summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/build/android/pylib/utils
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
commit6bf0a5cb5034a7e684dcc3500e841785237ce2dd (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /third_party/libwebrtc/build/android/pylib/utils
parentInitial commit. (diff)
downloadthunderbird-upstream.tar.xz
thunderbird-upstream.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/build/android/pylib/utils')
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/__init__.py0
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/app_bundle_utils.py169
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/argparse_utils.py52
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils.py171
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils_test.py235
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/decorators.py37
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/decorators_test.py104
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/device_dependencies.py136
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/device_dependencies_test.py52
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/dexdump.py136
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/dexdump_test.py141
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/gold_utils.py78
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/gold_utils_test.py123
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/google_storage_helper.py129
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/instrumentation_tracing.py204
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/local_utils.py19
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/logdog_helper.py96
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/logging_utils.py136
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/maven_downloader.py140
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/proguard.py285
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/proguard_test.py495
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/repo_utils.py22
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/shared_preference_utils.py116
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/simpleperf.py260
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/test_filter.py148
-rwxr-xr-xthird_party/libwebrtc/build/android/pylib/utils/test_filter_test.py247
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/time_profile.py45
-rw-r--r--third_party/libwebrtc/build/android/pylib/utils/xvfb.py58
28 files changed, 3834 insertions, 0 deletions
diff --git a/third_party/libwebrtc/build/android/pylib/utils/__init__.py b/third_party/libwebrtc/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/__init__.py
diff --git a/third_party/libwebrtc/build/android/pylib/utils/app_bundle_utils.py b/third_party/libwebrtc/build/android/pylib/utils/app_bundle_utils.py
new file mode 100644
index 0000000000..986e12688e
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/app_bundle_utils.py
@@ -0,0 +1,169 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import os
+import re
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+
+from util import build_utils
+from util import md5_check
+from util import resource_utils
+import bundletool
+
+# List of valid modes for GenerateBundleApks()
+BUILD_APKS_MODES = ('default', 'universal', 'system', 'system_compressed')
+OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE',
+ 'TEXTURE_COMPRESSION_FORMAT')
+_SYSTEM_MODES = ('system_compressed', 'system')
+
+_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
+
+
+def _CreateDeviceSpec(bundle_path, sdk_version, locales):
+ if not sdk_version:
+ manifest_data = bundletool.RunBundleTool(
+ ['dump', 'manifest', '--bundle', bundle_path])
+ sdk_version = int(
+ re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1))
+
+ # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk
+ # files from being created within the .apks file.
+ return {
+ 'screenDensity': 1000, # Ignored since we don't split on density.
+ 'sdkVersion': sdk_version,
+ 'supportedAbis': _ALL_ABIS, # Our .aab files are already split on abi.
+ 'supportedLocales': locales,
+ }
+
+
+def GenerateBundleApks(bundle_path,
+ bundle_apks_path,
+ aapt2_path,
+ keystore_path,
+ keystore_password,
+ keystore_alias,
+ mode=None,
+ local_testing=False,
+ minimal=False,
+ minimal_sdk_version=None,
+ check_for_noop=True,
+ system_image_locales=None,
+ optimize_for=None):
+ """Generate an .apks archive from a an app bundle if needed.
+
+ Args:
+ bundle_path: Input bundle file path.
+ bundle_apks_path: Output bundle .apks archive path. Name must end with
+ '.apks' or this operation will fail.
+ aapt2_path: Path to aapt2 build tool.
+ keystore_path: Path to keystore.
+ keystore_password: Keystore password, as a string.
+ keystore_alias: Keystore signing key alias.
+ mode: Build mode, which must be either None or one of BUILD_APKS_MODES.
+ minimal: Create the minimal set of apks possible (english-only).
+ minimal_sdk_version: Use this sdkVersion when |minimal| or
+ |system_image_locales| args are present.
+ check_for_noop: Use md5_check to short-circuit when inputs have not changed.
+ system_image_locales: Locales to package in the APK when mode is "system"
+ or "system_compressed".
+ optimize_for: Overrides split configuration, which must be None or
+ one of OPTIMIZE_FOR_OPTIONS.
+ """
+ device_spec = None
+ if minimal_sdk_version:
+ assert minimal or system_image_locales, (
+ 'minimal_sdk_version is only used when minimal or system_image_locales '
+ 'is specified')
+ if minimal:
+ # Measure with one language split installed. Use Hindi because it is
+ # popular. resource_size.py looks for splits/base-hi.apk.
+ # Note: English is always included since it's in base-master.apk.
+ device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi'])
+ elif mode in _SYSTEM_MODES:
+ if not system_image_locales:
+ raise Exception('system modes require system_image_locales')
+ # Bundletool doesn't seem to understand device specs with locales in the
+ # form of "<lang>-r<region>", so just provide the language code instead.
+ locales = [
+ resource_utils.ToAndroidLocaleName(l).split('-')[0]
+ for l in system_image_locales
+ ]
+ device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales)
+
+ def rebuild():
+ logging.info('Building %s', bundle_apks_path)
+ with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_apks_file:
+ cmd_args = [
+ 'build-apks',
+ '--aapt2=%s' % aapt2_path,
+ '--output=%s' % tmp_apks_file.name,
+ '--bundle=%s' % bundle_path,
+ '--ks=%s' % keystore_path,
+ '--ks-pass=pass:%s' % keystore_password,
+ '--ks-key-alias=%s' % keystore_alias,
+ '--overwrite',
+ ]
+
+ if local_testing:
+ cmd_args += ['--local-testing']
+
+ if mode is not None:
+ if mode not in BUILD_APKS_MODES:
+ raise Exception('Invalid mode parameter %s (should be in %s)' %
+ (mode, BUILD_APKS_MODES))
+ cmd_args += ['--mode=' + mode]
+
+ if optimize_for:
+ if optimize_for not in OPTIMIZE_FOR_OPTIONS:
+ raise Exception('Invalid optimize_for parameter %s '
+ '(should be in %s)' %
+ (mode, OPTIMIZE_FOR_OPTIONS))
+ cmd_args += ['--optimize-for=' + optimize_for]
+
+ with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as spec_file:
+ if device_spec:
+ json.dump(device_spec, spec_file)
+ spec_file.flush()
+ cmd_args += ['--device-spec=' + spec_file.name]
+ bundletool.RunBundleTool(cmd_args)
+
+ # Make the resulting .apks file hermetic.
+ with build_utils.TempDir() as temp_dir, \
+ build_utils.AtomicOutput(bundle_apks_path, only_if_changed=False) as f:
+ files = build_utils.ExtractAll(tmp_apks_file.name, temp_dir)
+ build_utils.DoZip(files, f, base_dir=temp_dir)
+
+ if check_for_noop:
+ # NOTE: BUNDLETOOL_JAR_PATH is added to input_strings, rather than
+ # input_paths, to speed up MD5 computations by about 400ms (the .jar file
+ # contains thousands of class files which are checked independently,
+ # resulting in an .md5.stamp of more than 60000 lines!).
+ input_paths = [bundle_path, aapt2_path, keystore_path]
+ input_strings = [
+ keystore_password,
+ keystore_alias,
+ bundletool.BUNDLETOOL_JAR_PATH,
+ # NOTE: BUNDLETOOL_VERSION is already part of BUNDLETOOL_JAR_PATH, but
+ # it's simpler to assume that this may not be the case in the future.
+ bundletool.BUNDLETOOL_VERSION,
+ device_spec,
+ ]
+ if mode is not None:
+ input_strings.append(mode)
+
+ # Avoid rebuilding (saves ~20s) when the input files have not changed. This
+ # is essential when calling the apk_operations.py script multiple times with
+ # the same bundle (e.g. out/Debug/bin/monochrome_public_bundle run).
+ md5_check.CallAndRecordIfStale(
+ rebuild,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=[bundle_apks_path])
+ else:
+ rebuild()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/argparse_utils.py b/third_party/libwebrtc/build/android/pylib/utils/argparse_utils.py
new file mode 100644
index 0000000000..bd603c9d5a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/argparse_utils.py
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+
+import argparse
+
+
+class CustomHelpAction(argparse.Action):
+ '''Allows defining custom help actions.
+
+ Help actions can run even when the parser would otherwise fail on missing
+ arguments. The first help or custom help command mentioned on the command
+ line will have its help text displayed.
+
+ Usage:
+ parser = argparse.ArgumentParser(...)
+ CustomHelpAction.EnableFor(parser)
+ parser.add_argument('--foo-help',
+ action='custom_help',
+ custom_help_text='this is the help message',
+ help='What this helps with')
+ '''
+ # Derived from argparse._HelpAction from
+ # https://github.com/python/cpython/blob/master/Lib/argparse.py
+
+ # pylint: disable=redefined-builtin
+ # (complains about 'help' being redefined)
+ def __init__(self,
+ option_strings,
+ dest=argparse.SUPPRESS,
+ default=argparse.SUPPRESS,
+ custom_help_text=None,
+ help=None):
+ super(CustomHelpAction, self).__init__(option_strings=option_strings,
+ dest=dest,
+ default=default,
+ nargs=0,
+ help=help)
+
+ if not custom_help_text:
+ raise ValueError('custom_help_text is required')
+ self._help_text = custom_help_text
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ print(self._help_text)
+ parser.exit()
+
+ @staticmethod
+ def EnableFor(parser):
+ parser.register('action', 'custom_help', CustomHelpAction)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils.py b/third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils.py
new file mode 100644
index 0000000000..149d0b9c8c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils.py
@@ -0,0 +1,171 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for setting up and tear down WPR and TsProxy service."""
+
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+from devil.android import forwarder
+
+PROXY_HOST_IP = '127.0.0.1'
+# From Catapult/WebPageReplay document.
+IGNORE_CERT_ERROR_SPKI_LIST = 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I='
+PROXY_SERVER = 'socks5://localhost'
+DEFAULT_DEVICE_PORT = 1080
+DEFAULT_ROUND_TRIP_LATENCY_MS = 100
+DEFAULT_DOWNLOAD_BANDWIDTH_KBPS = 72000
+DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000
+
+
+class WPRServer(object):
+ """Utils to set up a webpagereplay_go_server instance."""
+
+ def __init__(self):
+ self._archive_path = None
+ self._host_http_port = 0
+ self._host_https_port = 0
+ self._record_mode = False
+ self._server = None
+
+ def StartServer(self, wpr_archive_path):
+ """Starts a webpagereplay_go_server instance."""
+ if wpr_archive_path == self._archive_path and self._server:
+ # Reuse existing webpagereplay_go_server instance.
+ return
+
+ if self._server:
+ self.StopServer()
+
+ replay_options = []
+ if self._record_mode:
+ replay_options.append('--record')
+
+ ports = {}
+ if not self._server:
+ self._server = webpagereplay_go_server.ReplayServer(
+ wpr_archive_path,
+ PROXY_HOST_IP,
+ http_port=self._host_http_port,
+ https_port=self._host_https_port,
+ replay_options=replay_options)
+ self._archive_path = wpr_archive_path
+ ports = self._server.StartServer()
+
+ self._host_http_port = ports['http']
+ self._host_https_port = ports['https']
+
+ def StopServer(self):
+ """Stops the webpagereplay_go_server instance and resets archive."""
+ self._server.StopServer()
+ self._server = None
+ self._host_http_port = 0
+ self._host_https_port = 0
+
+ @staticmethod
+ def SetServerBinaryPath(go_binary_path):
+ """Sets the go_binary_path for webpagereplay_go_server.ReplayServer."""
+ webpagereplay_go_server.ReplayServer.SetGoBinaryPath(go_binary_path)
+
+ @property
+ def record_mode(self):
+ return self._record_mode
+
+ @record_mode.setter
+ def record_mode(self, value):
+ self._record_mode = value
+
+ @property
+ def http_port(self):
+ return self._host_http_port
+
+ @property
+ def https_port(self):
+ return self._host_https_port
+
+ @property
+ def archive_path(self):
+ return self._archive_path
+
+
+class ChromeProxySession(object):
+ """Utils to help set up a Chrome Proxy."""
+
+ def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT):
+ self._device_proxy_port = device_proxy_port
+ self._ts_proxy_server = ts_proxy_server.TsProxyServer(PROXY_HOST_IP)
+ self._wpr_server = WPRServer()
+
+ @property
+ def wpr_record_mode(self):
+ """Returns whether this proxy session was running in record mode."""
+ return self._wpr_server.record_mode
+
+ @wpr_record_mode.setter
+ def wpr_record_mode(self, value):
+ self._wpr_server.record_mode = value
+
+ @property
+ def wpr_replay_mode(self):
+ """Returns whether this proxy session was running in replay mode."""
+ return not self._wpr_server.record_mode
+
+ @property
+ def wpr_archive_path(self):
+ """Returns the wpr archive file path used in this proxy session."""
+ return self._wpr_server.archive_path
+
+ @property
+ def device_proxy_port(self):
+ return self._device_proxy_port
+
+ def GetFlags(self):
+ """Gets the chrome command line flags to be needed by ChromeProxySession."""
+ extra_flags = []
+
+ extra_flags.append('--ignore-certificate-errors-spki-list=%s' %
+ IGNORE_CERT_ERROR_SPKI_LIST)
+ extra_flags.append('--proxy-server=%s:%s' %
+ (PROXY_SERVER, self._device_proxy_port))
+ return extra_flags
+
+ @staticmethod
+ def SetWPRServerBinary(go_binary_path):
+ """Sets the WPR server go_binary_path."""
+ WPRServer.SetServerBinaryPath(go_binary_path)
+
+ def Start(self, device, wpr_archive_path):
+ """Starts the wpr_server as well as the ts_proxy server and setups env.
+
+ Args:
+ device: A DeviceUtils instance.
+ wpr_archive_path: A abs path to the wpr archive file.
+
+ """
+ self._wpr_server.StartServer(wpr_archive_path)
+ self._ts_proxy_server.StartServer()
+
+ # Maps device port to host port
+ forwarder.Forwarder.Map(
+ [(self._device_proxy_port, self._ts_proxy_server.port)], device)
+ # Maps tsProxy port to wpr http/https ports
+ self._ts_proxy_server.UpdateOutboundPorts(
+ http_port=self._wpr_server.http_port,
+ https_port=self._wpr_server.https_port)
+ self._ts_proxy_server.UpdateTrafficSettings(
+ round_trip_latency_ms=DEFAULT_ROUND_TRIP_LATENCY_MS,
+ download_bandwidth_kbps=DEFAULT_DOWNLOAD_BANDWIDTH_KBPS,
+ upload_bandwidth_kbps=DEFAULT_UPLOAD_BANDWIDTH_KBPS)
+
+ def Stop(self, device):
+ """Stops the wpr_server, and ts_proxy server and tears down env.
+
+ Note that Stop does not reset wpr_record_mode, wpr_replay_mode,
+ wpr_archive_path property.
+
+ Args:
+ device: A DeviceUtils instance.
+ """
+ self._wpr_server.StopServer()
+ self._ts_proxy_server.StopServer()
+ forwarder.Forwarder.UnmapDevicePort(self._device_proxy_port, device)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils_test.py b/third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils_test.py
new file mode 100755
index 0000000000..7a52024661
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/chrome_proxy_utils_test.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env vpython3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for chrome_proxy_utils."""
+
+#pylint: disable=protected-access
+
+import os
+import unittest
+
+from pylib.utils import chrome_proxy_utils
+
+from devil.android import forwarder
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+from py_utils import ts_proxy_server
+from py_utils import webpagereplay_go_server
+
+import mock # pylint: disable=import-error
+
+
+def _DeviceUtilsMock(test_serial, is_ready=True):
+ """Returns a DeviceUtils instance based on given serial."""
+ adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+ adb.__str__ = mock.Mock(return_value=test_serial)
+ adb.GetDeviceSerial.return_value = test_serial
+ adb.is_ready = is_ready
+ return device_utils.DeviceUtils(adb)
+
+
+class ChromeProxySessionTest(unittest.TestCase):
+ """Unittest for ChromeProxySession."""
+
+ #pylint: disable=no-self-use
+
+ @mock.patch.object(forwarder.Forwarder, 'Map')
+ @mock.patch.object(chrome_proxy_utils.WPRServer, 'StartServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'StartServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateOutboundPorts')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateTrafficSettings')
+ @mock.patch('py_utils.ts_proxy_server.TsProxyServer.port',
+ new_callable=mock.PropertyMock)
+ def test_Start(self, port_mock, traffic_setting_mock, outboundport_mock,
+ start_server_mock, wpr_mock, forwarder_mock):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy._wpr_server._host_http_port = 1
+ chrome_proxy._wpr_server._host_https_port = 2
+ port_mock.return_value = 3
+ device = _DeviceUtilsMock('01234')
+ chrome_proxy.Start(device, 'abc')
+
+ forwarder_mock.assert_called_once_with([(4, 3)], device)
+ wpr_mock.assert_called_once_with('abc')
+ start_server_mock.assert_called_once()
+ outboundport_mock.assert_called_once_with(http_port=1, https_port=2)
+ traffic_setting_mock.assert_called_once_with(download_bandwidth_kbps=72000,
+ round_trip_latency_ms=100,
+ upload_bandwidth_kbps=72000)
+ port_mock.assert_called_once()
+
+ @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+ @mock.patch.object(chrome_proxy_utils.WPRServer, 'StopServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+ def test_Stop(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ device = _DeviceUtilsMock('01234')
+ chrome_proxy.wpr_record_mode = True
+ chrome_proxy._wpr_server._archive_path = 'abc'
+ chrome_proxy.Stop(device)
+
+ forwarder_mock.assert_called_once_with(4, device)
+ wpr_mock.assert_called_once_with()
+ ts_proxy_mock.assert_called_once_with()
+
+ #pylint: enable=no-self-use
+
+ @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+ @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
+ def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy._wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__), chrome_proxy_utils.PROXY_HOST_IP, 0, 0, [])
+ chrome_proxy._wpr_server._archive_path = os.path.abspath(__file__)
+ device = _DeviceUtilsMock('01234')
+ chrome_proxy.wpr_record_mode = True
+ chrome_proxy.Stop(device)
+
+ forwarder_mock.assert_called_once_with(4, device)
+ wpr_mock.assert_called_once_with()
+ ts_proxy_mock.assert_called_once_with()
+ self.assertFalse(chrome_proxy.wpr_replay_mode)
+ self.assertEqual(chrome_proxy.wpr_archive_path, os.path.abspath(__file__))
+
+ def test_SetWPRRecordMode(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy.wpr_record_mode = True
+ self.assertTrue(chrome_proxy._wpr_server.record_mode)
+ self.assertTrue(chrome_proxy.wpr_record_mode)
+ self.assertFalse(chrome_proxy.wpr_replay_mode)
+
+ chrome_proxy.wpr_record_mode = False
+ self.assertFalse(chrome_proxy._wpr_server.record_mode)
+ self.assertFalse(chrome_proxy.wpr_record_mode)
+ self.assertTrue(chrome_proxy.wpr_replay_mode)
+
+ def test_SetWPRArchivePath(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
+ chrome_proxy._wpr_server._archive_path = 'abc'
+ self.assertEqual(chrome_proxy.wpr_archive_path, 'abc')
+
+ def test_UseDefaultDeviceProxyPort(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession()
+ expected_flags = [
+ '--ignore-certificate-errors-spki-list='
+ 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+ '--proxy-server=socks5://localhost:1080'
+ ]
+ self.assertEqual(chrome_proxy.device_proxy_port, 1080)
+ self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+ def test_UseNewDeviceProxyPort(self):
+ chrome_proxy = chrome_proxy_utils.ChromeProxySession(1)
+ expected_flags = [
+ '--ignore-certificate-errors-spki-list='
+ 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
+ '--proxy-server=socks5://localhost:1'
+ ]
+ self.assertEqual(chrome_proxy.device_proxy_port, 1)
+ self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
+
+
+class WPRServerTest(unittest.TestCase):
+ @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+ def test_StartSever_fresh_replaymode(self, wpr_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_archive_file = os.path.abspath(__file__)
+ wpr_server.StartServer(wpr_archive_file)
+
+ wpr_mock.assert_called_once_with(wpr_archive_file,
+ '127.0.0.1',
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+
+ self.assertEqual(wpr_server._archive_path, wpr_archive_file)
+ self.assertTrue(wpr_server._server)
+
+ @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
+ def test_StartSever_fresh_recordmode(self, wpr_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server.record_mode = True
+ wpr_server.StartServer(os.path.abspath(__file__))
+ wpr_archive_file = os.path.abspath(__file__)
+
+ wpr_mock.assert_called_once_with(wpr_archive_file,
+ '127.0.0.1',
+ http_port=0,
+ https_port=0,
+ replay_options=['--record'])
+
+ self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+ self.assertTrue(wpr_server._server)
+
+ #pylint: disable=no-self-use
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+ def test_StartSever_recordmode(self, start_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ start_server_mock.return_value = {'http': 1, 'https': 2}
+ wpr_server.StartServer(os.path.abspath(__file__))
+
+ start_server_mock.assert_called_once()
+ self.assertEqual(wpr_server._host_http_port, 1)
+ self.assertEqual(wpr_server._host_https_port, 2)
+ self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
+ self.assertTrue(wpr_server._server)
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+ def test_StartSever_reuseServer(self, start_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__),
+ chrome_proxy_utils.PROXY_HOST_IP,
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+ wpr_server._archive_path = os.path.abspath(__file__)
+ wpr_server.StartServer(os.path.abspath(__file__))
+ start_server_mock.assert_not_called()
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+ def test_StartSever_notReuseServer(self, stop_server_mock, start_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__),
+ chrome_proxy_utils.PROXY_HOST_IP,
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+ wpr_server._archive_path = ''
+ wpr_server.StartServer(os.path.abspath(__file__))
+ start_server_mock.assert_called_once()
+ stop_server_mock.assert_called_once()
+
+ #pylint: enable=no-self-use
+
+ @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
+ def test_StopServer(self, stop_server_mock):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server._server = webpagereplay_go_server.ReplayServer(
+ os.path.abspath(__file__),
+ chrome_proxy_utils.PROXY_HOST_IP,
+ http_port=0,
+ https_port=0,
+ replay_options=[])
+ wpr_server.StopServer()
+ stop_server_mock.assert_called_once()
+ self.assertFalse(wpr_server._server)
+ self.assertFalse(wpr_server._archive_path)
+ self.assertFalse(wpr_server.http_port)
+ self.assertFalse(wpr_server.https_port)
+
+ def test_SetWPRRecordMode(self):
+ wpr_server = chrome_proxy_utils.WPRServer()
+ wpr_server.record_mode = True
+ self.assertTrue(wpr_server.record_mode)
+ wpr_server.record_mode = False
+ self.assertFalse(wpr_server.record_mode)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/decorators.py b/third_party/libwebrtc/build/android/pylib/utils/decorators.py
new file mode 100644
index 0000000000..8eec1d1e58
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/decorators.py
@@ -0,0 +1,37 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+
+
+def Memoize(f):
+ """Decorator to cache return values of function."""
+ memoize_dict = {}
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ key = repr((args, kwargs))
+ if key not in memoize_dict:
+ memoize_dict[key] = f(*args, **kwargs)
+ return memoize_dict[key]
+ return wrapper
+
+
+def NoRaiseException(default_return_value=None, exception_message=''):
+ """Returns decorator that catches and logs uncaught Exceptions.
+
+ Args:
+ default_return_value: Value to return in the case of uncaught Exception.
+ exception_message: Message for uncaught exceptions.
+ """
+ def decorator(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except Exception: # pylint: disable=broad-except
+ logging.exception(exception_message)
+ return default_return_value
+ return wrapper
+ return decorator
diff --git a/third_party/libwebrtc/build/android/pylib/utils/decorators_test.py b/third_party/libwebrtc/build/android/pylib/utils/decorators_test.py
new file mode 100755
index 0000000000..5d39846824
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/decorators_test.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env vpython3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for decorators.py."""
+
+import unittest
+
+from pylib.utils import decorators
+
+
+class NoRaiseExceptionDecoratorTest(unittest.TestCase):
+
+ def testFunctionDoesNotRaiseException(self):
+ """Tests that the |NoRaiseException| decorator catches exception."""
+
+ @decorators.NoRaiseException()
+ def raiseException():
+ raise Exception()
+
+ try:
+ raiseException()
+ except Exception: # pylint: disable=broad-except
+ self.fail('Exception was not caught by |NoRaiseException| decorator')
+
+ def testFunctionReturnsCorrectValues(self):
+ """Tests that the |NoRaiseException| decorator returns correct values."""
+
+ @decorators.NoRaiseException(default_return_value=111)
+ def raiseException():
+ raise Exception()
+
+ @decorators.NoRaiseException(default_return_value=111)
+ def doesNotRaiseException():
+ return 999
+
+ self.assertEqual(raiseException(), 111)
+ self.assertEqual(doesNotRaiseException(), 999)
+
+
+class MemoizeDecoratorTest(unittest.TestCase):
+
+ def testFunctionExceptionNotMemoized(self):
+ """Tests that |Memoize| decorator does not cache exception results."""
+
+ class ExceptionType1(Exception):
+ pass
+
+ class ExceptionType2(Exception):
+ pass
+
+ @decorators.Memoize
+ def raiseExceptions():
+ if raiseExceptions.count == 0:
+ raiseExceptions.count += 1
+ raise ExceptionType1()
+
+ if raiseExceptions.count == 1:
+ raise ExceptionType2()
+ raiseExceptions.count = 0
+
+ with self.assertRaises(ExceptionType1):
+ raiseExceptions()
+ with self.assertRaises(ExceptionType2):
+ raiseExceptions()
+
+ def testFunctionResultMemoized(self):
+ """Tests that |Memoize| decorator caches results."""
+
+ @decorators.Memoize
+ def memoized():
+ memoized.count += 1
+ return memoized.count
+ memoized.count = 0
+
+ def notMemoized():
+ notMemoized.count += 1
+ return notMemoized.count
+ notMemoized.count = 0
+
+ self.assertEqual(memoized(), 1)
+ self.assertEqual(memoized(), 1)
+ self.assertEqual(memoized(), 1)
+
+ self.assertEqual(notMemoized(), 1)
+ self.assertEqual(notMemoized(), 2)
+ self.assertEqual(notMemoized(), 3)
+
+ def testFunctionMemoizedBasedOnArgs(self):
+ """Tests that |Memoize| caches results based on args and kwargs."""
+
+ @decorators.Memoize
+ def returnValueBasedOnArgsKwargs(a, k=0):
+ return a + k
+
+ self.assertEqual(returnValueBasedOnArgsKwargs(1, 1), 2)
+ self.assertEqual(returnValueBasedOnArgsKwargs(1, 2), 3)
+ self.assertEqual(returnValueBasedOnArgsKwargs(2, 1), 3)
+ self.assertEqual(returnValueBasedOnArgsKwargs(3, 3), 6)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/device_dependencies.py b/third_party/libwebrtc/build/android/pylib/utils/device_dependencies.py
new file mode 100644
index 0000000000..9cb5bd892a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/device_dependencies.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+from pylib import constants
+
+
+_EXCLUSIONS = [
+ re.compile(r'.*OWNERS'), # Should never be included.
+ re.compile(r'.*\.crx'), # Chrome extension zip files.
+ re.compile(os.path.join('.*',
+ r'\.git.*')), # Any '.git*' directories/files.
+ re.compile(r'.*\.so'), # Libraries packed into .apk.
+ re.compile(r'.*Mojo.*manifest\.json'), # Some source_set()s pull these in.
+ re.compile(r'.*\.py'), # Some test_support targets include python deps.
+ re.compile(r'.*\.apk'), # Should be installed separately.
+ re.compile(r'.*lib.java/.*'), # Never need java intermediates.
+
+ # Test filter files:
+ re.compile(r'.*/testing/buildbot/filters/.*'),
+
+ # Chrome external extensions config file.
+ re.compile(r'.*external_extensions\.json'),
+
+ # Exists just to test the compile, not to be run.
+ re.compile(r'.*jni_generator_tests'),
+
+ # v8's blobs and icu data get packaged into APKs.
+ re.compile(r'.*snapshot_blob.*\.bin'),
+ re.compile(r'.*icudtl.bin'),
+
+ # Scripts that are needed by swarming, but not on devices:
+ re.compile(r'.*llvm-symbolizer'),
+ re.compile(r'.*md5sum_bin'),
+ re.compile(os.path.join('.*', 'development', 'scripts', 'stack')),
+
+ # Required for java deobfuscation on the host:
+ re.compile(r'.*build/android/stacktrace/.*'),
+ re.compile(r'.*third_party/jdk/.*'),
+ re.compile(r'.*third_party/proguard/.*'),
+
+ # Build artifacts:
+ re.compile(r'.*\.stamp'),
+ re.compile(r'.*.pak\.info'),
+ re.compile(r'.*\.incremental\.json'),
+]
+
+
+def _FilterDataDeps(abs_host_files):
+ exclusions = _EXCLUSIONS + [
+ re.compile(os.path.join(constants.GetOutDirectory(), 'bin'))
+ ]
+ return [p for p in abs_host_files if not any(r.match(p) for r in exclusions)]
+
+
+def DevicePathComponentsFor(host_path, output_directory):
+ """Returns the device path components for a given host path.
+
+ This returns the device path as a list of joinable path components,
+ with None as the first element to indicate that the path should be
+ rooted at $EXTERNAL_STORAGE.
+
+ e.g., given
+
+ '$RUNTIME_DEPS_ROOT_DIR/foo/bar/baz.txt'
+
+ this would return
+
+ [None, 'foo', 'bar', 'baz.txt']
+
+ This handles a couple classes of paths differently than it otherwise would:
+ - All .pak files get mapped to top-level paks/
+ - All other dependencies get mapped to the top level directory
+ - If a file is not in the output directory then it's relative path to
+ the output directory will start with .. strings, so we remove those
+ and then the path gets mapped to the top-level directory
+ - If a file is in the output directory then the relative path to the
+ output directory gets mapped to the top-level directory
+
+ e.g. given
+
+ '$RUNTIME_DEPS_ROOT_DIR/out/Release/icu_fake_dir/icudtl.dat'
+
+ this would return
+
+ [None, 'icu_fake_dir', 'icudtl.dat']
+
+ Args:
+ host_path: The absolute path to the host file.
+ Returns:
+ A list of device path components.
+ """
+ if (host_path.startswith(output_directory) and
+ os.path.splitext(host_path)[1] == '.pak'):
+ return [None, 'paks', os.path.basename(host_path)]
+
+ rel_host_path = os.path.relpath(host_path, output_directory)
+
+ device_path_components = [None]
+ p = rel_host_path
+ while p:
+ p, d = os.path.split(p)
+ # The relative path from the output directory to a file under the runtime
+ # deps root directory may start with multiple .. strings, so they need to
+ # be skipped.
+ if d and d != os.pardir:
+ device_path_components.insert(1, d)
+ return device_path_components
+
+
+def GetDataDependencies(runtime_deps_path):
+ """Returns a list of device data dependencies.
+
+ Args:
+ runtime_deps_path: A str path to the .runtime_deps file.
+ Returns:
+ A list of (host_path, device_path) tuples.
+ """
+ if not runtime_deps_path:
+ return []
+
+ with open(runtime_deps_path, 'r') as runtime_deps_file:
+ rel_host_files = [l.strip() for l in runtime_deps_file if l]
+
+ output_directory = constants.GetOutDirectory()
+ abs_host_files = [
+ os.path.abspath(os.path.join(output_directory, r))
+ for r in rel_host_files]
+ filtered_abs_host_files = _FilterDataDeps(abs_host_files)
+ # TODO(crbug.com/752610): Filter out host executables, and investigate
+ # whether other files could be filtered as well.
+ return [(f, DevicePathComponentsFor(f, output_directory))
+ for f in filtered_abs_host_files]
diff --git a/third_party/libwebrtc/build/android/pylib/utils/device_dependencies_test.py b/third_party/libwebrtc/build/android/pylib/utils/device_dependencies_test.py
new file mode 100755
index 0000000000..35879882b7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/device_dependencies_test.py
@@ -0,0 +1,52 @@
+#! /usr/bin/env vpython3
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from pylib import constants
+from pylib.utils import device_dependencies
+
+
+class DevicePathComponentsForTest(unittest.TestCase):
+
+ def testCheckedInFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEqual([None, 'foo', 'bar', 'baz.txt'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectoryFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'icudtl.dat')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEqual([None, 'icudtl.dat'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectorySubdirFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'test_dir', 'icudtl.dat')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEqual([None, 'test_dir', 'icudtl.dat'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+ def testOutputDirectoryPakFile(self):
+ test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
+ 'foo.pak')
+ output_directory = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
+ self.assertEqual([None, 'paks', 'foo.pak'],
+ device_dependencies.DevicePathComponentsFor(
+ test_path, output_directory))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/dexdump.py b/third_party/libwebrtc/build/android/pylib/utils/dexdump.py
new file mode 100644
index 0000000000..f81ac603d4
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/dexdump.py
@@ -0,0 +1,136 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import shutil
+import sys
+import tempfile
+from xml.etree import ElementTree
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
+from util import build_utils
+
+DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+
+def Dump(apk_path):
+ """Dumps class and method information from a APK into a dict via dexdump.
+
+ Args:
+ apk_path: An absolute path to an APK file to dump.
+ Returns:
+ A dict in the following format:
+ {
+ <package_name>: {
+ 'classes': {
+ <class_name>: {
+ 'methods': [<method_1>, <method_2>]
+ }
+ }
+ }
+ }
+ """
+ try:
+ dexfile_dir = tempfile.mkdtemp()
+ parsed_dex_files = []
+ for dex_file in build_utils.ExtractAll(apk_path,
+ dexfile_dir,
+ pattern='*classes*.dex'):
+ output_xml = cmd_helper.GetCmdOutput(
+ [DEXDUMP_PATH, '-l', 'xml', dex_file])
+ # Dexdump doesn't escape its XML output very well; decode it as utf-8 with
+ # invalid sequences replaced, then remove forbidden characters and
+ # re-encode it (as etree expects a byte string as input so it can figure
+ # out the encoding itself from the XML declaration)
+ BAD_XML_CHARS = re.compile(
+ u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' +
+ u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]')
+ if sys.version_info[0] < 3:
+ decoded_xml = output_xml.decode('utf-8', 'replace')
+ clean_xml = BAD_XML_CHARS.sub(u'\ufffd', decoded_xml)
+ else:
+ # Line duplicated to avoid pylint redefined-variable-type error.
+ clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml)
+ parsed_dex_files.append(
+ _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8'))))
+ return parsed_dex_files
+ finally:
+ shutil.rmtree(dexfile_dir)
+
+
+def _ParseRootNode(root):
+ """Parses the XML output of dexdump. This output is in the following format.
+
+ This is a subset of the information contained within dexdump output.
+
+ <api>
+ <package name="foo.bar">
+ <class name="Class" extends="foo.bar.SuperClass">
+ <field name="Field">
+ </field>
+ <constructor name="Method">
+ <parameter name="Param" type="int">
+ </parameter>
+ </constructor>
+ <method name="Method">
+ <parameter name="Param" type="int">
+ </parameter>
+ </method>
+ </class>
+ </package>
+ </api>
+ """
+ results = {}
+ for child in root:
+ if child.tag == 'package':
+ package_name = child.attrib['name']
+ parsed_node = _ParsePackageNode(child)
+ if package_name in results:
+ results[package_name]['classes'].update(parsed_node['classes'])
+ else:
+ results[package_name] = parsed_node
+ return results
+
+
+def _ParsePackageNode(package_node):
+ """Parses a <package> node from the dexdump xml output.
+
+ Returns:
+ A dict in the format:
+ {
+ 'classes': {
+ <class_1>: {
+ 'methods': [<method_1>, <method_2>]
+ },
+ <class_2>: {
+ 'methods': [<method_1>, <method_2>]
+ },
+ }
+ }
+ """
+ classes = {}
+ for child in package_node:
+ if child.tag == 'class':
+ classes[child.attrib['name']] = _ParseClassNode(child)
+ return {'classes': classes}
+
+
+def _ParseClassNode(class_node):
+ """Parses a <class> node from the dexdump xml output.
+
+ Returns:
+ A dict in the format:
+ {
+ 'methods': [<method_1>, <method_2>]
+ }
+ """
+ methods = []
+ for child in class_node:
+ if child.tag == 'method':
+ methods.append(child.attrib['name'])
+ return {'methods': methods, 'superclass': class_node.attrib['extends']}
diff --git a/third_party/libwebrtc/build/android/pylib/utils/dexdump_test.py b/third_party/libwebrtc/build/android/pylib/utils/dexdump_test.py
new file mode 100755
index 0000000000..fc2914a4e5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/dexdump_test.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env vpython3
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+from xml.etree import ElementTree
+
+from pylib.utils import dexdump
+
+# pylint: disable=protected-access
+
+
+class DexdumpXMLParseTest(unittest.TestCase):
+
+ def testParseRootXmlNode(self):
+ example_xml_string = (
+ '<api>'
+ '<package name="com.foo.bar1">'
+ '<class'
+ ' name="Class1"'
+ ' extends="java.lang.Object"'
+ ' abstract="false"'
+ ' static="false"'
+ ' final="true"'
+ ' visibility="public">'
+ '<method'
+ ' name="class1Method1"'
+ ' return="java.lang.String"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '<method'
+ ' name="class1Method2"'
+ ' return="viod"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '</class>'
+ '<class'
+ ' name="Class2"'
+ ' extends="java.lang.Object"'
+ ' abstract="false"'
+ ' static="false"'
+ ' final="true"'
+ ' visibility="public">'
+ '<method'
+ ' name="class2Method1"'
+ ' return="java.lang.String"'
+ ' abstract="false"'
+ ' native="false"'
+ ' synchronized="false"'
+ ' static="false"'
+ ' final="false"'
+ ' visibility="public">'
+ '</method>'
+ '</class>'
+ '</package>'
+ '<package name="com.foo.bar2">'
+ '</package>'
+ '<package name="com.foo.bar3">'
+ '</package>'
+ '</api>')
+
+ actual = dexdump._ParseRootNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'com.foo.bar1' : {
+ 'classes': {
+ 'Class1': {
+ 'methods': ['class1Method1', 'class1Method2'],
+ 'superclass': 'java.lang.Object',
+ },
+ 'Class2': {
+ 'methods': ['class2Method1'],
+ 'superclass': 'java.lang.Object',
+ }
+ },
+ },
+ 'com.foo.bar2' : {'classes': {}},
+ 'com.foo.bar3' : {'classes': {}},
+ }
+ self.assertEqual(expected, actual)
+
+ def testParsePackageNode(self):
+ example_xml_string = (
+ '<package name="com.foo.bar">'
+ '<class name="Class1" extends="java.lang.Object">'
+ '</class>'
+ '<class name="Class2" extends="java.lang.Object">'
+ '</class>'
+ '</package>')
+
+
+ actual = dexdump._ParsePackageNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'classes': {
+ 'Class1': {
+ 'methods': [],
+ 'superclass': 'java.lang.Object',
+ },
+ 'Class2': {
+ 'methods': [],
+ 'superclass': 'java.lang.Object',
+ },
+ },
+ }
+ self.assertEqual(expected, actual)
+
+ def testParseClassNode(self):
+ example_xml_string = (
+ '<class name="Class1" extends="java.lang.Object">'
+ '<method name="method1">'
+ '</method>'
+ '<method name="method2">'
+ '</method>'
+ '</class>')
+
+ actual = dexdump._ParseClassNode(
+ ElementTree.fromstring(example_xml_string))
+
+ expected = {
+ 'methods': ['method1', 'method2'],
+ 'superclass': 'java.lang.Object',
+ }
+ self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/gold_utils.py b/third_party/libwebrtc/build/android/pylib/utils/gold_utils.py
new file mode 100644
index 0000000000..0b79a6d7cb
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/gold_utils.py
@@ -0,0 +1,78 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""//build/android implementations of //testing/skia_gold_common.
+
+Used for interacting with the Skia Gold image diffing service.
+"""
+
+import os
+import shutil
+
+from devil.utils import cmd_helper
+from pylib.base.output_manager import Datatype
+from pylib.constants import host_paths
+from pylib.utils import repo_utils
+
+with host_paths.SysPath(host_paths.BUILD_PATH):
+ from skia_gold_common import skia_gold_session
+ from skia_gold_common import skia_gold_session_manager
+ from skia_gold_common import skia_gold_properties
+
+
+class AndroidSkiaGoldSession(skia_gold_session.SkiaGoldSession):
+ def _StoreDiffLinks(self, image_name, output_manager, output_dir):
+ """See SkiaGoldSession._StoreDiffLinks for general documentation.
+
+ |output_manager| must be a build.android.pylib.base.OutputManager instance.
+ """
+ given_path = closest_path = diff_path = None
+ # The directory should contain "input-<hash>.png", "closest-<hash>.png",
+ # and "diff.png".
+ for f in os.listdir(output_dir):
+ filepath = os.path.join(output_dir, f)
+ if f.startswith('input-'):
+ given_path = filepath
+ elif f.startswith('closest-'):
+ closest_path = filepath
+ elif f == 'diff.png':
+ diff_path = filepath
+ results = self._comparison_results.setdefault(image_name,
+ self.ComparisonResults())
+ if given_path:
+ with output_manager.ArchivedTempfile('given_%s.png' % image_name,
+ 'gold_local_diffs',
+ Datatype.PNG) as given_file:
+ shutil.move(given_path, given_file.name)
+ results.local_diff_given_image = given_file.Link()
+ if closest_path:
+ with output_manager.ArchivedTempfile('closest_%s.png' % image_name,
+ 'gold_local_diffs',
+ Datatype.PNG) as closest_file:
+ shutil.move(closest_path, closest_file.name)
+ results.local_diff_closest_image = closest_file.Link()
+ if diff_path:
+ with output_manager.ArchivedTempfile('diff_%s.png' % image_name,
+ 'gold_local_diffs',
+ Datatype.PNG) as diff_file:
+ shutil.move(diff_path, diff_file.name)
+ results.local_diff_diff_image = diff_file.Link()
+
+ @staticmethod
+ def _RunCmdForRcAndOutput(cmd):
+ rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(cmd,
+ merge_stderr=True)
+ return rc, stdout
+
+
+class AndroidSkiaGoldSessionManager(
+ skia_gold_session_manager.SkiaGoldSessionManager):
+ @staticmethod
+ def GetSessionClass():
+ return AndroidSkiaGoldSession
+
+
+class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties):
+ @staticmethod
+ def _GetGitOriginMasterHeadSha1():
+ return repo_utils.GetGitOriginMasterHeadSHA1(host_paths.DIR_SOURCE_ROOT)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/gold_utils_test.py b/third_party/libwebrtc/build/android/pylib/utils/gold_utils_test.py
new file mode 100755
index 0000000000..cc1da043fc
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/gold_utils_test.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env vpython3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for gold_utils."""
+
+#pylint: disable=protected-access
+
+import contextlib
+import os
+import tempfile
+import unittest
+
+from pylib.constants import host_paths
+from pylib.utils import gold_utils
+
+with host_paths.SysPath(host_paths.BUILD_PATH):
+ from skia_gold_common import unittest_utils
+
+import mock # pylint: disable=import-error
+from pyfakefs import fake_filesystem_unittest # pylint: disable=import-error
+
+createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
+
+
+def assertArgWith(test, arg_list, arg, value):
+ i = arg_list.index(arg)
+ test.assertEqual(arg_list[i + 1], value)
+
+
+class AndroidSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
+ def setUp(self):
+ self.setUpPyfakefs()
+ self._working_dir = tempfile.mkdtemp()
+ self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+ @mock.patch.object(gold_utils.AndroidSkiaGoldSession, '_RunCmdForRcAndOutput')
+ def test_commandCommonArgs(self, cmd_mock):
+ cmd_mock.return_value = (None, None)
+ args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
+ sgp = gold_utils.AndroidSkiaGoldProperties(args)
+ session = gold_utils.AndroidSkiaGoldSession(self._working_dir,
+ sgp,
+ self._json_keys,
+ 'corpus',
+ instance='instance')
+ session.Diff('name', 'png_file', None)
+ call_args = cmd_mock.call_args[0][0]
+ self.assertIn('diff', call_args)
+ assertArgWith(self, call_args, '--corpus', 'corpus')
+ # TODO(skbug.com/10610): Remove the -public once we go back to using the
+ # non-public instance, or add a second test for testing that the correct
+ # instance is chosen if we decide to support both depending on what the
+ # user is authenticated for.
+ assertArgWith(self, call_args, '--instance', 'instance-public')
+ assertArgWith(self, call_args, '--input', 'png_file')
+ assertArgWith(self, call_args, '--test', 'name')
+ # TODO(skbug.com/10611): Re-add this assert and remove the check for the
+ # absence of the directory once we switch back to using the proper working
+ # directory.
+ # assertArgWith(self, call_args, '--work-dir', self._working_dir)
+ self.assertNotIn(self._working_dir, call_args)
+ i = call_args.index('--out-dir')
+ # The output directory should be a subdirectory of the working directory.
+ self.assertIn(self._working_dir, call_args[i + 1])
+
+
+class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase):
+ class FakeArchivedFile(object):
+ def __init__(self, path):
+ self.name = path
+
+ def Link(self):
+ return 'file://' + self.name
+
+ class FakeOutputManager(object):
+ def __init__(self):
+ self.output_dir = tempfile.mkdtemp()
+
+ @contextlib.contextmanager
+ def ArchivedTempfile(self, image_name, _, __):
+ filepath = os.path.join(self.output_dir, image_name)
+ yield AndroidSkiaGoldSessionDiffLinksTest.FakeArchivedFile(filepath)
+
+ def setUp(self):
+ self.setUpPyfakefs()
+ self._working_dir = tempfile.mkdtemp()
+ self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
+
+ def test_outputManagerUsed(self):
+ args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
+ sgp = gold_utils.AndroidSkiaGoldProperties(args)
+ session = gold_utils.AndroidSkiaGoldSession(self._working_dir, sgp,
+ self._json_keys, None, None)
+ with open(os.path.join(self._working_dir, 'input-inputhash.png'), 'w') as f:
+ f.write('input')
+ with open(os.path.join(self._working_dir, 'closest-closesthash.png'),
+ 'w') as f:
+ f.write('closest')
+ with open(os.path.join(self._working_dir, 'diff.png'), 'w') as f:
+ f.write('diff')
+
+ output_manager = AndroidSkiaGoldSessionDiffLinksTest.FakeOutputManager()
+ session._StoreDiffLinks('foo', output_manager, self._working_dir)
+
+ copied_input = os.path.join(output_manager.output_dir, 'given_foo.png')
+ copied_closest = os.path.join(output_manager.output_dir, 'closest_foo.png')
+ copied_diff = os.path.join(output_manager.output_dir, 'diff_foo.png')
+ with open(copied_input) as f:
+ self.assertEqual(f.read(), 'input')
+ with open(copied_closest) as f:
+ self.assertEqual(f.read(), 'closest')
+ with open(copied_diff) as f:
+ self.assertEqual(f.read(), 'diff')
+
+ self.assertEqual(session.GetGivenImageLink('foo'), 'file://' + copied_input)
+ self.assertEqual(session.GetClosestImageLink('foo'),
+ 'file://' + copied_closest)
+ self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + copied_diff)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/google_storage_helper.py b/third_party/libwebrtc/build/android/pylib/utils/google_storage_helper.py
new file mode 100644
index 0000000000..94efe33f85
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/google_storage_helper.py
@@ -0,0 +1,129 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to Google Storage.
+
+Text data should be streamed to logdog using |logdog_helper| module.
+Due to logdog not having image or HTML viewer, those instead should be uploaded
+to Google Storage directly using this module.
+"""
+
+import logging
+import os
+import sys
+import time
+try:
+ from urllib.parse import urlparse
+except ImportError:
+ from urlparse import urlparse
+
+from pylib.constants import host_paths
+from pylib.utils import decorators
+
+if host_paths.DEVIL_PATH not in sys.path:
+ sys.path.append(host_paths.DEVIL_PATH)
+from devil.utils import cmd_helper
+
+_GSUTIL_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'catapult',
+ 'third_party', 'gsutil', 'gsutil.py')
+_PUBLIC_URL = 'https://storage.googleapis.com/%s/'
+_AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/'
+
+
+@decorators.NoRaiseException(default_return_value='')
+def upload(name, filepath, bucket, gs_args=None, command_args=None,
+ content_type=None, authenticated_link=True):
+ """Uploads data to Google Storage.
+
+ Args:
+ name: Name of the file on Google Storage.
+ filepath: Path to file you want to upload.
+ bucket: Bucket to upload file to.
+ content_type: Content type to upload as. If not specified, Google storage
+ will attempt to infer content type from file extension.
+ authenticated_link: Whether to return a link that requires user to
+ authenticate with a Google account. Setting this to false will return
+ a link that does not require user to be signed into Google account but
+ will only work for completely public storage buckets.
+ Returns:
+ Web link to item uploaded to Google Storage bucket.
+ """
+ bucket = _format_bucket_name(bucket)
+
+ gs_path = 'gs://%s/%s' % (bucket, name)
+ logging.info('Uploading %s to %s', filepath, gs_path)
+
+ cmd = [_GSUTIL_PATH, '-q']
+ cmd.extend(gs_args or [])
+ if content_type:
+ cmd.extend(['-h', 'Content-Type:%s' % content_type])
+ cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])
+
+ cmd_helper.RunCmd(cmd)
+
+ return get_url_link(name, bucket, authenticated_link)
+
+
+@decorators.NoRaiseException(default_return_value='')
+def read_from_link(link):
+ # Note that urlparse returns the path with an initial '/', so we only need to
+ # add one more after the 'gs;'
+ gs_path = 'gs:/%s' % urlparse(link).path
+ cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path]
+ return cmd_helper.GetCmdOutput(cmd)
+
+
+@decorators.NoRaiseException(default_return_value=False)
+def exists(name, bucket):
+ bucket = _format_bucket_name(bucket)
+ gs_path = 'gs://%s/%s' % (bucket, name)
+
+ cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
+ return_code = cmd_helper.RunCmd(cmd)
+ return return_code == 0
+
+
+# TODO(jbudorick): Delete this function. Only one user of it.
+def unique_name(basename, suffix='', timestamp=True, device=None):
+ """Helper function for creating a unique name for a file to store in GS.
+
+ Args:
+ basename: Base of the unique filename.
+ suffix: Suffix of filename.
+ timestamp: Whether or not to add a timestamp to name.
+ device: Device to add device serial of to name.
+ """
+ return '%s%s%s%s' % (
+ basename,
+ '_%s' % time.strftime('%Y_%m_%d_T%H_%M_%S-UTC', time.gmtime())
+ if timestamp else '',
+ '_%s' % device.serial if device else '',
+ suffix)
+
+
+def get_url_link(name, bucket, authenticated_link=True):
+ """Get url link before/without uploading.
+
+ Args:
+ name: Name of the file on Google Storage.
+ bucket: Bucket to upload file to.
+ authenticated_link: Whether to return a link that requires user to
+ authenticate with a Google account. Setting this to false will return
+ a link that does not require user to be signed into Google account but
+ will only work for completely public storage buckets.
+ Returns:
+ Web link to item to be uploaded to Google Storage bucket
+ """
+ bucket = _format_bucket_name(bucket)
+ url_template = _AUTHENTICATED_URL if authenticated_link else _PUBLIC_URL
+ return os.path.join(url_template % bucket, name)
+
+
+def _format_bucket_name(bucket):
+ if bucket.startswith('gs://'):
+ bucket = bucket[len('gs://'):]
+ if bucket.endswith('/'):
+ bucket = bucket[:-1]
+ return bucket
diff --git a/third_party/libwebrtc/build/android/pylib/utils/instrumentation_tracing.py b/third_party/libwebrtc/build/android/pylib/utils/instrumentation_tracing.py
new file mode 100644
index 0000000000..f1d03a0dcf
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/instrumentation_tracing.py
@@ -0,0 +1,204 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions to instrument all Python function calls.
+
+This generates a JSON file readable by Chrome's about:tracing. To use it,
+either call start_instrumenting and stop_instrumenting at the appropriate times,
+or use the Instrument context manager.
+
+A function is only traced if it is from a Python module that matches at least
+one regular expression object in to_include, and does not match any in
+to_exclude. In between the start and stop events, every function call of a
+function from such a module will be added to the trace.
+"""
+
+import contextlib
+import functools
+import inspect
+import os
+import re
+import sys
+import threading
+
+from py_trace_event import trace_event
+
+
+# Modules to exclude by default (to avoid problems like infinite loops)
+DEFAULT_EXCLUDE = [r'py_trace_event\..*']
+
+class _TraceArguments(object):
+ def __init__(self):
+ """Wraps a dictionary to ensure safe evaluation of repr()."""
+ self._arguments = {}
+
+ @staticmethod
+ def _safeStringify(item):
+ try:
+ item_str = repr(item)
+ except Exception: # pylint: disable=broad-except
+ try:
+ item_str = str(item)
+ except Exception: # pylint: disable=broad-except
+ item_str = "<ERROR>"
+ return item_str
+
+ def add(self, key, val):
+ key_str = _TraceArguments._safeStringify(key)
+ val_str = _TraceArguments._safeStringify(val)
+
+ self._arguments[key_str] = val_str
+
+ def __repr__(self):
+ return repr(self._arguments)
+
+
+saved_thread_ids = set()
+
+def _shouldTrace(frame, to_include, to_exclude, included, excluded):
+ """
+ Decides whether or not the function called in frame should be traced.
+
+ Args:
+ frame: The Python frame object of this function call.
+ to_include: Set of regex objects for modules which should be traced.
+ to_exclude: Set of regex objects for modules which should not be traced.
+ included: Set of module names we've determined should be traced.
+ excluded: Set of module names we've determined should not be traced.
+ """
+ if not inspect.getmodule(frame):
+ return False
+
+ module_name = inspect.getmodule(frame).__name__
+
+ if module_name in included:
+ includes = True
+ elif to_include:
+ includes = any([pattern.match(module_name) for pattern in to_include])
+ else:
+ includes = True
+
+ if includes:
+ included.add(module_name)
+ else:
+ return False
+
+ # Find the modules of every function in the stack trace.
+ frames = inspect.getouterframes(frame)
+ calling_module_names = [inspect.getmodule(fr[0]).__name__ for fr in frames]
+
+ # Return False for anything with an excluded module's function anywhere in the
+ # stack trace (even if the function itself is in an included module).
+ if to_exclude:
+ for calling_module in calling_module_names:
+ if calling_module in excluded:
+ return False
+ for pattern in to_exclude:
+ if pattern.match(calling_module):
+ excluded.add(calling_module)
+ return False
+
+ return True
+
+def _generate_trace_function(to_include, to_exclude):
+ to_include = {re.compile(item) for item in to_include}
+ to_exclude = {re.compile(item) for item in to_exclude}
+ to_exclude.update({re.compile(item) for item in DEFAULT_EXCLUDE})
+
+ included = set()
+ excluded = set()
+
+ tracing_pid = os.getpid()
+
+ def traceFunction(frame, event, arg):
+ del arg
+
+ # Don't try to trace in subprocesses.
+ if os.getpid() != tracing_pid:
+ sys.settrace(None)
+ return None
+
+ # pylint: disable=unused-argument
+ if event not in ("call", "return"):
+ return None
+
+ function_name = frame.f_code.co_name
+ filename = frame.f_code.co_filename
+ line_number = frame.f_lineno
+
+ if _shouldTrace(frame, to_include, to_exclude, included, excluded):
+ if event == "call":
+ # This function is beginning; we save the thread name (if that hasn't
+ # been done), record the Begin event, and return this function to be
+ # used as the local trace function.
+
+ thread_id = threading.current_thread().ident
+
+ if thread_id not in saved_thread_ids:
+ thread_name = threading.current_thread().name
+
+ trace_event.trace_set_thread_name(thread_name)
+
+ saved_thread_ids.add(thread_id)
+
+ arguments = _TraceArguments()
+ # The function's argument values are stored in the frame's
+ # |co_varnames| as the first |co_argcount| elements. (Following that
+ # are local variables.)
+ for idx in range(frame.f_code.co_argcount):
+ arg_name = frame.f_code.co_varnames[idx]
+ arguments.add(arg_name, frame.f_locals[arg_name])
+ trace_event.trace_begin(function_name, arguments=arguments,
+ module=inspect.getmodule(frame).__name__,
+ filename=filename, line_number=line_number)
+
+ # Return this function, so it gets used as the "local trace function"
+ # within this function's frame (and in particular, gets called for this
+ # function's "return" event).
+ return traceFunction
+
+ if event == "return":
+ trace_event.trace_end(function_name)
+ return None
+
+ return traceFunction
+
+
+def no_tracing(f):
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ trace_func = sys.gettrace()
+ try:
+ sys.settrace(None)
+ threading.settrace(None)
+ return f(*args, **kwargs)
+ finally:
+ sys.settrace(trace_func)
+ threading.settrace(trace_func)
+ return wrapper
+
+
+def start_instrumenting(output_file, to_include=(), to_exclude=()):
+ """Enable tracing of all function calls (from specified modules)."""
+ trace_event.trace_enable(output_file)
+
+ traceFunc = _generate_trace_function(to_include, to_exclude)
+ sys.settrace(traceFunc)
+ threading.settrace(traceFunc)
+
+
+def stop_instrumenting():
+ trace_event.trace_disable()
+
+ sys.settrace(None)
+ threading.settrace(None)
+
+
+@contextlib.contextmanager
+def Instrument(output_file, to_include=(), to_exclude=()):
+ try:
+ start_instrumenting(output_file, to_include, to_exclude)
+ yield None
+ finally:
+ stop_instrumenting()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/local_utils.py b/third_party/libwebrtc/build/android/pylib/utils/local_utils.py
new file mode 100644
index 0000000000..027cca3925
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/local_utils.py
@@ -0,0 +1,19 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Utilities for determining if a test is being run locally or not."""
+
+import os
+
+
+def IsOnSwarming():
+ """Determines whether we are on swarming or not.
+
+ Returns:
+ True if the test is being run on swarming, otherwise False.
+ """
+ # Look for the presence of the SWARMING_SERVER environment variable as a
+ # heuristic to determine whether we're running on a workstation or a bot.
+ # This should always be set on swarming, but would be strange to be set on
+ # a workstation.
+ return 'SWARMING_SERVER' in os.environ
diff --git a/third_party/libwebrtc/build/android/pylib/utils/logdog_helper.py b/third_party/libwebrtc/build/android/pylib/utils/logdog_helper.py
new file mode 100644
index 0000000000..3000a2f7cb
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/logdog_helper.py
@@ -0,0 +1,96 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to upload data to logdog."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import decorators
+
+sys.path.insert(
+ 0,
+ os.path.abspath(
+ os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'logdog')))
+from logdog import bootstrap # pylint: disable=import-error
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def text(name, data, content_type=None):
+ """Uploads text to logdog.
+
+ Args:
+ name: Name of the logdog stream.
+ data: String with data you want to upload.
+ content_type: The optional content type of the stream. If None, a
+ default content type will be chosen.
+
+ Returns:
+ Link to view uploaded text in logdog viewer.
+ """
+ logging.info('Writing text to logdog stream, %s', name)
+ with get_logdog_client().text(name, content_type=content_type) as stream:
+ stream.write(data)
+ return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value=None,
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def open_text(name):
+ """Returns a file like object which you can write to.
+
+ Args:
+ name: Name of the logdog stream.
+
+ Returns:
+ A file like object. close() file when done.
+ """
+ logging.info('Opening text logdog stream, %s', name)
+ return get_logdog_client().open_text(name)
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def binary(name, binary_path):
+ """Uploads binary to logdog.
+
+ Args:
+ name: Name of the logdog stream.
+ binary_path: Path to binary you want to upload.
+
+ Returns:
+ Link to view uploaded binary in logdog viewer.
+ """
+ logging.info('Writing binary to logdog stream, %s', name)
+ with get_logdog_client().binary(name) as stream:
+ with open(binary_path, 'r') as f:
+ stream.write(f.read())
+ return stream.get_viewer_url()
+
+
+@decorators.NoRaiseException(default_return_value='',
+ exception_message=('Ignore this exception. '
+ 'crbug.com/675666'))
+def get_viewer_url(name):
+ """Get Logdog viewer URL.
+
+ Args:
+ name: Name of the logdog stream.
+
+ Returns:
+ Link to view uploaded binary in logdog viewer.
+ """
+ return get_logdog_client().get_viewer_url(name)
+
+
+@decorators.Memoize
+def get_logdog_client():
+ logging.info('Getting logdog client.')
+ return bootstrap.ButlerBootstrap.probe().stream_client()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/logging_utils.py b/third_party/libwebrtc/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000000..846d336c2c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+
+from pylib.constants import host_paths
+
+_COLORAMA_PATH = os.path.join(
+ host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')
+
+with host_paths.SysPath(_COLORAMA_PATH, position=0):
+ import colorama
+
+BACK = colorama.Back
+FORE = colorama.Fore
+STYLE = colorama.Style
+
+
+class _ColorFormatter(logging.Formatter):
+ # pylint does not see members added dynamically in the constructor.
+ # pylint: disable=no-member
+ color_map = {
+ logging.DEBUG: (FORE.CYAN),
+ logging.WARNING: (FORE.YELLOW),
+ logging.ERROR: (FORE.RED),
+ logging.CRITICAL: (BACK.RED),
+ }
+
+ def __init__(self, wrapped_formatter=None):
+ """Wraps a |logging.Formatter| and adds color."""
+ super(_ColorFormatter, self).__init__(self)
+ self._wrapped_formatter = wrapped_formatter or logging.Formatter()
+
+ #override
+ def format(self, record):
+ message = self._wrapped_formatter.format(record)
+ return self.Colorize(message, record.levelno)
+
+ def Colorize(self, message, log_level):
+ try:
+ return (''.join(self.color_map[log_level]) + message +
+ colorama.Style.RESET_ALL)
+ except KeyError:
+ return message
+
+
+class ColorStreamHandler(logging.StreamHandler):
+ """Handler that can be used to colorize logging output.
+
+ Example using a specific logger:
+
+ logger = logging.getLogger('my_logger')
+ logger.addHandler(ColorStreamHandler())
+ logger.info('message')
+
+ Example using the root logger:
+
+ ColorStreamHandler.MakeDefault()
+ logging.info('message')
+
+ """
+ def __init__(self, force_color=False):
+ super(ColorStreamHandler, self).__init__()
+ self.force_color = force_color
+ self.setFormatter(logging.Formatter())
+
+ @property
+ def is_tty(self):
+ isatty = getattr(self.stream, 'isatty', None)
+ return isatty and isatty()
+
+ #override
+ def setFormatter(self, formatter):
+ if self.force_color or self.is_tty:
+ formatter = _ColorFormatter(formatter)
+ super(ColorStreamHandler, self).setFormatter(formatter)
+
+ @staticmethod
+ def MakeDefault(force_color=False):
+ """
+ Replaces the default logging handlers with a coloring handler. To use
+ a colorizing handler at the same time as others, either register them
+ after this call, or add the ColorStreamHandler on the logger using
+ Logger.addHandler()
+
+ Args:
+ force_color: Set to True to bypass the tty check and always colorize.
+ """
+ # If the existing handlers aren't removed, messages are duplicated
+ logging.getLogger().handlers = []
+ logging.getLogger().addHandler(ColorStreamHandler(force_color))
+
+
+@contextlib.contextmanager
+def OverrideColor(level, color):
+ """Temporarily override the logging color for a specified level.
+
+ Args:
+ level: logging level whose color gets overridden.
+ color: tuple of formats to apply to log lines.
+ """
+ prev_colors = {}
+ for handler in logging.getLogger().handlers:
+ if isinstance(handler.formatter, _ColorFormatter):
+ prev_colors[handler.formatter] = handler.formatter.color_map[level]
+ handler.formatter.color_map[level] = color
+ try:
+ yield
+ finally:
+ for formatter, prev_color in prev_colors.items():
+ formatter.color_map[level] = prev_color
+
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+ """Momentarilly suppress logging events from all loggers.
+
+ TODO(jbudorick): This is not thread safe. Log events from other threads might
+ also inadvertently disappear.
+
+ Example:
+
+ with logging_utils.SuppressLogging():
+ # all but CRITICAL logging messages are suppressed
+ logging.info('just doing some thing') # not shown
+ logging.critical('something really bad happened') # still shown
+
+ Args:
+ level: logging events with this or lower levels are suppressed.
+ """
+ logging.disable(level)
+ yield
+ logging.disable(logging.NOTSET)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/maven_downloader.py b/third_party/libwebrtc/build/android/pylib/utils/maven_downloader.py
new file mode 100755
index 0000000000..7247f7c88c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/maven_downloader.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env vpython3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import logging
+import os
+import shutil
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
+import devil_chromium # pylint: disable=unused-import
+from devil.utils import cmd_helper
+from devil.utils import parallelizer
+
+
+def _MakeDirsIfAbsent(path):
+ try:
+ os.makedirs(path)
+ except OSError as err:
+ if err.errno != errno.EEXIST or not os.path.isdir(path):
+ raise
+
+
+class MavenDownloader(object):
+ '''
+ Downloads and installs the requested artifacts from the Google Maven repo.
+ The artifacts are expected to be specified in the format
+ "group_id:artifact_id:version:file_type", as the default file type is JAR
+ but most Android libraries are provided as AARs, which would otherwise fail
+ downloading. See Install()
+ '''
+
+ # Remote repository to download the artifacts from. The support library and
+ # Google Play service are only distributed there, but third party libraries
+ # could use Maven Central or JCenter for example. The default Maven remote
+ # is Maven Central.
+ _REMOTE_REPO = 'https://maven.google.com'
+
+ # Default Maven repository.
+ _DEFAULT_REPO_PATH = os.path.join(
+ os.path.expanduser('~'), '.m2', 'repository')
+
+ def __init__(self, debug=False):
+ self._repo_path = MavenDownloader._DEFAULT_REPO_PATH
+ self._remote_url = MavenDownloader._REMOTE_REPO
+ self._debug = debug
+
+ def Install(self, target_repo, artifacts, include_poms=False):
+ logging.info('Installing %d artifacts...', len(artifacts))
+ downloaders = [_SingleArtifactDownloader(self, artifact, target_repo)
+ for artifact in artifacts]
+ if self._debug:
+ for downloader in downloaders:
+ downloader.Run(include_poms)
+ else:
+ parallelizer.SyncParallelizer(downloaders).Run(include_poms)
+ logging.info('%d artifacts installed to %s', len(artifacts), target_repo)
+
+ @property
+ def repo_path(self):
+ return self._repo_path
+
+ @property
+ def remote_url(self):
+ return self._remote_url
+
+ @property
+ def debug(self):
+ return self._debug
+
+
+class _SingleArtifactDownloader(object):
+ '''Handles downloading and installing a single Maven artifact.'''
+
+ _POM_FILE_TYPE = 'pom'
+
+ def __init__(self, download_manager, artifact, target_repo):
+ self._download_manager = download_manager
+ self._artifact = artifact
+ self._target_repo = target_repo
+
+ def Run(self, include_pom=False):
+ parts = self._artifact.split(':')
+ if len(parts) != 4:
+ raise Exception('Artifacts expected as '
+ '"group_id:artifact_id:version:file_type".')
+ group_id, artifact_id, version, file_type = parts
+ self._InstallArtifact(group_id, artifact_id, version, file_type)
+
+ if include_pom and file_type != _SingleArtifactDownloader._POM_FILE_TYPE:
+ self._InstallArtifact(group_id, artifact_id, version,
+ _SingleArtifactDownloader._POM_FILE_TYPE)
+
+ def _InstallArtifact(self, group_id, artifact_id, version, file_type):
+ logging.debug('Processing %s', self._artifact)
+
+ download_relpath = self._DownloadArtifact(
+ group_id, artifact_id, version, file_type)
+ logging.debug('Downloaded.')
+
+ install_path = self._ImportArtifact(download_relpath)
+ logging.debug('Installed %s', os.path.relpath(install_path))
+
+ def _DownloadArtifact(self, group_id, artifact_id, version, file_type):
+ '''
+ Downloads the specified artifact using maven, to its standard location, see
+ MavenDownloader._DEFAULT_REPO_PATH.
+ '''
+ cmd = ['mvn',
+ 'org.apache.maven.plugins:maven-dependency-plugin:RELEASE:get',
+ '-DremoteRepositories={}'.format(self._download_manager.remote_url),
+ '-Dartifact={}:{}:{}:{}'.format(group_id, artifact_id, version,
+ file_type)]
+
+ stdout = None if self._download_manager.debug else open(os.devnull, 'wb')
+
+ try:
+ ret_code = cmd_helper.Call(cmd, stdout=stdout)
+ if ret_code != 0:
+ raise Exception('Command "{}" failed'.format(' '.join(cmd)))
+ except OSError as e:
+ if e.errno == os.errno.ENOENT:
+ raise Exception('mvn command not found. Please install Maven.')
+ raise
+
+ return os.path.join(os.path.join(*group_id.split('.')),
+ artifact_id,
+ version,
+ '{}-{}.{}'.format(artifact_id, version, file_type))
+
+ def _ImportArtifact(self, artifact_path):
+ src_dir = os.path.join(self._download_manager.repo_path, artifact_path)
+ dst_dir = os.path.join(self._target_repo, os.path.dirname(artifact_path))
+
+ _MakeDirsIfAbsent(dst_dir)
+ shutil.copy(src_dir, dst_dir)
+
+ return dst_dir
diff --git a/third_party/libwebrtc/build/android/pylib/utils/proguard.py b/third_party/libwebrtc/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000000..9d5bae285a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/proguard.py
@@ -0,0 +1,285 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from devil.utils import cmd_helper
+from pylib import constants
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*? Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+ r'^(Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+ r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'^(\s*?)- Annotation \[L(\S*);\]:$')
+_ELEMENT_PRIMITIVE = 0
+_ELEMENT_ARRAY = 1
+_ELEMENT_ANNOTATION = 2
+_PROGUARD_ELEMENT_RES = [
+ (_ELEMENT_PRIMITIVE,
+ re.compile(r'^(\s*?)- Constant element value \[(\S*) .*\]$')),
+ (_ELEMENT_ARRAY,
+ re.compile(r'^(\s*?)- Array element value \[(\S*)\]:$')),
+ (_ELEMENT_ANNOTATION,
+ re.compile(r'^(\s*?)- Annotation element value \[(\S*)\]:$'))
+]
+_PROGUARD_INDENT_WIDTH = 2
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'^(\s*?)- \S+? \[(.*)\]$')
+
+
+def _GetProguardPath():
+ return os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'proguard',
+ 'lib', 'proguard603.jar')
+
+
+def Dump(jar_path):
+ """Dumps class and method information from a JAR into a dict via proguard.
+
+ Args:
+ jar_path: An absolute path to the JAR file to dump.
+ Returns:
+ A dict in the following format:
+ {
+ 'classes': [
+ {
+ 'class': '',
+ 'superclass': '',
+ 'annotations': {/* dict -- see below */},
+ 'methods': [
+ {
+ 'method': '',
+ 'annotations': {/* dict -- see below */},
+ },
+ ...
+ ],
+ },
+ ...
+ ],
+ }
+
+ Annotations dict format:
+ {
+ 'empty-annotation-class-name': None,
+ 'annotation-class-name': {
+ 'field': 'primitive-value',
+ 'field': [ 'array-item-1', 'array-item-2', ... ],
+ 'field': {
+ /* Object value */
+ 'field': 'primitive-value',
+ 'field': [ 'array-item-1', 'array-item-2', ... ],
+ 'field': { /* Object value */ }
+ }
+ }
+ }
+
+ Note that for top-level annotations their class names are used for
+ identification, whereas for any nested annotations the corresponding
+ field names are used.
+
+ One drawback of this approach is that an array containing empty
+ annotation classes will be represented as an array of 'None' values,
+ thus it will not be possible to find out annotation class names.
+ On the other hand, storing both annotation class name and the field name
+ would produce a very complex JSON.
+ """
+
+ with tempfile.NamedTemporaryFile() as proguard_output:
+ cmd_helper.GetCmdStatusAndOutput([
+ 'java',
+ '-jar', _GetProguardPath(),
+ '-injars', jar_path,
+ '-dontshrink', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+ '-dump', proguard_output.name])
+ return Parse(proguard_output)
+
+class _AnnotationElement(object):
+ def __init__(self, name, ftype, depth):
+ self.ref = None
+ self.name = name
+ self.ftype = ftype
+ self.depth = depth
+
+class _ParseState(object):
+ _INITIAL_VALUES = (lambda: None, list, dict)
+ # Empty annotations are represented as 'None', not as an empty dictionary.
+ _LAZY_INITIAL_VALUES = (lambda: None, list, lambda: None)
+
+ def __init__(self):
+ self._class_result = None
+ self._method_result = None
+ self._parse_annotations = False
+ self._annotation_stack = []
+
+ def ResetPerSection(self, section_name):
+ self.InitMethod(None)
+ self._parse_annotations = (
+ section_name in ['Class file attributes', 'Methods'])
+
+ def ParseAnnotations(self):
+ return self._parse_annotations
+
+ def CreateAndInitClass(self, class_name):
+ self.InitMethod(None)
+ self._class_result = {
+ 'class': class_name,
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [],
+ }
+ return self._class_result
+
+ def HasCurrentClass(self):
+ return bool(self._class_result)
+
+ def SetSuperClass(self, superclass):
+ assert self.HasCurrentClass()
+ self._class_result['superclass'] = superclass
+
+ def InitMethod(self, method_name):
+ self._annotation_stack = []
+ if method_name:
+ self._method_result = {
+ 'method': method_name,
+ 'annotations': {},
+ }
+ self._class_result['methods'].append(self._method_result)
+ else:
+ self._method_result = None
+
+ def InitAnnotation(self, annotation, depth):
+ if not self._annotation_stack:
+ # Add a fake parent element comprising 'annotations' dictionary,
+ # so we can work uniformly with both top-level and nested annotations.
+ annotations = _AnnotationElement(
+ '<<<top level>>>', _ELEMENT_ANNOTATION, depth - 1)
+ if self._method_result:
+ annotations.ref = self._method_result['annotations']
+ else:
+ annotations.ref = self._class_result['annotations']
+ self._annotation_stack = [annotations]
+ self._BacktrackAnnotationStack(depth)
+ if not self.HasCurrentAnnotation():
+ self._annotation_stack.append(
+ _AnnotationElement(annotation, _ELEMENT_ANNOTATION, depth))
+ self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+ def HasCurrentAnnotation(self):
+ return len(self._annotation_stack) > 1
+
+ def InitAnnotationField(self, field, field_type, depth):
+ self._BacktrackAnnotationStack(depth)
+ # Create the parent representation, if needed. E.g. annotations
+ # are represented with `None`, not with `{}` until they receive the first
+ # field.
+ self._CreateAnnotationPlaceHolder(self._INITIAL_VALUES)
+ if self._annotation_stack[-1].ftype == _ELEMENT_ARRAY:
+ # Nested arrays are not allowed in annotations.
+ assert not field_type == _ELEMENT_ARRAY
+ # Use array index instead of bogus field name.
+ field = len(self._annotation_stack[-1].ref)
+ self._annotation_stack.append(_AnnotationElement(field, field_type, depth))
+ self._CreateAnnotationPlaceHolder(self._LAZY_INITIAL_VALUES)
+
+ def UpdateCurrentAnnotationFieldValue(self, value, depth):
+ self._BacktrackAnnotationStack(depth)
+ self._InitOrUpdateCurrentField(value)
+
+ def _CreateAnnotationPlaceHolder(self, constructors):
+ assert self.HasCurrentAnnotation()
+ field = self._annotation_stack[-1]
+ if field.ref is None:
+ field.ref = constructors[field.ftype]()
+ self._InitOrUpdateCurrentField(field.ref)
+
+ def _BacktrackAnnotationStack(self, depth):
+ stack = self._annotation_stack
+ while len(stack) > 0 and stack[-1].depth >= depth:
+ stack.pop()
+
+ def _InitOrUpdateCurrentField(self, value):
+ assert self.HasCurrentAnnotation()
+ parent = self._annotation_stack[-2]
+ assert not parent.ref is None
+ # There can be no nested constant element values.
+ assert parent.ftype in [_ELEMENT_ARRAY, _ELEMENT_ANNOTATION]
+ field = self._annotation_stack[-1]
+ if isinstance(value, str) and not field.ftype == _ELEMENT_PRIMITIVE:
+ # The value comes from the output parser via
+ # UpdateCurrentAnnotationFieldValue, and should be a value of a constant
+ # element. If it isn't, just skip it.
+ return
+ if parent.ftype == _ELEMENT_ARRAY and field.name >= len(parent.ref):
+ parent.ref.append(value)
+ else:
+ parent.ref[field.name] = value
+
+
+def _GetDepth(prefix):
+ return len(prefix) // _PROGUARD_INDENT_WIDTH
+
+def Parse(proguard_output):
+ results = {
+ 'classes': [],
+ }
+
+ state = _ParseState()
+
+ for line in proguard_output:
+ line = line.strip('\r\n')
+
+ m = _PROGUARD_CLASS_RE.match(line)
+ if m:
+ results['classes'].append(
+ state.CreateAndInitClass(m.group(1).replace('/', '.')))
+ continue
+
+ if not state.HasCurrentClass():
+ continue
+
+ m = _PROGUARD_SUPERCLASS_RE.match(line)
+ if m:
+ state.SetSuperClass(m.group(1).replace('/', '.'))
+ continue
+
+ m = _PROGUARD_SECTION_RE.match(line)
+ if m:
+ state.ResetPerSection(m.group(1))
+ continue
+
+ m = _PROGUARD_METHOD_RE.match(line)
+ if m:
+ state.InitMethod(m.group(1))
+ continue
+
+ if not state.ParseAnnotations():
+ continue
+
+ m = _PROGUARD_ANNOTATION_RE.match(line)
+ if m:
+ # Ignore the annotation package.
+ state.InitAnnotation(m.group(2).split('/')[-1], _GetDepth(m.group(1)))
+ continue
+
+ if state.HasCurrentAnnotation():
+ m = None
+ for (element_type, element_re) in _PROGUARD_ELEMENT_RES:
+ m = element_re.match(line)
+ if m:
+ state.InitAnnotationField(
+ m.group(2), element_type, _GetDepth(m.group(1)))
+ break
+ if m:
+ continue
+ m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+ if m:
+ state.UpdateCurrentAnnotationFieldValue(
+ m.group(2), _GetDepth(m.group(1)))
+ else:
+ state.InitMethod(None)
+
+ return results
diff --git a/third_party/libwebrtc/build/android/pylib/utils/proguard_test.py b/third_party/libwebrtc/build/android/pylib/utils/proguard_test.py
new file mode 100755
index 0000000000..775bbbac35
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/proguard_test.py
@@ -0,0 +1,495 @@
+#! /usr/bin/env vpython3
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.utils import proguard
+
+class TestParse(unittest.TestCase):
+
+ def setUp(self):
+ self.maxDiff = None
+
+ def testClass(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ ' Superclass: java/lang/Object'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': 'java.lang.Object',
+ 'annotations': {},
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testMethod(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: <init>()V'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': '<init>',
+ 'annotations': {}
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testClassAnnotation(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Annotation [Lorg/example/Annotation;]:',
+ ' - Annotation [Lorg/example/AnnotationWithValue;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+ ' - Constant element value [attr1 \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [attr2 \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'Annotation': None,
+ 'AnnotationWithValue': {'attr': 'val'},
+ 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testClassAnnotationWithArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'AnnotationWithEmptyArray': {'arrayAttr': []},
+ 'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+ 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testNestedClassAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 1):',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Constant element value [outerAttr \'13\']',
+ ' - Utf8 [outerVal]',
+ ' - Array element value [outerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal2]',
+ ' - Annotation element value [emptyAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [ann]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'13\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [innerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal2]',
+ ' - Annotation element value [emptyInnerAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'outerAttr': 'outerVal',
+ 'outerArr': ['outerArrVal1', 'outerArrVal2'],
+ 'emptyAnn': None,
+ 'ann': {
+ 'innerAttr': 'innerVal',
+ 'innerArr': ['innerArrVal1', 'innerArrVal2'],
+ 'emptyInnerAnn': None
+ }
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testClassArraysOfAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 1):',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Array element value [arrayWithEmptyAnnotations]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Array element value [outerArray]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'115\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [arguments]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg1Attr \'115\']',
+ ' - Utf8 [arg1Val]',
+ ' - Array element value [arg1Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [11]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [12]',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg2Attr \'115\']',
+ ' - Utf8 [arg2Val]',
+ ' - Array element value [arg2Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [21]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [22]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'arrayWithEmptyAnnotations': [None, None],
+ 'outerArray': [
+ {
+ 'innerAttr': 'innerVal',
+ 'arguments': [
+ {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+ {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+ ]
+ }
+ ]
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testReadFullClassFileAttributes(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Class file attributes (count = 3):',
+ ' - Source file attribute:',
+ ' - Utf8 [Class.java]',
+ ' - Runtime visible annotations attribute:',
+ ' - Annotation [Lorg/example/IntValueAnnotation;]:',
+ ' - Constant element value [value \'73\']',
+ ' - Integer [19]',
+ ' - Inner classes attribute (count = 1)',
+ ' - InnerClassesInfo:',
+ ' Access flags: 0x9 = public static',
+ ' - Class [org/example/Class1]',
+ ' - Class [org/example/Class2]',
+ ' - Utf8 [OnPageFinishedHelper]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {
+ 'IntValueAnnotation': {
+ 'value': '19',
+ }
+ },
+ 'methods': []
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testMethodAnnotation(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/Annotation;]:',
+ ' - Annotation [Lorg/example/AnnotationWithValue;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoValues;]:',
+ ' - Constant element value [attr1 \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [attr2 \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'Annotation': None,
+ 'AnnotationWithValue': {'attr': 'val'},
+ 'AnnotationWithTwoValues': {'attr1': 'val1', 'attr2': 'val2'}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testMethodAnnotationWithArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/AnnotationWithEmptyArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Annotation [Lorg/example/AnnotationWithOneElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationWithTwoElemArray;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'AnnotationWithEmptyArray': {'arrayAttr': []},
+ 'AnnotationWithOneElemArray': {'arrayAttr': ['val']},
+ 'AnnotationWithTwoElemArray': {'arrayAttr': ['val1', 'val2']}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testMethodAnnotationWithPrimitivesAndArrays(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/AnnotationPrimitiveThenArray;]:',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationArrayThenPrimitive;]:',
+ ' - Array element value [arrayAttr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val]',
+ ' - Constant element value [attr \'13\']',
+ ' - Utf8 [val]',
+ ' - Annotation [Lorg/example/AnnotationTwoArrays;]:',
+ ' - Array element value [arrayAttr1]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val1]',
+ ' - Array element value [arrayAttr2]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [val2]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'AnnotationPrimitiveThenArray': {'attr': 'val',
+ 'arrayAttr': ['val']},
+ 'AnnotationArrayThenPrimitive': {'arrayAttr': ['val'],
+ 'attr': 'val'},
+ 'AnnotationTwoArrays': {'arrayAttr1': ['val1'],
+ 'arrayAttr2': ['val2']}
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testNestedMethodAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Constant element value [outerAttr \'13\']',
+ ' - Utf8 [outerVal]',
+ ' - Array element value [outerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [outerArrVal2]',
+ ' - Annotation element value [emptyAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [ann]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'13\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [innerArr]:',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal1]',
+ ' - Constant element value [(default) \'13\']',
+ ' - Utf8 [innerArrVal2]',
+ ' - Annotation element value [emptyInnerAnn]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'outerAttr': 'outerVal',
+ 'outerArr': ['outerArrVal1', 'outerArrVal2'],
+ 'emptyAnn': None,
+ 'ann': {
+ 'innerAttr': 'innerVal',
+ 'innerArr': ['innerArrVal1', 'innerArrVal2'],
+ 'emptyInnerAnn': None
+ }
+ }
+ },
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+ def testMethodArraysOfAnnotations(self):
+ actual = proguard.Parse(
+ ['- Program class: org/example/Test',
+ 'Methods (count = 1):',
+ '- Method: Test()V',
+ ' - Annotation [Lorg/example/OuterAnnotation;]:',
+ ' - Array element value [arrayWithEmptyAnnotations]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/EmptyAnnotation;]:',
+ ' - Array element value [outerArray]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation;]:',
+ ' - Constant element value [innerAttr \'115\']',
+ ' - Utf8 [innerVal]',
+ ' - Array element value [arguments]:',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg1Attr \'115\']',
+ ' - Utf8 [arg1Val]',
+ ' - Array element value [arg1Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [11]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [12]',
+ ' - Annotation element value [(default)]:',
+ ' - Annotation [Lorg/example/InnerAnnotation$Argument;]:',
+ ' - Constant element value [arg2Attr \'115\']',
+ ' - Utf8 [arg2Val]',
+ ' - Array element value [arg2Array]:',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [21]',
+ ' - Constant element value [(default) \'73\']',
+ ' - Integer [22]'])
+ expected = {
+ 'classes': [
+ {
+ 'class': 'org.example.Test',
+ 'superclass': '',
+ 'annotations': {},
+ 'methods': [
+ {
+ 'method': 'Test',
+ 'annotations': {
+ 'OuterAnnotation': {
+ 'arrayWithEmptyAnnotations': [None, None],
+ 'outerArray': [
+ {
+ 'innerAttr': 'innerVal',
+ 'arguments': [
+ {'arg1Attr': 'arg1Val', 'arg1Array': ['11', '12']},
+ {'arg2Attr': 'arg2Val', 'arg2Array': ['21', '22']}
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/repo_utils.py b/third_party/libwebrtc/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000000..f9d300a214
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from devil.utils import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+ """Returns the git hash tag for the given directory.
+
+ Args:
+ in_directory: The directory where git is to be run.
+ """
+ command_line = ['git', 'log', '-1', '--pretty=format:%H']
+ output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+ return output[0:40]
+
+
+def GetGitOriginMasterHeadSHA1(in_directory):
+ command_line = ['git', 'rev-parse', 'origin/master']
+ output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+ return output.strip()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/shared_preference_utils.py b/third_party/libwebrtc/build/android/pylib/utils/shared_preference_utils.py
new file mode 100644
index 0000000000..64c4c3f919
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/shared_preference_utils.py
@@ -0,0 +1,116 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for modifying an app's settings file using JSON."""
+
+import json
+import logging
+
+
+def UnicodeToStr(data):
+ """Recursively converts any Unicode to Python strings.
+
+ Args:
+ data: The data to be converted.
+
+ Return:
+ A copy of the given data, but with instances of Unicode converted to Python
+ strings.
+ """
+ if isinstance(data, dict):
+ return {
+ UnicodeToStr(key): UnicodeToStr(value)
+ for key, value in data.items()
+ }
+ elif isinstance(data, list):
+ return [UnicodeToStr(element) for element in data]
+ try:
+ # Python-2 compatibility.
+ if isinstance(data, unicode):
+ return data.encode('utf-8')
+ except NameError:
+ # Strings are already unicode in python3.
+ pass
+ return data
+
+
+def ExtractSettingsFromJson(filepath):
+ """Extracts the settings data from the given JSON file.
+
+ Args:
+ filepath: The path to the JSON file to read.
+
+ Return:
+ The data read from the JSON file with strings converted to Python strings.
+ """
+ # json.load() loads strings as unicode, which causes issues when trying
+ # to edit string values in preference files, so convert to Python strings
+ with open(filepath) as prefs_file:
+ return UnicodeToStr(json.load(prefs_file))
+
+
+def ApplySharedPreferenceSetting(shared_pref, setting):
+ """Applies the given app settings to the given device.
+
+ Modifies an installed app's settings by modifying its shared preference
+ settings file. Provided settings data must be a settings dictionary,
+ which are in the following format:
+ {
+ "package": "com.example.package",
+ "filename": "AppSettingsFile.xml",
+ "supports_encrypted_path": true,
+ "set": {
+ "SomeBoolToSet": true,
+ "SomeStringToSet": "StringValue",
+ },
+ "remove": [
+ "list",
+ "of",
+ "keys",
+ "to",
+ "remove",
+ ]
+ }
+
+ Example JSON files that can be read with ExtractSettingsFromJson and passed to
+ this function are in //chrome/android/shared_preference_files/test/.
+
+ Args:
+ shared_pref: The devil SharedPrefs object for the device the settings will
+ be applied to.
+ setting: A settings dictionary to apply.
+ """
+ shared_pref.Load()
+ for key in setting.get('remove', []):
+ try:
+ shared_pref.Remove(key)
+ except KeyError:
+ logging.warning("Attempted to remove non-existent key %s", key)
+ for key, value in setting.get('set', {}).items():
+ is_set = False
+ if not is_set and isinstance(value, bool):
+ shared_pref.SetBoolean(key, value)
+ is_set = True
+ try:
+ # Python-2 compatibility.
+ if not is_set and isinstance(value, basestring):
+ shared_pref.SetString(key, value)
+ is_set = True
+ if not is_set and (isinstance(value, long) or isinstance(value, int)):
+ shared_pref.SetLong(key, value)
+ is_set = True
+ except NameError:
+ if not is_set and isinstance(value, str):
+ shared_pref.SetString(key, value)
+ is_set = True
+ if not is_set and isinstance(value, int):
+ shared_pref.SetLong(key, value)
+ is_set = True
+ if not is_set and isinstance(value, list):
+ shared_pref.SetStringSet(key, value)
+ is_set = True
+ if not is_set:
+ raise ValueError("Given invalid value type %s for key %s" % (
+ str(type(value)), key))
+ shared_pref.Commit()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/simpleperf.py b/third_party/libwebrtc/build/android/pylib/utils/simpleperf.py
new file mode 100644
index 0000000000..b3ba00e6c2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/simpleperf.py
@@ -0,0 +1,260 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from devil import devil_env
+from devil.android import device_signal
+from devil.android.sdk import version_codes
+from pylib import constants
+
+
+def _ProcessType(proc):
+ _, _, suffix = proc.name.partition(':')
+ if not suffix:
+ return 'browser'
+ if suffix.startswith('sandboxed_process'):
+ return 'renderer'
+ if suffix.startswith('privileged_process'):
+ return 'gpu'
+ return None
+
+
+def _GetSpecifiedPID(device, package_name, process_specifier):
+ if process_specifier is None:
+ return None
+
+ # Check for numeric PID
+ try:
+ pid = int(process_specifier)
+ return pid
+ except ValueError:
+ pass
+
+ # Check for exact process name; can be any of these formats:
+ # <package>:<process name>, i.e. 'org.chromium.chrome:sandboxed_process0'
+ # :<process name>, i.e. ':sandboxed_process0'
+ # <process name>, i.e. 'sandboxed_process0'
+ full_process_name = process_specifier
+ if process_specifier.startswith(':'):
+ full_process_name = package_name + process_specifier
+ elif ':' not in process_specifier:
+ full_process_name = '%s:%s' % (package_name, process_specifier)
+ matching_processes = device.ListProcesses(full_process_name)
+ if len(matching_processes) == 1:
+ return matching_processes[0].pid
+ if len(matching_processes) > 1:
+ raise RuntimeError('Found %d processes with name "%s".' % (
+ len(matching_processes), process_specifier))
+
+ # Check for process type (i.e. 'renderer')
+ package_processes = device.ListProcesses(package_name)
+ matching_processes = [p for p in package_processes if (
+ _ProcessType(p) == process_specifier)]
+ if process_specifier == 'renderer' and len(matching_processes) > 1:
+ raise RuntimeError('Found %d renderer processes; please re-run with only '
+ 'one open tab.' % len(matching_processes))
+ if len(matching_processes) != 1:
+ raise RuntimeError('Found %d processes of type "%s".' % (
+ len(matching_processes), process_specifier))
+ return matching_processes[0].pid
+
+
+def _ThreadsForProcess(device, pid):
+ # The thread list output format for 'ps' is the same regardless of version.
+ # Here's the column headers, and a sample line for a thread belonging to
+ # pid 12345 (note that the last few columns are not aligned with headers):
+ #
+ # USER PID TID PPID VSZ RSS WCHAN ADDR S CMD
+ # u0_i101 12345 24680 567 1357902 97531 futex_wait_queue_me e85acd9c S \
+ # CrRendererMain
+ if device.build_version_sdk >= version_codes.OREO:
+ pid_regex = (
+ r'^[[:graph:]]\{1,\}[[:blank:]]\{1,\}%d[[:blank:]]\{1,\}' % pid)
+ ps_cmd = "ps -T -e | grep '%s'" % pid_regex
+ ps_output_lines = device.RunShellCommand(
+ ps_cmd, shell=True, check_return=True)
+ else:
+ ps_cmd = ['ps', '-p', str(pid), '-t']
+ ps_output_lines = device.RunShellCommand(ps_cmd, check_return=True)
+ result = []
+ for l in ps_output_lines:
+ fields = l.split()
+ # fields[2] is tid, fields[-1] is thread name. Output may include an entry
+ # for the process itself with tid=pid; omit that one.
+ if fields[2] == str(pid):
+ continue
+ result.append((int(fields[2]), fields[-1]))
+ return result
+
+
+def _ThreadType(thread_name):
+ if not thread_name:
+ return 'unknown'
+ if (thread_name.startswith('Chrome_ChildIO') or
+ thread_name.startswith('Chrome_IO')):
+ return 'io'
+ if thread_name.startswith('Compositor'):
+ return 'compositor'
+ if (thread_name.startswith('ChildProcessMai') or
+ thread_name.startswith('CrGpuMain') or
+ thread_name.startswith('CrRendererMain')):
+ return 'main'
+ if thread_name.startswith('RenderThread'):
+ return 'render'
+
+
+def _GetSpecifiedTID(device, pid, thread_specifier):
+ if thread_specifier is None:
+ return None
+
+ # Check for numeric TID
+ try:
+ tid = int(thread_specifier)
+ return tid
+ except ValueError:
+ pass
+
+ # Check for thread type
+ if pid is not None:
+ matching_threads = [t for t in _ThreadsForProcess(device, pid) if (
+ _ThreadType(t[1]) == thread_specifier)]
+ if len(matching_threads) != 1:
+ raise RuntimeError('Found %d threads of type "%s".' % (
+ len(matching_threads), thread_specifier))
+ return matching_threads[0][0]
+
+ return None
+
+
+def PrepareDevice(device):
+ if device.build_version_sdk < version_codes.NOUGAT:
+ raise RuntimeError('Simpleperf profiling is only supported on Android N '
+ 'and later.')
+
+ # Necessary for profiling
+ # https://android-review.googlesource.com/c/platform/system/sepolicy/+/234400
+ device.SetProp('security.perf_harden', '0')
+
+
+def InstallSimpleperf(device, package_name):
+ package_arch = device.GetPackageArchitecture(package_name) or 'armeabi-v7a'
+ host_simpleperf_path = devil_env.config.LocalPath('simpleperf', package_arch)
+ if not host_simpleperf_path:
+ raise Exception('Could not get path to simpleperf executable on host.')
+ device_simpleperf_path = '/'.join(
+ ('/data/local/tmp/profilers', package_arch, 'simpleperf'))
+ device.PushChangedFiles([(host_simpleperf_path, device_simpleperf_path)])
+ return device_simpleperf_path
+
+
+@contextlib.contextmanager
+def RunSimpleperf(device, device_simpleperf_path, package_name,
+ process_specifier, thread_specifier, profiler_args,
+ host_out_path):
+ pid = _GetSpecifiedPID(device, package_name, process_specifier)
+ tid = _GetSpecifiedTID(device, pid, thread_specifier)
+ if pid is None and tid is None:
+ raise RuntimeError('Could not find specified process/thread running on '
+ 'device. Make sure the apk is already running before '
+ 'attempting to profile.')
+ profiler_args = list(profiler_args)
+ if profiler_args and profiler_args[0] == 'record':
+ profiler_args.pop(0)
+ if '--call-graph' not in profiler_args and '-g' not in profiler_args:
+ profiler_args.append('-g')
+ if '-f' not in profiler_args:
+ profiler_args.extend(('-f', '1000'))
+ device_out_path = '/data/local/tmp/perf.data'
+ if '-o' in profiler_args:
+ device_out_path = profiler_args[profiler_args.index('-o') + 1]
+ else:
+ profiler_args.extend(('-o', device_out_path))
+
+ if tid:
+ profiler_args.extend(('-t', str(tid)))
+ else:
+ profiler_args.extend(('-p', str(pid)))
+
+ adb_shell_simpleperf_process = device.adb.StartShell(
+ [device_simpleperf_path, 'record'] + profiler_args)
+
+ completed = False
+ try:
+ yield
+ completed = True
+
+ finally:
+ device.KillAll('simpleperf', signum=device_signal.SIGINT, blocking=True,
+ quiet=True)
+ if completed:
+ adb_shell_simpleperf_process.wait()
+ device.PullFile(device_out_path, host_out_path)
+
+
+def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
+ pprof_out_path):
+ # The simpleperf scripts require the unstripped libs to be installed in the
+ # same directory structure as the libs on the device. Much of the logic here
+ # is just figuring out and creating the necessary directory structure, and
+ # symlinking the unstripped shared libs.
+
+ # Get the set of libs that we can symbolize
+ unstripped_lib_dir = os.path.join(build_directory, 'lib.unstripped')
+ unstripped_libs = set(
+ f for f in os.listdir(unstripped_lib_dir) if f.endswith('.so'))
+
+ # report.py will show the directory structure above the shared libs;
+ # that is the directory structure we need to recreate on the host.
+ script_dir = devil_env.config.LocalPath('simpleperf_scripts')
+ report_path = os.path.join(script_dir, 'report.py')
+ report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path]
+ device_lib_path = None
+ for line in subprocess.check_output(
+ report_cmd, stderr=subprocess.STDOUT).splitlines():
+ fields = line.split()
+ if len(fields) < 5:
+ continue
+ shlib_path = fields[4]
+ shlib_dirname, shlib_basename = shlib_path.rpartition('/')[::2]
+ if shlib_basename in unstripped_libs:
+ device_lib_path = shlib_dirname
+ break
+ if not device_lib_path:
+ raise RuntimeError('No chrome-related symbols in profiling data in %s. '
+ 'Either the process was idle for the entire profiling '
+ 'period, or something went very wrong (and you should '
+ 'file a bug at crbug.com/new with component '
+ 'Speed>Tracing, and assign it to szager@chromium.org).'
+ % simpleperf_out_path)
+
+ # Recreate the directory structure locally, and symlink unstripped libs.
+ processing_dir = tempfile.mkdtemp()
+ try:
+ processing_lib_dir = os.path.join(
+ processing_dir, 'binary_cache', device_lib_path.lstrip('/'))
+ os.makedirs(processing_lib_dir)
+ for lib in unstripped_libs:
+ unstripped_lib_path = os.path.join(unstripped_lib_dir, lib)
+ processing_lib_path = os.path.join(processing_lib_dir, lib)
+ os.symlink(unstripped_lib_path, processing_lib_path)
+
+ # Run the script to annotate symbols and convert from simpleperf format to
+ # pprof format.
+ pprof_converter_script = os.path.join(
+ script_dir, 'pprof_proto_generator.py')
+ pprof_converter_cmd = [
+ sys.executable, pprof_converter_script, '-i', simpleperf_out_path, '-o',
+ os.path.abspath(pprof_out_path), '--ndk_path',
+ constants.ANDROID_NDK_ROOT
+ ]
+ subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
+ cwd=processing_dir)
+ finally:
+ shutil.rmtree(processing_dir, ignore_errors=True)
diff --git a/third_party/libwebrtc/build/android/pylib/utils/test_filter.py b/third_party/libwebrtc/build/android/pylib/utils/test_filter.py
new file mode 100644
index 0000000000..7bafd002e3
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/test_filter.py
@@ -0,0 +1,148 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+
+
+_CMDLINE_NAME_SEGMENT_RE = re.compile(
+ r' with(?:out)? \{[^\}]*\}')
+
+class ConflictingPositiveFiltersException(Exception):
+ """Raised when both filter file and filter argument have positive filters."""
+
+
+def ParseFilterFile(input_lines):
+ """Converts test filter file contents to positive and negative pattern lists.
+
+ See //testing/buildbot/filters/README.md for description of the
+ syntax that |input_lines| are expected to follow.
+
+ See
+ https://github.com/google/googletest/blob/master/docs/advanced.md#running-a-subset-of-the-tests
+ for description of the syntax that --gtest_filter argument should follow.
+
+ Args:
+ input_lines: An iterable (e.g. a list or a file) containing input lines.
+ Returns:
+ tuple containing the lists of positive patterns and negative patterns
+ """
+ # Strip comments and whitespace from each line and filter non-empty lines.
+ stripped_lines = (l.split('#', 1)[0].strip() for l in input_lines)
+ filter_lines = [l for l in stripped_lines if l]
+
+ # Split the tests into positive and negative patterns (gtest treats
+ # every pattern after the first '-' sign as an exclusion).
+ positive_patterns = [l for l in filter_lines if l[0] != '-']
+ negative_patterns = [l[1:] for l in filter_lines if l[0] == '-']
+ return positive_patterns, negative_patterns
+
+
+def AddFilterOptions(parser):
+ """Adds filter command-line options to the provided parser.
+
+ Args:
+ parser: an argparse.ArgumentParser instance.
+ """
+ parser.add_argument(
+ # Deprecated argument.
+ '--gtest-filter-file',
+ # New argument.
+ '--test-launcher-filter-file',
+ action='append',
+ dest='test_filter_files',
+ help='Path to file that contains googletest-style filter strings. '
+ 'See also //testing/buildbot/filters/README.md.')
+
+ filter_group = parser.add_mutually_exclusive_group()
+ filter_group.add_argument(
+ '-f', '--test-filter', '--gtest_filter', '--gtest-filter',
+ dest='test_filter',
+ help='googletest-style filter string.',
+ default=os.environ.get('GTEST_FILTER'))
+ filter_group.add_argument(
+ '--isolated-script-test-filter',
+ help='isolated script filter string. '
+ 'Like gtest filter strings, but with :: separators instead of :')
+
+
+def AppendPatternsToFilter(test_filter, positive_patterns=None,
+ negative_patterns=None):
+ """Returns a test-filter string with additional patterns.
+
+ Args:
+ test_filter: test filter string
+ positive_patterns: list of positive patterns to add to string
+ negative_patterns: list of negative patterns to add to string
+ """
+ positives = []
+ negatives = []
+ positive = ''
+ negative = ''
+
+ split_filter = test_filter.split('-', 1)
+ if len(split_filter) == 1:
+ positive = split_filter[0]
+ else:
+ positive, negative = split_filter
+
+ positives += [f for f in positive.split(':') if f]
+ negatives += [f for f in negative.split(':') if f]
+
+ positives += positive_patterns if positive_patterns else []
+ negatives += negative_patterns if negative_patterns else []
+
+ final_filter = ':'.join([p.replace('#', '.') for p in positives])
+ if negatives:
+ final_filter += '-' + ':'.join([n.replace('#', '.') for n in negatives])
+ return final_filter
+
+
+def HasPositivePatterns(test_filter):
+ """Returns True if test_filter contains a positive pattern, else False
+
+ Args:
+ test_filter: test-filter style string
+ """
+ return bool(len(test_filter) > 0 and test_filter[0] != '-')
+
+
+def InitializeFilterFromArgs(args):
+ """Returns a filter string from the command-line option values.
+
+ Args:
+ args: an argparse.Namespace instance resulting from a using parser
+ to which the filter options above were added.
+
+ Raises:
+ ConflictingPositiveFiltersException if both filter file and command line
+ specify positive filters.
+ """
+ test_filter = ''
+ if args.isolated_script_test_filter:
+ args.test_filter = args.isolated_script_test_filter.replace('::', ':')
+ if args.test_filter:
+ test_filter = _CMDLINE_NAME_SEGMENT_RE.sub(
+ '', args.test_filter.replace('#', '.'))
+
+ if not args.test_filter_files:
+ return test_filter
+
+ # At this point it's potentially several files, in a list and ; separated
+ for test_filter_file in args.test_filter_files:
+ # At this point it's potentially several files, ; separated
+ for test_filter_file in test_filter_file.split(';'):
+ # At this point it's individual files
+ with open(test_filter_file, 'r') as f:
+ positive_file_patterns, negative_file_patterns = ParseFilterFile(f)
+ if positive_file_patterns and HasPositivePatterns(test_filter):
+ raise ConflictingPositiveFiltersException(
+ 'Cannot specify positive pattern in both filter file and ' +
+ 'filter command line argument')
+ test_filter = AppendPatternsToFilter(
+ test_filter,
+ positive_patterns=positive_file_patterns,
+ negative_patterns=negative_file_patterns)
+
+ return test_filter
diff --git a/third_party/libwebrtc/build/android/pylib/utils/test_filter_test.py b/third_party/libwebrtc/build/android/pylib/utils/test_filter_test.py
new file mode 100755
index 0000000000..3f1f21e4cb
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/test_filter_test.py
@@ -0,0 +1,247 @@
+#!/usr/bin/env vpython3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import sys
+import tempfile
+import unittest
+
+from pylib.utils import test_filter
+
+class ParseFilterFileTest(unittest.TestCase):
+
+ def testParseFilterFile_commentsAndBlankLines(self):
+ input_lines = [
+ 'positive1',
+ '# comment',
+ 'positive2 # Another comment',
+ ''
+ 'positive3'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2', 'positive3'], []
+ self.assertEqual(expected, actual)
+
+ def testParseFilterFile_onlyPositive(self):
+ input_lines = [
+ 'positive1',
+ 'positive2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2'], []
+ self.assertEqual(expected, actual)
+
+ def testParseFilterFile_onlyNegative(self):
+ input_lines = [
+ '-negative1',
+ '-negative2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = [], ['negative1', 'negative2']
+ self.assertEqual(expected, actual)
+
+ def testParseFilterFile_positiveAndNegative(self):
+ input_lines = [
+ 'positive1',
+ 'positive2',
+ '-negative1',
+ '-negative2'
+ ]
+ actual = test_filter.ParseFilterFile(input_lines)
+ expected = ['positive1', 'positive2'], ['negative1', 'negative2']
+ self.assertEqual(expected, actual)
+
+
+class InitializeFilterFromArgsTest(unittest.TestCase):
+
+ def testInitializeBasicFilter(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--test-filter',
+ 'FooTest.testFoo:BarTest.testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEqual(actual, expected)
+
+ def testInitializeJavaStyleFilter(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--test-filter',
+ 'FooTest#testFoo:BarTest#testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEqual(actual, expected)
+
+ def testInitializeBasicIsolatedScript(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ args = parser.parse_args([
+ '--isolated-script-test-filter',
+ 'FooTest.testFoo::BarTest.testBar'])
+ expected = 'FooTest.testFoo:BarTest.testBar'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEqual(actual, expected)
+
+ def testFilterArgWithPositiveFilterInFilterFile(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
+ tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter=-negative1',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = 'positive1:positive2-negative1:negative2:negative3'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEqual(actual, expected)
+
+ def testFilterFileWithPositiveFilterInFilterArg(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
+ tmp_file.write('-negative2\n-negative3\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter',
+ 'positive1:positive2-negative1',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = 'positive1:positive2-negative1:negative2:negative3'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEqual(actual, expected)
+
+ def testPositiveFilterInBothFileAndArg(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
+ tmp_file.write('positive1\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter',
+ 'positive2',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ with self.assertRaises(test_filter.ConflictingPositiveFiltersException):
+ test_filter.InitializeFilterFromArgs(args)
+
+ def testFilterArgWithFilterFileAllNegative(self):
+ parser = argparse.ArgumentParser()
+ test_filter.AddFilterOptions(parser)
+ with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
+ tmp_file.write('-negative3\n-negative4\n')
+ tmp_file.seek(0)
+ args = parser.parse_args([
+ '--test-filter=-negative1:negative2',
+ '--test-launcher-filter-file',
+ tmp_file.name])
+ expected = '-negative1:negative2:negative3:negative4'
+ actual = test_filter.InitializeFilterFromArgs(args)
+ self.assertEqual(actual, expected)
+
+
+class AppendPatternsToFilter(unittest.TestCase):
+ def testAllEmpty(self):
+ expected = ''
+ actual = test_filter.AppendPatternsToFilter('', [], [])
+ self.assertEqual(actual, expected)
+
+ def testAppendOnlyPositiveToEmptyFilter(self):
+ expected = 'positive'
+ actual = test_filter.AppendPatternsToFilter('', ['positive'])
+ self.assertEqual(actual, expected)
+
+ def testAppendOnlyNegativeToEmptyFilter(self):
+ expected = '-negative'
+ actual = test_filter.AppendPatternsToFilter('',
+ negative_patterns=['negative'])
+ self.assertEqual(actual, expected)
+
+ def testAppendToEmptyFilter(self):
+ expected = 'positive-negative'
+ actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative'])
+ self.assertEqual(actual, expected)
+
+ def testAppendToPositiveOnlyFilter(self):
+ expected = 'positive1:positive2-negative'
+ actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'],
+ ['negative'])
+ self.assertEqual(actual, expected)
+
+ def testAppendToNegativeOnlyFilter(self):
+ expected = 'positive-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'],
+ ['negative2'])
+ self.assertEqual(actual, expected)
+
+ def testAppendPositiveToFilter(self):
+ expected = 'positive1:positive2-negative1'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2'])
+ self.assertEqual(actual, expected)
+
+ def testAppendNegativeToFilter(self):
+ expected = 'positive1-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ negative_patterns=['negative2'])
+ self.assertEqual(actual, expected)
+
+ def testAppendBothToFilter(self):
+ expected = 'positive1:positive2-negative1:negative2'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ positive_patterns=['positive2'],
+ negative_patterns=['negative2'])
+ self.assertEqual(actual, expected)
+
+ def testAppendMultipleToFilter(self):
+ expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+ actual = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2', 'positive3'],
+ ['negative2', 'negative3'])
+ self.assertEqual(actual, expected)
+
+ def testRepeatedAppendToFilter(self):
+ expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
+ filter_string = test_filter.AppendPatternsToFilter('positive1-negative1',
+ ['positive2'],
+ ['negative2'])
+ actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'],
+ ['negative3'])
+ self.assertEqual(actual, expected)
+
+ def testAppendHashSeparatedPatternsToFilter(self):
+ expected = 'positive.test1:positive.test2-negative.test1:negative.test2'
+ actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1',
+ ['positive#test2'],
+ ['negative#test2'])
+ self.assertEqual(actual, expected)
+
+
+class HasPositivePatterns(unittest.TestCase):
+ def testEmpty(self):
+ expected = False
+ actual = test_filter.HasPositivePatterns('')
+ self.assertEqual(actual, expected)
+
+ def testHasOnlyPositive(self):
+ expected = True
+ actual = test_filter.HasPositivePatterns('positive')
+ self.assertEqual(actual, expected)
+
+ def testHasOnlyNegative(self):
+ expected = False
+ actual = test_filter.HasPositivePatterns('-negative')
+ self.assertEqual(actual, expected)
+
+ def testHasBoth(self):
+ expected = True
+ actual = test_filter.HasPositivePatterns('positive-negative')
+ self.assertEqual(actual, expected)
+
+
+if __name__ == '__main__':
+ sys.exit(unittest.main())
diff --git a/third_party/libwebrtc/build/android/pylib/utils/time_profile.py b/third_party/libwebrtc/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000000..094799c4f2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+ """Class for simple profiling of action, with logging of cost."""
+
+ def __init__(self, description='operation'):
+ self._starttime = None
+ self._endtime = None
+ self._description = description
+ self.Start()
+
+ def Start(self):
+ self._starttime = time.time()
+ self._endtime = None
+
+ def GetDelta(self):
+ """Returns the rounded delta.
+
+ Also stops the timer if Stop() has not already been called.
+ """
+ if self._endtime is None:
+ self.Stop(log=False)
+ delta = self._endtime - self._starttime
+ delta = round(delta, 2) if delta < 10 else round(delta, 1)
+ return delta
+
+ def LogResult(self):
+ """Logs the result."""
+ logging.info('%s seconds to perform %s', self.GetDelta(), self._description)
+
+ def Stop(self, log=True):
+ """Stop profiling.
+
+ Args:
+ log: Log the delta (defaults to true).
+ """
+ self._endtime = time.time()
+ if log:
+ self.LogResult()
diff --git a/third_party/libwebrtc/build/android/pylib/utils/xvfb.py b/third_party/libwebrtc/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000000..cb9d50e8fd
--- /dev/null
+++ b/third_party/libwebrtc/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+ """Return True if on Linux; else False."""
+ return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+ """Class to start and stop Xvfb if relevant. Nop if not Linux."""
+
+ def __init__(self):
+ self._pid = 0
+
+ def Start(self):
+ """Start Xvfb and set an appropriate DISPLAY environment. Linux only.
+
+ Copied from tools/code_coverage/coverage_posix.py
+ """
+ if not _IsLinux():
+ return
+ proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+ '-ac'],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ self._pid = proc.pid
+ if not self._pid:
+ raise Exception('Could not start Xvfb')
+ os.environ['DISPLAY'] = ':9'
+
+ # Now confirm, giving a chance for it to start if needed.
+ for _ in range(10):
+ proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+ _, retcode = os.waitpid(proc.pid, 0)
+ if retcode == 0:
+ break
+ time.sleep(0.25)
+ if retcode != 0:
+ raise Exception('Could not confirm Xvfb happiness')
+
+ def Stop(self):
+ """Stop Xvfb if needed. Linux only."""
+ if self._pid:
+ try:
+ os.kill(self._pid, signal.SIGKILL)
+ except:
+ pass
+ del os.environ['DISPLAY']
+ self._pid = 0