summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/build/android/gyp/util
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /third_party/libwebrtc/build/android/gyp/util
parentInitial commit. (diff)
downloadfirefox-upstream.tar.xz
firefox-upstream.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/build/android/gyp/util')
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/__init__.py3
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/build_utils.py725
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/build_utils_test.py48
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/diff_utils.py127
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py59
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py194
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/manifest_utils.py321
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py128
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/md5_check.py471
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/md5_check_test.py178
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/parallel.py214
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/protoresources.py308
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/resource_utils.py1078
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/resource_utils_test.py275
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/resources_parser.py142
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/server_utils.py41
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/zipalign.py97
17 files changed, 4409 insertions, 0 deletions
diff --git a/third_party/libwebrtc/build/android/gyp/util/__init__.py b/third_party/libwebrtc/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/third_party/libwebrtc/build/android/gyp/util/build_utils.py b/third_party/libwebrtc/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000000..6469f762cc
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/build_utils.py
@@ -0,0 +1,725 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for GN action()s."""
+
+import atexit
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import logging
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import time
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir))
+import gn_helpers
+
+# Use relative paths to improved hermetic property of build scripts.
+DIR_SOURCE_ROOT = os.path.relpath(
+ os.environ.get(
+ 'CHECKOUT_SOURCE_ROOT',
+ os.path.join(
+ os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+ os.pardir)))
+JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac')
+JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap')
+RT_JAR_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'extras',
+ 'java_8', 'jre', 'lib', 'rt.jar')
+
+try:
+ string_types = basestring
+except NameError:
+ string_types = (str, bytes)
+
+
+def JavaCmd(verify=True, xmx='1G'):
+ ret = [os.path.join(JAVA_HOME, 'bin', 'java')]
+ # Limit heap to avoid Java not GC'ing when it should, and causing
+ # bots to OOM when many java commands are runnig at the same time
+ # https://crbug.com/1098333
+ ret += ['-Xmx' + xmx]
+
+ # Disable bytecode verification for local builds gives a ~2% speed-up.
+ if not verify:
+ ret += ['-noverify']
+
+ return ret
+
+
+@contextlib.contextmanager
+def TempDir(**kwargs):
+ dirname = tempfile.mkdtemp(**kwargs)
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+ try:
+ os.makedirs(dir_path)
+ except OSError:
+ pass
+
+
+def DeleteDirectory(dir_path):
+ if os.path.exists(dir_path):
+ shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+ if fail_if_missing and not os.path.exists(path):
+ raise Exception(path + ' doesn\'t exist.')
+
+ MakeDirectory(os.path.dirname(path))
+ with open(path, 'a'):
+ os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter='*'):
+ files = []
+ for root, _dirnames, filenames in os.walk(directory):
+ matched_files = fnmatch.filter(filenames, filename_filter)
+ files.extend((os.path.join(root, f) for f in matched_files))
+ return files
+
+
+def ParseGnList(value):
+ """Converts a "GN-list" command-line parameter into a list.
+
+ Conversions handled:
+ * None -> []
+ * '' -> []
+ * 'asdf' -> ['asdf']
+ * '["a", "b"]' -> ['a', 'b']
+ * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list)
+
+ The common use for this behavior is in the Android build where things can
+ take lists of @FileArg references that are expanded via ExpandFileArgs.
+ """
+ # Convert None to [].
+ if not value:
+ return []
+ # Convert a list of GN lists to a flattened list.
+ if isinstance(value, list):
+ ret = []
+ for arg in value:
+ ret.extend(ParseGnList(arg))
+ return ret
+ # Convert normal GN list.
+ if value.startswith('['):
+ return gn_helpers.GNValueParser(value).ParseList()
+ # Convert a single string value to a list.
+ return [value]
+
+
+def CheckOptions(options, parser, required=None):
+ if not required:
+ return
+ for option_name in required:
+ if getattr(options, option_name) is None:
+ parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+ old_dump = None
+ if os.path.exists(path):
+ with open(path, 'r') as oldfile:
+ old_dump = oldfile.read()
+
+ new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+ if not only_if_changed or old_dump != new_dump:
+ with open(path, 'w') as outfile:
+ outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def AtomicOutput(path, only_if_changed=True, mode='w+b'):
+ """Helper to prevent half-written outputs.
+
+ Args:
+ path: Path to the final output file, which will be written atomically.
+ only_if_changed: If True (the default), do not touch the filesystem
+ if the content has not changed.
+ mode: The mode to open the file in (str).
+ Returns:
+ A python context manager that yelds a NamedTemporaryFile instance
+ that must be used by clients to write the data to. On exit, the
+ manager will try to replace the final output file with the
+ temporary one if necessary. The temporary file is always destroyed
+ on exit.
+ Example:
+ with build_utils.AtomicOutput(output_path) as tmp_file:
+ subprocess.check_call(['prog', '--output', tmp_file.name])
+ """
+ # Create in same directory to ensure same filesystem when moving.
+ dirname = os.path.dirname(path)
+ if not os.path.exists(dirname):
+ MakeDirectory(dirname)
+ with tempfile.NamedTemporaryFile(
+ mode, suffix=os.path.basename(path), dir=dirname, delete=False) as f:
+ try:
+ yield f
+
+ # file should be closed before comparison/move.
+ f.close()
+ if not (only_if_changed and os.path.exists(path) and
+ filecmp.cmp(f.name, path)):
+ shutil.move(f.name, path)
+ finally:
+ if os.path.exists(f.name):
+ os.unlink(f.name)
+
+
+class CalledProcessError(Exception):
+ """This exception is raised when the process run by CheckOutput
+ exits with a non-zero exit code."""
+
+ def __init__(self, cwd, args, output):
+ super(CalledProcessError, self).__init__()
+ self.cwd = cwd
+ self.args = args
+ self.output = output
+
+ def __str__(self):
+ # A user should be able to simply copy and paste the command that failed
+ # into their shell.
+ copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+ ' '.join(map(pipes.quote, self.args)))
+ return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def FilterLines(output, filter_string):
+ """Output filter from build_utils.CheckOutput.
+
+ Args:
+ output: Executable output as from build_utils.CheckOutput.
+ filter_string: An RE string that will filter (remove) matching
+ lines from |output|.
+
+ Returns:
+ The filtered output, as a single string.
+ """
+ re_filter = re.compile(filter_string)
+ return '\n'.join(
+ line for line in output.split('\n') if not re_filter.search(line))
+
+
+def FilterReflectiveAccessJavaWarnings(output):
+ """Filters out warnings about illegal reflective access operation.
+
+ These warnings were introduced in Java 9, and generally mean that dependencies
+ need to be updated.
+ """
+ # WARNING: An illegal reflective access operation has occurred
+ # WARNING: Illegal reflective access by ...
+ # WARNING: Please consider reporting this to the maintainers of ...
+ # WARNING: Use --illegal-access=warn to enable warnings of further ...
+ # WARNING: All illegal access operations will be denied in a future release
+ return FilterLines(
+ output, r'WARNING: ('
+ 'An illegal reflective|'
+ 'Illegal reflective access|'
+ 'Please consider reporting this to|'
+ 'Use --illegal-access=warn|'
+ 'All illegal access operations)')
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args,
+ cwd=None,
+ env=None,
+ print_stdout=False,
+ print_stderr=True,
+ stdout_filter=None,
+ stderr_filter=None,
+ fail_on_output=True,
+ fail_func=lambda returncode, stderr: returncode != 0):
+ if not cwd:
+ cwd = os.getcwd()
+
+ logging.info('CheckOutput: %s', ' '.join(args))
+ child = subprocess.Popen(args,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+ stdout, stderr = child.communicate()
+
+ # For Python3 only:
+ if isinstance(stdout, bytes) and sys.version_info >= (3, ):
+ stdout = stdout.decode('utf-8')
+ stderr = stderr.decode('utf-8')
+
+ if stdout_filter is not None:
+ stdout = stdout_filter(stdout)
+
+ if stderr_filter is not None:
+ stderr = stderr_filter(stderr)
+
+ if fail_func and fail_func(child.returncode, stderr):
+ raise CalledProcessError(cwd, args, stdout + stderr)
+
+ if print_stdout:
+ sys.stdout.write(stdout)
+ if print_stderr:
+ sys.stderr.write(stderr)
+
+ has_stdout = print_stdout and stdout
+ has_stderr = print_stderr and stderr
+ if fail_on_output and (has_stdout or has_stderr):
+ MSG = """\
+Command failed because it wrote to {}.
+You can often set treat_warnings_as_errors=false to not treat output as \
+failure (useful when developing locally)."""
+ if has_stdout and has_stderr:
+ stream_string = 'stdout and stderr'
+ elif has_stdout:
+ stream_string = 'stdout'
+ else:
+ stream_string = 'stderr'
+ raise CalledProcessError(cwd, args, MSG.format(stream_string))
+
+ return stdout
+
+
+def GetModifiedTime(path):
+ # For a symlink, the modified time should be the greater of the link's
+ # modified time and the modified time of the target.
+ return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+ if not os.path.exists(output):
+ return True
+
+ output_time = GetModifiedTime(output)
+ for i in inputs:
+ if GetModifiedTime(i) > output_time:
+ return True
+ return False
+
+
+def _CheckZipPath(name):
+ if os.path.normpath(name) != name:
+ raise Exception('Non-canonical zip path: %s' % name)
+ if os.path.isabs(name):
+ raise Exception('Absolute zip path: %s' % name)
+
+
+def _IsSymlink(zip_file, name):
+ zi = zip_file.getinfo(name)
+
+ # The two high-order bytes of ZipInfo.external_attr represent
+ # UNIX permissions and file type bits.
+ return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+ predicate=None):
+ if path is None:
+ path = os.getcwd()
+ elif not os.path.exists(path):
+ MakeDirectory(path)
+
+ if not zipfile.is_zipfile(zip_path):
+ raise Exception('Invalid zip file: %s' % zip_path)
+
+ extracted = []
+ with zipfile.ZipFile(zip_path) as z:
+ for name in z.namelist():
+ if name.endswith('/'):
+ MakeDirectory(os.path.join(path, name))
+ continue
+ if pattern is not None:
+ if not fnmatch.fnmatch(name, pattern):
+ continue
+ if predicate and not predicate(name):
+ continue
+ _CheckZipPath(name)
+ if no_clobber:
+ output_path = os.path.join(path, name)
+ if os.path.exists(output_path):
+ raise Exception(
+ 'Path already exists from zip: %s %s %s'
+ % (zip_path, name, output_path))
+ if _IsSymlink(z, name):
+ dest = os.path.join(path, name)
+ MakeDirectory(os.path.dirname(dest))
+ os.symlink(z.read(name), dest)
+ extracted.append(dest)
+ else:
+ z.extract(name, path)
+ extracted.append(os.path.join(path, name))
+
+ return extracted
+
+
+def HermeticDateTime(timestamp=None):
+ """Returns a constant ZipInfo.date_time tuple.
+
+ Args:
+ timestamp: Unix timestamp to use for files in the archive.
+
+ Returns:
+ A ZipInfo.date_time tuple for Jan 1, 2001, or the given timestamp.
+ """
+ if not timestamp:
+ return (2001, 1, 1, 0, 0, 0)
+ utc_time = time.gmtime(timestamp)
+ return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour,
+ utc_time.tm_min, utc_time.tm_sec)
+
+
+def HermeticZipInfo(*args, **kwargs):
+ """Creates a zipfile.ZipInfo with a constant timestamp and external_attr.
+
+ If a date_time value is not provided in the positional or keyword arguments,
+ the default value from HermeticDateTime is used.
+
+ Args:
+ See zipfile.ZipInfo.
+
+ Returns:
+ A zipfile.ZipInfo.
+ """
+ # The caller may have provided a date_time either as a positional parameter
+ # (args[1]) or as a keyword parameter. Use the default hermetic date_time if
+ # none was provided.
+ date_time = None
+ if len(args) >= 2:
+ date_time = args[1]
+ elif 'date_time' in kwargs:
+ date_time = kwargs['date_time']
+ if not date_time:
+ kwargs['date_time'] = HermeticDateTime()
+ ret = zipfile.ZipInfo(*args, **kwargs)
+ ret.external_attr = (0o644 << 16)
+ return ret
+
+
+def AddToZipHermetic(zip_file,
+ zip_path,
+ src_path=None,
+ data=None,
+ compress=None,
+ date_time=None):
+ """Adds a file to the given ZipFile with a hard-coded modified time.
+
+ Args:
+ zip_file: ZipFile instance to add the file to.
+ zip_path: Destination path within the zip file (or ZipInfo instance).
+ src_path: Path of the source file. Mutually exclusive with |data|.
+ data: File data as a string.
+ compress: Whether to enable compression. Default is taken from ZipFile
+ constructor.
+ date_time: The last modification date and time for the archive member.
+ """
+ assert (src_path is None) != (data is None), (
+ '|src_path| and |data| are mutually exclusive.')
+ if isinstance(zip_path, zipfile.ZipInfo):
+ zipinfo = zip_path
+ zip_path = zipinfo.filename
+ else:
+ zipinfo = HermeticZipInfo(filename=zip_path, date_time=date_time)
+
+ _CheckZipPath(zip_path)
+
+ if src_path and os.path.islink(src_path):
+ zipinfo.filename = zip_path
+ zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
+ zip_file.writestr(zipinfo, os.readlink(src_path))
+ return
+
+ # zipfile.write() does
+ # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
+ # but we want to use _HERMETIC_FILE_ATTR, so manually set
+ # the few attr bits we care about.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16
+
+ if src_path:
+ with open(src_path, 'rb') as f:
+ data = f.read()
+
+ # zipfile will deflate even when it makes the file bigger. To avoid
+ # growing files, disable compression at an arbitrary cut off point.
+ if len(data) < 16:
+ compress = False
+
+ # None converts to ZIP_STORED, when passed explicitly rather than the
+ # default passed to the ZipFile constructor.
+ compress_type = zip_file.compression
+ if compress is not None:
+ compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs,
+ output,
+ base_dir=None,
+ compress_fn=None,
+ zip_prefix_path=None,
+ timestamp=None):
+ """Creates a zip file from a list of files.
+
+ Args:
+ inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+ output: Path, fileobj, or ZipFile instance to add files to.
+ base_dir: Prefix to strip from inputs.
+ compress_fn: Applied to each input to determine whether or not to compress.
+ By default, items will be |zipfile.ZIP_STORED|.
+ zip_prefix_path: Path prepended to file path in zip file.
+ timestamp: Unix timestamp to use for files in the archive.
+ """
+ if base_dir is None:
+ base_dir = '.'
+ input_tuples = []
+ for tup in inputs:
+ if isinstance(tup, string_types):
+ tup = (os.path.relpath(tup, base_dir), tup)
+ if tup[0].startswith('..'):
+ raise Exception('Invalid zip_path: ' + tup[0])
+ input_tuples.append(tup)
+
+ # Sort by zip path to ensure stable zip ordering.
+ input_tuples.sort(key=lambda tup: tup[0])
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ date_time = HermeticDateTime(timestamp)
+ try:
+ for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = os.path.join(zip_prefix_path, zip_path)
+ compress = compress_fn(zip_path) if compress_fn else None
+ AddToZipHermetic(out_zip,
+ zip_path,
+ src_path=fs_path,
+ compress=compress,
+ date_time=date_time)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+ """Creates a zip file from a directory."""
+ inputs = []
+ for root, _, files in os.walk(base_dir):
+ for f in files:
+ inputs.append(os.path.join(root, f))
+
+ if isinstance(output, zipfile.ZipFile):
+ DoZip(
+ inputs,
+ output,
+ base_dir,
+ compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+ else:
+ with AtomicOutput(output) as f:
+ DoZip(
+ inputs,
+ f,
+ base_dir,
+ compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+
+
+def MatchesGlob(path, filters):
+ """Returns whether the given path matches any of the given glob patterns."""
+ return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, input_zips, path_transform=None, compress=None):
+ """Combines all files from |input_zips| into |output|.
+
+ Args:
+ output: Path, fileobj, or ZipFile instance to add files to.
+ input_zips: Iterable of paths to zip files to merge.
+ path_transform: Called for each entry path. Returns a new path, or None to
+ skip the file.
+ compress: Overrides compression setting from origin zip entries.
+ """
+ path_transform = path_transform or (lambda p: p)
+ added_names = set()
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for in_file in input_zips:
+ with zipfile.ZipFile(in_file, 'r') as in_zip:
+ for info in in_zip.infolist():
+ # Ignore directories.
+ if info.filename[-1] == '/':
+ continue
+ dst_name = path_transform(info.filename)
+ if not dst_name:
+ continue
+ already_added = dst_name in added_names
+ if not already_added:
+ if compress is not None:
+ compress_entry = compress
+ else:
+ compress_entry = info.compress_type != zipfile.ZIP_STORED
+ AddToZipHermetic(
+ out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=compress_entry)
+ added_names.add(dst_name)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+ """Gets the list of all transitive dependencies in sorted order.
+
+ There should be no cycles in the dependency graph (crashes if cycles exist).
+
+ Args:
+ top: A list of the top level nodes
+ deps_func: A function that takes a node and returns a list of its direct
+ dependencies.
+ Returns:
+ A list of all transitive dependencies of nodes in top, in order (a node will
+ appear in the list at a higher index than all of its dependencies).
+ """
+ # Find all deps depth-first, maintaining original order in the case of ties.
+ deps_map = collections.OrderedDict()
+ def discover(nodes):
+ for node in nodes:
+ if node in deps_map:
+ continue
+ deps = deps_func(node)
+ discover(deps)
+ deps_map[node] = deps
+
+ discover(top)
+ return list(deps_map)
+
+
+def InitLogging(enabling_env):
+ logging.basicConfig(
+ level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
+ format='%(levelname).1s %(process)d %(relativeCreated)6d %(message)s')
+ script_name = os.path.basename(sys.argv[0])
+ logging.info('Started (%s)', script_name)
+
+ my_pid = os.getpid()
+
+ def log_exit():
+ # Do not log for fork'ed processes.
+ if os.getpid() == my_pid:
+ logging.info("Job's done (%s)", script_name)
+
+ atexit.register(log_exit)
+
+
+def AddDepfileOption(parser):
+ # TODO(agrieve): Get rid of this once we've moved to argparse.
+ if hasattr(parser, 'add_option'):
+ func = parser.add_option
+ else:
+ func = parser.add_argument
+ func('--depfile',
+ help='Path to depfile (refer to `gn help depfile`)')
+
+
+def WriteDepfile(depfile_path, first_gn_output, inputs=None):
+ assert depfile_path != first_gn_output # http://crbug.com/646165
+ assert not isinstance(inputs, string_types) # Easy mistake to make
+ inputs = inputs or []
+ MakeDirectory(os.path.dirname(depfile_path))
+ # Ninja does not support multiple outputs in depfiles.
+ with open(depfile_path, 'w') as depfile:
+ depfile.write(first_gn_output.replace(' ', '\\ '))
+ depfile.write(': \\\n ')
+ depfile.write(' \\\n '.join(i.replace(' ', '\\ ') for i in inputs))
+ depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+ """Replaces file-arg placeholders in args.
+
+ These placeholders have the form:
+ @FileArg(filename:key1:key2:...:keyn)
+
+ The value of such a placeholder is calculated by reading 'filename' as json.
+ And then extracting the value at [key1][key2]...[keyn]. If a key has a '[]'
+ suffix the (intermediate) value will be interpreted as a single item list and
+ the single item will be returned or used for further traversal.
+
+ Note: This intentionally does not return the list of files that appear in such
+ placeholders. An action that uses file-args *must* know the paths of those
+ files prior to the parsing of the arguments (typically by explicitly listing
+ them in the action's inputs in build files).
+ """
+ new_args = list(args)
+ file_jsons = dict()
+ r = re.compile('@FileArg\((.*?)\)')
+ for i, arg in enumerate(args):
+ match = r.search(arg)
+ if not match:
+ continue
+
+ def get_key(key):
+ if key.endswith('[]'):
+ return key[:-2], True
+ return key, False
+
+ lookup_path = match.group(1).split(':')
+ file_path, _ = get_key(lookup_path[0])
+ if not file_path in file_jsons:
+ with open(file_path) as f:
+ file_jsons[file_path] = json.load(f)
+
+ expansion = file_jsons
+ for k in lookup_path:
+ k, flatten = get_key(k)
+ expansion = expansion[k]
+ if flatten:
+ if not isinstance(expansion, list) or not len(expansion) == 1:
+ raise Exception('Expected single item list but got %s' % expansion)
+ expansion = expansion[0]
+
+ # This should match ParseGnList. The output is either a GN-formatted list
+ # or a literal (with no quotes).
+ if isinstance(expansion, list):
+ new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
+ arg[match.end():])
+ else:
+ new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
+
+ return new_args
+
+
+def ReadSourcesList(sources_list_file_name):
+ """Reads a GN-written file containing list of file names and returns a list.
+
+ Note that this function should not be used to parse response files.
+ """
+ with open(sources_list_file_name) as f:
+ return [file_name.strip() for file_name in f]
diff --git a/third_party/libwebrtc/build/android/gyp/util/build_utils_test.py b/third_party/libwebrtc/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 0000000000..008ea11748
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+ def testGetSortedTransitiveDependencies_all(self):
+ TOP = _DEPS.keys()
+ EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leaves(self):
+ TOP = ['c', 'e', 'g', 'h', 'i']
+ EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leavesReverse(self):
+ TOP = ['i', 'h', 'g', 'e', 'c']
+ EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/diff_utils.py b/third_party/libwebrtc/build/android/gyp/util/diff_utils.py
new file mode 100644
index 0000000000..530a688191
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/diff_utils.py
@@ -0,0 +1,127 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import difflib
+from util import build_utils
+
+
+def _SkipOmitted(line):
+ """
+ Skip lines that are to be intentionally omitted from the expectations file.
+
+ This is required when the file to be compared against expectations contains
+ a line that changes from build to build because - for instance - it contains
+ version information.
+ """
+ if line.rstrip().endswith('# OMIT FROM EXPECTATIONS'):
+ return '# THIS LINE WAS OMITTED\n'
+ return line
+
+
+def _GenerateDiffWithOnlyAdditons(expected_path, actual_data):
+ """Generate a diff that only contains additions"""
+ # Ignore blank lines when creating the diff to cut down on whitespace-only
+ # lines in the diff. Also remove trailing whitespaces and add the new lines
+ # manually (ndiff expects new lines but we don't care about trailing
+ # whitespace).
+ with open(expected_path) as expected:
+ expected_lines = [l for l in expected.readlines() if l.strip()]
+ actual_lines = [
+ '{}\n'.format(l.rstrip()) for l in actual_data.splitlines() if l.strip()
+ ]
+
+ diff = difflib.ndiff(expected_lines, actual_lines)
+ filtered_diff = (l for l in diff if l.startswith('+'))
+ return ''.join(filtered_diff)
+
+
+def _DiffFileContents(expected_path, actual_data):
+ """Check file contents for equality and return the diff or None."""
+ # Remove all trailing whitespace and add it explicitly in the end.
+ with open(expected_path) as f_expected:
+ expected_lines = [l.rstrip() for l in f_expected.readlines()]
+ actual_lines = [
+ _SkipOmitted(line).rstrip() for line in actual_data.splitlines()
+ ]
+
+ if expected_lines == actual_lines:
+ return None
+
+ expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT)
+
+ diff = difflib.unified_diff(
+ expected_lines,
+ actual_lines,
+ fromfile=os.path.join('before', expected_path),
+ tofile=os.path.join('after', expected_path),
+ n=0,
+ lineterm='',
+ )
+
+ return '\n'.join(diff)
+
+
+def AddCommandLineFlags(parser):
+ group = parser.add_argument_group('Expectations')
+ group.add_argument(
+ '--expected-file',
+ help='Expected contents for the check. If --expected-file-base is set, '
+ 'this is a diff of --actual-file and --expected-file-base.')
+ group.add_argument(
+ '--expected-file-base',
+ help='File to diff against before comparing to --expected-file.')
+ group.add_argument('--actual-file',
+ help='Path to write actual file (for reference).')
+ group.add_argument('--failure-file',
+ help='Write to this file if expectations fail.')
+ group.add_argument('--fail-on-expectations',
+ action="store_true",
+ help='Fail on expectation mismatches.')
+ group.add_argument('--only-verify-expectations',
+ action='store_true',
+ help='Verify the expectation and exit.')
+
+
+def CheckExpectations(actual_data, options, custom_msg=''):
+ if options.actual_file:
+ with build_utils.AtomicOutput(options.actual_file) as f:
+ f.write(actual_data.encode('utf8'))
+ if options.expected_file_base:
+ actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base,
+ actual_data)
+ diff_text = _DiffFileContents(options.expected_file, actual_data)
+
+ if not diff_text:
+ fail_msg = ''
+ else:
+ fail_msg = """
+Expectations need updating:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/expectations/README.md
+
+LogDog tip: Use "Raw log" or "Switch to lite mode" before copying:
+https://bugs.chromium.org/p/chromium/issues/detail?id=984616
+
+{}
+
+To update expectations, run:
+########### START ###########
+ patch -p1 <<'END_DIFF'
+{}
+END_DIFF
+############ END ############
+""".format(custom_msg, diff_text)
+
+ sys.stderr.write(fail_msg)
+
+ if fail_msg and options.fail_on_expectations:
+ # Don't write failure file when failing on expectations or else the target
+ # will not be re-run on subsequent ninja invocations.
+ sys.exit(1)
+
+ if options.failure_file:
+ with open(options.failure_file, 'w') as f:
+ f.write(fail_msg)
diff --git a/third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py b/third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py
new file mode 100644
index 0000000000..975945510e
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py
@@ -0,0 +1,59 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+# Utilities to read and write .jar.info files.
+#
+# A .jar.info file contains a simple mapping from fully-qualified Java class
+# names to the source file that actually defines it.
+#
+# For APKs, the .jar.info maps the class names to the .jar file that which
+# contains its .class definition instead.
+
+
+def ReadAarSourceInfo(info_path):
+ """Returns the source= path from an .aar's source.info file."""
+ # The .info looks like: "source=path/to/.aar\n".
+ with open(info_path) as f:
+ return f.read().rstrip().split('=', 1)[1]
+
+
+def ParseJarInfoFile(info_path):
+ """Parse a given .jar.info file as a dictionary.
+
+ Args:
+ info_path: input .jar.info file path.
+ Returns:
+ A new dictionary mapping fully-qualified Java class names to file paths.
+ """
+ info_data = dict()
+ if os.path.exists(info_path):
+ with open(info_path, 'r') as info_file:
+ for line in info_file:
+ line = line.strip()
+ if line:
+ fully_qualified_name, path = line.split(',', 1)
+ info_data[fully_qualified_name] = path
+ return info_data
+
+
+def WriteJarInfoFile(output_obj, info_data, source_file_map=None):
+ """Generate a .jar.info file from a given dictionary.
+
+ Args:
+ output_obj: output file object.
+ info_data: a mapping of fully qualified Java class names to filepaths.
+ source_file_map: an optional mapping from java source file paths to the
+ corresponding source .srcjar. This is because info_data may contain the
+ path of Java source files that where extracted from an .srcjar into a
+ temporary location.
+ """
+ for fully_qualified_name, path in sorted(info_data.items()):
+ if source_file_map and path in source_file_map:
+ path = source_file_map[path]
+ assert not path.startswith('/tmp'), (
+ 'Java file path should not be in temp dir: {}'.format(path))
+ output_obj.write(('{},{}\n'.format(fully_qualified_name,
+ path)).encode('utf8'))
diff --git a/third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py b/third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py
new file mode 100644
index 0000000000..5180400d61
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py
@@ -0,0 +1,194 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+
+def GetScriptName():
+ return os.path.basename(os.path.abspath(sys.argv[0]))
+
+
+def GetJavaFilePath(java_package, class_name):
+ package_path = java_package.replace('.', os.path.sep)
+ file_name = class_name + '.java'
+ return os.path.join(package_path, file_name)
+
+
+def KCamelToShouty(s):
+ """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE.
+
+ kFooBar -> FOO_BAR
+ FooBar -> FOO_BAR
+ FooBAR9 -> FOO_BAR9
+ FooBARBaz -> FOO_BAR_BAZ
+ """
+ if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s):
+ return s
+ # Strip the leading k.
+ s = re.sub(r'^k', '', s)
+ # Treat "WebView" like one word.
+ s = re.sub(r'WebView', r'Webview', s)
+ # Add _ between title words and anything else.
+ s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s)
+ # Add _ between lower -> upper transitions.
+ s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s)
+ return s.upper()
+
+
+class JavaString(object):
+ def __init__(self, name, value, comments):
+ self.name = KCamelToShouty(name)
+ self.value = value
+ self.comments = '\n'.join(' ' + x for x in comments)
+
+ def Format(self):
+ return '%s\n public static final String %s = %s;' % (
+ self.comments, self.name, self.value)
+
+
+def ParseTemplateFile(lines):
+ package_re = re.compile(r'^package (.*);')
+ class_re = re.compile(r'.*class (.*) {')
+ package = ''
+ class_name = ''
+ for line in lines:
+ package_line = package_re.match(line)
+ if package_line:
+ package = package_line.groups()[0]
+ class_line = class_re.match(line)
+ if class_line:
+ class_name = class_line.groups()[0]
+ break
+ return package, class_name
+
+
+# TODO(crbug.com/937282): Work will be needed if we want to annotate specific
+# constants in the file to be parsed.
+class CppConstantParser(object):
+ """Parses C++ constants, retaining their comments.
+
+ The Delegate subclass is responsible for matching and extracting the
+ constant's variable name and value, as well as generating an object to
+ represent the Java representation of this value.
+ """
+ SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)')
+
+ class Delegate(object):
+ def ExtractConstantName(self, line):
+ """Extracts a constant's name from line or None if not a match."""
+ raise NotImplementedError()
+
+ def ExtractValue(self, line):
+ """Extracts a constant's value from line or None if not a match."""
+ raise NotImplementedError()
+
+ def CreateJavaConstant(self, name, value, comments):
+ """Creates an object representing the Java analog of a C++ constant.
+
+ CppConstantParser will not interact with the object created by this
+ method. Instead, it will store this value in a list and return a list of
+ all objects from the Parse() method. In this way, the caller may define
+ whatever class suits their need.
+
+ Args:
+ name: the constant's variable name, as extracted by
+ ExtractConstantName()
+ value: the constant's value, as extracted by ExtractValue()
+ comments: the code comments describing this constant
+ """
+ raise NotImplementedError()
+
+ def __init__(self, delegate, lines):
+ self._delegate = delegate
+ self._lines = lines
+ self._in_variable = False
+ self._in_comment = False
+ self._package = ''
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._constants = []
+
+ def _ExtractVariable(self, line):
+ match = StringFileParser.STRING_RE.match(line)
+ return match.group(1) if match else None
+
+ def _ExtractValue(self, line):
+ match = StringFileParser.VALUE_RE.search(line)
+ return match.group(1) if match else None
+
+ def _Reset(self):
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._in_variable = False
+ self._in_comment = False
+
+ def _AppendConstant(self):
+ self._constants.append(
+ self._delegate.CreateJavaConstant(self._current_name,
+ self._current_value,
+ self._current_comments))
+ self._Reset()
+
+ def _ParseValue(self, line):
+ current_value = self._delegate.ExtractValue(line)
+ if current_value is not None:
+ self._current_value = current_value
+ self._AppendConstant()
+ else:
+ self._Reset()
+
+ def _ParseComment(self, line):
+ comment_line = CppConstantParser.SINGLE_LINE_COMMENT_RE.match(line)
+ if comment_line:
+ self._current_comments.append(comment_line.groups()[0])
+ self._in_comment = True
+ self._in_variable = True
+ return True
+ else:
+ self._in_comment = False
+ return False
+
+ def _ParseVariable(self, line):
+ current_name = self._delegate.ExtractConstantName(line)
+ if current_name is not None:
+ self._current_name = current_name
+ current_value = self._delegate.ExtractValue(line)
+ if current_value is not None:
+ self._current_value = current_value
+ self._AppendConstant()
+ else:
+ self._in_variable = True
+ return True
+ else:
+ self._in_variable = False
+ return False
+
+ def _ParseLine(self, line):
+ if not self._in_variable:
+ if not self._ParseVariable(line):
+ self._ParseComment(line)
+ return
+
+ if self._in_comment:
+ if self._ParseComment(line):
+ return
+ if not self._ParseVariable(line):
+ self._Reset()
+ return
+
+ if self._in_variable:
+ self._ParseValue(line)
+
+ def Parse(self):
+ """Returns a list of objects representing C++ constants.
+
+ Each object in the list was created by Delegate.CreateJavaValue().
+ """
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._constants
diff --git a/third_party/libwebrtc/build/android/gyp/util/manifest_utils.py b/third_party/libwebrtc/build/android/gyp/util/manifest_utils.py
new file mode 100644
index 0000000000..a517708b59
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/manifest_utils.py
@@ -0,0 +1,321 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for working with Android manifests."""
+
+import hashlib
+import os
+import re
+import shlex
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+from xml.etree import ElementTree
+
+ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+DIST_NAMESPACE = 'http://schemas.android.com/apk/distribution'
+EMPTY_ANDROID_MANIFEST_PATH = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', 'AndroidManifest.xml'))
+# When normalizing for expectation matching, wrap these tags when they are long
+# or else they become very hard to read.
+_WRAP_CANDIDATES = (
+ '<manifest',
+ '<application',
+ '<activity',
+ '<provider',
+ '<receiver',
+ '<service',
+)
+# Don't wrap lines shorter than this.
+_WRAP_LINE_LENGTH = 100
+
+_xml_namespace_initialized = False
+
+
+def _RegisterElementTreeNamespaces():
+ global _xml_namespace_initialized
+ if _xml_namespace_initialized:
+ return
+ _xml_namespace_initialized = True
+ ElementTree.register_namespace('android', ANDROID_NAMESPACE)
+ ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
+ ElementTree.register_namespace('dist', DIST_NAMESPACE)
+
+
+def ParseManifest(path):
+ """Parses an AndroidManifest.xml using ElementTree.
+
+ Registers required namespaces, creates application node if missing, adds any
+ missing namespaces for 'android', 'tools' and 'dist'.
+
+ Returns tuple of:
+ doc: Root xml document.
+ manifest_node: the <manifest> node.
+ app_node: the <application> node.
+ """
+ _RegisterElementTreeNamespaces()
+ doc = ElementTree.parse(path)
+ # ElementTree.find does not work if the required tag is the root.
+ if doc.getroot().tag == 'manifest':
+ manifest_node = doc.getroot()
+ else:
+ manifest_node = doc.find('manifest')
+
+ app_node = doc.find('application')
+ if app_node is None:
+ app_node = ElementTree.SubElement(manifest_node, 'application')
+
+ return doc, manifest_node, app_node
+
+
+def SaveManifest(doc, path):
+ with build_utils.AtomicOutput(path) as f:
+ f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
+
+
+def GetPackage(manifest_node):
+ return manifest_node.get('package')
+
+
+def AssertUsesSdk(manifest_node,
+ min_sdk_version=None,
+ target_sdk_version=None,
+ max_sdk_version=None,
+ fail_if_not_exist=False):
+ """Asserts values of attributes of <uses-sdk> element.
+
+ Unless |fail_if_not_exist| is true, will only assert if both the passed value
+ is not None and the value of attribute exist. If |fail_if_not_exist| is true
+ will fail if passed value is not None but attribute does not exist.
+ """
+ uses_sdk_node = manifest_node.find('./uses-sdk')
+ if uses_sdk_node is None:
+ return
+ for prefix, sdk_version in (('min', min_sdk_version), ('target',
+ target_sdk_version),
+ ('max', max_sdk_version)):
+ value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix))
+ if fail_if_not_exist and not value and sdk_version:
+ assert False, (
+ '%sSdkVersion in Android manifest does not exist but we expect %s' %
+ (prefix, sdk_version))
+ if not value or not sdk_version:
+ continue
+ assert value == sdk_version, (
+ '%sSdkVersion in Android manifest is %s but we expect %s' %
+ (prefix, value, sdk_version))
+
+
+def AssertPackage(manifest_node, package):
+ """Asserts that manifest package has desired value.
+
+ Will only assert if both |package| is not None and the package is set in the
+ manifest.
+ """
+ package_value = GetPackage(manifest_node)
+ if package_value is None or package is None:
+ return
+ assert package_value == package, (
+ 'Package in Android manifest is %s but we expect %s' % (package_value,
+ package))
+
+
+def _SortAndStripElementTree(root):
+ # Sort alphabetically with two exceptions:
+ # 1) Put <application> node last (since it's giant).
+ # 2) Put android:name before other attributes.
+ def element_sort_key(node):
+ if node.tag == 'application':
+ return 'z'
+ ret = ElementTree.tostring(node)
+ # ElementTree.tostring inserts namespace attributes for any that are needed
+ # for the node or any of its descendants. Remove them so as to prevent a
+ # change to a child that adds/removes a namespace usage from changing sort
+ # order.
+ return re.sub(r' xmlns:.*?".*?"', '', ret.decode('utf8'))
+
+ name_attr = '{%s}name' % ANDROID_NAMESPACE
+
+ def attribute_sort_key(tup):
+ return ('', '') if tup[0] == name_attr else tup
+
+ def helper(node):
+ for child in node:
+ if child.text and child.text.isspace():
+ child.text = None
+ helper(child)
+
+ # Sort attributes (requires Python 3.8+).
+ node.attrib = dict(sorted(node.attrib.items(), key=attribute_sort_key))
+
+ # Sort nodes
+ node[:] = sorted(node, key=element_sort_key)
+
+ helper(root)
+
+
+def _SplitElement(line):
+ """Parses a one-line xml node into ('<tag', ['a="b"', ...]], '/>')."""
+
+ # Shlex splits nicely, but removes quotes. Need to put them back.
+ def restore_quotes(value):
+ return value.replace('=', '="', 1) + '"'
+
+ # Simplify restore_quotes by separating />.
+ assert line.endswith('>'), line
+ end_tag = '>'
+ if line.endswith('/>'):
+ end_tag = '/>'
+ line = line[:-len(end_tag)]
+
+ # Use shlex to avoid having to re-encode &quot;, etc.
+ parts = shlex.split(line)
+ start_tag = parts[0]
+ attrs = parts[1:]
+
+ return start_tag, [restore_quotes(x) for x in attrs], end_tag
+
+
+def _CreateNodeHash(lines):
+ """Computes a hash (md5) for the first XML node found in |lines|.
+
+ Args:
+ lines: List of strings containing pretty-printed XML.
+
+ Returns:
+ Positive 32-bit integer hash of the node (including children).
+ """
+ target_indent = lines[0].find('<')
+ tag_closed = False
+ for i, l in enumerate(lines[1:]):
+ cur_indent = l.find('<')
+ if cur_indent != -1 and cur_indent <= target_indent:
+ tag_lines = lines[:i + 1]
+ break
+ elif not tag_closed and 'android:name="' in l:
+ # To reduce noise of node tags changing, use android:name as the
+ # basis the hash since they usually unique.
+ tag_lines = [l]
+ break
+ tag_closed = tag_closed or '>' in l
+ else:
+ assert False, 'Did not find end of node:\n' + '\n'.join(lines)
+
+ # Insecure and truncated hash as it only needs to be unique vs. its neighbors.
+ return hashlib.md5(('\n'.join(tag_lines)).encode('utf8')).hexdigest()[:8]
+
+
+def _IsSelfClosing(lines):
+ """Given pretty-printed xml, returns whether first node is self-closing."""
+ for l in lines:
+ idx = l.find('>')
+ if idx != -1:
+ return l[idx - 1] == '/'
+ assert False, 'Did not find end of tag:\n' + '\n'.join(lines)
+
+
+def _AddDiffTags(lines):
+ # When multiple identical tags appear sequentially, XML diffs can look like:
+ # + </tag>
+ # + <tag>
+ # rather than:
+ # + <tag>
+ # + </tag>
+ # To reduce confusion, add hashes to tags.
+ # This also ensures changed tags show up with outer <tag> elements rather than
+ # showing only changed attributes.
+ hash_stack = []
+ for i, l in enumerate(lines):
+ stripped = l.lstrip()
+ # Ignore non-indented tags and lines that are not the start/end of a node.
+ if l[0] != ' ' or stripped[0] != '<':
+ continue
+ # Ignore self-closing nodes that fit on one line.
+ if l[-2:] == '/>':
+ continue
+ # Ignore <application> since diff tag changes with basically any change.
+ if stripped.lstrip('</').startswith('application'):
+ continue
+
+ # Check for the closing tag (</foo>).
+ if stripped[1] != '/':
+ cur_hash = _CreateNodeHash(lines[i:])
+ if not _IsSelfClosing(lines[i:]):
+ hash_stack.append(cur_hash)
+ else:
+ cur_hash = hash_stack.pop()
+ lines[i] += ' # DIFF-ANCHOR: {}'.format(cur_hash)
+ assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack)
+
+
+def NormalizeManifest(manifest_contents):
+ _RegisterElementTreeNamespaces()
+ # This also strips comments and sorts node attributes alphabetically.
+ root = ElementTree.fromstring(manifest_contents)
+ package = GetPackage(root)
+
+ app_node = root.find('application')
+ if app_node is not None:
+ # android:debuggable is added when !is_official_build. Strip it out to avoid
+ # expectation diffs caused by not adding is_official_build. Play store
+ # blocks uploading apps with it set, so there's no risk of it slipping in.
+ debuggable_name = '{%s}debuggable' % ANDROID_NAMESPACE
+ if debuggable_name in app_node.attrib:
+ del app_node.attrib[debuggable_name]
+
+ # Trichrome's static library version number is updated daily. To avoid
+ # frequent manifest check failures, we remove the exact version number
+ # during normalization.
+ for node in app_node:
+ if (node.tag in ['uses-static-library', 'static-library']
+ and '{%s}version' % ANDROID_NAMESPACE in node.keys()
+ and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
+ node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
+
+ # We also remove the exact package name (except the one at the root level)
+ # to avoid noise during manifest comparison.
+ def blur_package_name(node):
+ for key in node.keys():
+ node.set(key, node.get(key).replace(package, '$PACKAGE'))
+
+ for child in node:
+ blur_package_name(child)
+
+ # We only blur the package names of non-root nodes because they generate a lot
+ # of diffs when doing manifest checks for upstream targets. We still want to
+ # have 1 piece of package name not blurred just in case the package name is
+ # mistakenly changed.
+ for child in root:
+ blur_package_name(child)
+
+ _SortAndStripElementTree(root)
+
+ # Fix up whitespace/indentation.
+ dom = minidom.parseString(ElementTree.tostring(root))
+ out_lines = []
+ for l in dom.toprettyxml(indent=' ').splitlines():
+ if not l or l.isspace():
+ continue
+ if len(l) > _WRAP_LINE_LENGTH and any(x in l for x in _WRAP_CANDIDATES):
+ indent = ' ' * l.find('<')
+ start_tag, attrs, end_tag = _SplitElement(l)
+ out_lines.append('{}{}'.format(indent, start_tag))
+ for attribute in attrs:
+ out_lines.append('{} {}'.format(indent, attribute))
+ out_lines[-1] += '>'
+ # Heuristic: Do not allow multi-line tags to be self-closing since these
+ # can generally be allowed to have nested elements. When diffing, it adds
+ # noise if the base file is self-closing and the non-base file is not
+ # self-closing.
+ if end_tag == '/>':
+ out_lines.append('{}{}>'.format(indent, start_tag.replace('<', '</')))
+ else:
+ out_lines.append(l)
+
+ # Make output more diff-friendly.
+ _AddDiffTags(out_lines)
+
+ return '\n'.join(out_lines) + '\n'
diff --git a/third_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py b/third_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py
new file mode 100755
index 0000000000..52bf458a59
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..'))
+from util import manifest_utils
+
+_TEST_MANIFEST = """\
+<?xml version="1.0" ?>
+<manifest package="test.pkg"
+ tools:ignore="MissingVersion"
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools">
+ <!-- Should be one line. -->
+ <uses-sdk android:minSdkVersion="24"
+ android:targetSdkVersion="30"/>
+ <!-- Should have attrs sorted-->
+ <uses-feature android:required="false" android:version="1"
+ android:name="android.hardware.vr.headtracking" />
+ <!-- Should not be wrapped since < 100 chars. -->
+ <application
+ android:name="testname">
+ <activity
+ {extra_activity_attr}
+ android:icon="@drawable/ic_devices_48dp"
+ android:label="label with spaces"
+ android:name="to be hashed"
+ android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+ <intent-filter>
+ {extra_intent_filter_elem}
+ <action android:name="android.intent.action.SEND"/>
+ <category android:name="android.intent.category.DEFAULT"/>
+ <data android:mimeType="text/plain"/>
+ </intent-filter>
+ </activity>
+ <!-- Should be made non-self-closing. -->
+ <receiver android:exported="false" android:name="\
+org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"/>
+ </application>
+</manifest>
+"""
+
+_TEST_MANIFEST_NORMALIZED = """\
+<?xml version="1.0" ?>
+<manifest
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="test.pkg"
+ tools:ignore="MissingVersion">
+ <uses-feature android:name="android.hardware.vr.headtracking" \
+android:required="false" android:version="1"/>
+ <uses-sdk android:minSdkVersion="24" android:targetSdkVersion="30"/>
+ <application android:name="testname">
+ <activity # DIFF-ANCHOR: {activity_diff_anchor}
+ android:name="to be hashed"
+ {extra_activity_attr}android:icon="@drawable/ic_devices_48dp"
+ android:label="label with spaces"
+ android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+ <intent-filter> # DIFF-ANCHOR: {intent_filter_diff_anchor}
+ {extra_intent_filter_elem}\
+<action android:name="android.intent.action.SEND"/>
+ <category android:name="android.intent.category.DEFAULT"/>
+ <data android:mimeType="text/plain"/>
+ </intent-filter> # DIFF-ANCHOR: {intent_filter_diff_anchor}
+ </activity> # DIFF-ANCHOR: {activity_diff_anchor}
+ <receiver # DIFF-ANCHOR: ddab3320
+ android:name=\
+"org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"
+ android:exported="false">
+ </receiver> # DIFF-ANCHOR: ddab3320
+ </application>
+</manifest>
+"""
+
+_ACTIVITY_DIFF_ANCHOR = '32b3a641'
+_INTENT_FILTER_DIFF_ANCHOR = '4ee601b7'
+
+
+def _CreateTestData(intent_filter_diff_anchor=_INTENT_FILTER_DIFF_ANCHOR,
+ extra_activity_attr='',
+ extra_intent_filter_elem=''):
+ if extra_activity_attr:
+ extra_activity_attr += '\n '
+ if extra_intent_filter_elem:
+ extra_intent_filter_elem += '\n '
+ test_manifest = _TEST_MANIFEST.format(
+ extra_activity_attr=extra_activity_attr,
+ extra_intent_filter_elem=extra_intent_filter_elem)
+ expected = _TEST_MANIFEST_NORMALIZED.format(
+ activity_diff_anchor=_ACTIVITY_DIFF_ANCHOR,
+ intent_filter_diff_anchor=intent_filter_diff_anchor,
+ extra_activity_attr=extra_activity_attr,
+ extra_intent_filter_elem=extra_intent_filter_elem)
+ return test_manifest, expected
+
+
+class ManifestUtilsTest(unittest.TestCase):
+ # Enable diff output.
+ maxDiff = None
+
+ def testNormalizeManifest_golden(self):
+ test_manifest, expected = _CreateTestData()
+ actual = manifest_utils.NormalizeManifest(test_manifest)
+ self.assertMultiLineEqual(expected, actual)
+
+ def testNormalizeManifest_nameUsedForActivity(self):
+ test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"')
+ actual = manifest_utils.NormalizeManifest(test_manifest)
+ # Checks that the DIFF-ANCHOR does not change with the added attribute.
+ self.assertMultiLineEqual(expected, actual)
+
+ def testNormalizeManifest_nameNotUsedForIntentFilter(self):
+ test_manifest, expected = _CreateTestData(
+ extra_intent_filter_elem='<a/>', intent_filter_diff_anchor='5f5c8a70')
+ actual = manifest_utils.NormalizeManifest(test_manifest)
+ # Checks that the DIFF-ANCHOR does change with the added element despite
+ # having a nested element with an android:name set.
+ self.assertMultiLineEqual(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/md5_check.py b/third_party/libwebrtc/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000000..87ee723c85
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/md5_check.py
@@ -0,0 +1,471 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import print_python_deps
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndWriteDepfileIfStale(on_stale_md5,
+ options,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_allowlist=None,
+ depfile_deps=None):
+ """Wraps CallAndRecordIfStale() and writes a depfile if applicable.
+
+ Depfiles are automatically added to output_paths when present in the |options|
+ argument. They are then created after |on_stale_md5| is called.
+
+ By default, only python dependencies are added to the depfile. If there are
+ other input paths that are not captured by GN deps, then they should be listed
+ in depfile_deps. It's important to write paths to the depfile that are already
+ captured by GN deps since GN args can cause GN deps to change, and such
+ changes are not immediately reflected in depfiles (http://crbug.com/589311).
+ """
+ if not output_paths:
+ raise Exception('At least one output_path must be specified.')
+ input_paths = list(input_paths or [])
+ input_strings = list(input_strings or [])
+ output_paths = list(output_paths or [])
+
+ input_paths += print_python_deps.ComputePythonDependencies()
+
+ CallAndRecordIfStale(
+ on_stale_md5,
+ record_path=record_path,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=pass_changes,
+ track_subpaths_allowlist=track_subpaths_allowlist)
+
+ # Write depfile even when inputs have not changed to ensure build correctness
+ # on bots that build with & without patch, and the patch changes the depfile
+ # location.
+ if hasattr(options, 'depfile') and options.depfile:
+ build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
+
+
+def CallAndRecordIfStale(function,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_allowlist=None):
+ """Calls function if outputs are stale.
+
+ Outputs are considered stale if:
+ - any output_paths are missing, or
+ - the contents of any file within input_paths has changed, or
+ - the contents of input_strings has changed.
+
+ To debug which files are out-of-date, set the environment variable:
+ PRINT_MD5_DIFFS=1
+
+ Args:
+ function: The function to call.
+ record_path: Path to record metadata.
+ Defaults to output_paths[0] + '.md5.stamp'
+ input_paths: List of paths to calcualte an md5 sum on.
+ input_strings: List of strings to record verbatim.
+ output_paths: List of output paths.
+ force: Whether to treat outputs as missing regardless of whether they
+ actually are.
+ pass_changes: Whether to pass a Changes instance to |function|.
+ track_subpaths_allowlist: Relevant only when pass_changes=True. List of .zip
+ files from |input_paths| to make subpath information available for.
+ """
+ assert record_path or output_paths
+ input_paths = input_paths or []
+ input_strings = input_strings or []
+ output_paths = output_paths or []
+ record_path = record_path or output_paths[0] + '.md5.stamp'
+
+ assert record_path.endswith('.stamp'), (
+ 'record paths must end in \'.stamp\' so that they are easy to find '
+ 'and delete')
+
+ new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
+ new_metadata.AddStrings(input_strings)
+
+ zip_allowlist = set(track_subpaths_allowlist or [])
+ for path in input_paths:
+ # It's faster to md5 an entire zip file than it is to just locate & hash
+ # its central directory (which is what this used to do).
+ if path in zip_allowlist:
+ entries = _ExtractZipEntries(path)
+ new_metadata.AddZipFile(path, entries)
+ else:
+ new_metadata.AddFile(path, _ComputeTagForPath(path))
+
+ old_metadata = None
+ force = force or _FORCE_REBUILD
+ missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+ too_new = []
+ # When outputs are missing, don't bother gathering change information.
+ if not missing_outputs and os.path.exists(record_path):
+ record_mtime = os.path.getmtime(record_path)
+ # Outputs newer than the change information must have been modified outside
+ # of the build, and should be considered stale.
+ too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime]
+ if not too_new:
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.FromFile(jsonfile)
+ except: # pylint: disable=bare-except
+ pass # Not yet using new file format.
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new)
+ if not changes.HasChanges():
+ return
+
+ if PRINT_EXPLANATIONS:
+ print('=' * 80)
+ print('Target is stale: %s' % record_path)
+ print(changes.DescribeDifference())
+ print('=' * 80)
+
+ args = (changes,) if pass_changes else ()
+ function(*args)
+
+ with open(record_path, 'w') as f:
+ new_metadata.ToFile(f)
+
+
+class Changes(object):
+ """Provides and API for querying what changed between runs."""
+
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs,
+ too_new):
+ self.old_metadata = old_metadata
+ self.new_metadata = new_metadata
+ self.force = force
+ self.missing_outputs = missing_outputs
+ self.too_new = too_new
+
+ def _GetOldTag(self, path, subpath=None):
+ return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+ def HasChanges(self):
+ """Returns whether any changes exist."""
+ return (self.HasStringChanges()
+ or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+ def HasStringChanges(self):
+ """Returns whether string metadata changed."""
+ return (self.force or not self.old_metadata
+ or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5())
+
+ def AddedOrModifiedOnly(self):
+ """Returns whether the only changes were from added or modified (sub)files.
+
+ No missing outputs, no removed paths/subpaths.
+ """
+ if self.HasStringChanges():
+ return False
+ if any(self.IterRemovedPaths()):
+ return False
+ for path in self.IterModifiedPaths():
+ if any(self.IterRemovedSubpaths(path)):
+ return False
+ return True
+
+ def IterAllPaths(self):
+ """Generator for paths."""
+ return self.new_metadata.IterPaths();
+
+ def IterAllSubpaths(self, path):
+ """Generator for subpaths."""
+ return self.new_metadata.IterSubpaths(path);
+
+ def IterAddedPaths(self):
+ """Generator for paths that were added."""
+ for path in self.new_metadata.IterPaths():
+ if self._GetOldTag(path) is None:
+ yield path
+
+ def IterAddedSubpaths(self, path):
+ """Generator for paths that were added within the given zip file."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ if self._GetOldTag(path, subpath) is None:
+ yield subpath
+
+ def IterRemovedPaths(self):
+ """Generator for paths that were removed."""
+ if self.old_metadata:
+ for path in self.old_metadata.IterPaths():
+ if self.new_metadata.GetTag(path) is None:
+ yield path
+
+ def IterRemovedSubpaths(self, path):
+ """Generator for paths that were removed within the given zip file."""
+ if self.old_metadata:
+ for subpath in self.old_metadata.IterSubpaths(path):
+ if self.new_metadata.GetTag(path, subpath) is None:
+ yield subpath
+
+ def IterModifiedPaths(self):
+ """Generator for paths whose contents have changed."""
+ for path in self.new_metadata.IterPaths():
+ old_tag = self._GetOldTag(path)
+ new_tag = self.new_metadata.GetTag(path)
+ if old_tag is not None and old_tag != new_tag:
+ yield path
+
+ def IterModifiedSubpaths(self, path):
+ """Generator for paths within a zip file whose contents have changed."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ old_tag = self._GetOldTag(path, subpath)
+ new_tag = self.new_metadata.GetTag(path, subpath)
+ if old_tag is not None and old_tag != new_tag:
+ yield subpath
+
+ def IterChangedPaths(self):
+ """Generator for all changed paths (added/removed/modified)."""
+ return itertools.chain(self.IterRemovedPaths(),
+ self.IterModifiedPaths(),
+ self.IterAddedPaths())
+
+ def IterChangedSubpaths(self, path):
+ """Generator for paths within a zip that were added/removed/modified."""
+ return itertools.chain(self.IterRemovedSubpaths(path),
+ self.IterModifiedSubpaths(path),
+ self.IterAddedSubpaths(path))
+
+ def DescribeDifference(self):
+ """Returns a human-readable description of what changed."""
+ if self.force:
+ return 'force=True'
+ elif self.missing_outputs:
+ return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs)
+ elif self.too_new:
+ return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new)
+ elif self.old_metadata is None:
+ return 'Previous stamp file not found.'
+
+ if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+ ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+ self.new_metadata.GetStrings())
+ changed = [s for s in ndiff if not s.startswith(' ')]
+ return 'Input strings changed:\n ' + '\n '.join(changed)
+
+ if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+ return "There's no difference."
+
+ lines = []
+ lines.extend('Added: ' + p for p in self.IterAddedPaths())
+ lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+ for path in self.IterModifiedPaths():
+ lines.append('Modified: ' + path)
+ lines.extend(' -> Subpath added: ' + p
+ for p in self.IterAddedSubpaths(path))
+ lines.extend(' -> Subpath removed: ' + p
+ for p in self.IterRemovedSubpaths(path))
+ lines.extend(' -> Subpath modified: ' + p
+ for p in self.IterModifiedSubpaths(path))
+ if lines:
+ return 'Input files changed:\n ' + '\n '.join(lines)
+ return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+ """Data model for tracking change metadata.
+
+ Args:
+ track_entries: Enables per-file change tracking. Slower, but required for
+ Changes functionality.
+ """
+ # Schema:
+ # {
+ # "files-md5": "VALUE",
+ # "strings-md5": "VALUE",
+ # "input-files": [
+ # {
+ # "path": "path.jar",
+ # "tag": "{MD5 of entries}",
+ # "entries": [
+ # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+ # ]
+ # }, {
+ # "path": "path.txt",
+ # "tag": "{MD5}",
+ # }
+ # ],
+ # "input-strings": ["a", "b", ...],
+ # }
+ def __init__(self, track_entries=False):
+ self._track_entries = track_entries
+ self._files_md5 = None
+ self._strings_md5 = None
+ self._files = []
+ self._strings = []
+ # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+ self._file_map = None
+
+ @classmethod
+ def FromFile(cls, fileobj):
+ """Returns a _Metadata initialized from a file object."""
+ ret = cls()
+ obj = json.load(fileobj)
+ ret._files_md5 = obj['files-md5']
+ ret._strings_md5 = obj['strings-md5']
+ ret._files = obj.get('input-files', [])
+ ret._strings = obj.get('input-strings', [])
+ return ret
+
+ def ToFile(self, fileobj):
+ """Serializes metadata to the given file object."""
+ obj = {
+ 'files-md5': self.FilesMd5(),
+ 'strings-md5': self.StringsMd5(),
+ }
+ if self._track_entries:
+ obj['input-files'] = sorted(self._files, key=lambda e: e['path'])
+ obj['input-strings'] = self._strings
+
+ json.dump(obj, fileobj, indent=2)
+
+ def _AssertNotQueried(self):
+ assert self._files_md5 is None
+ assert self._strings_md5 is None
+ assert self._file_map is None
+
+ def AddStrings(self, values):
+ self._AssertNotQueried()
+ self._strings.extend(str(v) for v in values)
+
+ def AddFile(self, path, tag):
+ """Adds metadata for a non-zip file.
+
+ Args:
+ path: Path to the file.
+ tag: A short string representative of the file contents.
+ """
+ self._AssertNotQueried()
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ })
+
+ def AddZipFile(self, path, entries):
+ """Adds metadata for a zip file.
+
+ Args:
+ path: Path to the file.
+ entries: List of (subpath, tag) tuples for entries within the zip.
+ """
+ self._AssertNotQueried()
+ tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+ (e[1] for e in entries)))
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ 'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+ })
+
+ def GetStrings(self):
+ """Returns the list of input strings."""
+ return self._strings
+
+ def FilesMd5(self):
+ """Lazily computes and returns the aggregate md5 of input files."""
+ if self._files_md5 is None:
+ # Omit paths from md5 since temporary files have random names.
+ self._files_md5 = _ComputeInlineMd5(
+ self.GetTag(p) for p in sorted(self.IterPaths()))
+ return self._files_md5
+
+ def StringsMd5(self):
+ """Lazily computes and returns the aggregate md5 of input strings."""
+ if self._strings_md5 is None:
+ self._strings_md5 = _ComputeInlineMd5(self._strings)
+ return self._strings_md5
+
+ def _GetEntry(self, path, subpath=None):
+ """Returns the JSON entry for the given path / subpath."""
+ if self._file_map is None:
+ self._file_map = {}
+ for entry in self._files:
+ self._file_map[(entry['path'], None)] = entry
+ for subentry in entry.get('entries', ()):
+ self._file_map[(entry['path'], subentry['path'])] = subentry
+ return self._file_map.get((path, subpath))
+
+ def GetTag(self, path, subpath=None):
+ """Returns the tag for the given path / subpath."""
+ ret = self._GetEntry(path, subpath)
+ return ret and ret['tag']
+
+ def IterPaths(self):
+ """Returns a generator for all top-level paths."""
+ return (e['path'] for e in self._files)
+
+ def IterSubpaths(self, path):
+ """Returns a generator for all subpaths in the given zip.
+
+ If the given path is not a zip file or doesn't exist, returns an empty
+ iterable.
+ """
+ outer_entry = self._GetEntry(path)
+ if not outer_entry:
+ return ()
+ subentries = outer_entry.get('entries', [])
+ return (entry['path'] for entry in subentries)
+
+
+def _ComputeTagForPath(path):
+ stat = os.stat(path)
+ if stat.st_size > 1 * 1024 * 1024:
+ # Fallback to mtime for large files so that md5_check does not take too long
+ # to run.
+ return stat.st_mtime
+ md5 = hashlib.md5()
+ with open(path, 'rb') as f:
+ md5.update(f.read())
+ return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+ """Computes the md5 of the concatenated parameters."""
+ md5 = hashlib.md5()
+ for item in iterable:
+ md5.update(str(item).encode('ascii'))
+ return md5.hexdigest()
+
+
+def _ExtractZipEntries(path):
+ """Returns a list of (path, CRC32) of all files within |path|."""
+ entries = []
+ with zipfile.ZipFile(path) as zip_file:
+ for zip_info in zip_file.infolist():
+ # Skip directories and empty files.
+ if zip_info.CRC:
+ entries.append(
+ (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+ return entries
diff --git a/third_party/libwebrtc/build/android/gyp/util/md5_check_test.py b/third_party/libwebrtc/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000000..e11bbd50ed
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import os
+import sys
+import tempfile
+import unittest
+import zipfile
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import md5_check
+
+
+def _WriteZipFile(path, entries):
+ with zipfile.ZipFile(path, 'w') as zip_file:
+ for subpath, data in entries:
+ zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+ def setUp(self):
+ self.called = False
+ self.changes = None
+
+ def testCallAndRecordIfStale(self):
+ input_strings = ['string1', 'string2']
+ input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+ input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+ file1_contents = b'input file 1'
+ input_file1.write(file1_contents)
+ input_file1.flush()
+ # Test out empty zip file to start.
+ _WriteZipFile(input_file2.name, [])
+ input_files = [input_file1.name, input_file2.name]
+ zip_paths = [input_file2.name]
+
+ record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+ def CheckCallAndRecord(should_call,
+ message,
+ force=False,
+ outputs_specified=False,
+ outputs_missing=False,
+ expected_changes=None,
+ added_or_modified_only=None,
+ track_subentries=False,
+ output_newer_than_record=False):
+ output_paths = None
+ if outputs_specified:
+ output_file1 = tempfile.NamedTemporaryFile()
+ if outputs_missing:
+ output_file1.close() # Gets deleted on close().
+ output_paths = [output_file1.name]
+ if output_newer_than_record:
+ output_mtime = os.path.getmtime(output_file1.name)
+ os.utime(record_path.name, (output_mtime - 1, output_mtime - 1))
+ else:
+ # touch the record file so it doesn't look like it's older that
+ # the output we've just created
+ os.utime(record_path.name, None)
+
+ self.called = False
+ self.changes = None
+ if expected_changes or added_or_modified_only is not None:
+ def MarkCalled(changes):
+ self.called = True
+ self.changes = changes
+ else:
+ def MarkCalled():
+ self.called = True
+
+ md5_check.CallAndRecordIfStale(
+ MarkCalled,
+ record_path=record_path.name,
+ input_paths=input_files,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=(expected_changes or added_or_modified_only) is not None,
+ track_subpaths_allowlist=zip_paths if track_subentries else None)
+ self.assertEqual(should_call, self.called, message)
+ if expected_changes:
+ description = self.changes.DescribeDifference()
+ self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+ 'Expected %s to match %s' % (
+ repr(description), repr(expected_changes)))
+ if should_call and added_or_modified_only is not None:
+ self.assertEqual(added_or_modified_only,
+ self.changes.AddedOrModifiedOnly())
+
+ CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+ expected_changes='Previous stamp file not found.',
+ added_or_modified_only=False)
+ CheckCallAndRecord(False, 'should not call when nothing changed')
+ input_files = input_files[::-1]
+ CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+ CheckCallAndRecord(False, 'should not call when nothing changed #2',
+ outputs_specified=True, outputs_missing=False)
+ CheckCallAndRecord(True, 'should call when output missing',
+ outputs_specified=True, outputs_missing=True,
+ expected_changes='Outputs do not exist:*',
+ added_or_modified_only=False)
+ CheckCallAndRecord(True,
+ 'should call when output is newer than record',
+ expected_changes='Outputs newer than stamp file:*',
+ outputs_specified=True,
+ outputs_missing=False,
+ added_or_modified_only=False,
+ output_newer_than_record=True)
+ CheckCallAndRecord(True, force=True, message='should call when forced',
+ expected_changes='force=True',
+ added_or_modified_only=False)
+
+ input_file1.write(b'some more input')
+ input_file1.flush()
+ CheckCallAndRecord(True, 'changed input file should trigger call',
+ expected_changes='*Modified: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_files = input_files[:1]
+ CheckCallAndRecord(True, 'removing file should trigger call',
+ expected_changes='*Removed: %s' % input_file1.name,
+ added_or_modified_only=False)
+
+ input_files.append(input_file1.name)
+ CheckCallAndRecord(True, 'added input file should trigger call',
+ expected_changes='*Added: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_strings[0] = input_strings[0] + ' a bit longer'
+ CheckCallAndRecord(True, 'changed input string should trigger call',
+ expected_changes='*Input strings changed*',
+ added_or_modified_only=False)
+
+ input_strings = input_strings[::-1]
+ CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+ expected_changes='*Input strings changed*')
+
+ input_strings = input_strings[:1]
+ CheckCallAndRecord(True, 'removing a string should trigger call')
+
+ input_strings.append('a brand new string')
+ CheckCallAndRecord(
+ True,
+ 'added input string should trigger call',
+ added_or_modified_only=False)
+
+ _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+ CheckCallAndRecord(
+ True,
+ 'added subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name,
+ 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
+ _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+ CheckCallAndRecord(
+ True,
+ 'changed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath modified: %s' %
+ (input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
+
+ _WriteZipFile(input_file2.name, [])
+ CheckCallAndRecord(True, 'removed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath removed: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/parallel.py b/third_party/libwebrtc/build/android/gyp/util/parallel.py
new file mode 100644
index 0000000000..c26875a71c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/parallel.py
@@ -0,0 +1,214 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers related to multiprocessing.
+
+Based on: //tools/binary_size/libsupersize/parallel.py
+"""
+
+import atexit
+import logging
+import multiprocessing
+import os
+import sys
+import threading
+import traceback
+
+DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1'
+if DISABLE_ASYNC:
+ logging.warning('Running in synchronous mode.')
+
+_all_pools = None
+_is_child_process = False
+_silence_exceptions = False
+
+# Used to pass parameters to forked processes without pickling.
+_fork_params = None
+_fork_kwargs = None
+
+
+class _ImmediateResult(object):
+ def __init__(self, value):
+ self._value = value
+
+ def get(self):
+ return self._value
+
+ def wait(self):
+ pass
+
+ def ready(self):
+ return True
+
+ def successful(self):
+ return True
+
+
+class _ExceptionWrapper(object):
+ """Used to marshal exception messages back to main process."""
+
+ def __init__(self, msg, exception_type=None):
+ self.msg = msg
+ self.exception_type = exception_type
+
+ def MaybeThrow(self):
+ if self.exception_type:
+ raise getattr(__builtins__,
+ self.exception_type)('Originally caused by: ' + self.msg)
+
+
+class _FuncWrapper(object):
+ """Runs on the fork()'ed side to catch exceptions and spread *args."""
+
+ def __init__(self, func):
+ global _is_child_process
+ _is_child_process = True
+ self._func = func
+
+ def __call__(self, index, _=None):
+ try:
+ return self._func(*_fork_params[index], **_fork_kwargs)
+ except Exception as e:
+ # Only keep the exception type for builtin exception types or else risk
+ # further marshalling exceptions.
+ exception_type = None
+ if hasattr(__builtins__, type(e).__name__):
+ exception_type = type(e).__name__
+ # multiprocessing is supposed to catch and return exceptions automatically
+ # but it doesn't seem to work properly :(.
+ return _ExceptionWrapper(traceback.format_exc(), exception_type)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper(traceback.format_exc())
+
+
+class _WrappedResult(object):
+ """Allows for host-side logic to be run after child process has terminated.
+
+ * Unregisters associated pool _all_pools.
+ * Raises exception caught by _FuncWrapper.
+ """
+
+ def __init__(self, result, pool=None):
+ self._result = result
+ self._pool = pool
+
+ def get(self):
+ self.wait()
+ value = self._result.get()
+ _CheckForException(value)
+ return value
+
+ def wait(self):
+ self._result.wait()
+ if self._pool:
+ _all_pools.remove(self._pool)
+ self._pool = None
+
+ def ready(self):
+ return self._result.ready()
+
+ def successful(self):
+ return self._result.successful()
+
+
+def _TerminatePools():
+ """Calls .terminate() on all active process pools.
+
+ Not supposed to be necessary according to the docs, but seems to be required
+ when child process throws an exception or Ctrl-C is hit.
+ """
+ global _silence_exceptions
+ _silence_exceptions = True
+ # Child processes cannot have pools, but atexit runs this function because
+ # it was registered before fork()ing.
+ if _is_child_process:
+ return
+
+ def close_pool(pool):
+ try:
+ pool.terminate()
+ except: # pylint: disable=bare-except
+ pass
+
+ for i, pool in enumerate(_all_pools):
+ # Without calling terminate() on a separate thread, the call can block
+ # forever.
+ thread = threading.Thread(name='Pool-Terminate-{}'.format(i),
+ target=close_pool,
+ args=(pool, ))
+ thread.daemon = True
+ thread.start()
+
+
+def _CheckForException(value):
+ if isinstance(value, _ExceptionWrapper):
+ global _silence_exceptions
+ if not _silence_exceptions:
+ value.MaybeThrow()
+ _silence_exceptions = True
+ logging.error('Subprocess raised an exception:\n%s', value.msg)
+ sys.exit(1)
+
+
+def _MakeProcessPool(job_params, **job_kwargs):
+ global _all_pools
+ global _fork_params
+ global _fork_kwargs
+ assert _fork_params is None
+ assert _fork_kwargs is None
+ pool_size = min(len(job_params), multiprocessing.cpu_count())
+ _fork_params = job_params
+ _fork_kwargs = job_kwargs
+ ret = multiprocessing.Pool(pool_size)
+ _fork_params = None
+ _fork_kwargs = None
+ if _all_pools is None:
+ _all_pools = []
+ atexit.register(_TerminatePools)
+ _all_pools.append(ret)
+ return ret
+
+
+def ForkAndCall(func, args):
+ """Runs |func| in a fork'ed process.
+
+ Returns:
+ A Result object (call .get() to get the return value)
+ """
+ if DISABLE_ASYNC:
+ pool = None
+ result = _ImmediateResult(func(*args))
+ else:
+ pool = _MakeProcessPool([args]) # Omit |kwargs|.
+ result = pool.apply_async(_FuncWrapper(func), (0, ))
+ pool.close()
+ return _WrappedResult(result, pool=pool)
+
+
+def BulkForkAndCall(func, arg_tuples, **kwargs):
+ """Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
+
+ Args:
+ kwargs: Common keyword arguments to be passed to |func|.
+
+ Yields the return values in order.
+ """
+ arg_tuples = list(arg_tuples)
+ if not arg_tuples:
+ return
+
+ if DISABLE_ASYNC:
+ for args in arg_tuples:
+ yield func(*args, **kwargs)
+ return
+
+ pool = _MakeProcessPool(arg_tuples, **kwargs)
+ wrapped_func = _FuncWrapper(func)
+ try:
+ for result in pool.imap(wrapped_func, range(len(arg_tuples))):
+ _CheckForException(result)
+ yield result
+ finally:
+ pool.close()
+ pool.join()
+ _all_pools.remove(pool)
diff --git a/third_party/libwebrtc/build/android/gyp/util/protoresources.py b/third_party/libwebrtc/build/android/gyp/util/protoresources.py
new file mode 100644
index 0000000000..272574f117
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/protoresources.py
@@ -0,0 +1,308 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functions that modify resources in protobuf format.
+
+Format reference:
+https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/Resources.proto
+"""
+
+import logging
+import os
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+from util import resource_utils
+
+sys.path[1:1] = [
+ # `Resources_pb2` module imports `descriptor`, which imports `six`.
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'six', 'src'),
+ # Make sure the pb2 files are able to import google.protobuf
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf',
+ 'python'),
+]
+
+from proto import Resources_pb2
+
+# First bytes in an .flat.arsc file.
+# uint32: Magic ("ARSC"), version (1), num_entries (1), type (0)
+_FLAT_ARSC_HEADER = b'AAPT\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
+
+# The package ID hardcoded for shared libraries. See
+# _HardcodeSharedLibraryDynamicAttributes() for more details. If this value
+# changes make sure to change REQUIRED_PACKAGE_IDENTIFIER in WebLayerImpl.java.
+SHARED_LIBRARY_HARDCODED_ID = 36
+
+
+def _ProcessZip(zip_path, process_func):
+ """Filters a .zip file via: new_bytes = process_func(filename, data)."""
+ has_changes = False
+ zip_entries = []
+ with zipfile.ZipFile(zip_path) as src_zip:
+ for info in src_zip.infolist():
+ data = src_zip.read(info)
+ new_data = process_func(info.filename, data)
+ if new_data is not data:
+ has_changes = True
+ data = new_data
+ zip_entries.append((info, data))
+
+ # Overwrite the original zip file.
+ if has_changes:
+ with zipfile.ZipFile(zip_path, 'w') as f:
+ for info, data in zip_entries:
+ f.writestr(info, data)
+
+
+def _ProcessProtoItem(item):
+ if not item.HasField('ref'):
+ return
+
+ # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode
+ # the package to SHARED_LIBRARY_HARDCODED_ID.
+ if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (item.ref.id
+ & 0xff000000):
+ item.ref.id |= (0x01000000 * SHARED_LIBRARY_HARDCODED_ID)
+ item.ref.ClearField('is_dynamic')
+
+
+def _ProcessProtoValue(value):
+ if value.HasField('item'):
+ _ProcessProtoItem(value.item)
+ return
+
+ compound_value = value.compound_value
+ if compound_value.HasField('style'):
+ for entry in compound_value.style.entry:
+ _ProcessProtoItem(entry.item)
+ elif compound_value.HasField('array'):
+ for element in compound_value.array.element:
+ _ProcessProtoItem(element.item)
+ elif compound_value.HasField('plural'):
+ for entry in compound_value.plural.entry:
+ _ProcessProtoItem(entry.item)
+
+
+def _ProcessProtoXmlNode(xml_node):
+ if not xml_node.HasField('element'):
+ return
+
+ for attribute in xml_node.element.attribute:
+ _ProcessProtoItem(attribute.compiled_item)
+
+ for child in xml_node.element.child:
+ _ProcessProtoXmlNode(child)
+
+
+def _SplitLocaleResourceType(_type, allowed_resource_names):
+ """Splits locale specific resources out of |_type| and returns them.
+
+ Any locale specific resources will be removed from |_type|, and a new
+ Resources_pb2.Type value will be returned which contains those resources.
+
+ Args:
+ _type: A Resources_pb2.Type value
+ allowed_resource_names: Names of locale resources that should be kept in the
+ main type.
+ """
+ locale_entries = []
+ for entry in _type.entry:
+ if entry.name in allowed_resource_names:
+ continue
+
+ # First collect all resources values with a locale set.
+ config_values_with_locale = []
+ for config_value in entry.config_value:
+ if config_value.config.locale:
+ config_values_with_locale.append(config_value)
+
+ if config_values_with_locale:
+ # Remove the locale resources from the original entry
+ for value in config_values_with_locale:
+ entry.config_value.remove(value)
+
+ # Add locale resources to a new Entry, and save for later.
+ locale_entry = Resources_pb2.Entry()
+ locale_entry.CopyFrom(entry)
+ del locale_entry.config_value[:]
+ locale_entry.config_value.extend(config_values_with_locale)
+ locale_entries.append(locale_entry)
+
+ if not locale_entries:
+ return None
+
+ # Copy the original type and replace the entries with |locale_entries|.
+ locale_type = Resources_pb2.Type()
+ locale_type.CopyFrom(_type)
+ del locale_type.entry[:]
+ locale_type.entry.extend(locale_entries)
+ return locale_type
+
+
+def _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist):
+ translations_package = None
+ if is_bundle_module:
+ # A separate top level package will be added to the resources, which
+ # contains only locale specific resources. The package ID of the locale
+ # resources is hardcoded to SHARED_LIBRARY_HARDCODED_ID. This causes
+ # resources in locale splits to all get assigned
+ # SHARED_LIBRARY_HARDCODED_ID as their package ID, which prevents a bug
+ # in shared library bundles where each split APK gets a separate dynamic
+ # ID, and cannot be accessed by the main APK.
+ translations_package = Resources_pb2.Package()
+ translations_package.package_id.id = SHARED_LIBRARY_HARDCODED_ID
+ translations_package.package_name = (table.package[0].package_name +
+ '_translations')
+
+ # These resources are allowed in the base resources, since they are needed
+ # by WebView.
+ allowed_resource_names = set()
+ if shared_resources_allowlist:
+ allowed_resource_names = set(
+ resource_utils.GetRTxtStringResourceNames(shared_resources_allowlist))
+
+ for package in table.package:
+ for _type in package.type:
+ for entry in _type.entry:
+ for config_value in entry.config_value:
+ _ProcessProtoValue(config_value.value)
+
+ if translations_package is not None:
+ locale_type = _SplitLocaleResourceType(_type, allowed_resource_names)
+ if locale_type:
+ translations_package.type.add().CopyFrom(locale_type)
+
+ if translations_package is not None:
+ table.package.add().CopyFrom(translations_package)
+
+
+def HardcodeSharedLibraryDynamicAttributes(zip_path,
+ is_bundle_module,
+ shared_resources_allowlist=None):
+ """Hardcodes the package IDs of dynamic attributes and locale resources.
+
+ Hardcoding dynamic attribute package IDs is a workaround for b/147674078,
+ which affects Android versions pre-N. Hardcoding locale resource package IDs
+ is a workaround for b/155437035, which affects resources built with
+ --shared-lib on all Android versions
+
+ Args:
+ zip_path: Path to proto APK file.
+ is_bundle_module: True for bundle modules.
+ shared_resources_allowlist: Set of resource names to not extract out of the
+ main package.
+ """
+
+ def process_func(filename, data):
+ if filename == 'resources.pb':
+ table = Resources_pb2.ResourceTable()
+ table.ParseFromString(data)
+ _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist)
+ data = table.SerializeToString()
+ elif filename.endswith('.xml') and not filename.startswith('res/raw'):
+ xml_node = Resources_pb2.XmlNode()
+ xml_node.ParseFromString(data)
+ _ProcessProtoXmlNode(xml_node)
+ data = xml_node.SerializeToString()
+ return data
+
+ _ProcessZip(zip_path, process_func)
+
+
+class _ResourceStripper(object):
+ def __init__(self, partial_path, keep_predicate):
+ self.partial_path = partial_path
+ self.keep_predicate = keep_predicate
+ self._has_changes = False
+
+ @staticmethod
+ def _IterStyles(entry):
+ for config_value in entry.config_value:
+ value = config_value.value
+ if value.HasField('compound_value'):
+ compound_value = value.compound_value
+ if compound_value.HasField('style'):
+ yield compound_value.style
+
+ def _StripStyles(self, entry, type_and_name):
+ # Strip style entries that refer to attributes that have been stripped.
+ for style in self._IterStyles(entry):
+ entries = style.entry
+ new_entries = []
+ for entry in entries:
+ full_name = '{}/{}'.format(type_and_name, entry.key.name)
+ if not self.keep_predicate(full_name):
+ logging.debug('Stripped %s/%s', self.partial_path, full_name)
+ else:
+ new_entries.append(entry)
+
+ if len(new_entries) != len(entries):
+ self._has_changes = True
+ del entries[:]
+ entries.extend(new_entries)
+
+ def _StripEntries(self, entries, type_name):
+ new_entries = []
+ for entry in entries:
+ type_and_name = '{}/{}'.format(type_name, entry.name)
+ if not self.keep_predicate(type_and_name):
+ logging.debug('Stripped %s/%s', self.partial_path, type_and_name)
+ else:
+ new_entries.append(entry)
+ self._StripStyles(entry, type_and_name)
+
+ if len(new_entries) != len(entries):
+ self._has_changes = True
+ del entries[:]
+ entries.extend(new_entries)
+
+ def StripTable(self, table):
+ self._has_changes = False
+ for package in table.package:
+ for _type in package.type:
+ self._StripEntries(_type.entry, _type.name)
+ return self._has_changes
+
+
+def _TableFromFlatBytes(data):
+ # https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/format/Container.cpp
+ size_idx = len(_FLAT_ARSC_HEADER)
+ proto_idx = size_idx + 8
+ if data[:size_idx] != _FLAT_ARSC_HEADER:
+ raise Exception('Error parsing {} in {}'.format(info.filename, zip_path))
+ # Size is stored as uint64.
+ size = struct.unpack('<Q', data[size_idx:proto_idx])[0]
+ table = Resources_pb2.ResourceTable()
+ proto_bytes = data[proto_idx:proto_idx + size]
+ table.ParseFromString(proto_bytes)
+ return table
+
+
+def _FlatBytesFromTable(table):
+ proto_bytes = table.SerializeToString()
+ size = struct.pack('<Q', len(proto_bytes))
+ overage = len(proto_bytes) % 4
+ padding = b'\0' * (4 - overage) if overage else b''
+ return b''.join((_FLAT_ARSC_HEADER, size, proto_bytes, padding))
+
+
+def StripUnwantedResources(partial_path, keep_predicate):
+ """Removes resources from .arsc.flat files inside of a .zip.
+
+ Args:
+ partial_path: Path to a .zip containing .arsc.flat entries
+ keep_predicate: Given "$partial_path/$res_type/$res_name", returns
+ whether to keep the resource.
+ """
+ stripper = _ResourceStripper(partial_path, keep_predicate)
+
+ def process_file(filename, data):
+ if filename.endswith('.arsc.flat'):
+ table = _TableFromFlatBytes(data)
+ if stripper.StripTable(table):
+ data = _FlatBytesFromTable(table)
+ return data
+
+ _ProcessZip(partial_path, process_file)
diff --git a/third_party/libwebrtc/build/android/gyp/util/resource_utils.py b/third_party/libwebrtc/build/android/gyp/util/resource_utils.py
new file mode 100644
index 0000000000..4f64174193
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,1078 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import itertools
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+# A variation of these maps also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+ 'es-419': 'es-rUS',
+ 'sr-Latn': 'b+sr+Latn',
+ 'fil': 'tl',
+ 'he': 'iw',
+ 'id': 'in',
+ 'yi': 'ji',
+}
+_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
+ 'tl': 'fil',
+ 'iw': 'he',
+ 'in': 'id',
+ 'ji': 'yi',
+ 'no': 'nb', # 'no' is not a real language. http://crbug.com/920960
+}
+
+_ALL_RESOURCE_TYPES = {
+ 'anim', 'animator', 'array', 'attr', 'bool', 'color', 'dimen', 'drawable',
+ 'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'macro',
+ 'menu', 'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable',
+ 'transition', 'xml'
+}
+
+AAPT_IGNORE_PATTERN = ':'.join([
+ '*OWNERS', # Allow OWNERS files within res/
+ 'DIR_METADATA', # Allow DIR_METADATA files within res/
+ '*.py', # PRESUBMIT.py sometimes exist.
+ '*.pyc',
+ '*~', # Some editors create these as temp files.
+ '.*', # Never makes sense to include dot(files/dirs).
+ '*.d.stamp', # Ignore stamp files
+ '*.backup', # Some tools create temporary backup files.
+])
+
+MULTIPLE_RES_MAGIC_STRING = b'magic'
+
+
+def ToAndroidLocaleName(chromium_locale):
+ """Convert a Chromium locale name into a corresponding Android one."""
+ # Should be in sync with build/config/locales.gni.
+ # First handle the special cases, these are needed to deal with Android
+ # releases *before* 5.0/Lollipop.
+ android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale)
+ if android_locale:
+ return android_locale
+
+ # Format of Chromium locale name is '<lang>' or '<lang>-<region>'
+ # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2)
+ # and region is a capitalized locale region name.
+ lang, _, region = chromium_locale.partition('-')
+ if not region:
+ return lang
+
+ # Translate newer language tags into obsolete ones. Only necessary if
+ # region is not None (e.g. 'he-IL' -> 'iw-rIL')
+ lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang)
+
+ # Using '<lang>-r<region>' is now acceptable as a locale name for all
+ # versions of Android.
+ return '%s-r%s' % (lang, region)
+
+
+# ISO 639 language code + optional ("-r" + capitalized region code).
+# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes
+# are supported.
+_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
+
+# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
+# be prefixed with 'b+', and may include optional tags.
+# e.g. 'b+en+US', 'b+ja+Latn', 'b+ja+Latn+JP'
+_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
+
+
+def ToChromiumLocaleName(android_locale):
+ """Convert an Android locale name into a Chromium one."""
+ lang = None
+ region = None
+ script = None
+ m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
+ if m:
+ lang = m.group(1)
+ if m.group(2):
+ region = m.group(3)
+ elif _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale):
+ # Split an Android BCP-47 locale (e.g. b+sr+Latn+RS)
+ tags = android_locale.split('+')
+
+ # The Lang tag is always the first tag.
+ lang = tags[1]
+
+ # The optional region tag is 2ALPHA or 3DIGIT tag in pos 1 or 2.
+ # The optional script tag is 4ALPHA and always in pos 1.
+ optional_tags = iter(tags[2:])
+
+ next_tag = next(optional_tags, None)
+ if next_tag and len(next_tag) == 4:
+ script = next_tag
+ next_tag = next(optional_tags, None)
+ if next_tag and len(next_tag) < 4:
+ region = next_tag
+
+ if not lang:
+ return None
+
+ # Special case for es-rUS -> es-419
+ if lang == 'es' and region == 'US':
+ return 'es-419'
+
+ lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+
+ if script:
+ lang = '%s-%s' % (lang, script)
+
+ if not region:
+ return lang
+
+ return '%s-%s' % (lang, region)
+
+
+def IsAndroidLocaleQualifier(string):
+ """Returns true if |string| is a valid Android resource locale qualifier."""
+ return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string)
+ or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string))
+
+
+def FindLocaleInStringResourceFilePath(file_path):
+ """Return Android locale name of a string resource file path.
+
+ Args:
+ file_path: A file path.
+ Returns:
+ If |file_path| is of the format '.../values-<locale>/<name>.xml', return
+ the value of <locale> (and Android locale qualifier). Otherwise return None.
+ """
+ if not file_path.endswith('.xml'):
+ return None
+ prefix = 'values-'
+ dir_name = os.path.basename(os.path.dirname(file_path))
+ if not dir_name.startswith(prefix):
+ return None
+ qualifier = dir_name[len(prefix):]
+ return qualifier if IsAndroidLocaleQualifier(qualifier) else None
+
+
+def ToAndroidLocaleList(locale_list):
+ """Convert a list of Chromium locales into the corresponding Android list."""
+ return sorted(ToAndroidLocaleName(locale) for locale in locale_list)
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+ ('java_type', 'resource_type', 'name', 'value'))
+
+
+def _GenerateGlobs(pattern):
+ # This function processes the aapt ignore assets pattern into a list of globs
+ # to be used to exclude files using build_utils.MatchesGlob. It removes the
+ # '!', which is used by aapt to mean 'not chatty' so it does not output if the
+ # file is ignored (we dont output anyways, so it is not required). This
+ # function does not handle the <dir> and <file> prefixes used by aapt and are
+ # assumed not to be included in the pattern string.
+ return pattern.replace('!', '').split(':')
+
+
+def DeduceResourceDirsFromFileList(resource_files):
+ """Return a list of resource directories from a list of resource files."""
+ # Directory list order is important, cannot use set or other data structures
+ # that change order. This is because resource files of the same name in
+ # multiple res/ directories ellide one another (the last one passed is used).
+ # Thus the order must be maintained to prevent non-deterministic and possibly
+ # flakey builds.
+ resource_dirs = []
+ for resource_path in resource_files:
+ # Resources are always 1 directory deep under res/.
+ res_dir = os.path.dirname(os.path.dirname(resource_path))
+ if res_dir not in resource_dirs:
+ resource_dirs.append(res_dir)
+
+ # Check if any resource_dirs are children of other ones. This indicates that a
+ # file was listed that is not exactly 1 directory deep under res/.
+ # E.g.:
+ # sources = ["java/res/values/foo.xml", "java/res/README.md"]
+ # ^^ This will cause "java" to be detected as resource directory.
+ for a, b in itertools.permutations(resource_dirs, 2):
+ if not os.path.relpath(a, b).startswith('..'):
+ bad_sources = (s for s in resource_files
+ if os.path.dirname(os.path.dirname(s)) == b)
+ msg = """\
+Resource(s) found that are not in a proper directory structure:
+ {}
+All resource files must follow a structure of "$ROOT/$SUBDIR/$FILE"."""
+ raise Exception(msg.format('\n '.join(bad_sources)))
+
+ return resource_dirs
+
+
+def IterResourceFilesInDirectories(directories,
+ ignore_pattern=AAPT_IGNORE_PATTERN):
+ globs = _GenerateGlobs(ignore_pattern)
+ for d in directories:
+ for root, _, files in os.walk(d):
+ for f in files:
+ archive_path = f
+ parent_dir = os.path.relpath(root, d)
+ if parent_dir != '.':
+ archive_path = os.path.join(parent_dir, f)
+ path = os.path.join(root, f)
+ if build_utils.MatchesGlob(archive_path, globs):
+ continue
+ yield path, archive_path
+
+
+class ResourceInfoFile(object):
+ """Helper for building up .res.info files."""
+
+ def __init__(self):
+ # Dict of archive_path -> source_path for the current target.
+ self._entries = {}
+ # List of (old_archive_path, new_archive_path) tuples.
+ self._renames = []
+ # We don't currently support using both AddMapping and MergeInfoFile.
+ self._add_mapping_was_called = False
+
+ def AddMapping(self, archive_path, source_path):
+ """Adds a single |archive_path| -> |source_path| entry."""
+ self._add_mapping_was_called = True
+ # "values/" files do not end up in the apk except through resources.arsc.
+ if archive_path.startswith('values'):
+ return
+ source_path = os.path.normpath(source_path)
+ new_value = self._entries.setdefault(archive_path, source_path)
+ if new_value != source_path:
+ raise Exception('Duplicate AddMapping for "{}". old={} new={}'.format(
+ archive_path, new_value, source_path))
+
+ def RegisterRename(self, old_archive_path, new_archive_path):
+ """Records an archive_path rename.
+
+ |old_archive_path| does not need to currently exist in the mappings. Renames
+ are buffered and replayed only when Write() is called.
+ """
+ if not old_archive_path.startswith('values'):
+ self._renames.append((old_archive_path, new_archive_path))
+
+ def MergeInfoFile(self, info_file_path):
+ """Merges the mappings from |info_file_path| into this object.
+
+ Any existing entries are overridden.
+ """
+ assert not self._add_mapping_was_called
+ # Allows clobbering, which is used when overriding resources.
+ with open(info_file_path) as f:
+ self._entries.update(l.rstrip().split('\t') for l in f)
+
+ def _ApplyRenames(self):
+ applied_renames = set()
+ ret = self._entries
+ for rename_tup in self._renames:
+ # Duplicate entries happen for resource overrides.
+ # Use a "seen" set to ensure we still error out if multiple renames
+ # happen for the same old_archive_path with different new_archive_paths.
+ if rename_tup in applied_renames:
+ continue
+ applied_renames.add(rename_tup)
+ old_archive_path, new_archive_path = rename_tup
+ ret[new_archive_path] = ret[old_archive_path]
+ del ret[old_archive_path]
+
+ self._entries = None
+ self._renames = None
+ return ret
+
+ def Write(self, info_file_path):
+ """Applies renames and writes out the file.
+
+ No other methods may be called after this.
+ """
+ entries = self._ApplyRenames()
+ lines = []
+ for archive_path, source_path in entries.items():
+ lines.append('{}\t{}\n'.format(archive_path, source_path))
+ with open(info_file_path, 'w') as info_file:
+ info_file.writelines(sorted(lines))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+ """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+ Args:
+ path: Input file path.
+ fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file
+ will be fixed to 0x7f.
+ Returns:
+ A list of _TextSymbolEntry instances.
+ Raises:
+ Exception: An unexpected line was detected in the input.
+ """
+ ret = []
+ with open(path) as f:
+ for line in f:
+ m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+ if not m:
+ raise Exception('Unexpected line in R.txt: %s' % line)
+ java_type, resource_type, name, value = m.groups()
+ if fix_package_ids:
+ value = _FixPackageIds(value)
+ ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+ return ret
+
+
+def _FixPackageIds(resource_value):
+ # Resource IDs for resources belonging to regular APKs have their first byte
+ # as 0x7f (package id). However with webview, since it is not a regular apk
+ # but used as a shared library, aapt is passed the --shared-resources flag
+ # which changes some of the package ids to 0x00. This function normalises
+ # these (0x00) package ids to 0x7f, which the generated code in R.java changes
+ # to the correct package id at runtime. resource_value is a string with
+ # either, a single value '0x12345678', or an array of values like '{
+ # 0xfedcba98, 0x01234567, 0x56789abc }'
+ return resource_value.replace('0x00', '0x7f')
+
+
+def _GetRTxtResourceNames(r_txt_path):
+ """Parse an R.txt file and extract the set of resource names from it."""
+ return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)}
+
+
+def GetRTxtStringResourceNames(r_txt_path):
+ """Parse an R.txt file and the list of its string resource names."""
+ return sorted({
+ entry.name
+ for entry in _ParseTextSymbolsFile(r_txt_path)
+ if entry.resource_type == 'string'
+ })
+
+
+def GenerateStringResourcesAllowList(module_r_txt_path, allowlist_r_txt_path):
+ """Generate a allowlist of string resource IDs.
+
+ Args:
+ module_r_txt_path: Input base module R.txt path.
+ allowlist_r_txt_path: Input allowlist R.txt path.
+ Returns:
+ A dictionary mapping numerical resource IDs to the corresponding
+ string resource names. The ID values are taken from string resources in
+ |module_r_txt_path| that are also listed by name in |allowlist_r_txt_path|.
+ """
+ allowlisted_names = {
+ entry.name
+ for entry in _ParseTextSymbolsFile(allowlist_r_txt_path)
+ if entry.resource_type == 'string'
+ }
+ return {
+ int(entry.value, 0): entry.name
+ for entry in _ParseTextSymbolsFile(module_r_txt_path)
+ if entry.resource_type == 'string' and entry.name in allowlisted_names
+ }
+
+
+class RJavaBuildOptions:
+ """A class used to model the various ways to build an R.java file.
+
+ This is used to control which resource ID variables will be final or
+ non-final, and whether an onResourcesLoaded() method will be generated
+ to adjust the non-final ones, when the corresponding library is loaded
+ at runtime.
+
+ Note that by default, all resources are final, and there is no
+ method generated, which corresponds to calling ExportNoResources().
+ """
+ def __init__(self):
+ self.has_constant_ids = True
+ self.resources_allowlist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+ self.final_package_id = None
+ self.fake_on_resources_loaded = False
+
+ def ExportNoResources(self):
+ """Make all resource IDs final, and don't generate a method."""
+ self.has_constant_ids = True
+ self.resources_allowlist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportAllResources(self):
+ """Make all resource IDs non-final in the R.java file."""
+ self.has_constant_ids = False
+ self.resources_allowlist = None
+
+ def ExportSomeResources(self, r_txt_file_path):
+ """Only select specific resource IDs to be non-final.
+
+ Args:
+ r_txt_file_path: The path to an R.txt file. All resources named
+ int it will be non-final in the generated R.java file, all others
+ will be final.
+ """
+ self.has_constant_ids = True
+ self.resources_allowlist = _GetRTxtResourceNames(r_txt_file_path)
+
+ def ExportAllStyleables(self):
+ """Make all styleable constants non-final, even non-resources ones.
+
+ Resources that are styleable but not of int[] type are not actually
+ resource IDs but constants. By default they are always final. Call this
+ method to make them non-final anyway in the final R.java file.
+ """
+ self.export_const_styleable = True
+
+ def GenerateOnResourcesLoaded(self, fake=False):
+ """Generate an onResourcesLoaded() method.
+
+ This Java method will be called at runtime by the framework when
+ the corresponding library (which includes the R.java source file)
+ will be loaded at runtime. This corresponds to the --shared-resources
+ or --app-as-shared-lib flags of 'aapt package'.
+
+ if |fake|, then the method will be empty bodied to compile faster. This
+ useful for dummy R.java files that will eventually be replaced by real
+ ones.
+ """
+ self.has_on_resources_loaded = True
+ self.fake_on_resources_loaded = fake
+
+ def SetFinalPackageId(self, package_id):
+ """Sets a package ID to be used for resources marked final."""
+ self.final_package_id = package_id
+
+ def _MaybeRewriteRTxtPackageIds(self, r_txt_path):
+ """Rewrites package IDs in the R.txt file if necessary.
+
+ If SetFinalPackageId() was called, some of the resource IDs may have had
+ their package ID changed. This function rewrites the R.txt file to match
+ those changes.
+ """
+ if self.final_package_id is None:
+ return
+
+ entries = _ParseTextSymbolsFile(r_txt_path)
+ with open(r_txt_path, 'w') as f:
+ for entry in entries:
+ value = entry.value
+ if self._IsResourceFinal(entry):
+ value = re.sub(r'0x(?:00|7f)',
+ '0x{:02x}'.format(self.final_package_id), value)
+ f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type,
+ entry.name, value))
+
+ def _IsResourceFinal(self, entry):
+ """Determines whether a resource should be final or not.
+
+ Args:
+ entry: A _TextSymbolEntry instance.
+ Returns:
+ True iff the corresponding entry should be final.
+ """
+ if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+ # A styleable constant may be exported as non-final after all.
+ return not self.export_const_styleable
+ elif not self.has_constant_ids:
+ # Every resource is non-final
+ return False
+ elif not self.resources_allowlist:
+ # No allowlist means all IDs are non-final.
+ return True
+ else:
+ # Otherwise, only those in the
+ return entry.name not in self.resources_allowlist
+
+
+def CreateRJavaFiles(srcjar_dir,
+ package,
+ main_r_txt_file,
+ extra_res_packages,
+ rjava_build_options,
+ srcjar_out,
+ custom_root_package_name=None,
+ grandparent_custom_package_name=None,
+ extra_main_r_text_files=None,
+ ignore_mismatched_values=False):
+ """Create all R.java files for a set of packages and R.txt files.
+
+ Args:
+ srcjar_dir: The top-level output directory for the generated files.
+ package: Package name for R java source files which will inherit
+ from the root R java file.
+ main_r_txt_file: The main R.txt file containing the valid values
+ of _all_ resource IDs.
+ extra_res_packages: A list of extra package names.
+ rjava_build_options: An RJavaBuildOptions instance that controls how
+ exactly the R.java file is generated.
+ srcjar_out: Path of desired output srcjar.
+ custom_root_package_name: Custom package name for module root R.java file,
+ (eg. vr for gen.vr package).
+ grandparent_custom_package_name: Custom root package name for the root
+ R.java file to inherit from. DFM root R.java files will have "base"
+ as the grandparent_custom_package_name. The format of this package name
+ is identical to custom_root_package_name.
+ (eg. for vr grandparent_custom_package_name would be "base")
+ extra_main_r_text_files: R.txt files to be added to the root R.java file.
+ ignore_mismatched_values: If True, ignores if a resource appears multiple
+ times with different entry values (useful when all the values are
+ dummy anyways).
+ Raises:
+ Exception if a package name appears several times in |extra_res_packages|
+ """
+ rjava_build_options._MaybeRewriteRTxtPackageIds(main_r_txt_file)
+
+ packages = list(extra_res_packages)
+
+ if package and package not in packages:
+ # Sometimes, an apk target and a resources target share the same
+ # AndroidManifest.xml and thus |package| will already be in |packages|.
+ packages.append(package)
+
+ # Map of (resource_type, name) -> Entry.
+ # Contains the correct values for resources.
+ all_resources = {}
+ all_resources_by_type = collections.defaultdict(list)
+
+ main_r_text_files = [main_r_txt_file]
+ if extra_main_r_text_files:
+ main_r_text_files.extend(extra_main_r_text_files)
+ for r_txt_file in main_r_text_files:
+ for entry in _ParseTextSymbolsFile(r_txt_file, fix_package_ids=True):
+ entry_key = (entry.resource_type, entry.name)
+ if entry_key in all_resources:
+ if not ignore_mismatched_values:
+ assert entry == all_resources[entry_key], (
+ 'Input R.txt %s provided a duplicate resource with a different '
+ 'entry value. Got %s, expected %s.' %
+ (r_txt_file, entry, all_resources[entry_key]))
+ else:
+ all_resources[entry_key] = entry
+ all_resources_by_type[entry.resource_type].append(entry)
+ assert entry.resource_type in _ALL_RESOURCE_TYPES, (
+ 'Unknown resource type: %s, add to _ALL_RESOURCE_TYPES!' %
+ entry.resource_type)
+
+ if custom_root_package_name:
+ # Custom package name is available, thus use it for root_r_java_package.
+ root_r_java_package = GetCustomPackagePath(custom_root_package_name)
+ else:
+ # Create a unique name using srcjar_out. Underscores are added to ensure
+ # no reserved keywords are used for directory names.
+ root_r_java_package = re.sub('[^\w\.]', '', srcjar_out.replace('/', '._'))
+
+ root_r_java_dir = os.path.join(srcjar_dir, *root_r_java_package.split('.'))
+ build_utils.MakeDirectory(root_r_java_dir)
+ root_r_java_path = os.path.join(root_r_java_dir, 'R.java')
+ root_java_file_contents = _RenderRootRJavaSource(
+ root_r_java_package, all_resources_by_type, rjava_build_options,
+ grandparent_custom_package_name)
+ with open(root_r_java_path, 'w') as f:
+ f.write(root_java_file_contents)
+
+ for package in packages:
+ _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+ rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+ rjava_build_options):
+ """Generates an R.java source file."""
+ package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+ build_utils.MakeDirectory(package_r_java_dir)
+ package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+ java_file_contents = _RenderRJavaSource(package, root_r_java_package,
+ rjava_build_options)
+ with open(package_r_java_path, 'w') as f:
+ f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+ """Get the index of the first application resource ID within a resource
+ array."""
+ res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+ for i, res_id in enumerate(res_ids):
+ if res_id.startswith('0x7f'):
+ return i
+ return len(res_ids)
+
+
+def _RenderRJavaSource(package, root_r_java_package, rjava_build_options):
+ """Generates the contents of a R.java file."""
+ template = Template(
+ """/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ {% for resource_type in resource_types %}
+ public static final class {{ resource_type }} extends
+ {{ root_package }}.R.{{ resource_type }} {}
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ {{ root_package }}.R.onResourcesLoaded(packageId);
+ }
+ {% endif %}
+}
+""",
+ trim_blocks=True,
+ lstrip_blocks=True)
+
+ return template.render(
+ package=package,
+ resource_types=sorted(_ALL_RESOURCE_TYPES),
+ root_package=root_r_java_package,
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded)
+
+
+def GetCustomPackagePath(package_name):
+ return 'gen.' + package_name + '_module'
+
+
+def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options,
+ grandparent_custom_package_name):
+ """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+ final_resources_by_type = collections.defaultdict(list)
+ non_final_resources_by_type = collections.defaultdict(list)
+ for res_type, resources in all_resources_by_type.items():
+ for entry in resources:
+ # Entries in stylable that are not int[] are not actually resource ids
+ # but constants.
+ if rjava_build_options._IsResourceFinal(entry):
+ final_resources_by_type[res_type].append(entry)
+ else:
+ non_final_resources_by_type[res_type].append(entry)
+
+ # Here we diverge from what aapt does. Because we have so many
+ # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+ # Java imposes. For this reason we split onResourcesLoaded into different
+ # methods for each resource type.
+ extends_string = ''
+ dep_path = ''
+ if grandparent_custom_package_name:
+ extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} '
+ dep_path = GetCustomPackagePath(grandparent_custom_package_name)
+
+ template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ {% for resource_type in resource_types %}
+ public static class {{ resource_type }} """ + extends_string + """ {
+ {% for e in final_resources[resource_type] %}
+ public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% endfor %}
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.value != '0' %}
+ public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% else %}
+ public static {{ e.java_type }} {{ e.name }};
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ {% if fake_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ }
+ {% else %}
+ private static boolean sResourcesDidLoad;
+
+ private static void patchArray(
+ int[] arr, int startIndex, int packageIdTransform) {
+ for (int i = startIndex; i < arr.length; ++i) {
+ arr[i] ^= packageIdTransform;
+ }
+ }
+
+ public static void onResourcesLoaded(int packageId) {
+ if (sResourcesDidLoad) {
+ return;
+ }
+ sResourcesDidLoad = true;
+ int packageIdTransform = (packageId ^ 0x7f) << 24;
+ {# aapt2 makes int[] resources refer to other resources by reference
+ rather than by value. Thus, need to transform the int[] resources
+ first, before the referenced resources are transformed in order to
+ ensure the transform applies exactly once.
+ See https://crbug.com/1237059 for context.
+ #}
+ {% for resource_type in resource_types %}
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.java_type == 'int[]' %}
+ patchArray({{ e.resource_type }}.{{ e.name }}, {{ startIndex(e) }}, \
+packageIdTransform);
+ {% endif %}
+ {% endfor %}
+ {% endfor %}
+ {% for resource_type in resource_types %}
+ onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+ {% endfor %}
+ }
+ {% for res_type in resource_types %}
+ private static void onResourcesLoaded{{ res_type|title }} (
+ int packageIdTransform) {
+ {% for e in non_final_resources[res_type] %}
+ {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+ {{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% endif %}
+ {% endif %}
+}
+""",
+ trim_blocks=True,
+ lstrip_blocks=True)
+ return template.render(
+ package=package,
+ resource_types=sorted(_ALL_RESOURCE_TYPES),
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+ fake_on_resources_loaded=rjava_build_options.fake_on_resources_loaded,
+ final_resources=final_resources_by_type,
+ non_final_resources=non_final_resources_by_type,
+ startIndex=_GetNonSystemIndex,
+ parent_path=dep_path)
+
+
+def ExtractBinaryManifestValues(aapt2_path, apk_path):
+ """Returns (version_code, version_name, package_name) for the given apk."""
+ output = subprocess.check_output([
+ aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml'
+ ]).decode('utf-8')
+ version_code = re.search(r'versionCode.*?=(\d*)', output).group(1)
+ version_name = re.search(r'versionName.*?="(.*?)"', output).group(1)
+ package_name = re.search(r'package.*?="(.*?)"', output).group(1)
+ return version_code, version_name, package_name
+
+
+def ExtractArscPackage(aapt2_path, apk_path):
+ """Returns (package_name, package_id) of resources.arsc from apk_path.
+
+ When the apk does not have any entries in its resources file, in recent aapt2
+ versions it will not contain a "Package" line. The package is not even in the
+ actual resources.arsc/resources.pb file (which itself is mostly empty). Thus
+ return (None, None) when dump succeeds and there are no errors to indicate
+ that the package name does not exist in the resources file.
+ """
+ proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ for line in proc.stdout:
+ line = line.decode('utf-8')
+ # Package name=org.chromium.webview_shell id=7f
+ if line.startswith('Package'):
+ proc.kill()
+ parts = line.split()
+ package_name = parts[1].split('=')[1]
+ package_id = parts[2][3:]
+ return package_name, int(package_id, 16)
+
+ # aapt2 currently crashes when dumping webview resources, but not until after
+ # it prints the "Package" line (b/130553900).
+ stderr_output = proc.stderr.read().decode('utf-8')
+ if stderr_output:
+ sys.stderr.write(stderr_output)
+ raise Exception('Failed to find arsc package name')
+ return None, None
+
+
+def _RenameSubdirsWithPrefix(dir_path, prefix):
+ subdirs = [
+ d for d in os.listdir(dir_path)
+ if os.path.isdir(os.path.join(dir_path, d))
+ ]
+ renamed_subdirs = []
+ for d in subdirs:
+ old_path = os.path.join(dir_path, d)
+ new_path = os.path.join(dir_path, '{}_{}'.format(prefix, d))
+ renamed_subdirs.append(new_path)
+ os.rename(old_path, new_path)
+ return renamed_subdirs
+
+
+def _HasMultipleResDirs(zip_path):
+ """Checks for magic comment set by prepare_resources.py
+
+ Returns: True iff the zipfile has the magic comment that means it contains
+ multiple res/ dirs inside instead of just contents of a single res/ dir
+ (without a wrapping res/).
+ """
+ with zipfile.ZipFile(zip_path) as z:
+ return z.comment == MULTIPLE_RES_MAGIC_STRING
+
+
+def ExtractDeps(dep_zips, deps_dir):
+ """Extract a list of resource dependency zip files.
+
+ Args:
+ dep_zips: A list of zip file paths, each one will be extracted to
+ a subdirectory of |deps_dir|, named after the zip file's path (e.g.
+ '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/').
+ deps_dir: Top-level extraction directory.
+ Returns:
+ The list of all sub-directory paths, relative to |deps_dir|.
+ Raises:
+ Exception: If a sub-directory already exists with the same name before
+ extraction.
+ """
+ dep_subdirs = []
+ for z in dep_zips:
+ subdirname = z.replace(os.path.sep, '_')
+ subdir = os.path.join(deps_dir, subdirname)
+ if os.path.exists(subdir):
+ raise Exception('Resource zip name conflict: ' + subdirname)
+ build_utils.ExtractAll(z, path=subdir)
+ if _HasMultipleResDirs(z):
+ # basename of the directory is used to create a zip during resource
+ # compilation, include the path in the basename to help blame errors on
+ # the correct target. For example directory 0_res may be renamed
+ # chrome_android_chrome_app_java_resources_0_res pointing to the name and
+ # path of the android_resources target from whence it came.
+ subdir_subdirs = _RenameSubdirsWithPrefix(subdir, subdirname)
+ dep_subdirs.extend(subdir_subdirs)
+ else:
+ dep_subdirs.append(subdir)
+ return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+ """A temporary directory for packaging and compiling Android resources.
+
+ Args:
+ temp_dir: Optional root build directory path. If None, a temporary
+ directory will be created, and removed in Close().
+ """
+
+ def __init__(self, temp_dir=None, keep_files=False):
+ """Initialized the context."""
+ # The top-level temporary directory.
+ if temp_dir:
+ self.temp_dir = temp_dir
+ os.makedirs(temp_dir)
+ else:
+ self.temp_dir = tempfile.mkdtemp()
+ self.remove_on_exit = not keep_files
+
+ # A location to store resources extracted form dependency zip files.
+ self.deps_dir = os.path.join(self.temp_dir, 'deps')
+ os.mkdir(self.deps_dir)
+ # A location to place aapt-generated files.
+ self.gen_dir = os.path.join(self.temp_dir, 'gen')
+ os.mkdir(self.gen_dir)
+ # A location to place generated R.java files.
+ self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+ os.mkdir(self.srcjar_dir)
+ # Temporary file locacations.
+ self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+ self.srcjar_path = os.path.join(self.temp_dir, 'R.srcjar')
+ self.info_path = os.path.join(self.temp_dir, 'size.info')
+ self.stable_ids_path = os.path.join(self.temp_dir, 'in_ids.txt')
+ self.emit_ids_path = os.path.join(self.temp_dir, 'out_ids.txt')
+ self.proguard_path = os.path.join(self.temp_dir, 'keeps.flags')
+ self.proguard_main_dex_path = os.path.join(self.temp_dir, 'maindex.flags')
+ self.arsc_path = os.path.join(self.temp_dir, 'out.ap_')
+ self.proto_path = os.path.join(self.temp_dir, 'out.proto.ap_')
+ self.optimized_arsc_path = os.path.join(self.temp_dir, 'out.opt.ap_')
+ self.optimized_proto_path = os.path.join(self.temp_dir, 'out.opt.proto.ap_')
+
+ def Close(self):
+ """Close the context and destroy all temporary files."""
+ if self.remove_on_exit:
+ shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext(temp_dir=None, keep_files=False):
+ """Generator for a _ResourceBuildContext instance."""
+ context = None
+ try:
+ context = _ResourceBuildContext(temp_dir, keep_files)
+ yield context
+ finally:
+ if context:
+ context.Close()
+
+
+def ResourceArgsParser():
+ """Create an argparse.ArgumentParser instance with common argument groups.
+
+ Returns:
+ A tuple of (parser, in_group, out_group) corresponding to the parser
+ instance, and the input and output argument groups for it, respectively.
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ input_opts = parser.add_argument_group('Input options')
+ output_opts = parser.add_argument_group('Output options')
+
+ build_utils.AddDepfileOption(output_opts)
+
+ input_opts.add_argument('--include-resources', required=True, action="append",
+ help='Paths to arsc resource files used to link '
+ 'against. Can be specified multiple times.')
+
+ input_opts.add_argument('--dependencies-res-zips', required=True,
+ help='Resources zip archives from dependents. Required to '
+ 'resolve @type/foo references into dependent '
+ 'libraries.')
+
+ input_opts.add_argument(
+ '--extra-res-packages',
+ help='Additional package names to generate R.java files for.')
+
+ return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+ """Handle common command-line options after parsing.
+
+ Args:
+ options: the result of parse_args() on the parser returned by
+ ResourceArgsParser(). This function updates a few common fields.
+ """
+ options.include_resources = [build_utils.ParseGnList(r) for r in
+ options.include_resources]
+ # Flatten list of include resources list to make it easier to use.
+ options.include_resources = [r for resources in options.include_resources
+ for r in resources]
+
+ options.dependencies_res_zips = (
+ build_utils.ParseGnList(options.dependencies_res_zips))
+
+ # Don't use [] as default value since some script explicitly pass "".
+ if options.extra_res_packages:
+ options.extra_res_packages = (
+ build_utils.ParseGnList(options.extra_res_packages))
+ else:
+ options.extra_res_packages = []
+
+
+def ParseAndroidResourceStringsFromXml(xml_data):
+ """Parse and Android xml resource file and extract strings from it.
+
+ Args:
+ xml_data: XML file data.
+ Returns:
+ A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8
+ encoded value, and |namespaces| is a dictionary mapping prefixes to URLs
+ corresponding to namespaces declared in the <resources> element.
+ """
+ # NOTE: This uses regular expression matching because parsing with something
+ # like ElementTree makes it tedious to properly parse some of the structured
+ # text found in string resources, e.g.:
+ # <string msgid="3300176832234831527" \
+ # name="abc_shareactionprovider_share_with_application">\
+ # "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\
+ # </string>
+ result = {}
+
+ # Find <resources> start tag and extract namespaces from it.
+ m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE)
+ if not m:
+ raise Exception('<resources> start tag expected: ' + xml_data)
+ input_data = xml_data[m.end():]
+ resource_attrs = m.group(1)
+ re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")')
+ namespaces = {}
+ while resource_attrs:
+ m = re_namespace.match(resource_attrs)
+ if not m:
+ break
+ namespaces[m.group(2)] = m.group(3)
+ resource_attrs = resource_attrs[m.end(1):]
+
+ # Find each string element now.
+ re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>')
+ re_string_element_end = re.compile('</string>')
+ while input_data:
+ m = re_string_element_start.search(input_data)
+ if not m:
+ break
+ name = m.group(2)
+ input_data = input_data[m.end():]
+ m2 = re_string_element_end.search(input_data)
+ if not m2:
+ raise Exception('Expected closing string tag: ' + input_data)
+ text = input_data[:m2.start()]
+ input_data = input_data[m2.end():]
+ if len(text) and text[0] == '"' and text[-1] == '"':
+ text = text[1:-1]
+ result[name] = text
+
+ return result, namespaces
+
+
+def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None):
+ """Generate an XML text corresponding to an Android resource strings map.
+
+ Args:
+ names_to_text: A dictionary mapping resource names to localized
+ text (encoded as UTF-8).
+ namespaces: A map of namespace prefix to URL.
+ Returns:
+ New non-Unicode string containing an XML data structure describing the
+ input as an Android resource .xml file.
+ """
+ result = '<?xml version="1.0" encoding="utf-8"?>\n'
+ result += '<resources'
+ if namespaces:
+ for prefix, url in sorted(namespaces.items()):
+ result += ' xmlns:%s="%s"' % (prefix, url)
+ result += '>\n'
+ if not names_to_utf8_text:
+ result += '<!-- this file intentionally empty -->\n'
+ else:
+ for name, utf8_text in sorted(names_to_utf8_text.items()):
+ result += '<string name="%s">"%s"</string>\n' % (name, utf8_text)
+ result += '</resources>\n'
+ return result.encode('utf8')
+
+
+def FilterAndroidResourceStringsXml(xml_file_path, string_predicate):
+ """Remove unwanted localized strings from an Android resource .xml file.
+
+ This function takes a |string_predicate| callable object that will
+ receive a resource string name, and should return True iff the
+ corresponding <string> element should be kept in the file.
+
+ Args:
+ xml_file_path: Android resource strings xml file path.
+ string_predicate: A predicate function which will receive the string name
+ and shal
+ """
+ with open(xml_file_path) as f:
+ xml_data = f.read()
+ strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data)
+
+ string_deletion = False
+ for name in list(strings_map.keys()):
+ if not string_predicate(name):
+ del strings_map[name]
+ string_deletion = True
+
+ if string_deletion:
+ new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces)
+ with open(xml_file_path, 'wb') as f:
+ f.write(new_xml_data)
diff --git a/third_party/libwebrtc/build/android/gyp/util/resource_utils_test.py b/third_party/libwebrtc/build/android/gyp/util/resource_utils_test.py
new file mode 100755
index 0000000000..62d5b431e9
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/resource_utils_test.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+# coding: utf-8
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+# Required because the following import needs build/android/gyp in the
+# Python path to import util.build_utils.
+_BUILD_ANDROID_GYP_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT)
+
+import resource_utils # pylint: disable=relative-import
+
+# pylint: disable=line-too-long
+
+_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string>
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+# pylint: enable=line-too-long
+
+_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<!-- this file intentionally empty -->
+</resources>
+'''
+
+_TEST_RESOURCES_MAP_1 = {
+ 'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada',
+ 'opening_file_error': 'Valit. faili avamine ebaõnnestus',
+ 'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus',
+ 'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>',
+}
+
+_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'}
+
+_TEST_RESOURCES_ALLOWLIST_1 = ['low_memory_error', 'structured_text']
+
+# Extracted from one generated Chromium R.txt file, with string resource
+# names shuffled randomly.
+_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000
+int anim abc_fade_out 0x7f050001
+int anim abc_grow_fade_in_from_bottom 0x7f050002
+int array DefaultCookiesSettingEntries 0x7f120002
+int array DefaultCookiesSettingValues 0x7f120003
+int array DefaultGeolocationSettingEntries 0x7f120004
+int attr actionBarDivider 0x7f0100e7
+int attr actionBarStyle 0x7f0100e2
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109
+int string AllowedDomainsForAppsTitle 0x7f0c0104
+int string AlternateErrorPagesEnabledTitle 0x7f0c0106
+int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba }
+int styleable SnackbarLayout_android_maxWidth 0
+int styleable SnackbarLayout_elevation 2
+'''
+
+# Test allowlist R.txt file. Note that AlternateErrorPagesEnabledTitle is
+# listed as an 'anim' and should thus be skipped. Similarly the string
+# 'ThisStringDoesNotAppear' should not be in the final result.
+_TEST_ALLOWLIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string ThisStringDoesNotAppear 0x7f0fffff
+'''
+
+_TEST_R_TEXT_RESOURCES_IDS = {
+ 0x7f0c0105: 'AllowedDomainsForAppsDesc',
+ 0x7f0c0107: 'AlternateErrorPagesEnabledDesc',
+}
+
+# Names of string resources in _TEST_R_TXT, should be sorted!
+_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([
+ 'AllowedDomainsForAppsDesc',
+ 'AllowedDomainsForAppsTitle',
+ 'AlternateErrorPagesEnabledDesc',
+ 'AlternateErrorPagesEnabledTitle',
+ 'AuthAndroidNegotiateAccountTypeDesc',
+])
+
+
+def _CreateTestFile(tmp_dir, file_name, file_data):
+ file_path = os.path.join(tmp_dir, file_name)
+ with open(file_path, 'wt') as f:
+ f.write(file_data)
+ return file_path
+
+
+
+class ResourceUtilsTest(unittest.TestCase):
+
+ def test_GetRTxtStringResourceNames(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ self.assertListEqual(
+ resource_utils.GetRTxtStringResourceNames(tmp_file),
+ _TEST_R_TXT_STRING_RESOURCE_NAMES)
+
+ def test_GenerateStringResourcesAllowList(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ tmp_allowlist_rtxt_file = _CreateTestFile(tmp_dir, "test_allowlist_R.txt",
+ _TEST_ALLOWLIST_R_TXT)
+ self.assertDictEqual(
+ resource_utils.GenerateStringResourcesAllowList(
+ tmp_module_rtxt_file, tmp_allowlist_rtxt_file),
+ _TEST_R_TEXT_RESOURCES_IDS)
+
+ def test_IsAndroidLocaleQualifier(self):
+ good_locales = [
+ 'en',
+ 'en-rUS',
+ 'fil',
+ 'fil-rPH',
+ 'iw',
+ 'iw-rIL',
+ 'b+en',
+ 'b+en+US',
+ 'b+ja+Latn',
+ 'b+ja+JP+Latn',
+ 'b+cmn+Hant-TW',
+ ]
+ bad_locales = [
+ 'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+'
+ ]
+ for locale in good_locales:
+ self.assertTrue(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a good locale!" % locale)
+
+ for locale in bad_locales:
+ self.assertFalse(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a bad locale!" % locale)
+
+ def test_ToAndroidLocaleName(self):
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = {
+ 'en': 'en',
+ 'en-US': 'en-rUS',
+ 'en-FOO': 'en-rFOO',
+ 'fil': 'tl',
+ 'tl': 'tl',
+ 'he': 'iw',
+ 'he-IL': 'iw-rIL',
+ 'id': 'in',
+ 'id-BAR': 'in-rBAR',
+ 'nb': 'nb',
+ 'yi': 'ji'
+ }
+ for chromium_locale, android_locale in \
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.items():
+ result = resource_utils.ToAndroidLocaleName(chromium_locale)
+ self.assertEqual(result, android_locale)
+
+ def test_ToChromiumLocaleName(self):
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
+ 'foo': 'foo',
+ 'foo-rBAR': 'foo-BAR',
+ 'b+lll': 'lll',
+ 'b+ll+Extra': 'll',
+ 'b+ll+RR': 'll-RR',
+ 'b+lll+RR+Extra': 'lll-RR',
+ 'b+ll+RRR+Extra': 'll-RRR',
+ 'b+ll+Ssss': 'll-Ssss',
+ 'b+ll+Ssss+Extra': 'll-Ssss',
+ 'b+ll+Ssss+RR': 'll-Ssss-RR',
+ 'b+ll+Ssss+RRR': 'll-Ssss-RRR',
+ 'b+ll+Ssss+RRR+Extra': 'll-Ssss-RRR',
+ 'b+ll+Whatever': 'll',
+ 'en': 'en',
+ 'en-rUS': 'en-US',
+ 'en-US': None,
+ 'en-FOO': None,
+ 'en-rFOO': 'en-FOO',
+ 'es-rES': 'es-ES',
+ 'es-rUS': 'es-419',
+ 'tl': 'fil',
+ 'fil': 'fil',
+ 'iw': 'he',
+ 'iw-rIL': 'he-IL',
+ 'b+iw+IL': 'he-IL',
+ 'in': 'id',
+ 'in-rBAR': 'id-BAR',
+ 'id-rBAR': 'id-BAR',
+ 'nb': 'nb',
+ 'no': 'nb', # http://crbug.com/920960
+ }
+ for android_locale, chromium_locale in \
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.items():
+ result = resource_utils.ToChromiumLocaleName(android_locale)
+ self.assertEqual(result, chromium_locale)
+
+ def test_FindLocaleInStringResourceFilePath(self):
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values/whatever.xml'))
+ self.assertEqual(
+ 'foo',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/whatever.xml'))
+ self.assertEqual(
+ 'foo-rBAR',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo-rBAR/whatever.xml'))
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/ignore-subdirs/whatever.xml'))
+
+ def test_ParseAndroidResourceStringsFromXml(self):
+ ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml(
+ _TEST_XML_INPUT_1)
+ self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1)
+ self.assertDictEqual(namespaces, _TEST_NAMESPACES_1)
+
+ def test_GenerateAndroidResourceStringsXml(self):
+ # Fist, an empty strings map, with no namespaces
+ result = resource_utils.GenerateAndroidResourceStringsXml({})
+ self.assertEqual(result.decode('utf8'), _TEST_XML_OUTPUT_EMPTY)
+
+ result = resource_utils.GenerateAndroidResourceStringsXml(
+ _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ self.assertEqual(result.decode('utf8'), _TEST_XML_INPUT_1)
+
+ @staticmethod
+ def _CreateTestResourceFile(output_dir, locale, string_map, namespaces):
+ values_dir = os.path.join(output_dir, 'values-' + locale)
+ build_utils.MakeDirectory(values_dir)
+ file_path = os.path.join(values_dir, 'strings.xml')
+ with open(file_path, 'wb') as f:
+ file_data = resource_utils.GenerateAndroidResourceStringsXml(
+ string_map, namespaces)
+ f.write(file_data)
+ return file_path
+
+ def _CheckTestResourceFile(self, file_path, expected_data):
+ with open(file_path) as f:
+ file_data = f.read()
+ self.assertEqual(file_data, expected_data)
+
+ def test_FilterAndroidResourceStringsXml(self):
+ with build_utils.TempDir() as tmp_path:
+ test_file = self._CreateTestResourceFile(
+ tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ resource_utils.FilterAndroidResourceStringsXml(
+ test_file, lambda x: x in _TEST_RESOURCES_ALLOWLIST_1)
+ self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/resources_parser.py b/third_party/libwebrtc/build/android/gyp/util/resources_parser.py
new file mode 100644
index 0000000000..8d8d69cce8
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/resources_parser.py
@@ -0,0 +1,142 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import re
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import resource_utils
+
+_TextSymbolEntry = collections.namedtuple(
+ 'RTextEntry', ('java_type', 'resource_type', 'name', 'value'))
+
+_DUMMY_RTXT_ID = '0x7f010001'
+_DUMMY_RTXT_INDEX = '1'
+
+
+def _ResourceNameToJavaSymbol(resource_name):
+ return re.sub('[\.:]', '_', resource_name)
+
+
+class RTxtGenerator(object):
+ def __init__(self,
+ res_dirs,
+ ignore_pattern=resource_utils.AAPT_IGNORE_PATTERN):
+ self.res_dirs = res_dirs
+ self.ignore_pattern = ignore_pattern
+
+ def _ParseDeclareStyleable(self, node):
+ ret = set()
+ stylable_name = _ResourceNameToJavaSymbol(node.attrib['name'])
+ ret.add(
+ _TextSymbolEntry('int[]', 'styleable', stylable_name,
+ '{{{}}}'.format(_DUMMY_RTXT_ID)))
+ for child in node:
+ if child.tag == 'eat-comment':
+ continue
+ if child.tag != 'attr':
+ # This parser expects everything inside <declare-stylable/> to be either
+ # an attr or an eat-comment. If new resource xml files are added that do
+ # not conform to this, this parser needs updating.
+ raise Exception('Unexpected tag {} inside <delcare-stylable/>'.format(
+ child.tag))
+ entry_name = '{}_{}'.format(
+ stylable_name, _ResourceNameToJavaSymbol(child.attrib['name']))
+ ret.add(
+ _TextSymbolEntry('int', 'styleable', entry_name, _DUMMY_RTXT_INDEX))
+ if not child.attrib['name'].startswith('android:'):
+ resource_name = _ResourceNameToJavaSymbol(child.attrib['name'])
+ ret.add(_TextSymbolEntry('int', 'attr', resource_name, _DUMMY_RTXT_ID))
+ for entry in child:
+ if entry.tag not in ('enum', 'flag'):
+ # This parser expects everything inside <attr/> to be either an
+ # <enum/> or an <flag/>. If new resource xml files are added that do
+ # not conform to this, this parser needs updating.
+ raise Exception('Unexpected tag {} inside <attr/>'.format(entry.tag))
+ resource_name = _ResourceNameToJavaSymbol(entry.attrib['name'])
+ ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+ return ret
+
+ def _ExtractNewIdsFromNode(self, node):
+ ret = set()
+ # Sometimes there are @+id/ in random attributes (not just in android:id)
+ # and apparently that is valid. See:
+ # https://developer.android.com/reference/android/widget/RelativeLayout.LayoutParams.html
+ for value in node.attrib.values():
+ if value.startswith('@+id/'):
+ resource_name = value[5:]
+ ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+ for child in node:
+ ret.update(self._ExtractNewIdsFromNode(child))
+ return ret
+
+ def _ExtractNewIdsFromXml(self, xml_path):
+ root = ElementTree.parse(xml_path).getroot()
+ return self._ExtractNewIdsFromNode(root)
+
+ def _ParseValuesXml(self, xml_path):
+ ret = set()
+ root = ElementTree.parse(xml_path).getroot()
+ assert root.tag == 'resources'
+ for child in root:
+ if child.tag == 'eat-comment':
+ # eat-comment is just a dummy documentation element.
+ continue
+ if child.tag == 'skip':
+ # skip is just a dummy element.
+ continue
+ if child.tag == 'declare-styleable':
+ ret.update(self._ParseDeclareStyleable(child))
+ else:
+ if child.tag == 'item':
+ resource_type = child.attrib['type']
+ elif child.tag in ('array', 'integer-array', 'string-array'):
+ resource_type = 'array'
+ else:
+ resource_type = child.tag
+ name = _ResourceNameToJavaSymbol(child.attrib['name'])
+ ret.add(_TextSymbolEntry('int', resource_type, name, _DUMMY_RTXT_ID))
+ return ret
+
+ def _CollectResourcesListFromDirectory(self, res_dir):
+ ret = set()
+ globs = resource_utils._GenerateGlobs(self.ignore_pattern)
+ for root, _, files in os.walk(res_dir):
+ resource_type = os.path.basename(root)
+ if '-' in resource_type:
+ resource_type = resource_type[:resource_type.index('-')]
+ for f in files:
+ if build_utils.MatchesGlob(f, globs):
+ continue
+ if resource_type == 'values':
+ ret.update(self._ParseValuesXml(os.path.join(root, f)))
+ else:
+ if '.' in f:
+ resource_name = f[:f.index('.')]
+ else:
+ resource_name = f
+ ret.add(
+ _TextSymbolEntry('int', resource_type, resource_name,
+ _DUMMY_RTXT_ID))
+ # Other types not just layouts can contain new ids (eg: Menus and
+ # Drawables). Just in case, look for new ids in all files.
+ if f.endswith('.xml'):
+ ret.update(self._ExtractNewIdsFromXml(os.path.join(root, f)))
+ return ret
+
+ def _CollectResourcesListFromDirectories(self):
+ ret = set()
+ for res_dir in self.res_dirs:
+ ret.update(self._CollectResourcesListFromDirectory(res_dir))
+ return ret
+
+ def WriteRTxtFile(self, rtxt_path):
+ resources = self._CollectResourcesListFromDirectories()
+ with build_utils.AtomicOutput(rtxt_path, mode='w') as f:
+ for resource in resources:
+ line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format(
+ resource)
+ f.write(line)
diff --git a/third_party/libwebrtc/build/android/gyp/util/server_utils.py b/third_party/libwebrtc/build/android/gyp/util/server_utils.py
new file mode 100644
index 0000000000..e050ef6552
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/server_utils.py
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import json
+import os
+import socket
+
+# Use a unix abstract domain socket:
+# https://man7.org/linux/man-pages/man7/unix.7.html#:~:text=abstract:
+SOCKET_ADDRESS = '\0chromium_build_server_socket'
+BUILD_SERVER_ENV_VARIABLE = 'INVOKED_BY_BUILD_SERVER'
+
+
+def MaybeRunCommand(name, argv, stamp_file):
+ """Returns True if the command was successfully sent to the build server."""
+
+ # When the build server runs a command, it sets this environment variable.
+ # This prevents infinite recursion where the script sends a request to the
+ # build server, then the build server runs the script, and then the script
+ # sends another request to the build server.
+ if BUILD_SERVER_ENV_VARIABLE in os.environ:
+ return False
+ with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock:
+ try:
+ sock.connect(SOCKET_ADDRESS)
+ sock.sendall(
+ json.dumps({
+ 'name': name,
+ 'cmd': argv,
+ 'cwd': os.getcwd(),
+ 'stamp_file': stamp_file,
+ }).encode('utf8'))
+ except socket.error as e:
+ # [Errno 111] Connection refused. Either the server has not been started
+ # or the server is not currently accepting new connections.
+ if e.errno == 111:
+ return False
+ raise e
+ return True
diff --git a/third_party/libwebrtc/build/android/gyp/util/zipalign.py b/third_party/libwebrtc/build/android/gyp/util/zipalign.py
new file mode 100644
index 0000000000..c5c4ea88c6
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/zipalign.py
@@ -0,0 +1,97 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+
+_FIXED_ZIP_HEADER_LEN = 30
+
+
+def _PatchedDecodeExtra(self):
+ # Try to decode the extra field.
+ extra = self.extra
+ unpack = struct.unpack
+ while len(extra) >= 4:
+ tp, ln = unpack('<HH', extra[:4])
+ if tp == 1:
+ if ln >= 24:
+ counts = unpack('<QQQ', extra[4:28])
+ elif ln == 16:
+ counts = unpack('<QQ', extra[4:20])
+ elif ln == 8:
+ counts = unpack('<Q', extra[4:12])
+ elif ln == 0:
+ counts = ()
+ else:
+ raise RuntimeError("Corrupt extra field %s" % (ln, ))
+
+ idx = 0
+
+ # ZIP64 extension (large files and/or large archives)
+ if self.file_size in (0xffffffffffffffff, 0xffffffff):
+ self.file_size = counts[idx]
+ idx += 1
+
+ if self.compress_size == 0xffffffff:
+ self.compress_size = counts[idx]
+ idx += 1
+
+ if self.header_offset == 0xffffffff:
+ self.header_offset = counts[idx]
+ idx += 1
+
+ extra = extra[ln + 4:]
+
+
+def ApplyZipFileZipAlignFix():
+ """Fix zipfile.ZipFile() to be able to open zipaligned .zip files.
+
+ Android's zip alignment uses not-quite-valid zip headers to perform alignment.
+ Python < 3.4 crashes when trying to load them.
+ https://bugs.python.org/issue14315
+ """
+ if sys.version_info < (3, 4):
+ zipfile.ZipInfo._decodeExtra = ( # pylint: disable=protected-access
+ _PatchedDecodeExtra)
+
+
+def _SetAlignment(zip_obj, zip_info, alignment):
+ """Sets a ZipInfo's extra field such that the file will be aligned.
+
+ Args:
+ zip_obj: The ZipFile object that is being written.
+ zip_info: The ZipInfo object about to be written.
+ alignment: The amount of alignment (e.g. 4, or 4*1024).
+ """
+ cur_offset = zip_obj.fp.tell()
+ header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename)
+ padding_needed = (alignment - (
+ (cur_offset + header_size) % alignment)) % alignment
+
+
+ # Python writes |extra| to both the local file header and the central
+ # directory's file header. Android's zipalign tool writes only to the
+ # local file header, so there is more overhead in using python to align.
+ zip_info.extra = b'\0' * padding_needed
+
+
+def AddToZipHermetic(zip_file,
+ zip_path,
+ src_path=None,
+ data=None,
+ compress=None,
+ alignment=None):
+ """Same as build_utils.AddToZipHermetic(), but with alignment.
+
+ Args:
+ alignment: If set, align the data of the entry to this many bytes.
+ """
+ zipinfo = build_utils.HermeticZipInfo(filename=zip_path)
+ if alignment:
+ _SetAlignment(zip_file, zipinfo, alignment)
+ build_utils.AddToZipHermetic(
+ zip_file, zipinfo, src_path=src_path, data=data, compress=compress)