summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/build/android/gyp
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /third_party/libwebrtc/build/android/gyp
parentInitial commit. (diff)
downloadfirefox-upstream.tar.xz
firefox-upstream.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/build/android/gyp')
-rw-r--r--third_party/libwebrtc/build/android/gyp/OWNERS4
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/aar.py210
-rw-r--r--third_party/libwebrtc/build/android/gyp/aar.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/aidl.py64
-rw-r--r--third_party/libwebrtc/build/android/gyp/aidl.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/allot_native_libraries.py185
-rw-r--r--third_party/libwebrtc/build/android/gyp/allot_native_libraries.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/apkbuilder.py561
-rw-r--r--third_party/libwebrtc/build/android/gyp/apkbuilder.pydeps9
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/assert_static_initializers.py187
-rw-r--r--third_party/libwebrtc/build/android/gyp/assert_static_initializers.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/bundletool.py46
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/bytecode_processor.py78
-rw-r--r--third_party/libwebrtc/build/android/gyp/bytecode_processor.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/bytecode_rewriter.py37
-rw-r--r--third_party/libwebrtc/build/android/gyp/bytecode_rewriter.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/check_flag_expectations.py132
-rw-r--r--third_party/libwebrtc/build/android/gyp/check_flag_expectations.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/compile_java.py787
-rw-r--r--third_party/libwebrtc/build/android/gyp/compile_java.pydeps30
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/compile_resources.py1032
-rw-r--r--third_party/libwebrtc/build/android/gyp/compile_resources.pydeps39
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/copy_ex.py129
-rw-r--r--third_party/libwebrtc/build/android/gyp/copy_ex.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_apk_operations_script.py88
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_apk_operations_script.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_app_bundle.py543
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_app_bundle.pydeps49
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_app_bundle_apks.py53
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.pydeps37
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.py122
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_java_binary_script.py120
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_java_binary_script.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_r_java.py62
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_r_java.pydeps31
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_r_txt.py31
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_r_txt.pydeps32
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_size_info_files.py195
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_size_info_files.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/create_ui_locale_resources.py87
-rw-r--r--third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.pydeps31
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/desugar.py67
-rw-r--r--third_party/libwebrtc/build/android/gyp/desugar.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/dex.py650
-rw-r--r--third_party/libwebrtc/build/android/gyp/dex.pydeps10
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/dex_jdk_libs.py93
-rw-r--r--third_party/libwebrtc/build/android/gyp/dex_jdk_libs.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/dexsplitter.py132
-rw-r--r--third_party/libwebrtc/build/android/gyp/dexsplitter.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/dist_aar.py159
-rw-r--r--third_party/libwebrtc/build/android/gyp/dist_aar.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/extract_unwind_tables.py283
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/extract_unwind_tables_tests.py120
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/filter_zip.py65
-rw-r--r--third_party/libwebrtc/build/android/gyp/filter_zip.pydeps6
-rw-r--r--third_party/libwebrtc/build/android/gyp/finalize_apk.py78
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/find.py33
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/gcc_preprocess.py63
-rw-r--r--third_party/libwebrtc/build/android/gyp/gcc_preprocess.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/generate_android_wrapper.py42
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/generate_linker_version_script.py82
-rw-r--r--third_party/libwebrtc/build/android/gyp/generate_linker_version_script.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/ijar.py34
-rw-r--r--third_party/libwebrtc/build/android/gyp/ijar.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/jacoco_instr.py242
-rw-r--r--third_party/libwebrtc/build/android/gyp/jacoco_instr.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_cpp_enum.py437
-rw-r--r--third_party/libwebrtc/build/android/gyp/java_cpp_enum.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_cpp_enum_tests.py783
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_cpp_features.py110
-rw-r--r--third_party/libwebrtc/build/android/gyp/java_cpp_features.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_cpp_features_tests.py198
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_cpp_strings.py103
-rw-r--r--third_party/libwebrtc/build/android/gyp/java_cpp_strings.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_cpp_strings_tests.py151
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_google_api_keys.py123
-rw-r--r--third_party/libwebrtc/build/android/gyp/java_google_api_keys.pydeps7
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/java_google_api_keys_tests.py42
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/javac_output_processor.py198
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/jetify_jar.py68
-rw-r--r--third_party/libwebrtc/build/android/gyp/jetify_jar.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/jinja_template.py160
-rw-r--r--third_party/libwebrtc/build/android/gyp/jinja_template.pydeps43
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/lint.py494
-rw-r--r--third_party/libwebrtc/build/android/gyp/lint.pydeps8
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/merge_manifest.py149
-rw-r--r--third_party/libwebrtc/build/android/gyp/merge_manifest.pydeps7
-rw-r--r--third_party/libwebrtc/build/android/gyp/native_libraries_template.py39
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/nocompile_test.py212
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/optimize_resources.py151
-rw-r--r--third_party/libwebrtc/build/android/gyp/optimize_resources.pydeps0
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/prepare_resources.py207
-rw-r--r--third_party/libwebrtc/build/android/gyp/prepare_resources.pydeps35
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/process_native_prebuilt.py38
-rw-r--r--third_party/libwebrtc/build/android/gyp/process_native_prebuilt.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/proguard.py710
-rw-r--r--third_party/libwebrtc/build/android/gyp/proguard.pydeps16
-rw-r--r--third_party/libwebrtc/build/android/gyp/proto/Configuration_pb2.py697
-rw-r--r--third_party/libwebrtc/build/android/gyp/proto/README.md13
-rw-r--r--third_party/libwebrtc/build/android/gyp/proto/Resources_pb2.py2779
-rw-r--r--third_party/libwebrtc/build/android/gyp/proto/__init__.py0
-rw-r--r--third_party/libwebrtc/build/android/gyp/test/BUILD.gn11
-rw-r--r--third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java15
-rw-r--r--third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java12
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/turbine.py170
-rw-r--r--third_party/libwebrtc/build/android/gyp/turbine.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/unused_resources.py89
-rw-r--r--third_party/libwebrtc/build/android/gyp/unused_resources.pydeps31
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/__init__.py3
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/build_utils.py725
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/build_utils_test.py48
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/diff_utils.py127
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py59
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py194
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/manifest_utils.py321
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py128
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/md5_check.py471
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/md5_check_test.py178
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/parallel.py214
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/protoresources.py308
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/resource_utils.py1078
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/util/resource_utils_test.py275
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/resources_parser.py142
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/server_utils.py41
-rw-r--r--third_party/libwebrtc/build/android/gyp/util/zipalign.py97
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.py93
-rw-r--r--third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.pydeps9
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/write_build_config.py2091
-rw-r--r--third_party/libwebrtc/build/android/gyp/write_build_config.pydeps31
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/write_native_libraries_java.py130
-rw-r--r--third_party/libwebrtc/build/android/gyp/write_native_libraries_java.pydeps6
-rwxr-xr-xthird_party/libwebrtc/build/android/gyp/zip.py72
-rw-r--r--third_party/libwebrtc/build/android/gyp/zip.pydeps6
134 files changed, 22486 insertions, 0 deletions
diff --git a/third_party/libwebrtc/build/android/gyp/OWNERS b/third_party/libwebrtc/build/android/gyp/OWNERS
new file mode 100644
index 0000000000..25557e1fc5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/OWNERS
@@ -0,0 +1,4 @@
+agrieve@chromium.org
+digit@chromium.org
+smaier@chromium.org
+wnwen@chromium.org
diff --git a/third_party/libwebrtc/build/android/gyp/aar.py b/third_party/libwebrtc/build/android/gyp/aar.py
new file mode 100755
index 0000000000..b157cd816f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/aar.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python3
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Processes an Android AAR file."""
+
+import argparse
+import os
+import posixpath
+import re
+import shutil
+import sys
+from xml.etree import ElementTree
+import zipfile
+
+from util import build_utils
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir)))
+import gn_helpers
+
+
+_PROGUARD_TXT = 'proguard.txt'
+
+
+def _GetManifestPackage(doc):
+ """Returns the package specified in the manifest.
+
+ Args:
+ doc: an XML tree parsed by ElementTree
+
+ Returns:
+ String representing the package name.
+ """
+ return doc.attrib['package']
+
+
+def _IsManifestEmpty(doc):
+ """Decides whether the given manifest has merge-worthy elements.
+
+ E.g.: <activity>, <service>, etc.
+
+ Args:
+ doc: an XML tree parsed by ElementTree
+
+ Returns:
+ Whether the manifest has merge-worthy elements.
+ """
+ for node in doc:
+ if node.tag == 'application':
+ if list(node):
+ return False
+ elif node.tag != 'uses-sdk':
+ return False
+
+ return True
+
+
+def _CreateInfo(aar_file):
+ """Extracts and return .info data from an .aar file.
+
+ Args:
+ aar_file: Path to an input .aar file.
+
+ Returns:
+ A dict containing .info data.
+ """
+ data = {}
+ data['aidl'] = []
+ data['assets'] = []
+ data['resources'] = []
+ data['subjars'] = []
+ data['subjar_tuples'] = []
+ data['has_classes_jar'] = False
+ data['has_proguard_flags'] = False
+ data['has_native_libraries'] = False
+ data['has_r_text_file'] = False
+ with zipfile.ZipFile(aar_file) as z:
+ manifest_xml = ElementTree.fromstring(z.read('AndroidManifest.xml'))
+ data['is_manifest_empty'] = _IsManifestEmpty(manifest_xml)
+ manifest_package = _GetManifestPackage(manifest_xml)
+ if manifest_package:
+ data['manifest_package'] = manifest_package
+
+ for name in z.namelist():
+ if name.endswith('/'):
+ continue
+ if name.startswith('aidl/'):
+ data['aidl'].append(name)
+ elif name.startswith('res/'):
+ data['resources'].append(name)
+ elif name.startswith('libs/') and name.endswith('.jar'):
+ label = posixpath.basename(name)[:-4]
+ label = re.sub(r'[^a-zA-Z0-9._]', '_', label)
+ data['subjars'].append(name)
+ data['subjar_tuples'].append([label, name])
+ elif name.startswith('assets/'):
+ data['assets'].append(name)
+ elif name.startswith('jni/'):
+ data['has_native_libraries'] = True
+ if 'native_libraries' in data:
+ data['native_libraries'].append(name)
+ else:
+ data['native_libraries'] = [name]
+ elif name == 'classes.jar':
+ data['has_classes_jar'] = True
+ elif name == _PROGUARD_TXT:
+ data['has_proguard_flags'] = True
+ elif name == 'R.txt':
+ # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs
+ # have no resources as well. We treat empty R.txt as having no R.txt.
+ data['has_r_text_file'] = bool(z.read('R.txt').strip())
+
+ return data
+
+
+def _PerformExtract(aar_file, output_dir, name_allowlist):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_dir = os.path.join(tmp_dir, 'staging')
+ os.mkdir(tmp_dir)
+ build_utils.ExtractAll(
+ aar_file, path=tmp_dir, predicate=name_allowlist.__contains__)
+ # Write a breadcrumb so that SuperSize can attribute files back to the .aar.
+ with open(os.path.join(tmp_dir, 'source.info'), 'w') as f:
+ f.write('source={}\n'.format(aar_file))
+
+ shutil.rmtree(output_dir, ignore_errors=True)
+ shutil.move(tmp_dir, output_dir)
+
+
+def _AddCommonArgs(parser):
+ parser.add_argument(
+ 'aar_file', help='Path to the AAR file.', type=os.path.normpath)
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ command_parsers = parser.add_subparsers(dest='command')
+ subp = command_parsers.add_parser(
+ 'list', help='Output a GN scope describing the contents of the .aar.')
+ _AddCommonArgs(subp)
+ subp.add_argument('--output', help='Output file.', default='-')
+
+ subp = command_parsers.add_parser('extract', help='Extracts the .aar')
+ _AddCommonArgs(subp)
+ subp.add_argument(
+ '--output-dir',
+ help='Output directory for the extracted files.',
+ required=True,
+ type=os.path.normpath)
+ subp.add_argument(
+ '--assert-info-file',
+ help='Path to .info file. Asserts that it matches what '
+ '"list" would output.',
+ type=argparse.FileType('r'))
+ subp.add_argument(
+ '--ignore-resources',
+ action='store_true',
+ help='Whether to skip extraction of res/')
+
+ args = parser.parse_args()
+
+ aar_info = _CreateInfo(args.aar_file)
+ formatted_info = """\
+# Generated by //build/android/gyp/aar.py
+# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen".
+
+""" + gn_helpers.ToGNString(aar_info, pretty=True)
+
+ if args.command == 'extract':
+ if args.assert_info_file:
+ cached_info = args.assert_info_file.read()
+ if formatted_info != cached_info:
+ raise Exception('android_aar_prebuilt() cached .info file is '
+ 'out-of-date. Run gn gen with '
+ 'update_android_aar_prebuilts=true to update it.')
+
+ with zipfile.ZipFile(args.aar_file) as zf:
+ names = zf.namelist()
+ if args.ignore_resources:
+ names = [n for n in names if not n.startswith('res')]
+
+ _PerformExtract(args.aar_file, args.output_dir, set(names))
+
+ elif args.command == 'list':
+ aar_output_present = args.output != '-' and os.path.isfile(args.output)
+ if aar_output_present:
+ # Some .info files are read-only, for examples the cipd-controlled ones
+ # under third_party/android_deps/repositoty. To deal with these, first
+ # that its content is correct, and if it is, exit without touching
+ # the file system.
+ file_info = open(args.output, 'r').read()
+ if file_info == formatted_info:
+ return
+
+ # Try to write the file. This may fail for read-only ones that were
+ # not updated.
+ try:
+ with open(args.output, 'w') as f:
+ f.write(formatted_info)
+ except IOError as e:
+ if not aar_output_present:
+ raise e
+ raise Exception('Could not update output file: %s\n%s\n' %
+ (args.output, e))
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/android/gyp/aar.pydeps b/third_party/libwebrtc/build/android/gyp/aar.pydeps
new file mode 100644
index 0000000000..7e2924b34c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/aar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py
+../../gn_helpers.py
+aar.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/aidl.py b/third_party/libwebrtc/build/android/gyp/aidl.py
new file mode 100755
index 0000000000..b8099aaecd
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/aidl.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+ option_parser.add_option('--imports', help='Files to import.')
+ option_parser.add_option('--includes',
+ help='Directories to add as import search paths.')
+ option_parser.add_option('--srcjar', help='Path for srcjar output.')
+ build_utils.AddDepfileOption(option_parser)
+ options, args = option_parser.parse_args(argv[1:])
+
+ options.includes = build_utils.ParseGnList(options.includes)
+
+ with build_utils.TempDir() as temp_dir:
+ for f in args:
+ classname = os.path.splitext(os.path.basename(f))[0]
+ output = os.path.join(temp_dir, classname + '.java')
+ aidl_cmd = [options.aidl_path]
+ aidl_cmd += [
+ '-p' + s for s in build_utils.ParseGnList(options.imports)
+ ]
+ aidl_cmd += ['-I' + s for s in options.includes]
+ aidl_cmd += [
+ f,
+ output
+ ]
+ build_utils.CheckOutput(aidl_cmd)
+
+ with build_utils.AtomicOutput(options.srcjar) as f:
+ with zipfile.ZipFile(f, 'w') as srcjar:
+ for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+ with open(path) as fileobj:
+ data = fileobj.read()
+ pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+ arcname = '%s/%s' % (
+ pkg_name.replace('.', '/'), os.path.basename(path))
+ build_utils.AddToZipHermetic(srcjar, arcname, data=data)
+
+ if options.depfile:
+ include_files = []
+ for include_dir in options.includes:
+ include_files += build_utils.FindInDirectory(include_dir, '*.java')
+ build_utils.WriteDepfile(options.depfile, options.srcjar, include_files)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/third_party/libwebrtc/build/android/gyp/aidl.pydeps b/third_party/libwebrtc/build/android/gyp/aidl.pydeps
new file mode 100644
index 0000000000..11c55ed4b6
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/aidl.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py
+../../gn_helpers.py
+aidl.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/allot_native_libraries.py b/third_party/libwebrtc/build/android/gyp/allot_native_libraries.py
new file mode 100755
index 0000000000..978b173403
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/allot_native_libraries.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Allots libraries to modules to be packaged into.
+
+All libraries that are depended on by a single module will be allotted to this
+module. All other libraries will be allotted to the closest ancestor.
+
+Example:
+ Given the module dependency structure
+
+ c
+ / \
+ b d
+ / \
+ a e
+
+ and libraries assignment
+
+ a: ['lib1.so']
+ e: ['lib2.so', 'lib1.so']
+
+ will make the allotment decision
+
+ c: ['lib1.so']
+ e: ['lib2.so']
+
+ The above example is invoked via:
+
+ ./allot_native_libraries \
+ --libraries 'a,["1.so"]' \
+ --libraries 'e,["2.so", "1.so"]' \
+ --dep c:b \
+ --dep b:a \
+ --dep c:d \
+ --dep d:e \
+ --output <output JSON>
+"""
+
+import argparse
+import collections
+import json
+import sys
+
+from util import build_utils
+
+
+def _ModuleLibrariesPair(arg):
+ pos = arg.find(',')
+ assert pos > 0
+ return (arg[:pos], arg[pos + 1:])
+
+
+def _DepPair(arg):
+ parent, child = arg.split(':')
+ return (parent, child)
+
+
+def _PathFromRoot(module_tree, module):
+ """Computes path from root to a module.
+
+ Parameters:
+ module_tree: Dictionary mapping each module to its parent.
+ module: Module to which to compute the path.
+
+ Returns:
+ Path from root the the module.
+ """
+ path = [module]
+ while module_tree.get(module):
+ module = module_tree[module]
+ path = [module] + path
+ return path
+
+
+def _ClosestCommonAncestor(module_tree, modules):
+ """Computes the common ancestor of a set of modules.
+
+ Parameters:
+ module_tree: Dictionary mapping each module to its parent.
+ modules: Set of modules for which to find the closest common ancestor.
+
+ Returns:
+ The closest common ancestor.
+ """
+ paths = [_PathFromRoot(module_tree, m) for m in modules]
+ assert len(paths) > 0
+ ancestor = None
+ for level in zip(*paths):
+ if len(set(level)) != 1:
+ return ancestor
+ ancestor = level[0]
+ return ancestor
+
+
+def _AllotLibraries(module_tree, libraries_map):
+ """Allot all libraries to a module.
+
+ Parameters:
+ module_tree: Dictionary mapping each module to its parent. Modules can map
+ to None, which is considered the root of the tree.
+ libraries_map: Dictionary mapping each library to a set of modules, which
+ depend on the library.
+
+ Returns:
+ A dictionary mapping mapping each module name to a set of libraries allotted
+ to the module such that libraries with multiple dependees are allotted to
+ the closest ancestor.
+
+ Raises:
+ Exception if some libraries can only be allotted to the None root.
+ """
+ allotment_map = collections.defaultdict(set)
+ for library, modules in libraries_map.items():
+ ancestor = _ClosestCommonAncestor(module_tree, modules)
+ if not ancestor:
+ raise Exception('Cannot allot libraries for given dependency tree')
+ allotment_map[ancestor].add(library)
+ return allotment_map
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--libraries',
+ action='append',
+ type=_ModuleLibrariesPair,
+ required=True,
+ help='A pair of module name and GN list of libraries a module depends '
+ 'on. Can be specified multiple times.')
+ parser.add_argument(
+ '--output',
+ required=True,
+ help='A JSON file with a key for each module mapping to a list of '
+ 'libraries, which should be packaged into this module.')
+ parser.add_argument(
+ '--dep',
+ action='append',
+ type=_DepPair,
+ dest='deps',
+ default=[],
+ help='A pair of parent module name and child module name '
+ '(format: "<parent>:<child>"). Can be specified multiple times.')
+ options = parser.parse_args(build_utils.ExpandFileArgs(args))
+ options.libraries = [(m, build_utils.ParseGnList(l))
+ for m, l in options.libraries]
+
+ # Parse input creating libraries and dependency tree.
+ libraries_map = collections.defaultdict(set) # Maps each library to its
+ # dependee modules.
+ module_tree = {} # Maps each module name to its parent.
+ for module, libraries in options.libraries:
+ module_tree[module] = None
+ for library in libraries:
+ libraries_map[library].add(module)
+ for parent, child in options.deps:
+ if module_tree.get(child):
+ raise Exception('%s cannot have multiple parents' % child)
+ module_tree[child] = parent
+ module_tree[parent] = module_tree.get(parent)
+
+ # Allot all libraries to a module such that libraries with multiple dependees
+ # are allotted to the closest ancestor.
+ allotment_map = _AllotLibraries(module_tree, libraries_map)
+
+ # The build system expects there to be a set of libraries even for the modules
+ # that don't have any libraries allotted.
+ for module in module_tree:
+ # Creates missing sets because of defaultdict.
+ allotment_map[module] = allotment_map[module]
+
+ with open(options.output, 'w') as f:
+ # Write native libraries config and ensure the output is deterministic.
+ json.dump({m: sorted(l)
+ for m, l in allotment_map.items()},
+ f,
+ sort_keys=True,
+ indent=2)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/allot_native_libraries.pydeps b/third_party/libwebrtc/build/android/gyp/allot_native_libraries.pydeps
new file mode 100644
index 0000000000..d8b10cd3da
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/allot_native_libraries.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py
+../../gn_helpers.py
+allot_native_libraries.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/apkbuilder.py b/third_party/libwebrtc/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000000..c355fdf88f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/apkbuilder.py
@@ -0,0 +1,561 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import logging
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+import zlib
+
+import finalize_apk
+
+from util import build_utils
+from util import diff_utils
+from util import zipalign
+
+# Input dex.jar files are zipaligned.
+zipalign.ApplyZipFileZipAlignFix()
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+ '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+ '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+ '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+ '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--assets',
+ help='GYP-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
+ parser.add_argument(
+ '--java-resources', help='GYP-list of java_resources JARs to include.')
+ parser.add_argument('--write-asset-list',
+ action='store_true',
+ help='Whether to create an assets/assets_list file.')
+ parser.add_argument(
+ '--uncompressed-assets',
+ help='Same as --assets, except disables compression.')
+ parser.add_argument('--resource-apk',
+ help='An .ap_ file built using aapt',
+ required=True)
+ parser.add_argument('--output-apk',
+ help='Path to the output file',
+ required=True)
+ parser.add_argument('--format', choices=['apk', 'bundle-module'],
+ default='apk', help='Specify output format.')
+ parser.add_argument('--dex-file',
+ help='Path to the classes.dex to use')
+ parser.add_argument(
+ '--jdk-libs-dex-file',
+ help='Path to classes.dex created by dex_jdk_libs.py')
+ parser.add_argument('--uncompress-dex', action='store_true',
+ help='Store .dex files uncompressed in the APK')
+ parser.add_argument('--native-libs',
+ action='append',
+ help='GYP-list of native libraries to include. '
+ 'Can be specified multiple times.',
+ default=[])
+ parser.add_argument('--secondary-native-libs',
+ action='append',
+ help='GYP-list of native libraries for secondary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_argument('--android-abi',
+ help='Android architecture to use for native libraries')
+ parser.add_argument('--secondary-android-abi',
+ help='The secondary Android architecture to use for'
+ 'secondary native libraries')
+ parser.add_argument(
+ '--is-multi-abi',
+ action='store_true',
+ help='Will add a placeholder for the missing ABI if no native libs or '
+ 'placeholders are set for either the primary or secondary ABI. Can only '
+ 'be set if both --android-abi and --secondary-android-abi are set.')
+ parser.add_argument(
+ '--native-lib-placeholders',
+ help='GYP-list of native library placeholders to add.')
+ parser.add_argument(
+ '--secondary-native-lib-placeholders',
+ help='GYP-list of native library placeholders to add '
+ 'for the secondary ABI')
+ parser.add_argument('--uncompress-shared-libraries', default='False',
+ choices=['true', 'True', 'false', 'False'],
+ help='Whether to uncompress native shared libraries. Argument must be '
+ 'a boolean value.')
+ parser.add_argument(
+ '--apksigner-jar', help='Path to the apksigner executable.')
+ parser.add_argument('--zipalign-path',
+ help='Path to the zipalign executable.')
+ parser.add_argument('--key-path',
+ help='Path to keystore for signing.')
+ parser.add_argument('--key-passwd',
+ help='Keystore password')
+ parser.add_argument('--key-name',
+ help='Keystore name')
+ parser.add_argument(
+ '--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion')
+ parser.add_argument(
+ '--best-compression',
+ action='store_true',
+ help='Use zip -9 rather than zip -1')
+ parser.add_argument(
+ '--library-always-compress',
+ action='append',
+ help='The list of library files that we always compress.')
+ parser.add_argument(
+ '--library-renames',
+ action='append',
+ help='The list of library files that we prepend crazy. to their names.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ diff_utils.AddCommandLineFlags(parser)
+ options = parser.parse_args(args)
+ options.assets = build_utils.ParseGnList(options.assets)
+ options.uncompressed_assets = build_utils.ParseGnList(
+ options.uncompressed_assets)
+ options.native_lib_placeholders = build_utils.ParseGnList(
+ options.native_lib_placeholders)
+ options.secondary_native_lib_placeholders = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders)
+ options.java_resources = build_utils.ParseGnList(options.java_resources)
+ options.native_libs = build_utils.ParseGnList(options.native_libs)
+ options.secondary_native_libs = build_utils.ParseGnList(
+ options.secondary_native_libs)
+ options.library_always_compress = build_utils.ParseGnList(
+ options.library_always_compress)
+ options.library_renames = build_utils.ParseGnList(options.library_renames)
+
+ # --apksigner-jar, --zipalign-path, --key-xxx arguments are
+ # required when building an APK, but not a bundle module.
+ if options.format == 'apk':
+ required_args = [
+ 'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name'
+ ]
+ for required in required_args:
+ if not vars(options)[required]:
+ raise Exception('Argument --%s is required for APKs.' % (
+ required.replace('_', '-')))
+
+ options.uncompress_shared_libraries = \
+ options.uncompress_shared_libraries in [ 'true', 'True' ]
+
+ if not options.android_abi and (options.native_libs or
+ options.native_lib_placeholders):
+ raise Exception('Must specify --android-abi with --native-libs')
+ if not options.secondary_android_abi and (options.secondary_native_libs or
+ options.secondary_native_lib_placeholders):
+ raise Exception('Must specify --secondary-android-abi with'
+ ' --secondary-native-libs')
+ if options.is_multi_abi and not (options.android_abi
+ and options.secondary_android_abi):
+ raise Exception('Must specify --is-multi-abi with both --android-abi '
+ 'and --secondary-android-abi.')
+ return options
+
+
+def _SplitAssetPath(path):
+ """Returns (src, dest) given an asset path in the form src[:dest]."""
+ path_parts = path.split(':')
+ src_path = path_parts[0]
+ if len(path_parts) > 1:
+ dest_path = path_parts[1]
+ else:
+ dest_path = os.path.basename(src_path)
+ return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+ """Converts src:dst into tuples and enumerates files within directories.
+
+ Args:
+ paths: Paths in the form "src_path:dest_path"
+
+ Returns:
+ A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+ ordering within output .apk).
+ """
+ ret = []
+ for path in paths:
+ src_path, dest_path = _SplitAssetPath(path)
+ if os.path.isdir(src_path):
+ for f in build_utils.FindInDirectory(src_path, '*'):
+ ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+ else:
+ ret.append((src_path, dest_path))
+ ret.sort(key=lambda t:t[1])
+ return ret
+
+
+def _GetAssetsToAdd(path_tuples,
+ fast_align,
+ disable_compression=False,
+ allow_reads=True):
+ """Returns the list of file_detail tuples for assets in the apk.
+
+ Args:
+ path_tuples: List of src_path, dest_path tuples to add.
+ fast_align: Whether to perform alignment in python zipfile (alternatively
+ alignment can be done using the zipalign utility out of band).
+ disable_compression: Whether to disable compression.
+ allow_reads: If false, we do not try to read the files from disk (to find
+ their size for example).
+
+ Returns: A list of (src_path, apk_path, compress, alignment) tuple
+ representing what and how assets are added.
+ """
+ assets_to_add = []
+
+ # Group all uncompressed assets together in the hope that it will increase
+ # locality of mmap'ed files.
+ for target_compress in (False, True):
+ for src_path, dest_path in path_tuples:
+ compress = not disable_compression and (
+ os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+
+ if target_compress == compress:
+ # AddToZipHermetic() uses this logic to avoid growing small files.
+ # We need it here in order to set alignment correctly.
+ if allow_reads and compress and os.path.getsize(src_path) < 16:
+ compress = False
+
+ apk_path = 'assets/' + dest_path
+ alignment = 0 if compress and not fast_align else 4
+ assets_to_add.append((apk_path, src_path, compress, alignment))
+ return assets_to_add
+
+
+def _AddFiles(apk, details):
+ """Adds files to the apk.
+
+ Args:
+ apk: path to APK to add to.
+ details: A list of file detail tuples (src_path, apk_path, compress,
+ alignment) representing what and how files are added to the APK.
+ """
+ for apk_path, src_path, compress, alignment in details:
+ # This check is only relevant for assets, but it should not matter if it is
+ # checked for the whole list of files.
+ try:
+ apk.getinfo(apk_path)
+ # Should never happen since write_build_config.py handles merging.
+ raise Exception(
+ 'Multiple targets specified the asset path: %s' % apk_path)
+ except KeyError:
+ zipalign.AddToZipHermetic(
+ apk,
+ apk_path,
+ src_path=src_path,
+ compress=compress,
+ alignment=alignment)
+
+
+def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align,
+ lib_always_compress, lib_renames):
+ """Returns the list of file_detail tuples for native libraries in the apk.
+
+ Returns: A list of (src_path, apk_path, compress, alignment) tuple
+ representing what and how native libraries are added.
+ """
+ libraries_to_add = []
+
+
+ for path in native_libs:
+ basename = os.path.basename(path)
+ compress = not uncompress or any(lib_name in basename
+ for lib_name in lib_always_compress)
+ rename = any(lib_name in basename for lib_name in lib_renames)
+ if rename:
+ basename = 'crazy.' + basename
+
+ lib_android_abi = android_abi
+ if path.startswith('android_clang_arm64_hwasan/'):
+ lib_android_abi = 'arm64-v8a-hwasan'
+
+ apk_path = 'lib/%s/%s' % (lib_android_abi, basename)
+ alignment = 0 if compress and not fast_align else 0x1000
+ libraries_to_add.append((apk_path, path, compress, alignment))
+
+ return libraries_to_add
+
+
+def _CreateExpectationsData(native_libs, assets):
+ """Creates list of native libraries and assets."""
+ native_libs = sorted(native_libs)
+ assets = sorted(assets)
+
+ ret = []
+ for apk_path, _, compress, alignment in native_libs + assets:
+ ret.append('apk_path=%s, compress=%s, alignment=%s\n' %
+ (apk_path, compress, alignment))
+ return ''.join(ret)
+
+
+def main(args):
+ build_utils.InitLogging('APKBUILDER_DEBUG')
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ # Until Python 3.7, there's no better way to set compression level.
+ # The default is 6.
+ if options.best_compression:
+ # Compresses about twice as slow as the default.
+ zlib.Z_DEFAULT_COMPRESSION = 9
+ else:
+ # Compresses about twice as fast as the default.
+ zlib.Z_DEFAULT_COMPRESSION = 1
+
+ # Manually align only when alignment is necessary.
+ # Python's zip implementation duplicates file comments in the central
+ # directory, whereas zipalign does not, so use zipalign for official builds.
+ fast_align = options.format == 'apk' and not options.best_compression
+
+ native_libs = sorted(options.native_libs)
+
+ # Include native libs in the depfile_deps since GN doesn't know about the
+ # dependencies when is_component_build=true.
+ depfile_deps = list(native_libs)
+
+ # For targets that depend on static library APKs, dex paths are created by
+ # the static library's dexsplitter target and GN doesn't know about these
+ # paths.
+ if options.dex_file:
+ depfile_deps.append(options.dex_file)
+
+ secondary_native_libs = []
+ if options.secondary_native_libs:
+ secondary_native_libs = sorted(options.secondary_native_libs)
+ depfile_deps += secondary_native_libs
+
+ if options.java_resources:
+ # Included via .build_config.json, so need to write it to depfile.
+ depfile_deps.extend(options.java_resources)
+
+ assets = _ExpandPaths(options.assets)
+ uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+ # Included via .build_config.json, so need to write it to depfile.
+ depfile_deps.extend(x[0] for x in assets)
+ depfile_deps.extend(x[0] for x in uncompressed_assets)
+ depfile_deps.append(options.resource_apk)
+
+ # Bundle modules have a structure similar to APKs, except that resources
+ # are compiled in protobuf format (instead of binary xml), and that some
+ # files are located into different top-level directories, e.g.:
+ # AndroidManifest.xml -> manifest/AndroidManifest.xml
+ # classes.dex -> dex/classes.dex
+ # res/ -> res/ (unchanged)
+ # assets/ -> assets/ (unchanged)
+ # <other-file> -> root/<other-file>
+ #
+ # Hence, the following variables are used to control the location of files in
+ # the final archive.
+ if options.format == 'bundle-module':
+ apk_manifest_dir = 'manifest/'
+ apk_root_dir = 'root/'
+ apk_dex_dir = 'dex/'
+ else:
+ apk_manifest_dir = ''
+ apk_root_dir = ''
+ apk_dex_dir = ''
+
+ def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads):
+ ret = _GetAssetsToAdd(assets,
+ fast_align,
+ disable_compression=False,
+ allow_reads=allow_reads)
+ ret.extend(
+ _GetAssetsToAdd(uncompressed_assets,
+ fast_align,
+ disable_compression=True,
+ allow_reads=allow_reads))
+ return ret
+
+ libs_to_add = _GetNativeLibrariesToAdd(
+ native_libs, options.android_abi, options.uncompress_shared_libraries,
+ fast_align, options.library_always_compress, options.library_renames)
+ if options.secondary_android_abi:
+ libs_to_add.extend(
+ _GetNativeLibrariesToAdd(
+ secondary_native_libs, options.secondary_android_abi,
+ options.uncompress_shared_libraries, fast_align,
+ options.library_always_compress, options.library_renames))
+
+ if options.expected_file:
+ # We compute expectations without reading the files. This allows us to check
+ # expectations for different targets by just generating their build_configs
+ # and not have to first generate all the actual files and all their
+ # dependencies (for example by just passing --only-verify-expectations).
+ asset_details = _GetAssetDetails(assets,
+ uncompressed_assets,
+ fast_align,
+ allow_reads=False)
+
+ actual_data = _CreateExpectationsData(libs_to_add, asset_details)
+ diff_utils.CheckExpectations(actual_data, options)
+
+ if options.only_verify_expectations:
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile,
+ options.actual_file,
+ inputs=depfile_deps)
+ return
+
+ # If we are past this point, we are going to actually create the final apk so
+ # we should recompute asset details again but maybe perform some optimizations
+ # based on the size of the files on disk.
+ assets_to_add = _GetAssetDetails(
+ assets, uncompressed_assets, fast_align, allow_reads=True)
+
+ # Targets generally do not depend on apks, so no need for only_if_changed.
+ with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
+ with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+ zipfile.ZipFile(f, 'w') as out_apk:
+
+ def add_to_zip(zip_path, data, compress=True, alignment=4):
+ zipalign.AddToZipHermetic(
+ out_apk,
+ zip_path,
+ data=data,
+ compress=compress,
+ alignment=0 if compress and not fast_align else alignment)
+
+ def copy_resource(zipinfo, out_dir=''):
+ add_to_zip(
+ out_dir + zipinfo.filename,
+ resource_apk.read(zipinfo.filename),
+ compress=zipinfo.compress_type != zipfile.ZIP_STORED)
+
+ # Make assets come before resources in order to maintain the same file
+ # ordering as GYP / aapt. http://crbug.com/561862
+ resource_infos = resource_apk.infolist()
+
+ # 1. AndroidManifest.xml
+ logging.debug('Adding AndroidManifest.xml')
+ copy_resource(
+ resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
+
+ # 2. Assets
+ logging.debug('Adding assets/')
+ _AddFiles(out_apk, assets_to_add)
+
+ # 3. Dex files
+ logging.debug('Adding classes.dex')
+ if options.dex_file:
+ with open(options.dex_file, 'rb') as dex_file_obj:
+ if options.dex_file.endswith('.dex'):
+ max_dex_number = 1
+ # This is the case for incremental_install=true.
+ add_to_zip(
+ apk_dex_dir + 'classes.dex',
+ dex_file_obj.read(),
+ compress=not options.uncompress_dex)
+ else:
+ max_dex_number = 0
+ with zipfile.ZipFile(dex_file_obj) as dex_zip:
+ for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+ max_dex_number += 1
+ add_to_zip(
+ apk_dex_dir + dex,
+ dex_zip.read(dex),
+ compress=not options.uncompress_dex)
+
+ if options.jdk_libs_dex_file:
+ with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj:
+ add_to_zip(
+ apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1),
+ dex_file_obj.read(),
+ compress=not options.uncompress_dex)
+
+ # 4. Native libraries.
+ logging.debug('Adding lib/')
+ _AddFiles(out_apk, libs_to_add)
+
+ # Add a placeholder lib if the APK should be multi ABI but is missing libs
+ # for one of the ABIs.
+ native_lib_placeholders = options.native_lib_placeholders
+ secondary_native_lib_placeholders = (
+ options.secondary_native_lib_placeholders)
+ if options.is_multi_abi:
+ if ((secondary_native_libs or secondary_native_lib_placeholders)
+ and not native_libs and not native_lib_placeholders):
+ native_lib_placeholders += ['libplaceholder.so']
+ if ((native_libs or native_lib_placeholders)
+ and not secondary_native_libs
+ and not secondary_native_lib_placeholders):
+ secondary_native_lib_placeholders += ['libplaceholder.so']
+
+ # Add placeholder libs.
+ for name in sorted(native_lib_placeholders):
+ # Note: Empty libs files are ignored by md5check (can cause issues
+ # with stale builds when the only change is adding/removing
+ # placeholders).
+ apk_path = 'lib/%s/%s' % (options.android_abi, name)
+ add_to_zip(apk_path, '', alignment=0x1000)
+
+ for name in sorted(secondary_native_lib_placeholders):
+ # Note: Empty libs files are ignored by md5check (can cause issues
+ # with stale builds when the only change is adding/removing
+ # placeholders).
+ apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
+ add_to_zip(apk_path, '', alignment=0x1000)
+
+ # 5. Resources
+ logging.debug('Adding res/')
+ for info in sorted(resource_infos, key=lambda i: i.filename):
+ if info.filename != 'AndroidManifest.xml':
+ copy_resource(info)
+
+ # 6. Java resources that should be accessible via
+ # Class.getResourceAsStream(), in particular parts of Emma jar.
+ # Prebuilt jars may contain class files which we shouldn't include.
+ logging.debug('Adding Java resources')
+ for java_resource in options.java_resources:
+ with zipfile.ZipFile(java_resource, 'r') as java_resource_jar:
+ for apk_path in sorted(java_resource_jar.namelist()):
+ apk_path_lower = apk_path.lower()
+
+ if apk_path_lower.startswith('meta-inf/'):
+ continue
+ if apk_path_lower.endswith('/'):
+ continue
+ if apk_path_lower.endswith('.class'):
+ continue
+
+ add_to_zip(apk_root_dir + apk_path,
+ java_resource_jar.read(apk_path))
+
+ if options.format == 'apk':
+ zipalign_path = None if fast_align else options.zipalign_path
+ finalize_apk.FinalizeApk(options.apksigner_jar,
+ zipalign_path,
+ f.name,
+ f.name,
+ options.key_path,
+ options.key_passwd,
+ options.key_name,
+ int(options.min_sdk_version),
+ warnings_as_errors=options.warnings_as_errors)
+ logging.debug('Moving file into place')
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile,
+ options.output_apk,
+ inputs=depfile_deps)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/apkbuilder.pydeps b/third_party/libwebrtc/build/android/gyp/apkbuilder.pydeps
new file mode 100644
index 0000000000..e6122edd2f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/apkbuilder.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py
+../../gn_helpers.py
+apkbuilder.py
+finalize_apk.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/zipalign.py
diff --git a/third_party/libwebrtc/build/android/gyp/assert_static_initializers.py b/third_party/libwebrtc/build/android/gyp/assert_static_initializers.py
new file mode 100755
index 0000000000..9af5e2b825
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/assert_static_initializers.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Checks the number of static initializers in an APK's library."""
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT,
+ 'tools', 'linux',
+ 'dump-static-initializers.py')
+
+
+def _RunReadelf(so_path, options, tool_prefix=''):
+ return subprocess.check_output([tool_prefix + 'readelf'] + options +
+ [so_path]).decode('utf8')
+
+
+def _ParseLibBuildId(so_path, tool_prefix):
+ """Returns the Build ID of the given native library."""
+ stdout = _RunReadelf(so_path, ['-n'], tool_prefix)
+ match = re.search(r'Build ID: (\w+)', stdout)
+ return match.group(1) if match else None
+
+
+def _VerifyLibBuildIdsMatch(tool_prefix, *so_files):
+ if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1:
+ raise Exception('Found differing build ids in output directory and apk. '
+ 'Your output directory is likely stale.')
+
+
+def _GetStaticInitializers(so_path, tool_prefix):
+ output = subprocess.check_output(
+ [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix],
+ encoding='utf-8')
+ summary = re.search(r'Found \d+ static initializers in (\d+) files.', output)
+ return output.splitlines()[:-1], int(summary.group(1))
+
+
+def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix):
+ lib_name = os.path.basename(apk_so_name).replace('crazy.', '')
+ so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name)
+ if not os.path.exists(so_with_symbols_path):
+ raise Exception('Unstripped .so not found. Looked here: %s',
+ so_with_symbols_path)
+ _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path)
+ sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix)
+ for si in sis:
+ print(si)
+
+
+# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+def _ReadInitArray(so_path, tool_prefix, expect_no_initializers):
+ stdout = _RunReadelf(so_path, ['-SW'], tool_prefix)
+ # Matches: .init_array INIT_ARRAY 000000000516add0 5169dd0 000010 00 WA 0 0 8
+ match = re.search(r'\.init_array.*$', stdout, re.MULTILINE)
+ if expect_no_initializers:
+ if match:
+ raise Exception(
+ 'Expected no initializers for %s, yet some were found' % so_path)
+ else:
+ return 0
+ elif not match:
+ raise Exception('Did not find section: .init_array in {}:\n{}'.format(
+ so_path, stdout))
+ size_str = re.split(r'\W+', match.group(0))[5]
+ return int(size_str, 16)
+
+
+def _CountStaticInitializers(so_path, tool_prefix, expect_no_initializers):
+ # Find the number of files with at least one static initializer.
+ # First determine if we're 32 or 64 bit
+ stdout = _RunReadelf(so_path, ['-h'], tool_prefix)
+ elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0)
+ elf_class = re.split(r'\W+', elf_class_line)[1]
+ if elf_class == 'ELF32':
+ word_size = 4
+ else:
+ word_size = 8
+
+ # Then find the number of files with global static initializers.
+ # NOTE: this is very implementation-specific and makes assumptions
+ # about how compiler and linker implement global static initializers.
+ init_array_size = _ReadInitArray(so_path, tool_prefix, expect_no_initializers)
+ return init_array_size / word_size
+
+
+def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir,
+ ignored_libs, no_initializers_libs):
+ # Static initializer counting mostly copies logic in
+ # infra/scripts/legacy/scripts/slave/chromium/sizes.py.
+ with zipfile.ZipFile(apk_or_aab) as z:
+ so_files = [
+ f for f in z.infolist() if f.filename.endswith('.so')
+ and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs
+ ]
+ # Skip checking static initializers for secondary abi libs. They will be
+ # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so
+ # files in the output directory in 64 bit builds.
+ has_64 = any('64' in f.filename for f in so_files)
+ files_to_check = [f for f in so_files if not has_64 or '64' in f.filename]
+
+ # Do not check partitioned libs. They have no ".init_array" section since
+ # all SIs are considered "roots" by the linker, and so end up in the base
+ # module.
+ files_to_check = [
+ f for f in files_to_check if not f.filename.endswith('_partition.so')
+ ]
+
+ si_count = 0
+ for f in files_to_check:
+ lib_basename = os.path.basename(f.filename)
+ expect_no_initializers = lib_basename in no_initializers_libs
+ with tempfile.NamedTemporaryFile(prefix=lib_basename) as temp:
+ temp.write(z.read(f))
+ temp.flush()
+ si_count += _CountStaticInitializers(temp.name, tool_prefix,
+ expect_no_initializers)
+ if dump_sis:
+ # Print count and list of SIs reported by dump-static-initializers.py.
+ # Doesn't work well on all archs (particularly arm), which is why
+ # the readelf method is used for tracking SI counts.
+ _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix)
+ return si_count
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--touch', help='File to touch upon success')
+ parser.add_argument('--tool-prefix', required=True,
+ help='Prefix for nm and friends')
+ parser.add_argument('--expected-count', required=True, type=int,
+ help='Fail if number of static initializers is not '
+ 'equal to this value.')
+ parser.add_argument('apk_or_aab', help='Path to .apk or .aab file.')
+ args = parser.parse_args()
+
+ # TODO(crbug.com/838414): add support for files included via loadable_modules.
+ ignored_libs = {
+ 'libarcore_sdk_c.so', 'libcrashpad_handler_trampoline.so',
+ 'libsketchology_native.so'
+ }
+ # The chromium linker doesn't have static initializers, which makes the
+ # regular check throw. It should not have any.
+ no_initializers_libs = ['libchromium_android_linker.so']
+
+ si_count = _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix,
+ False, '.', ignored_libs,
+ no_initializers_libs)
+ if si_count != args.expected_count:
+ print('Expected {} static initializers, but found {}.'.format(
+ args.expected_count, si_count))
+ if args.expected_count > si_count:
+ print('You have removed one or more static initializers. Thanks!')
+ print('To fix the build, update the expectation in:')
+ print(' //chrome/android/static_initializers.gni')
+ else:
+ print('Dumping static initializers via dump-static-initializers.py:')
+ sys.stdout.flush()
+ _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.',
+ ignored_libs, no_initializers_libs)
+ print()
+ print('If the above list is not useful, consider listing them with:')
+ print(' //tools/binary_size/diagnose_bloat.py')
+ print()
+ print('For more information:')
+ print(' https://chromium.googlesource.com/chromium/src/+/main/docs/'
+ 'static_initializers.md')
+ sys.exit(1)
+
+ if args.touch:
+ open(args.touch, 'w')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/assert_static_initializers.pydeps b/third_party/libwebrtc/build/android/gyp/assert_static_initializers.pydeps
new file mode 100644
index 0000000000..b574d817a1
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/assert_static_initializers.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py
+../../gn_helpers.py
+assert_static_initializers.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/bundletool.py b/third_party/libwebrtc/build/android/gyp/bundletool.py
new file mode 100755
index 0000000000..372e55226d
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/bundletool.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple wrapper around the bundletool tool.
+
+Bundletool is distributed as a versioned jar file. This script abstracts the
+location and version of this jar file, as well as the JVM invokation."""
+
+import logging
+import os
+import sys
+
+from util import build_utils
+
+# Assume this is stored under build/android/gyp/
+BUNDLETOOL_DIR = os.path.abspath(os.path.join(
+ __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools',
+ 'bundletool'))
+
+BUNDLETOOL_VERSION = '1.8.0'
+
+BUNDLETOOL_JAR_PATH = os.path.join(
+ BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION)
+
+
+def RunBundleTool(args, warnings_as_errors=(), print_stdout=False):
+ # Use () instead of None because command-line flags are None by default.
+ verify = warnings_as_errors == () or warnings_as_errors
+ # ASAN builds failed with the default of 1GB (crbug.com/1120202).
+ # Bug for bundletool: https://issuetracker.google.com/issues/165911616
+ cmd = build_utils.JavaCmd(verify, xmx='4G')
+ cmd += ['-jar', BUNDLETOOL_JAR_PATH]
+ cmd += args
+ logging.debug(' '.join(cmd))
+ return build_utils.CheckOutput(
+ cmd,
+ print_stdout=print_stdout,
+ print_stderr=True,
+ fail_on_output=False,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings)
+
+
+if __name__ == '__main__':
+ RunBundleTool(sys.argv[1:], print_stdout=True)
diff --git a/third_party/libwebrtc/build/android/gyp/bytecode_processor.py b/third_party/libwebrtc/build/android/gyp/bytecode_processor.py
new file mode 100755
index 0000000000..d77f159d82
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/bytecode_processor.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps bin/helper/bytecode_processor and expands @FileArgs."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import server_utils
+
+
+def _AddSwitch(parser, val):
+ parser.add_argument(
+ val, action='store_const', default='--disabled', const=val)
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv[1:])
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--target-name', help='Fully qualified GN target name.')
+ parser.add_argument('--script', required=True,
+ help='Path to the java binary wrapper script.')
+ parser.add_argument('--gn-target', required=True)
+ parser.add_argument('--input-jar', required=True)
+ parser.add_argument('--direct-classpath-jars')
+ parser.add_argument('--sdk-classpath-jars')
+ parser.add_argument('--full-classpath-jars')
+ parser.add_argument('--full-classpath-gn-targets')
+ parser.add_argument('--stamp')
+ parser.add_argument('-v', '--verbose', action='store_true')
+ parser.add_argument('--missing-classes-allowlist')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ _AddSwitch(parser, '--is-prebuilt')
+ args = parser.parse_args(argv)
+
+ if server_utils.MaybeRunCommand(name=args.target_name,
+ argv=sys.argv,
+ stamp_file=args.stamp):
+ return
+
+ args.sdk_classpath_jars = build_utils.ParseGnList(args.sdk_classpath_jars)
+ args.direct_classpath_jars = build_utils.ParseGnList(
+ args.direct_classpath_jars)
+ args.full_classpath_jars = build_utils.ParseGnList(args.full_classpath_jars)
+ args.full_classpath_gn_targets = build_utils.ParseGnList(
+ args.full_classpath_gn_targets)
+ args.missing_classes_allowlist = build_utils.ParseGnList(
+ args.missing_classes_allowlist)
+
+ verbose = '--verbose' if args.verbose else '--not-verbose'
+
+ cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt]
+ cmd += [str(len(args.missing_classes_allowlist))]
+ cmd += args.missing_classes_allowlist
+ cmd += [str(len(args.sdk_classpath_jars))]
+ cmd += args.sdk_classpath_jars
+ cmd += [str(len(args.direct_classpath_jars))]
+ cmd += args.direct_classpath_jars
+ cmd += [str(len(args.full_classpath_jars))]
+ cmd += args.full_classpath_jars
+ cmd += [str(len(args.full_classpath_gn_targets))]
+ cmd += args.full_classpath_gn_targets
+ build_utils.CheckOutput(cmd,
+ print_stdout=True,
+ fail_func=None,
+ fail_on_output=args.warnings_as_errors)
+
+ if args.stamp:
+ build_utils.Touch(args.stamp)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/third_party/libwebrtc/build/android/gyp/bytecode_processor.pydeps b/third_party/libwebrtc/build/android/gyp/bytecode_processor.pydeps
new file mode 100644
index 0000000000..6105d934da
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/bytecode_processor.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py
+../../gn_helpers.py
+bytecode_processor.py
+util/__init__.py
+util/build_utils.py
+util/server_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/bytecode_rewriter.py b/third_party/libwebrtc/build/android/gyp/bytecode_rewriter.py
new file mode 100755
index 0000000000..ad232df038
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/bytecode_rewriter.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wrapper script around ByteCodeRewriter subclass scripts."""
+
+import argparse
+import sys
+
+from util import build_utils
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv[1:])
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--script',
+ required=True,
+ help='Path to the java binary wrapper script.')
+ parser.add_argument('--classpath', action='append', nargs='+')
+ parser.add_argument('--input-jar', required=True)
+ parser.add_argument('--output-jar', required=True)
+ args = parser.parse_args(argv)
+
+ classpath = build_utils.ParseGnList(args.classpath)
+ build_utils.WriteDepfile(args.depfile, args.output_jar, inputs=classpath)
+
+ classpath.append(args.input_jar)
+ cmd = [
+ args.script, '--classpath', ':'.join(classpath), args.input_jar,
+ args.output_jar
+ ]
+ build_utils.CheckOutput(cmd, print_stdout=True)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/third_party/libwebrtc/build/android/gyp/bytecode_rewriter.pydeps b/third_party/libwebrtc/build/android/gyp/bytecode_rewriter.pydeps
new file mode 100644
index 0000000000..b8f304a783
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/bytecode_rewriter.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_rewriter.pydeps build/android/gyp/bytecode_rewriter.py
+../../gn_helpers.py
+bytecode_rewriter.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/check_flag_expectations.py b/third_party/libwebrtc/build/android/gyp/check_flag_expectations.py
new file mode 100755
index 0000000000..22da211f36
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/check_flag_expectations.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+
+from util import build_utils
+from util import diff_utils
+
+IGNORE_FLAG_PREFIXES = [
+ # For cflags.
+ '-DANDROID_NDK_VERSION_ROLL',
+ '-DCR_LIBCXX_REVISION',
+ '-I',
+ '-g',
+ '-fcrash-diagnostics-dir=',
+ '-fprofile',
+ '--no-system-header-prefix',
+ '--system-header-prefix',
+ '-isystem',
+ '-iquote',
+ '-fmodule-map',
+ '-frandom-seed',
+ '-c ',
+ '-o ',
+ '-fmodule-name=',
+ '--sysroot=',
+ '-fcolor-diagnostics',
+ '-MF ',
+ '-MD',
+
+ # For ldflags.
+ '-Wl,--thinlto-cache-dir',
+ '-Wl,--thinlto-cache-policy',
+ '-Wl,--thinlto-jobs',
+ '-Wl,--start-lib',
+ '-Wl,--end-lib',
+ '-Wl,-whole-archive',
+ '-Wl,-no-whole-archive',
+ '-l',
+ '-L',
+ '-Wl,-soname',
+ '-Wl,-version-script',
+ '-Wl,--version-script',
+ '-fdiagnostics-color',
+ '-Wl,--color-diagnostics',
+ '-B',
+ '-Wl,--dynamic-linker',
+ '-DCR_CLANG_REVISION=',
+]
+
+FLAGS_WITH_PARAMS = (
+ '-Xclang',
+ '-mllvm',
+ '-Xclang -fdebug-compilation-dir',
+ '-Xclang -add-plugin',
+)
+
+
+def KeepFlag(flag):
+ return not any(flag.startswith(prefix) for prefix in IGNORE_FLAG_PREFIXES)
+
+
+def MergeFlags(flags):
+ flags = _MergeFlagsHelper(flags)
+ # For double params eg: -Xclang -fdebug-compilation-dir
+ flags = _MergeFlagsHelper(flags)
+ return flags
+
+
+def _MergeFlagsHelper(flags):
+ merged_flags = []
+ while flags:
+ current_flag = flags.pop(0)
+ if flags:
+ next_flag = flags[0]
+ else:
+ next_flag = None
+ merge_flags = False
+
+ # Special case some flags that always come with params.
+ if current_flag in FLAGS_WITH_PARAMS:
+ merge_flags = True
+ # Assume flags without '-' are a param.
+ if next_flag and not next_flag.startswith('-'):
+ merge_flags = True
+ # Special case -plugin-arg prefix because it has the plugin name.
+ if current_flag.startswith('-Xclang -plugin-arg'):
+ merge_flags = True
+ if merge_flags:
+ merged_flag = '{} {}'.format(current_flag, next_flag)
+ merged_flags.append(merged_flag)
+ flags.pop(0)
+ else:
+ merged_flags.append(current_flag)
+ return merged_flags
+
+
+def ParseFlags(flag_file_path):
+ flags = []
+ with open(flag_file_path) as f:
+ for flag in f.read().splitlines():
+ if KeepFlag(flag):
+ flags.append(flag)
+ return flags
+
+
+def main():
+ """Compare the flags with the checked in list."""
+ parser = argparse.ArgumentParser()
+ diff_utils.AddCommandLineFlags(parser)
+ parser.add_argument('--current-flags',
+ help='Path to flags to check against expectations.')
+ options = parser.parse_args()
+
+ flags = ParseFlags(options.current_flags)
+ flags = MergeFlags(flags)
+
+ msg = """
+This expectation file is meant to inform the build team about changes to
+flags used when building native libraries in chrome (most importantly any
+that relate to security). This is to ensure the flags are replicated when
+building native libraries outside of the repo. Please update the .expected
+files and a WATCHLIST entry will alert the build team to your change."""
+ diff_utils.CheckExpectations('\n'.join(sorted(flags)),
+ options,
+ custom_msg=msg)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/check_flag_expectations.pydeps b/third_party/libwebrtc/build/android/gyp/check_flag_expectations.pydeps
new file mode 100644
index 0000000000..d8c394a04c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/check_flag_expectations.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/check_flag_expectations.pydeps build/android/gyp/check_flag_expectations.py
+../../gn_helpers.py
+check_flag_expectations.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/compile_java.py b/third_party/libwebrtc/build/android/gyp/compile_java.py
new file mode 100755
index 0000000000..b11665e2a7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/compile_java.py
@@ -0,0 +1,787 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import multiprocessing
+import optparse
+import os
+import re
+import shutil
+import sys
+import time
+import zipfile
+
+import javac_output_processor
+from util import build_utils
+from util import md5_check
+from util import jar_info_utils
+from util import server_utils
+
+_JAVAC_EXTRACTOR = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party',
+ 'android_prebuilts', 'build_tools', 'common',
+ 'framework', 'javac_extractor.jar')
+
+# Add a check here to cause the suggested fix to be applied while compiling.
+# Use this when trying to enable more checks.
+ERRORPRONE_CHECKS_TO_APPLY = []
+
+# Full list of checks: https://errorprone.info/bugpatterns
+ERRORPRONE_WARNINGS_TO_DISABLE = [
+ # The following are super useful, but existing issues need to be fixed first
+ # before they can start failing the build on new errors.
+ 'InvalidParam',
+ 'InvalidLink',
+ 'InvalidInlineTag',
+ 'EmptyBlockTag',
+ 'PublicConstructorForAbstractClass',
+ 'InvalidBlockTag',
+ 'StaticAssignmentInConstructor',
+ 'MutablePublicArray',
+ 'UnescapedEntity',
+ 'NonCanonicalType',
+ 'AlmostJavadoc',
+ # The following are added for errorprone update: https://crbug.com/1216032
+ 'InlineMeSuggester',
+ 'DoNotClaimAnnotations',
+ 'JavaUtilDate',
+ 'IdentityHashMapUsage',
+ 'UnnecessaryMethodReference',
+ 'LongFloatConversion',
+ 'CharacterGetNumericValue',
+ 'ErroneousThreadPoolConstructorChecker',
+ 'StaticMockMember',
+ 'MissingSuperCall',
+ 'ToStringReturnsNull',
+ # TODO(crbug.com/834807): Follow steps in bug
+ 'DoubleBraceInitialization',
+ # TODO(crbug.com/834790): Follow steps in bug.
+ 'CatchAndPrintStackTrace',
+ # TODO(crbug.com/801210): Follow steps in bug.
+ 'SynchronizeOnNonFinalField',
+ # TODO(crbug.com/802073): Follow steps in bug.
+ 'TypeParameterUnusedInFormals',
+ # TODO(crbug.com/803484): Follow steps in bug.
+ 'CatchFail',
+ # TODO(crbug.com/803485): Follow steps in bug.
+ 'JUnitAmbiguousTestClass',
+ # Android platform default is always UTF-8.
+ # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset()
+ 'DefaultCharset',
+ # Low priority since there are lots of tags that don't fit this check.
+ 'UnrecognisedJavadocTag',
+ # Low priority since the alternatives still work.
+ 'JdkObsolete',
+ # We don't use that many lambdas.
+ 'FunctionalInterfaceClash',
+ # There are lots of times when we just want to post a task.
+ 'FutureReturnValueIgnored',
+ # Nice to be explicit about operators, but not necessary.
+ 'OperatorPrecedence',
+ # Just false positives in our code.
+ 'ThreadJoinLoop',
+ # Low priority corner cases with String.split.
+ # Linking Guava and using Splitter was rejected
+ # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630.
+ 'StringSplitter',
+ # Preferred to use another method since it propagates exceptions better.
+ 'ClassNewInstance',
+ # Nice to have static inner classes but not necessary.
+ 'ClassCanBeStatic',
+ # Explicit is better than implicit.
+ 'FloatCast',
+ # Results in false positives.
+ 'ThreadLocalUsage',
+ # Also just false positives.
+ 'Finally',
+ # False positives for Chromium.
+ 'FragmentNotInstantiable',
+ # Low priority to fix.
+ 'HidingField',
+ # Low priority.
+ 'IntLongMath',
+ # Low priority.
+ 'BadComparable',
+ # Low priority.
+ 'EqualsHashCode',
+ # Nice to fix but low priority.
+ 'TypeParameterShadowing',
+ # Good to have immutable enums, also low priority.
+ 'ImmutableEnumChecker',
+ # False positives for testing.
+ 'InputStreamSlowMultibyteRead',
+ # Nice to have better primitives.
+ 'BoxedPrimitiveConstructor',
+ # Not necessary for tests.
+ 'OverrideThrowableToString',
+ # Nice to have better type safety.
+ 'CollectionToArraySafeParameter',
+ # Makes logcat debugging more difficult, and does not provide obvious
+ # benefits in the Chromium codebase.
+ 'ObjectToString',
+ # Triggers on private methods that are @CalledByNative.
+ 'UnusedMethod',
+ # Triggers on generated R.java files.
+ 'UnusedVariable',
+ # Not that useful.
+ 'UnsafeReflectiveConstructionCast',
+ # Not that useful.
+ 'MixedMutabilityReturnType',
+ # Nice to have.
+ 'EqualsGetClass',
+ # A lot of false-positives from CharSequence.equals().
+ 'UndefinedEquals',
+ # Nice to have.
+ 'ExtendingJUnitAssert',
+ # Nice to have.
+ 'SystemExitOutsideMain',
+ # Nice to have.
+ 'TypeParameterNaming',
+ # Nice to have.
+ 'UnusedException',
+ # Nice to have.
+ 'UngroupedOverloads',
+ # Nice to have.
+ 'FunctionalInterfaceClash',
+ # Nice to have.
+ 'InconsistentOverloads',
+ # Dagger generated code triggers this.
+ 'SameNameButDifferent',
+ # Nice to have.
+ 'UnnecessaryLambda',
+ # Nice to have.
+ 'UnnecessaryAnonymousClass',
+ # Nice to have.
+ 'LiteProtoToString',
+ # Nice to have.
+ 'MissingSummary',
+ # Nice to have.
+ 'ReturnFromVoid',
+ # Nice to have.
+ 'EmptyCatch',
+ # Nice to have.
+ 'BadImport',
+ # Nice to have.
+ 'UseCorrectAssertInTests',
+ # Nice to have.
+ 'InlineFormatString',
+ # Nice to have.
+ 'DefaultPackage',
+ # Must be off since we are now passing in annotation processor generated
+ # code as a source jar (deduplicating work with turbine).
+ 'RefersToDaggerCodegen',
+ # We already have presubmit checks for this. Not necessary to warn on
+ # every build.
+ 'RemoveUnusedImports',
+ # We do not care about unnecessary parenthesis enough to check for them.
+ 'UnnecessaryParentheses',
+]
+
+# Full list of checks: https://errorprone.info/bugpatterns
+# Only those marked as "experimental" need to be listed here in order to be
+# enabled.
+ERRORPRONE_WARNINGS_TO_ENABLE = [
+ 'BinderIdentityRestoredDangerously',
+ 'EmptyIf',
+ 'EqualsBrokenForNull',
+ 'InvalidThrows',
+ 'LongLiteralLowerCaseSuffix',
+ 'MultiVariableDeclaration',
+ 'RedundantOverride',
+ 'StaticQualifiedUsingExpression',
+ 'StringEquality',
+ 'TimeUnitMismatch',
+ 'UnnecessaryStaticImport',
+ 'UseBinds',
+ 'WildcardImport',
+]
+
+
+def ProcessJavacOutput(output, target_name):
+ # These warnings cannot be suppressed even for third party code. Deprecation
+ # warnings especially do not help since we must support older android version.
+ deprecated_re = re.compile(
+ r'(Note: .* uses? or overrides? a deprecated API.)$')
+ unchecked_re = re.compile(
+ r'(Note: .* uses? unchecked or unsafe operations.)$')
+ recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$')
+
+ activity_re = re.compile(r'^(?P<prefix>\s*location: )class Activity$')
+
+ def ApplyFilters(line):
+ return not (deprecated_re.match(line) or unchecked_re.match(line)
+ or recompile_re.match(line))
+
+ def Elaborate(line):
+ if activity_re.match(line):
+ prefix = ' ' * activity_re.match(line).end('prefix')
+ return '{}\n{}Expecting a FragmentActivity? See {}'.format(
+ line, prefix, 'docs/ui/android/bytecode_rewriting.md')
+ return line
+
+ output = build_utils.FilterReflectiveAccessJavaWarnings(output)
+
+ lines = (l for l in output.split('\n') if ApplyFilters(l))
+ lines = (Elaborate(l) for l in lines)
+
+ output_processor = javac_output_processor.JavacOutputProcessor(target_name)
+ lines = output_processor.Process(lines)
+
+ return '\n'.join(lines)
+
+
+def _ParsePackageAndClassNames(java_file):
+ package_name = ''
+ class_names = []
+ with open(java_file) as f:
+ for l in f:
+ # Strip unindented comments.
+ # Considers a leading * as a continuation of a multi-line comment (our
+ # linter doesn't enforce a space before it like there should be).
+ l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l)
+
+ m = re.match(r'package\s+(.*?);', l)
+ if m and not package_name:
+ package_name = m.group(1)
+
+ # Not exactly a proper parser, but works for sources that Chrome uses.
+ # In order to not match nested classes, it just checks for lack of indent.
+ m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l)
+ if m:
+ class_names.append(m.group(1))
+ return package_name, class_names
+
+
+def _ProcessJavaFileForInfo(java_file):
+ package_name, class_names = _ParsePackageAndClassNames(java_file)
+ return java_file, package_name, class_names
+
+
+class _InfoFileContext(object):
+ """Manages the creation of the class->source file .info file."""
+
+ def __init__(self, chromium_code, excluded_globs):
+ self._chromium_code = chromium_code
+ self._excluded_globs = excluded_globs
+ # Map of .java path -> .srcjar/nested/path.java.
+ self._srcjar_files = {}
+ # List of generators from pool.imap_unordered().
+ self._results = []
+ # Lazily created multiprocessing.Pool.
+ self._pool = None
+
+ def AddSrcJarSources(self, srcjar_path, extracted_paths, parent_dir):
+ for path in extracted_paths:
+ # We want the path inside the srcjar so the viewer can have a tree
+ # structure.
+ self._srcjar_files[path] = '{}/{}'.format(
+ srcjar_path, os.path.relpath(path, parent_dir))
+
+ def SubmitFiles(self, java_files):
+ if self._pool is None:
+ # Restrict to just one process to not slow down compiling. Compiling
+ # is always slower.
+ self._pool = multiprocessing.Pool(1)
+ logging.info('Submitting %d files for info', len(java_files))
+ self._results.append(
+ self._pool.imap_unordered(
+ _ProcessJavaFileForInfo, java_files, chunksize=1000))
+
+ def _CheckPathMatchesClassName(self, java_file, package_name, class_name):
+ parts = package_name.split('.') + [class_name + '.java']
+ expected_path_suffix = os.path.sep.join(parts)
+ if not java_file.endswith(expected_path_suffix):
+ raise Exception(('Java package+class name do not match its path.\n'
+ 'Actual path: %s\nExpected path: %s') %
+ (java_file, expected_path_suffix))
+
+ def _ProcessInfo(self, java_file, package_name, class_names, source):
+ for class_name in class_names:
+ yield '{}.{}'.format(package_name, class_name)
+ # Skip aidl srcjars since they don't indent code correctly.
+ if '_aidl.srcjar' in source:
+ continue
+ assert not self._chromium_code or len(class_names) == 1, (
+ 'Chromium java files must only have one class: {}'.format(source))
+ if self._chromium_code:
+ # This check is not necessary but nice to check this somewhere.
+ self._CheckPathMatchesClassName(java_file, package_name, class_names[0])
+
+ def _ShouldIncludeInJarInfo(self, fully_qualified_name):
+ name_as_class_glob = fully_qualified_name.replace('.', '/') + '.class'
+ return not build_utils.MatchesGlob(name_as_class_glob, self._excluded_globs)
+
+ def _Collect(self):
+ if self._pool is None:
+ return {}
+ ret = {}
+ for result in self._results:
+ for java_file, package_name, class_names in result:
+ source = self._srcjar_files.get(java_file, java_file)
+ for fully_qualified_name in self._ProcessInfo(java_file, package_name,
+ class_names, source):
+ if self._ShouldIncludeInJarInfo(fully_qualified_name):
+ ret[fully_qualified_name] = java_file
+ self._pool.terminate()
+ return ret
+
+ def __del__(self):
+ # Work around for Python 2.x bug with multiprocessing and daemon threads:
+ # https://bugs.python.org/issue4106
+ if self._pool is not None:
+ logging.info('Joining multiprocessing.Pool')
+ self._pool.terminate()
+ self._pool.join()
+ logging.info('Done.')
+
+ def Commit(self, output_path):
+ """Writes a .jar.info file.
+
+ Maps fully qualified names for classes to either the java file that they
+ are defined in or the path of the srcjar that they came from.
+ """
+ logging.info('Collecting info file entries')
+ entries = self._Collect()
+
+ logging.info('Writing info file: %s', output_path)
+ with build_utils.AtomicOutput(output_path, mode='wb') as f:
+ jar_info_utils.WriteJarInfoFile(f, entries, self._srcjar_files)
+ logging.info('Completed info file: %s', output_path)
+
+
+def _CreateJarFile(jar_path, service_provider_configuration_dir,
+ additional_jar_files, classes_dir):
+ logging.info('Start creating jar file: %s', jar_path)
+ with build_utils.AtomicOutput(jar_path) as f:
+ with zipfile.ZipFile(f.name, 'w') as z:
+ build_utils.ZipDir(z, classes_dir)
+ if service_provider_configuration_dir:
+ config_files = build_utils.FindInDirectory(
+ service_provider_configuration_dir)
+ for config_file in config_files:
+ zip_path = os.path.relpath(config_file,
+ service_provider_configuration_dir)
+ build_utils.AddToZipHermetic(z, zip_path, src_path=config_file)
+
+ if additional_jar_files:
+ for src_path, zip_path in additional_jar_files:
+ build_utils.AddToZipHermetic(z, zip_path, src_path=src_path)
+ logging.info('Completed jar file: %s', jar_path)
+
+
+def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files):
+ logging.info('Starting _OnStaleMd5')
+ if options.enable_kythe_annotations:
+ # Kythe requires those env variables to be set and compile_java.py does the
+ # same
+ if not os.environ.get('KYTHE_ROOT_DIRECTORY') or \
+ not os.environ.get('KYTHE_OUTPUT_DIRECTORY'):
+ raise Exception('--enable-kythe-annotations requires '
+ 'KYTHE_ROOT_DIRECTORY and KYTHE_OUTPUT_DIRECTORY '
+ 'environment variables to be set.')
+ javac_extractor_cmd = build_utils.JavaCmd() + [
+ '-jar',
+ _JAVAC_EXTRACTOR,
+ ]
+ try:
+ # _RunCompiler()'s partial javac implementation does not support
+ # generating outputs in $KYTHE_OUTPUT_DIRECTORY.
+ _RunCompiler(changes,
+ options,
+ javac_extractor_cmd + javac_args,
+ java_files,
+ options.jar_path + '.javac_extractor',
+ enable_partial_javac=False)
+ except build_utils.CalledProcessError as e:
+ # Having no index for particular target is better than failing entire
+ # codesearch. Log and error and move on.
+ logging.error('Could not generate kzip: %s', e)
+
+ intermediates_out_dir = None
+ jar_info_path = None
+ if not options.enable_errorprone:
+ # Delete any stale files in the generated directory. The purpose of
+ # options.generated_dir is for codesearch.
+ shutil.rmtree(options.generated_dir, True)
+ intermediates_out_dir = options.generated_dir
+
+ jar_info_path = options.jar_path + '.info'
+
+ # Compiles with Error Prone take twice as long to run as pure javac. Thus GN
+ # rules run both in parallel, with Error Prone only used for checks.
+ _RunCompiler(changes,
+ options,
+ javac_cmd + javac_args,
+ java_files,
+ options.jar_path,
+ jar_info_path=jar_info_path,
+ intermediates_out_dir=intermediates_out_dir,
+ enable_partial_javac=True)
+ logging.info('Completed all steps in _OnStaleMd5')
+
+
+def _RunCompiler(changes,
+ options,
+ javac_cmd,
+ java_files,
+ jar_path,
+ jar_info_path=None,
+ intermediates_out_dir=None,
+ enable_partial_javac=False):
+ """Runs java compiler.
+
+ Args:
+ changes: md5_check.Changes object.
+ options: Object with command line flags.
+ javac_cmd: Command to execute.
+ java_files: List of java files passed from command line.
+ jar_path: Path of output jar file.
+ jar_info_path: Path of the .info file to generate.
+ If None, .info file will not be generated.
+ intermediates_out_dir: Directory for saving intermediate outputs.
+ If None a temporary directory is used.
+ enable_partial_javac: Enables compiling only Java files which have changed
+ in the special case that no method signatures have changed. This is
+ useful for large GN targets.
+ Not supported if compiling generates outputs other than |jar_path| and
+ |jar_info_path|.
+ """
+ logging.info('Starting _RunCompiler')
+
+ java_files = java_files.copy()
+ java_srcjars = options.java_srcjars
+ save_info_file = jar_info_path is not None
+
+ # Use jar_path's directory to ensure paths are relative (needed for goma).
+ temp_dir = jar_path + '.staging'
+ shutil.rmtree(temp_dir, True)
+ os.makedirs(temp_dir)
+ try:
+ classes_dir = os.path.join(temp_dir, 'classes')
+ service_provider_configuration = os.path.join(
+ temp_dir, 'service_provider_configuration')
+
+ if java_files:
+ os.makedirs(classes_dir)
+
+ if enable_partial_javac:
+ all_changed_paths_are_java = all(
+ [p.endswith(".java") for p in changes.IterChangedPaths()])
+ if (all_changed_paths_are_java and not changes.HasStringChanges()
+ and os.path.exists(jar_path)
+ and (jar_info_path is None or os.path.exists(jar_info_path))):
+ # Log message is used by tests to determine whether partial javac
+ # optimization was used.
+ logging.info('Using partial javac optimization for %s compile' %
+ (jar_path))
+
+ # Header jar corresponding to |java_files| did not change.
+ # As a build speed optimization (crbug.com/1170778), re-compile only
+ # java files which have changed. Re-use old jar .info file.
+ java_files = list(changes.IterChangedPaths())
+ java_srcjars = None
+
+ # Reuse old .info file.
+ save_info_file = False
+
+ build_utils.ExtractAll(jar_path, classes_dir)
+
+ if save_info_file:
+ info_file_context = _InfoFileContext(options.chromium_code,
+ options.jar_info_exclude_globs)
+
+ if intermediates_out_dir is None:
+ input_srcjars_dir = os.path.join(temp_dir, 'input_srcjars')
+ else:
+ input_srcjars_dir = os.path.join(intermediates_out_dir, 'input_srcjars')
+
+ if java_srcjars:
+ logging.info('Extracting srcjars to %s', input_srcjars_dir)
+ build_utils.MakeDirectory(input_srcjars_dir)
+ for srcjar in options.java_srcjars:
+ extracted_files = build_utils.ExtractAll(
+ srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java')
+ java_files.extend(extracted_files)
+ if save_info_file:
+ info_file_context.AddSrcJarSources(srcjar, extracted_files,
+ input_srcjars_dir)
+ logging.info('Done extracting srcjars')
+
+ if options.header_jar:
+ logging.info('Extracting service provider configs')
+ # Extract META-INF/services/* so that it can be copied into the output
+ # .jar
+ build_utils.ExtractAll(options.header_jar,
+ no_clobber=True,
+ path=service_provider_configuration,
+ pattern='META-INF/services/*')
+ logging.info('Done extracting service provider configs')
+
+ if save_info_file and java_files:
+ info_file_context.SubmitFiles(java_files)
+
+ if java_files:
+ # Don't include the output directory in the initial set of args since it
+ # being in a temp dir makes it unstable (breaks md5 stamping).
+ cmd = list(javac_cmd)
+ cmd += ['-d', classes_dir]
+
+ if options.classpath:
+ cmd += ['-classpath', ':'.join(options.classpath)]
+
+ # Pass source paths as response files to avoid extremely long command
+ # lines that are tedius to debug.
+ java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt')
+ with open(java_files_rsp_path, 'w') as f:
+ f.write(' '.join(java_files))
+ cmd += ['@' + java_files_rsp_path]
+
+ process_javac_output_partial = functools.partial(
+ ProcessJavacOutput, target_name=options.target_name)
+
+ logging.debug('Build command %s', cmd)
+ start = time.time()
+ build_utils.CheckOutput(cmd,
+ print_stdout=options.chromium_code,
+ stdout_filter=process_javac_output_partial,
+ stderr_filter=process_javac_output_partial,
+ fail_on_output=options.warnings_as_errors)
+ end = time.time() - start
+ logging.info('Java compilation took %ss', end)
+
+ _CreateJarFile(jar_path, service_provider_configuration,
+ options.additional_jar_files, classes_dir)
+
+ if save_info_file:
+ info_file_context.Commit(jar_info_path)
+
+ logging.info('Completed all steps in _RunCompiler')
+ finally:
+ shutil.rmtree(temp_dir)
+
+
+def _ParseOptions(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--target-name', help='Fully qualified GN target name.')
+ parser.add_option('--skip-build-server',
+ action='store_true',
+ help='Avoid using the build server.')
+ parser.add_option(
+ '--java-srcjars',
+ action='append',
+ default=[],
+ help='List of srcjars to include in compilation.')
+ parser.add_option(
+ '--generated-dir',
+ help='Subdirectory within target_gen_dir to place extracted srcjars and '
+ 'annotation processor output for codesearch to find.')
+ parser.add_option(
+ '--bootclasspath',
+ action='append',
+ default=[],
+ help='Boot classpath for javac. If this is specified multiple times, '
+ 'they will all be appended to construct the classpath.')
+ parser.add_option(
+ '--java-version',
+ help='Java language version to use in -source and -target args to javac.')
+ parser.add_option('--classpath', action='append', help='Classpath to use.')
+ parser.add_option(
+ '--processorpath',
+ action='append',
+ help='GN list of jars that comprise the classpath used for Annotation '
+ 'Processors.')
+ parser.add_option(
+ '--processor-arg',
+ dest='processor_args',
+ action='append',
+ help='key=value arguments for the annotation processors.')
+ parser.add_option(
+ '--additional-jar-file',
+ dest='additional_jar_files',
+ action='append',
+ help='Additional files to package into jar. By default, only Java .class '
+ 'files are packaged into the jar. Files should be specified in '
+ 'format <filename>:<path to be placed in jar>.')
+ parser.add_option(
+ '--jar-info-exclude-globs',
+ help='GN list of exclude globs to filter from generated .info files.')
+ parser.add_option(
+ '--chromium-code',
+ type='int',
+ help='Whether code being compiled should be built with stricter '
+ 'warnings for chromium code.')
+ parser.add_option(
+ '--gomacc-path', help='When set, prefix javac command with gomacc')
+ parser.add_option(
+ '--errorprone-path', help='Use the Errorprone compiler at this path.')
+ parser.add_option(
+ '--enable-errorprone',
+ action='store_true',
+ help='Enable errorprone checks')
+ parser.add_option(
+ '--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ parser.add_option('--jar-path', help='Jar output path.')
+ parser.add_option(
+ '--javac-arg',
+ action='append',
+ default=[],
+ help='Additional arguments to pass to javac.')
+ parser.add_option(
+ '--enable-kythe-annotations',
+ action='store_true',
+ help='Enable generation of Kythe kzip, used for codesearch. Ensure '
+ 'proper environment variables are set before using this flag.')
+ parser.add_option(
+ '--header-jar',
+ help='This is the header jar for the current target that contains '
+ 'META-INF/services/* files to be included in the output jar.')
+
+ options, args = parser.parse_args(argv)
+ build_utils.CheckOptions(options, parser, required=('jar_path', ))
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.classpath = build_utils.ParseGnList(options.classpath)
+ options.processorpath = build_utils.ParseGnList(options.processorpath)
+ options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+ options.jar_info_exclude_globs = build_utils.ParseGnList(
+ options.jar_info_exclude_globs)
+
+ additional_jar_files = []
+ for arg in options.additional_jar_files or []:
+ filepath, jar_filepath = arg.split(':')
+ additional_jar_files.append((filepath, jar_filepath))
+ options.additional_jar_files = additional_jar_files
+
+ java_files = []
+ for arg in args:
+ # Interpret a path prefixed with @ as a file containing a list of sources.
+ if arg.startswith('@'):
+ java_files.extend(build_utils.ReadSourcesList(arg[1:]))
+ else:
+ java_files.append(arg)
+
+ return options, java_files
+
+
+def main(argv):
+ build_utils.InitLogging('JAVAC_DEBUG')
+ argv = build_utils.ExpandFileArgs(argv)
+ options, java_files = _ParseOptions(argv)
+
+ # Only use the build server for errorprone runs.
+ if (options.enable_errorprone and not options.skip_build_server
+ and server_utils.MaybeRunCommand(name=options.target_name,
+ argv=sys.argv,
+ stamp_file=options.jar_path)):
+ return
+
+ javac_cmd = []
+ if options.gomacc_path:
+ javac_cmd.append(options.gomacc_path)
+ javac_cmd.append(build_utils.JAVAC_PATH)
+
+ javac_args = [
+ '-g',
+ # Chromium only allows UTF8 source files. Being explicit avoids
+ # javac pulling a default encoding from the user's environment.
+ '-encoding',
+ 'UTF-8',
+ # Prevent compiler from compiling .java files not listed as inputs.
+ # See: http://blog.ltgt.net/most-build-tools-misuse-javac/
+ '-sourcepath',
+ ':',
+ ]
+
+ if options.enable_errorprone:
+ # All errorprone args are passed space-separated in a single arg.
+ errorprone_flags = ['-Xplugin:ErrorProne']
+ # Make everything a warning so that when treat_warnings_as_errors is false,
+ # they do not fail the build.
+ errorprone_flags += ['-XepAllErrorsAsWarnings']
+ # Don't check generated files.
+ errorprone_flags += ['-XepDisableWarningsInGeneratedCode']
+ errorprone_flags.extend('-Xep:{}:OFF'.format(x)
+ for x in ERRORPRONE_WARNINGS_TO_DISABLE)
+ errorprone_flags.extend('-Xep:{}:WARN'.format(x)
+ for x in ERRORPRONE_WARNINGS_TO_ENABLE)
+
+ if ERRORPRONE_CHECKS_TO_APPLY:
+ errorprone_flags += [
+ '-XepPatchLocation:IN_PLACE',
+ '-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY)
+ ]
+
+ javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)]
+
+ # This flag quits errorprone after checks and before code generation, since
+ # we do not need errorprone outputs, this speeds up errorprone by 4 seconds
+ # for chrome_java.
+ if not ERRORPRONE_CHECKS_TO_APPLY:
+ javac_args += ['-XDshould-stop.ifNoError=FLOW']
+
+ if options.java_version:
+ javac_args.extend([
+ '-source',
+ options.java_version,
+ '-target',
+ options.java_version,
+ ])
+ if options.java_version == '1.8':
+ # Android's boot jar doesn't contain all java 8 classes.
+ options.bootclasspath.append(build_utils.RT_JAR_PATH)
+
+ # This effectively disables all annotation processors, even including
+ # annotation processors in service provider configuration files named
+ # META-INF/. See the following link for reference:
+ # https://docs.oracle.com/en/java/javase/11/tools/javac.html
+ javac_args.extend(['-proc:none'])
+
+ if options.bootclasspath:
+ javac_args.extend(['-bootclasspath', ':'.join(options.bootclasspath)])
+
+ if options.processorpath:
+ javac_args.extend(['-processorpath', ':'.join(options.processorpath)])
+ if options.processor_args:
+ for arg in options.processor_args:
+ javac_args.extend(['-A%s' % arg])
+
+ javac_args.extend(options.javac_arg)
+
+ classpath_inputs = (
+ options.bootclasspath + options.classpath + options.processorpath)
+
+ depfile_deps = classpath_inputs
+ # Files that are already inputs in GN should go in input_paths.
+ input_paths = depfile_deps + options.java_srcjars + java_files
+ if options.header_jar:
+ input_paths.append(options.header_jar)
+ input_paths += [x[0] for x in options.additional_jar_files]
+
+ output_paths = [options.jar_path]
+ if not options.enable_errorprone:
+ output_paths += [options.jar_path + '.info']
+
+ input_strings = javac_cmd + javac_args + options.classpath + java_files + [
+ options.warnings_as_errors, options.jar_info_exclude_globs
+ ]
+
+ # Use md5_check for |pass_changes| feature.
+ md5_check.CallAndWriteDepfileIfStale(lambda changes: _OnStaleMd5(
+ changes, options, javac_cmd, javac_args, java_files),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ pass_changes=True)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/compile_java.pydeps b/third_party/libwebrtc/build/android/gyp/compile_java.pydeps
new file mode 100644
index 0000000000..c1c7d5fd56
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/compile_java.pydeps
@@ -0,0 +1,30 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_java.pydeps build/android/gyp/compile_java.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/colorama/src/colorama/__init__.py
+../../../third_party/colorama/src/colorama/ansi.py
+../../../third_party/colorama/src/colorama/ansitowin32.py
+../../../third_party/colorama/src/colorama/initialise.py
+../../../third_party/colorama/src/colorama/win32.py
+../../../third_party/colorama/src/colorama/winterm.py
+../../../tools/android/modularization/convenience/lookup_dep.py
+../../gn_helpers.py
+../../print_python_deps.py
+../list_java_targets.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+compile_java.py
+javac_output_processor.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/server_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/compile_resources.py b/third_party/libwebrtc/build/android/gyp/compile_resources.py
new file mode 100755
index 0000000000..9add95aed8
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/compile_resources.py
@@ -0,0 +1,1032 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compile Android resources into an intermediate APK.
+
+This can also generate an R.txt, and an .srcjar file containing the proper
+final R.java class for all resource packages the APK depends on.
+
+This will crunch images with aapt2.
+"""
+
+import argparse
+import collections
+import contextlib
+import filecmp
+import hashlib
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+import zipfile
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import diff_utils
+from util import manifest_utils
+from util import parallel
+from util import protoresources
+from util import resource_utils
+
+
+# Pngs that we shouldn't convert to webp. Please add rationale when updating.
+_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([
+ # Crashes on Galaxy S5 running L (https://crbug.com/807059).
+ r'.*star_gray\.png',
+ # Android requires pngs for 9-patch images.
+ r'.*\.9\.png',
+ # Daydream requires pngs for icon files.
+ r'.*daydream_icon_.*\.png'
+]))
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser, input_opts, output_opts = resource_utils.ResourceArgsParser()
+
+ input_opts.add_argument(
+ '--aapt2-path', required=True, help='Path to the Android aapt2 tool.')
+ input_opts.add_argument(
+ '--android-manifest', required=True, help='AndroidManifest.xml path.')
+ input_opts.add_argument(
+ '--r-java-root-package-name',
+ default='base',
+ help='Short package name for this target\'s root R java file (ex. '
+ 'input of "base" would become gen.base_module). Defaults to "base".')
+ group = input_opts.add_mutually_exclusive_group()
+ group.add_argument(
+ '--shared-resources',
+ action='store_true',
+ help='Make all resources in R.java non-final and allow the resource IDs '
+ 'to be reset to a different package index when the apk is loaded by '
+ 'another application at runtime.')
+ group.add_argument(
+ '--app-as-shared-lib',
+ action='store_true',
+ help='Same as --shared-resources, but also ensures all resource IDs are '
+ 'directly usable from the APK loaded as an application.')
+
+ input_opts.add_argument(
+ '--package-id',
+ type=int,
+ help='Decimal integer representing custom package ID for resources '
+ '(instead of 127==0x7f). Cannot be used with --shared-resources.')
+
+ input_opts.add_argument(
+ '--package-name',
+ help='Package name that will be used to create R class.')
+
+ input_opts.add_argument(
+ '--rename-manifest-package', help='Package name to force AAPT to use.')
+
+ input_opts.add_argument(
+ '--arsc-package-name',
+ help='Package name to set in manifest of resources.arsc file. This is '
+ 'only used for apks under test.')
+
+ input_opts.add_argument(
+ '--shared-resources-allowlist',
+ help='An R.txt file acting as a allowlist for resources that should be '
+ 'non-final and have their package ID changed at runtime in R.java. '
+ 'Implies and overrides --shared-resources.')
+
+ input_opts.add_argument(
+ '--shared-resources-allowlist-locales',
+ default='[]',
+ help='Optional GN-list of locales. If provided, all strings corresponding'
+ ' to this locale list will be kept in the final output for the '
+ 'resources identified through --shared-resources-allowlist, even '
+ 'if --locale-allowlist is being used.')
+
+ input_opts.add_argument(
+ '--use-resource-ids-path',
+ help='Use resource IDs generated by aapt --emit-ids.')
+
+ input_opts.add_argument(
+ '--extra-main-r-text-files',
+ help='Additional R.txt files that will be added to the root R.java file, '
+ 'but not packaged in the generated resources.arsc. If these resources '
+ 'entries contain duplicate resources with the generated R.txt file, they '
+ 'must be identical.')
+
+ input_opts.add_argument(
+ '--debuggable',
+ action='store_true',
+ help='Whether to add android:debuggable="true".')
+
+ input_opts.add_argument('--version-code', help='Version code for apk.')
+ input_opts.add_argument('--version-name', help='Version name for apk.')
+ input_opts.add_argument(
+ '--min-sdk-version', required=True, help='android:minSdkVersion for APK.')
+ input_opts.add_argument(
+ '--target-sdk-version',
+ required=True,
+ help="android:targetSdkVersion for APK.")
+ input_opts.add_argument(
+ '--max-sdk-version',
+ help="android:maxSdkVersion expected in AndroidManifest.xml.")
+ input_opts.add_argument(
+ '--manifest-package', help='Package name of the AndroidManifest.xml.')
+
+ input_opts.add_argument(
+ '--locale-allowlist',
+ default='[]',
+ help='GN list of languages to include. All other language configs will '
+ 'be stripped out. List may include a combination of Android locales '
+ 'or Chrome locales.')
+ input_opts.add_argument(
+ '--resource-exclusion-regex',
+ default='',
+ help='File-based filter for resources (applied before compiling)')
+ input_opts.add_argument(
+ '--resource-exclusion-exceptions',
+ default='[]',
+ help='GN list of globs that say which files to include even '
+ 'when --resource-exclusion-regex is set.')
+
+ input_opts.add_argument(
+ '--dependencies-res-zip-overlays',
+ help='GN list with subset of --dependencies-res-zips to use overlay '
+ 'semantics for.')
+
+ input_opts.add_argument(
+ '--values-filter-rules',
+ help='GN list of source_glob:regex for filtering resources after they '
+ 'are compiled. Use this to filter out entries within values/ files.')
+
+ input_opts.add_argument('--png-to-webp', action='store_true',
+ help='Convert png files to webp format.')
+
+ input_opts.add_argument('--webp-binary', default='',
+ help='Path to the cwebp binary.')
+ input_opts.add_argument(
+ '--webp-cache-dir', help='The directory to store webp image cache.')
+
+ input_opts.add_argument(
+ '--no-xml-namespaces',
+ action='store_true',
+ help='Whether to strip xml namespaces from processed xml resources.')
+
+ output_opts.add_argument('--arsc-path', help='Apk output for arsc format.')
+ output_opts.add_argument('--proto-path', help='Apk output for proto format.')
+ group = input_opts.add_mutually_exclusive_group()
+
+ output_opts.add_argument(
+ '--info-path', help='Path to output info file for the partial apk.')
+
+ output_opts.add_argument(
+ '--srcjar-out',
+ required=True,
+ help='Path to srcjar to contain generated R.java.')
+
+ output_opts.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ output_opts.add_argument(
+ '--proguard-file', help='Path to proguard.txt generated file.')
+
+ output_opts.add_argument(
+ '--proguard-file-main-dex',
+ help='Path to proguard.txt generated file for main dex.')
+
+ output_opts.add_argument(
+ '--emit-ids-out', help='Path to file produced by aapt2 --emit-ids.')
+
+ input_opts.add_argument(
+ '--is-bundle-module',
+ action='store_true',
+ help='Whether resources are being generated for a bundle module.')
+
+ input_opts.add_argument(
+ '--uses-split',
+ help='Value to set uses-split to in the AndroidManifest.xml.')
+
+ input_opts.add_argument(
+ '--extra-verification-manifest',
+ help='Path to AndroidManifest.xml which should be merged into base '
+ 'manifest when performing verification.')
+
+ diff_utils.AddCommandLineFlags(parser)
+ options = parser.parse_args(args)
+
+ resource_utils.HandleCommonOptions(options)
+
+ options.locale_allowlist = build_utils.ParseGnList(options.locale_allowlist)
+ options.shared_resources_allowlist_locales = build_utils.ParseGnList(
+ options.shared_resources_allowlist_locales)
+ options.resource_exclusion_exceptions = build_utils.ParseGnList(
+ options.resource_exclusion_exceptions)
+ options.dependencies_res_zip_overlays = build_utils.ParseGnList(
+ options.dependencies_res_zip_overlays)
+ options.values_filter_rules = build_utils.ParseGnList(
+ options.values_filter_rules)
+ options.extra_main_r_text_files = build_utils.ParseGnList(
+ options.extra_main_r_text_files)
+
+ if not options.arsc_path and not options.proto_path:
+ parser.error('One of --arsc-path or --proto-path is required.')
+
+ if options.package_id and options.shared_resources:
+ parser.error('--package-id and --shared-resources are mutually exclusive')
+
+ return options
+
+
+def _IterFiles(root_dir):
+ for root, _, files in os.walk(root_dir):
+ for f in files:
+ yield os.path.join(root, f)
+
+
+def _RenameLocaleResourceDirs(resource_dirs, path_info):
+ """Rename locale resource directories into standard names when necessary.
+
+ This is necessary to deal with the fact that older Android releases only
+ support ISO 639-1 two-letter codes, and sometimes even obsolete versions
+ of them.
+
+ In practice it means:
+ * 3-letter ISO 639-2 qualifiers are renamed under a corresponding
+ 2-letter one. E.g. for Filipino, strings under values-fil/ will be moved
+ to a new corresponding values-tl/ sub-directory.
+
+ * Modern ISO 639-1 codes will be renamed to their obsolete variant
+ for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/).
+
+ * Norwegian macrolanguage strings will be renamed to Bokmal (main
+ Norway language). See http://crbug.com/920960. In practice this
+ means that 'values-no/ -> values-nb/' unless 'values-nb/' already
+ exists.
+
+ * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1
+ locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS').
+
+ Args:
+ resource_dirs: list of top-level resource directories.
+ """
+ for resource_dir in resource_dirs:
+ ignore_dirs = {}
+ for path in _IterFiles(resource_dir):
+ locale = resource_utils.FindLocaleInStringResourceFilePath(path)
+ if not locale:
+ continue
+ cr_locale = resource_utils.ToChromiumLocaleName(locale)
+ if not cr_locale:
+ continue # Unsupported Android locale qualifier!?
+ locale2 = resource_utils.ToAndroidLocaleName(cr_locale)
+ if locale != locale2:
+ path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2)
+ if path == path2:
+ raise Exception('Could not substitute locale %s for %s in %s' %
+ (locale, locale2, path))
+
+ # Ignore rather than rename when the destination resources config
+ # already exists.
+ # e.g. some libraries provide both values-nb/ and values-no/.
+ # e.g. material design provides:
+ # * res/values-rUS/values-rUS.xml
+ # * res/values-b+es+419/values-b+es+419.xml
+ config_dir = os.path.dirname(path2)
+ already_has_renamed_config = ignore_dirs.get(config_dir)
+ if already_has_renamed_config is None:
+ # Cache the result of the first time the directory is encountered
+ # since subsequent encounters will find the directory already exists
+ # (due to the rename).
+ already_has_renamed_config = os.path.exists(config_dir)
+ ignore_dirs[config_dir] = already_has_renamed_config
+ if already_has_renamed_config:
+ continue
+
+ build_utils.MakeDirectory(os.path.dirname(path2))
+ shutil.move(path, path2)
+ path_info.RegisterRename(
+ os.path.relpath(path, resource_dir),
+ os.path.relpath(path2, resource_dir))
+
+
+def _ToAndroidLocales(locale_allowlist):
+ """Converts the list of Chrome locales to Android config locale qualifiers.
+
+ Args:
+ locale_allowlist: A list of Chromium locale names.
+ Returns:
+ A set of matching Android config locale qualifier names.
+ """
+ ret = set()
+ for locale in locale_allowlist:
+ locale = resource_utils.ToAndroidLocaleName(locale)
+ if locale is None or ('-' in locale and '-r' not in locale):
+ raise Exception('Unsupported Chromium locale name: %s' % locale)
+ ret.add(locale)
+ # Always keep non-regional fall-backs.
+ language = locale.split('-')[0]
+ ret.add(language)
+
+ return ret
+
+
+def _MoveImagesToNonMdpiFolders(res_root, path_info):
+ """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+ Why? http://crbug.com/289843
+ """
+ for src_dir_name in os.listdir(res_root):
+ src_components = src_dir_name.split('-')
+ if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+ continue
+ src_dir = os.path.join(res_root, src_dir_name)
+ if not os.path.isdir(src_dir):
+ continue
+ dst_components = [c for c in src_components if c != 'mdpi']
+ assert dst_components != src_components
+ dst_dir_name = '-'.join(dst_components)
+ dst_dir = os.path.join(res_root, dst_dir_name)
+ build_utils.MakeDirectory(dst_dir)
+ for src_file_name in os.listdir(src_dir):
+ if not os.path.splitext(src_file_name)[1] in ('.png', '.webp', ''):
+ continue
+ src_file = os.path.join(src_dir, src_file_name)
+ dst_file = os.path.join(dst_dir, src_file_name)
+ assert not os.path.lexists(dst_file)
+ shutil.move(src_file, dst_file)
+ path_info.RegisterRename(
+ os.path.relpath(src_file, res_root),
+ os.path.relpath(dst_file, res_root))
+
+
+def _FixManifest(options, temp_dir, extra_manifest=None):
+ """Fix the APK's AndroidManifest.xml.
+
+ This adds any missing namespaces for 'android' and 'tools', and
+ sets certains elements like 'platformBuildVersionCode' or
+ 'android:debuggable' depending on the content of |options|.
+
+ Args:
+ options: The command-line arguments tuple.
+ temp_dir: A temporary directory where the fixed manifest will be written to.
+ extra_manifest: Path to an AndroidManifest.xml file which will get merged
+ into the application node of the base manifest.
+ Returns:
+ Tuple of:
+ * Manifest path within |temp_dir|.
+ * Original package_name.
+ * Manifest package name.
+ """
+ def maybe_extract_version(j):
+ try:
+ return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j)
+ except build_utils.CalledProcessError:
+ return None
+
+ android_sdk_jars = [j for j in options.include_resources
+ if os.path.basename(j) in ('android.jar',
+ 'android_system.jar')]
+ extract_all = [maybe_extract_version(j) for j in android_sdk_jars]
+ successful_extractions = [x for x in extract_all if x]
+ if len(successful_extractions) == 0:
+ raise Exception(
+ 'Unable to find android SDK jar among candidates: %s'
+ % ', '.join(android_sdk_jars))
+ elif len(successful_extractions) > 1:
+ raise Exception(
+ 'Found multiple android SDK jars among candidates: %s'
+ % ', '.join(android_sdk_jars))
+ version_code, version_name = successful_extractions.pop()[:2]
+
+ debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml')
+ doc, manifest_node, app_node = manifest_utils.ParseManifest(
+ options.android_manifest)
+
+ if extra_manifest:
+ _, extra_manifest_node, extra_app_node = manifest_utils.ParseManifest(
+ extra_manifest)
+ for node in extra_app_node:
+ app_node.append(node)
+ for node in extra_manifest_node:
+ # DFM manifests have a bunch of tags we don't care about inside
+ # <manifest>, so only take <queries>.
+ if node.tag == 'queries':
+ manifest_node.append(node)
+
+ manifest_utils.AssertUsesSdk(manifest_node, options.min_sdk_version,
+ options.target_sdk_version)
+ # We explicitly check that maxSdkVersion is set in the manifest since we don't
+ # add it later like minSdkVersion and targetSdkVersion.
+ manifest_utils.AssertUsesSdk(
+ manifest_node,
+ max_sdk_version=options.max_sdk_version,
+ fail_if_not_exist=True)
+ manifest_utils.AssertPackage(manifest_node, options.manifest_package)
+
+ manifest_node.set('platformBuildVersionCode', version_code)
+ manifest_node.set('platformBuildVersionName', version_name)
+
+ orig_package = manifest_node.get('package')
+ fixed_package = orig_package
+ if options.arsc_package_name:
+ manifest_node.set('package', options.arsc_package_name)
+ fixed_package = options.arsc_package_name
+
+ if options.debuggable:
+ app_node.set('{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, 'debuggable'),
+ 'true')
+
+ if options.uses_split:
+ uses_split = ElementTree.SubElement(manifest_node, 'uses-split')
+ uses_split.set('{%s}name' % manifest_utils.ANDROID_NAMESPACE,
+ options.uses_split)
+
+ # Make sure the min-sdk condition is not less than the min-sdk of the bundle.
+ for min_sdk_node in manifest_node.iter('{%s}min-sdk' %
+ manifest_utils.DIST_NAMESPACE):
+ dist_value = '{%s}value' % manifest_utils.DIST_NAMESPACE
+ if int(min_sdk_node.get(dist_value)) < int(options.min_sdk_version):
+ min_sdk_node.set(dist_value, options.min_sdk_version)
+
+ manifest_utils.SaveManifest(doc, debug_manifest_path)
+ return debug_manifest_path, orig_package, fixed_package
+
+
+def _CreateKeepPredicate(resource_exclusion_regex,
+ resource_exclusion_exceptions):
+ """Return a predicate lambda to determine which resource files to keep.
+
+ Args:
+ resource_exclusion_regex: A regular expression describing all resources
+ to exclude, except if they are mip-maps, or if they are listed
+ in |resource_exclusion_exceptions|.
+ resource_exclusion_exceptions: A list of glob patterns corresponding
+ to exceptions to the |resource_exclusion_regex|.
+ Returns:
+ A lambda that takes a path, and returns true if the corresponding file
+ must be kept.
+ """
+ predicate = lambda path: os.path.basename(path)[0] != '.'
+ if resource_exclusion_regex == '':
+ # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways.
+ return predicate
+
+ # A simple predicate that only removes (returns False for) paths covered by
+ # the exclusion regex or listed as exceptions.
+ return lambda path: (
+ not re.search(resource_exclusion_regex, path) or
+ build_utils.MatchesGlob(path, resource_exclusion_exceptions))
+
+
+def _ComputeSha1(path):
+ with open(path, 'rb') as f:
+ data = f.read()
+ return hashlib.sha1(data).hexdigest()
+
+
+def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir):
+ sha1_hash = _ComputeSha1(png_path)
+
+ # The set of arguments that will appear in the cache key.
+ quality_args = ['-m', '6', '-q', '100', '-lossless']
+
+ webp_cache_path = os.path.join(
+ webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version,
+ ''.join(quality_args)))
+ # No need to add .webp. Android can load images fine without them.
+ webp_path = os.path.splitext(png_path)[0]
+
+ cache_hit = os.path.exists(webp_cache_path)
+ if cache_hit:
+ os.link(webp_cache_path, webp_path)
+ else:
+ # We place the generated webp image to webp_path, instead of in the
+ # webp_cache_dir to avoid concurrency issues.
+ args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args
+ subprocess.check_call(args)
+
+ try:
+ os.link(webp_path, webp_cache_path)
+ except OSError:
+ # Because of concurrent run, a webp image may already exists in
+ # webp_cache_path.
+ pass
+
+ os.remove(png_path)
+ original_dir = os.path.dirname(os.path.dirname(png_path))
+ rename_tuple = (os.path.relpath(png_path, original_dir),
+ os.path.relpath(webp_path, original_dir))
+ return rename_tuple, cache_hit
+
+
+def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir):
+ cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip()
+ shard_args = [(f, ) for f in png_paths
+ if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)]
+
+ build_utils.MakeDirectory(webp_cache_dir)
+ results = parallel.BulkForkAndCall(_ConvertToWebPSingle,
+ shard_args,
+ cwebp_binary=cwebp_binary,
+ cwebp_version=cwebp_version,
+ webp_cache_dir=webp_cache_dir)
+ total_cache_hits = 0
+ for rename_tuple, cache_hit in results:
+ path_info.RegisterRename(*rename_tuple)
+ total_cache_hits += int(cache_hit)
+
+ logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args))
+
+
+def _RemoveImageExtensions(directory, path_info):
+ """Remove extensions from image files in the passed directory.
+
+ This reduces binary size but does not affect android's ability to load the
+ images.
+ """
+ for f in _IterFiles(directory):
+ if (f.endswith('.png') or f.endswith('.webp')) and not f.endswith('.9.png'):
+ path_with_extension = f
+ path_no_extension = os.path.splitext(path_with_extension)[0]
+ if path_no_extension != path_with_extension:
+ shutil.move(path_with_extension, path_no_extension)
+ path_info.RegisterRename(
+ os.path.relpath(path_with_extension, directory),
+ os.path.relpath(path_no_extension, directory))
+
+
+def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path,
+ partials_dir):
+ unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir))
+ partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name))
+
+ compile_command = [
+ aapt2_path,
+ 'compile',
+ # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched.
+ # '--no-crunch',
+ '--dir',
+ dep_subdir,
+ '-o',
+ partial_path
+ ]
+
+ # There are resources targeting API-versions lower than our minapi. For
+ # various reasons it's easier to let aapt2 ignore these than for us to
+ # remove them from our build (e.g. it's from a 3rd party library).
+ build_utils.CheckOutput(
+ compile_command,
+ stderr_filter=lambda output: build_utils.FilterLines(
+ output, r'ignoring configuration .* for (styleable|attribute)'))
+
+ # Filtering these files is expensive, so only apply filters to the partials
+ # that have been explicitly targeted.
+ if keep_predicate:
+ logging.debug('Applying .arsc filtering to %s', dep_subdir)
+ protoresources.StripUnwantedResources(partial_path, keep_predicate)
+ return partial_path
+
+
+def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir):
+ patterns = [
+ x[1] for x in exclusion_rules
+ if build_utils.MatchesGlob(dep_subdir, [x[0]])
+ ]
+ if not patterns:
+ return None
+
+ regexes = [re.compile(p) for p in patterns]
+ return lambda x: not any(r.search(x) for r in regexes)
+
+
+def _CompileDeps(aapt2_path, dep_subdirs, dep_subdir_overlay_set, temp_dir,
+ exclusion_rules):
+ partials_dir = os.path.join(temp_dir, 'partials')
+ build_utils.MakeDirectory(partials_dir)
+
+ job_params = [(i, dep_subdir,
+ _CreateValuesKeepPredicate(exclusion_rules, dep_subdir))
+ for i, dep_subdir in enumerate(dep_subdirs)]
+
+ # Filtering is slow, so ensure jobs with keep_predicate are started first.
+ job_params.sort(key=lambda x: not x[2])
+ partials = list(
+ parallel.BulkForkAndCall(_CompileSingleDep,
+ job_params,
+ aapt2_path=aapt2_path,
+ partials_dir=partials_dir))
+
+ partials_cmd = list()
+ for i, partial in enumerate(partials):
+ dep_subdir = job_params[i][1]
+ if dep_subdir in dep_subdir_overlay_set:
+ partials_cmd += ['-R']
+ partials_cmd += [partial]
+ return partials_cmd
+
+
+def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips):
+ for zip_file in dependencies_res_zips:
+ zip_info_file_path = zip_file + '.info'
+ if os.path.exists(zip_info_file_path):
+ path_info.MergeInfoFile(zip_info_file_path)
+ path_info.Write(info_path)
+
+
+def _RemoveUnwantedLocalizedStrings(dep_subdirs, options):
+ """Remove localized strings that should not go into the final output.
+
+ Args:
+ dep_subdirs: List of resource dependency directories.
+ options: Command-line options namespace.
+ """
+ # Collect locale and file paths from the existing subdirs.
+ # The following variable maps Android locale names to
+ # sets of corresponding xml file paths.
+ locale_to_files_map = collections.defaultdict(set)
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ locale = resource_utils.FindLocaleInStringResourceFilePath(f)
+ if locale:
+ locale_to_files_map[locale].add(f)
+
+ all_locales = set(locale_to_files_map)
+
+ # Set A: wanted locales, either all of them or the
+ # list provided by --locale-allowlist.
+ wanted_locales = all_locales
+ if options.locale_allowlist:
+ wanted_locales = _ToAndroidLocales(options.locale_allowlist)
+
+ # Set B: shared resources locales, which is either set A
+ # or the list provided by --shared-resources-allowlist-locales
+ shared_resources_locales = wanted_locales
+ shared_names_allowlist = set()
+ if options.shared_resources_allowlist_locales:
+ shared_names_allowlist = set(
+ resource_utils.GetRTxtStringResourceNames(
+ options.shared_resources_allowlist))
+
+ shared_resources_locales = _ToAndroidLocales(
+ options.shared_resources_allowlist_locales)
+
+ # Remove any file that belongs to a locale not covered by
+ # either A or B.
+ removable_locales = (all_locales - wanted_locales - shared_resources_locales)
+ for locale in removable_locales:
+ for path in locale_to_files_map[locale]:
+ os.remove(path)
+
+ # For any locale in B but not in A, only keep the shared
+ # resource strings in each file.
+ for locale in shared_resources_locales - wanted_locales:
+ for path in locale_to_files_map[locale]:
+ resource_utils.FilterAndroidResourceStringsXml(
+ path, lambda x: x in shared_names_allowlist)
+
+ # For any locale in A but not in B, only keep the strings
+ # that are _not_ from shared resources in the file.
+ for locale in wanted_locales - shared_resources_locales:
+ for path in locale_to_files_map[locale]:
+ resource_utils.FilterAndroidResourceStringsXml(
+ path, lambda x: x not in shared_names_allowlist)
+
+
+def _FilterResourceFiles(dep_subdirs, keep_predicate):
+ # Create a function that selects which resource files should be packaged
+ # into the final output. Any file that does not pass the predicate will
+ # be removed below.
+ png_paths = []
+ for directory in dep_subdirs:
+ for f in _IterFiles(directory):
+ if not keep_predicate(f):
+ os.remove(f)
+ elif f.endswith('.png'):
+ png_paths.append(f)
+
+ return png_paths
+
+
+def _PackageApk(options, build):
+ """Compile and link resources with aapt2.
+
+ Args:
+ options: The command-line options.
+ build: BuildContext object.
+ Returns:
+ The manifest package name for the APK.
+ """
+ logging.debug('Extracting resource .zips')
+ dep_subdirs = []
+ dep_subdir_overlay_set = set()
+ for dependency_res_zip in options.dependencies_res_zips:
+ extracted_dep_subdirs = resource_utils.ExtractDeps([dependency_res_zip],
+ build.deps_dir)
+ dep_subdirs += extracted_dep_subdirs
+ if dependency_res_zip in options.dependencies_res_zip_overlays:
+ dep_subdir_overlay_set.update(extracted_dep_subdirs)
+
+ logging.debug('Applying locale transformations')
+ path_info = resource_utils.ResourceInfoFile()
+ _RenameLocaleResourceDirs(dep_subdirs, path_info)
+
+ logging.debug('Applying file-based exclusions')
+ keep_predicate = _CreateKeepPredicate(options.resource_exclusion_regex,
+ options.resource_exclusion_exceptions)
+ png_paths = _FilterResourceFiles(dep_subdirs, keep_predicate)
+
+ if options.locale_allowlist or options.shared_resources_allowlist_locales:
+ logging.debug('Applying locale-based string exclusions')
+ _RemoveUnwantedLocalizedStrings(dep_subdirs, options)
+
+ if png_paths and options.png_to_webp:
+ logging.debug('Converting png->webp')
+ _ConvertToWebP(options.webp_binary, png_paths, path_info,
+ options.webp_cache_dir)
+ logging.debug('Applying drawable transformations')
+ for directory in dep_subdirs:
+ _MoveImagesToNonMdpiFolders(directory, path_info)
+ _RemoveImageExtensions(directory, path_info)
+
+ logging.debug('Running aapt2 compile')
+ exclusion_rules = [x.split(':', 1) for x in options.values_filter_rules]
+ partials = _CompileDeps(options.aapt2_path, dep_subdirs,
+ dep_subdir_overlay_set, build.temp_dir,
+ exclusion_rules)
+
+ link_command = [
+ options.aapt2_path,
+ 'link',
+ '--auto-add-overlay',
+ '--no-version-vectors',
+ # Set SDK versions in case they are not set in the Android manifest.
+ '--min-sdk-version',
+ options.min_sdk_version,
+ '--target-sdk-version',
+ options.target_sdk_version,
+ '--output-text-symbols',
+ build.r_txt_path,
+ ]
+
+ for j in options.include_resources:
+ link_command += ['-I', j]
+ if options.version_code:
+ link_command += ['--version-code', options.version_code]
+ if options.version_name:
+ link_command += ['--version-name', options.version_name]
+ if options.proguard_file:
+ link_command += ['--proguard', build.proguard_path]
+ link_command += ['--proguard-minimal-keep-rules']
+ if options.proguard_file_main_dex:
+ link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
+ if options.emit_ids_out:
+ link_command += ['--emit-ids', build.emit_ids_path]
+
+ # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
+ # can be used with recent versions of aapt2.
+ if options.shared_resources:
+ link_command.append('--shared-lib')
+
+ if options.no_xml_namespaces:
+ link_command.append('--no-xml-namespaces')
+
+ if options.package_id:
+ link_command += [
+ '--package-id',
+ hex(options.package_id),
+ '--allow-reserved-package-id',
+ ]
+
+ fixed_manifest, desired_manifest_package_name, fixed_manifest_package = (
+ _FixManifest(options, build.temp_dir))
+ if options.rename_manifest_package:
+ desired_manifest_package_name = options.rename_manifest_package
+
+ link_command += [
+ '--manifest', fixed_manifest, '--rename-manifest-package',
+ desired_manifest_package_name
+ ]
+
+ # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
+ # Also creates R.txt
+ if options.use_resource_ids_path:
+ _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path,
+ fixed_manifest_package)
+ link_command += ['--stable-ids', build.stable_ids_path]
+
+ link_command += partials
+
+ # We always create a binary arsc file first, then convert to proto, so flags
+ # such as --shared-lib can be supported.
+ link_command += ['-o', build.arsc_path]
+
+ logging.debug('Starting: aapt2 link')
+ link_proc = subprocess.Popen(link_command)
+
+ # Create .res.info file in parallel.
+ _CreateResourceInfoFile(path_info, build.info_path,
+ options.dependencies_res_zips)
+ logging.debug('Created .res.info file')
+
+ exit_code = link_proc.wait()
+ logging.debug('Finished: aapt2 link')
+ if exit_code:
+ raise subprocess.CalledProcessError(exit_code, link_command)
+
+ if options.proguard_file and (options.shared_resources
+ or options.app_as_shared_lib):
+ # Make sure the R class associated with the manifest package does not have
+ # its onResourcesLoaded method obfuscated or removed, so that the framework
+ # can call it in the case where the APK is being loaded as a library.
+ with open(build.proguard_path, 'a') as proguard_file:
+ keep_rule = '''
+ -keep,allowoptimization class {package}.R {{
+ public static void onResourcesLoaded(int);
+ }}
+ '''.format(package=desired_manifest_package_name)
+ proguard_file.write(textwrap.dedent(keep_rule))
+
+ logging.debug('Running aapt2 convert')
+ build_utils.CheckOutput([
+ options.aapt2_path, 'convert', '--output-format', 'proto', '-o',
+ build.proto_path, build.arsc_path
+ ])
+
+ # Workaround for b/147674078. This is only needed for WebLayer and does not
+ # affect WebView usage, since WebView does not used dynamic attributes.
+ if options.shared_resources:
+ logging.debug('Hardcoding dynamic attributes')
+ protoresources.HardcodeSharedLibraryDynamicAttributes(
+ build.proto_path, options.is_bundle_module,
+ options.shared_resources_allowlist)
+
+ build_utils.CheckOutput([
+ options.aapt2_path, 'convert', '--output-format', 'binary', '-o',
+ build.arsc_path, build.proto_path
+ ])
+
+ return desired_manifest_package_name
+
+
+@contextlib.contextmanager
+def _CreateStableIdsFile(in_path, out_path, package_name):
+ """Transforms a file generated by --emit-ids from another package.
+
+ --stable-ids is generally meant to be used by different versions of the same
+ package. To make it work for other packages, we need to transform the package
+ name references to match the package that resources are being generated for.
+
+ Note: This will fail if the package ID of the resources in
+ |options.use_resource_ids_path| does not match the package ID of the
+ resources being linked.
+ """
+ with open(in_path) as stable_ids_file:
+ with open(out_path, 'w') as output_ids_file:
+ output_stable_ids = re.sub(
+ r'^.*?:',
+ package_name + ':',
+ stable_ids_file.read(),
+ flags=re.MULTILINE)
+ output_ids_file.write(output_stable_ids)
+
+
+def _WriteOutputs(options, build):
+ possible_outputs = [
+ (options.srcjar_out, build.srcjar_path),
+ (options.r_text_out, build.r_txt_path),
+ (options.arsc_path, build.arsc_path),
+ (options.proto_path, build.proto_path),
+ (options.proguard_file, build.proguard_path),
+ (options.proguard_file_main_dex, build.proguard_main_dex_path),
+ (options.emit_ids_out, build.emit_ids_path),
+ (options.info_path, build.info_path),
+ ]
+
+ for final, temp in possible_outputs:
+ # Write file only if it's changed.
+ if final and not (os.path.exists(final) and filecmp.cmp(final, temp)):
+ shutil.move(temp, final)
+
+
+def _CreateNormalizedManifestForVerification(options):
+ with build_utils.TempDir() as tempdir:
+ fixed_manifest, _, _ = _FixManifest(
+ options, tempdir, extra_manifest=options.extra_verification_manifest)
+ with open(fixed_manifest) as f:
+ return manifest_utils.NormalizeManifest(f.read())
+
+
+def main(args):
+ build_utils.InitLogging('RESOURCE_DEBUG')
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ if options.expected_file:
+ actual_data = _CreateNormalizedManifestForVerification(options)
+ diff_utils.CheckExpectations(actual_data, options)
+ if options.only_verify_expectations:
+ return
+
+ path = options.arsc_path or options.proto_path
+ debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR')
+ if debug_temp_resources_dir:
+ path = os.path.join(debug_temp_resources_dir, os.path.basename(path))
+ else:
+ # Use a deterministic temp directory since .pb files embed the absolute
+ # path of resources: crbug.com/939984
+ path = path + '.tmpdir'
+ build_utils.DeleteDirectory(path)
+
+ with resource_utils.BuildContext(
+ temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build:
+
+ manifest_package_name = _PackageApk(options, build)
+
+ # If --shared-resources-allowlist is used, all the resources listed in the
+ # corresponding R.txt file will be non-final, and an onResourcesLoaded()
+ # will be generated to adjust them at runtime.
+ #
+ # Otherwise, if --shared-resources is used, the all resources will be
+ # non-final, and an onResourcesLoaded() method will be generated too.
+ #
+ # Otherwise, all resources will be final, and no method will be generated.
+ #
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ if options.shared_resources_allowlist:
+ rjava_build_options.ExportSomeResources(
+ options.shared_resources_allowlist)
+ rjava_build_options.GenerateOnResourcesLoaded()
+ if options.shared_resources:
+ # The final resources will only be used in WebLayer, so hardcode the
+ # package ID to be what WebLayer expects.
+ rjava_build_options.SetFinalPackageId(
+ protoresources.SHARED_LIBRARY_HARDCODED_ID)
+ elif options.shared_resources or options.app_as_shared_lib:
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.GenerateOnResourcesLoaded()
+
+ custom_root_package_name = options.r_java_root_package_name
+ grandparent_custom_package_name = None
+
+ # Always generate an R.java file for the package listed in
+ # AndroidManifest.xml because this is where Android framework looks to find
+ # onResourcesLoaded() for shared library apks. While not actually necessary
+ # for application apks, it also doesn't hurt.
+ apk_package_name = manifest_package_name
+
+ if options.package_name and not options.arsc_package_name:
+ # Feature modules have their own custom root package name and should
+ # inherit from the appropriate base module package. This behaviour should
+ # not be present for test apks with an apk under test. Thus,
+ # arsc_package_name is used as it is only defined for test apks with an
+ # apk under test.
+ custom_root_package_name = options.package_name
+ grandparent_custom_package_name = options.r_java_root_package_name
+ # Feature modules have the same manifest package as the base module but
+ # they should not create an R.java for said manifest package because it
+ # will be created in the base module.
+ apk_package_name = None
+
+ logging.debug('Creating R.srcjar')
+ resource_utils.CreateRJavaFiles(
+ build.srcjar_dir, apk_package_name, build.r_txt_path,
+ options.extra_res_packages, rjava_build_options, options.srcjar_out,
+ custom_root_package_name, grandparent_custom_package_name,
+ options.extra_main_r_text_files)
+ build_utils.ZipDir(build.srcjar_path, build.srcjar_dir)
+
+ # Sanity check that the created resources have the expected package ID.
+ logging.debug('Performing sanity check')
+ if options.package_id:
+ expected_id = options.package_id
+ elif options.shared_resources:
+ expected_id = 0
+ else:
+ expected_id = 127 # == '0x7f'.
+ _, package_id = resource_utils.ExtractArscPackage(
+ options.aapt2_path,
+ build.arsc_path if options.arsc_path else build.proto_path)
+ # When there are no resources, ExtractArscPackage returns (None, None), in
+ # this case there is no need to check for matching package ID.
+ if package_id is not None and package_id != expected_id:
+ raise Exception(
+ 'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id))
+
+ logging.debug('Copying outputs')
+ _WriteOutputs(options, build)
+
+ if options.depfile:
+ depfile_deps = (options.dependencies_res_zips +
+ options.dependencies_res_zip_overlays +
+ options.extra_main_r_text_files + options.include_resources)
+ build_utils.WriteDepfile(options.depfile, options.srcjar_out, depfile_deps)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/compile_resources.pydeps b/third_party/libwebrtc/build/android/gyp/compile_resources.pydeps
new file mode 100644
index 0000000000..907601422d
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/compile_resources.pydeps
@@ -0,0 +1,39 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../../third_party/six/src/six.py
+../../gn_helpers.py
+compile_resources.py
+proto/Configuration_pb2.py
+proto/Resources_pb2.py
+proto/__init__.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/manifest_utils.py
+util/parallel.py
+util/protoresources.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/copy_ex.py b/third_party/libwebrtc/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000000..41604c4627
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/copy_ex.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+from __future__ import print_function
+
+import filecmp
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+ """Returns a list of all the files in |base|. Each entry is relative to the
+ last path entry of |base|."""
+ result = []
+ dirname = os.path.dirname(base)
+ for root, _, files in os.walk(base):
+ result.extend([os.path.join(root[len(dirname):], f) for f in files])
+ return result
+
+def CopyFile(f, dest, deps):
+ """Copy file or directory and update deps."""
+ if os.path.isdir(f):
+ shutil.copytree(f, os.path.join(dest, os.path.basename(f)))
+ deps.extend(_get_all_files(f))
+ else:
+ if os.path.isfile(os.path.join(dest, os.path.basename(f))):
+ dest = os.path.join(dest, os.path.basename(f))
+
+ deps.append(f)
+
+ if os.path.isfile(dest):
+ if filecmp.cmp(dest, f, shallow=False):
+ return
+ # The shutil.copy() below would fail if the file does not have write
+ # permissions. Deleting the file has similar costs to modifying the
+ # permissions.
+ os.unlink(dest)
+
+ shutil.copy(f, dest)
+
+def DoCopy(options, deps):
+ """Copy files or directories given in options.files and update deps."""
+ files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f)
+ for f in options.files))
+
+ for f in files:
+ if os.path.isdir(f) and not options.clear:
+ print('To avoid stale files you must use --clear when copying '
+ 'directories')
+ sys.exit(-1)
+ CopyFile(f, options.dest, deps)
+
+def DoRenaming(options, deps):
+ """Copy and rename files given in options.renaming_sources and update deps."""
+ src_files = list(itertools.chain.from_iterable(
+ build_utils.ParseGnList(f)
+ for f in options.renaming_sources))
+
+ dest_files = list(itertools.chain.from_iterable(
+ build_utils.ParseGnList(f)
+ for f in options.renaming_destinations))
+
+ if (len(src_files) != len(dest_files)):
+ print('Renaming source and destination files not match.')
+ sys.exit(-1)
+
+ for src, dest in zip(src_files, dest_files):
+ if os.path.isdir(src):
+ print('renaming diretory is not supported.')
+ sys.exit(-1)
+ else:
+ CopyFile(src, os.path.join(options.dest, dest), deps)
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--dest', help='Directory to copy files to.')
+ parser.add_option('--files', action='append',
+ help='List of files to copy.')
+ parser.add_option('--clear', action='store_true',
+ help='If set, the destination directory will be deleted '
+ 'before copying files to it. This is highly recommended to '
+ 'ensure that no stale files are left in the directory.')
+ parser.add_option('--stamp', help='Path to touch on success.')
+ parser.add_option('--renaming-sources',
+ action='append',
+ help='List of files need to be renamed while being '
+ 'copied to dest directory')
+ parser.add_option('--renaming-destinations',
+ action='append',
+ help='List of destination file name without path, the '
+ 'number of elements must match rename-sources.')
+
+ options, _ = parser.parse_args(args)
+
+ if options.clear:
+ build_utils.DeleteDirectory(options.dest)
+ build_utils.MakeDirectory(options.dest)
+
+ deps = []
+
+ if options.files:
+ DoCopy(options, deps)
+
+ if options.renaming_sources:
+ DoRenaming(options, deps)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.stamp, deps)
+
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/copy_ex.pydeps b/third_party/libwebrtc/build/android/gyp/copy_ex.pydeps
new file mode 100644
index 0000000000..37352512be
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/copy_ex.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py
+../../gn_helpers.py
+copy_ex.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_apk_operations_script.py b/third_party/libwebrtc/build/android/gyp/create_apk_operations_script.py
new file mode 100755
index 0000000000..a5a5b6658a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_apk_operations_script.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python3
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import os
+import string
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python3
+#
+# This file was generated by build/android/gyp/create_apk_operations_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+ script_directory, p))
+ sys.path.append(resolve(${APK_OPERATIONS_DIR}))
+ import apk_operations
+ output_dir = resolve(${OUTPUT_DIR})
+ apk_operations.Run(
+ output_dir,
+ resolve(${APK_PATH}),
+ [resolve(p) for p in ${ADDITIONAL_APK_PATHS}],
+ resolve(${INC_JSON_PATH}),
+ ${FLAGS_FILE},
+ ${TARGET_CPU},
+ resolve(${MAPPING_PATH}))
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+""")
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path',
+ help='Output path for executable script.')
+ parser.add_argument('--apk-path')
+ parser.add_argument('--incremental-install-json-path')
+ parser.add_argument('--command-line-flags-file')
+ parser.add_argument('--target-cpu')
+ parser.add_argument(
+ '--additional-apk-path',
+ action='append',
+ dest='additional_apk_paths',
+ default=[],
+ help='Paths to APKs to be installed prior to --apk-path.')
+ parser.add_argument('--proguard-mapping-path')
+ args = parser.parse_args(args)
+
+ def relativize(path):
+ """Returns the path relative to the output script directory."""
+ if path is None:
+ return path
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+ apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ apk_operations_dir = relativize(apk_operations_dir)
+
+ with open(args.script_output_path, 'w') as script:
+ script_dict = {
+ 'APK_OPERATIONS_DIR': repr(apk_operations_dir),
+ 'OUTPUT_DIR': repr(relativize('.')),
+ 'APK_PATH': repr(relativize(args.apk_path)),
+ 'ADDITIONAL_APK_PATHS':
+ [relativize(p) for p in args.additional_apk_paths],
+ 'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)),
+ 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)),
+ 'FLAGS_FILE': repr(args.command_line_flags_file),
+ 'TARGET_CPU': repr(args.target_cpu),
+ }
+ script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+ os.chmod(args.script_output_path, 0o750)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/create_apk_operations_script.pydeps b/third_party/libwebrtc/build/android/gyp/create_apk_operations_script.pydeps
new file mode 100644
index 0000000000..e09bb7244c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_apk_operations_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py
+../../gn_helpers.py
+create_apk_operations_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_app_bundle.py b/third_party/libwebrtc/build/android/gyp/create_app_bundle.py
new file mode 100755
index 0000000000..8d03f08c34
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_app_bundle.py
@@ -0,0 +1,543 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create an Android application bundle from one or more bundle modules."""
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import zipfile
+
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import dexdump
+
+from util import build_utils
+from util import manifest_utils
+from util import resource_utils
+from xml.etree import ElementTree
+
+import bundletool
+
+# Location of language-based assets in bundle modules.
+_LOCALES_SUBDIR = 'assets/locales/'
+
+# The fallback locale should always have its .pak file included in
+# the base apk, i.e. not use language-based asset targetting. This ensures
+# that Chrome won't crash on startup if its bundle is installed on a device
+# with an unsupported system locale (e.g. fur-rIT).
+_FALLBACK_LOCALE = 'en-US'
+
+# List of split dimensions recognized by this tool.
+_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ]
+
+# Due to historical reasons, certain languages identified by Chromium with a
+# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters
+# ISO 639-1 code instead (due to the fact that older Android releases only
+# supported the latter when matching resources).
+#
+# the same conversion as for Java resources.
+_SHORTEN_LANGUAGE_CODE_MAP = {
+ 'fil': 'tl', # Filipino to Tagalog.
+}
+
+# A list of extensions corresponding to files that should never be compressed
+# in the bundle. This used to be handled by bundletool automatically until
+# release 0.8.0, which required that this be passed to the BundleConfig
+# file instead.
+#
+# This is the original list, which was taken from aapt2, with 'webp' added to
+# it (which curiously was missing from the list).
+_UNCOMPRESSED_FILE_EXTS = [
+ '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet',
+ 'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4',
+ 'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv',
+ 'xmf'
+]
+
+
+def _ParseArgs(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--out-bundle', required=True,
+ help='Output bundle zip archive.')
+ parser.add_argument('--module-zips', required=True,
+ help='GN-list of module zip archives.')
+ parser.add_argument(
+ '--pathmap-in-paths',
+ action='append',
+ help='List of module pathmap files.')
+ parser.add_argument(
+ '--module-name',
+ action='append',
+ dest='module_names',
+ help='List of module names.')
+ parser.add_argument(
+ '--pathmap-out-path', help='Path to combined pathmap file for bundle.')
+ parser.add_argument(
+ '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.')
+ parser.add_argument(
+ '--rtxt-out-path', help='Path to combined R.txt file for bundle.')
+ parser.add_argument('--uncompressed-assets', action='append',
+ help='GN-list of uncompressed assets.')
+ parser.add_argument(
+ '--compress-shared-libraries',
+ action='store_true',
+ help='Whether to store native libraries compressed.')
+ parser.add_argument('--compress-dex',
+ action='store_true',
+ help='Compress .dex files')
+ parser.add_argument('--split-dimensions',
+ help="GN-list of split dimensions to support.")
+ parser.add_argument(
+ '--base-module-rtxt-path',
+ help='Optional path to the base module\'s R.txt file, only used with '
+ 'language split dimension.')
+ parser.add_argument(
+ '--base-allowlist-rtxt-path',
+ help='Optional path to an R.txt file, string resources '
+ 'listed there _and_ in --base-module-rtxt-path will '
+ 'be kept in the base bundle module, even if language'
+ ' splitting is enabled.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+
+ parser.add_argument(
+ '--validate-services',
+ action='store_true',
+ help='Check if services are in base module if isolatedSplits is enabled.')
+
+ options = parser.parse_args(args)
+ options.module_zips = build_utils.ParseGnList(options.module_zips)
+ options.rtxt_in_paths = build_utils.ParseGnList(options.rtxt_in_paths)
+ options.pathmap_in_paths = build_utils.ParseGnList(options.pathmap_in_paths)
+
+ if len(options.module_zips) == 0:
+ raise Exception('The module zip list cannot be empty.')
+
+ # Merge all uncompressed assets into a set.
+ uncompressed_list = []
+ if options.uncompressed_assets:
+ for l in options.uncompressed_assets:
+ for entry in build_utils.ParseGnList(l):
+ # Each entry has the following format: 'zipPath' or 'srcPath:zipPath'
+ pos = entry.find(':')
+ if pos >= 0:
+ uncompressed_list.append(entry[pos + 1:])
+ else:
+ uncompressed_list.append(entry)
+
+ options.uncompressed_assets = set(uncompressed_list)
+
+ # Check that all split dimensions are valid
+ if options.split_dimensions:
+ options.split_dimensions = build_utils.ParseGnList(options.split_dimensions)
+ for dim in options.split_dimensions:
+ if dim.upper() not in _ALL_SPLIT_DIMENSIONS:
+ parser.error('Invalid split dimension "%s" (expected one of: %s)' % (
+ dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS)))
+
+ # As a special case, --base-allowlist-rtxt-path can be empty to indicate
+ # that the module doesn't need such a allowlist. That's because it is easier
+ # to check this condition here than through GN rules :-(
+ if options.base_allowlist_rtxt_path == '':
+ options.base_module_rtxt_path = None
+
+ # Check --base-module-rtxt-path and --base-allowlist-rtxt-path usage.
+ if options.base_module_rtxt_path:
+ if not options.base_allowlist_rtxt_path:
+ parser.error(
+ '--base-module-rtxt-path requires --base-allowlist-rtxt-path')
+ if 'language' not in options.split_dimensions:
+ parser.error('--base-module-rtxt-path is only valid with '
+ 'language-based splits.')
+
+ return options
+
+
+def _MakeSplitDimension(value, enabled):
+ """Return dict modelling a BundleConfig splitDimension entry."""
+ return {'value': value, 'negate': not enabled}
+
+
+def _GenerateBundleConfigJson(uncompressed_assets, compress_dex,
+ compress_shared_libraries, split_dimensions,
+ base_master_resource_ids):
+ """Generate a dictionary that can be written to a JSON BuildConfig.
+
+ Args:
+ uncompressed_assets: A list or set of file paths under assets/ that always
+ be stored uncompressed.
+ compressed_dex: Boolean, whether to compress .dex.
+ compress_shared_libraries: Boolean, whether to compress native libs.
+ split_dimensions: list of split dimensions.
+ base_master_resource_ids: Optional list of 32-bit resource IDs to keep
+ inside the base module, even when split dimensions are enabled.
+ Returns:
+ A dictionary that can be written as a json file.
+ """
+ # Compute splitsConfig list. Each item is a dictionary that can have
+ # the following keys:
+ # 'value': One of ['LANGUAGE', 'DENSITY', 'ABI']
+ # 'negate': Boolean, True to indicate that the bundle should *not* be
+ # split (unused at the moment by this script).
+
+ split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions)
+ for dim in _ALL_SPLIT_DIMENSIONS ]
+
+ # Native libraries loaded by the crazy linker.
+ # Whether other .so files are compressed is controlled by
+ # "uncompressNativeLibraries".
+ uncompressed_globs = ['lib/*/crazy.*']
+ # Locale-specific pak files stored in bundle splits need not be compressed.
+ uncompressed_globs.extend(
+ ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak'])
+ uncompressed_globs.extend('assets/' + x for x in uncompressed_assets)
+ # NOTE: Use '**' instead of '*' to work through directories!
+ uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS)
+ if not compress_dex:
+ # Explicit glob required only when using bundletool. Play Store looks for
+ # "uncompressDexFiles" set below.
+ uncompressed_globs.extend('classes*.dex')
+
+ data = {
+ 'optimizations': {
+ 'splitsConfig': {
+ 'splitDimension': split_dimensions,
+ },
+ 'uncompressNativeLibraries': {
+ 'enabled': not compress_shared_libraries,
+ },
+ 'uncompressDexFiles': {
+ 'enabled': True, # Applies only for P+.
+ }
+ },
+ 'compression': {
+ 'uncompressedGlob': sorted(uncompressed_globs),
+ },
+ }
+
+ if base_master_resource_ids:
+ data['master_resources'] = {
+ 'resource_ids': list(base_master_resource_ids),
+ }
+
+ return json.dumps(data, indent=2)
+
+
+def _RewriteLanguageAssetPath(src_path):
+ """Rewrite the destination path of a locale asset for language-based splits.
+
+ Should only be used when generating bundles with language-based splits.
+ This will rewrite paths that look like locales/<locale>.pak into
+ locales#<language>/<locale>.pak, where <language> is the language code
+ from the locale.
+
+ Returns new path.
+ """
+ if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'):
+ return [src_path]
+
+ locale = src_path[len(_LOCALES_SUBDIR):-4]
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+
+ # The locale format is <lang>-<region> or <lang> or BCP-47 (e.g b+sr+Latn).
+ # Extract the language.
+ pos = android_locale.find('-')
+ if android_locale.startswith('b+'):
+ # If locale is in BCP-47 the language is the second tag (e.g. b+sr+Latn)
+ android_language = android_locale.split('+')[1]
+ elif pos >= 0:
+ android_language = android_locale[:pos]
+ else:
+ android_language = android_locale
+
+ if locale == _FALLBACK_LOCALE:
+ # Fallback locale .pak files must be placed in a different directory
+ # to ensure they are always stored in the base module.
+ result_path = 'assets/fallback-locales/%s.pak' % locale
+ else:
+ # Other language .pak files go into a language-specific asset directory
+ # that bundletool will store in separate split APKs.
+ result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale)
+
+ return result_path
+
+
+def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions):
+ """Splits assets in a module if needed.
+
+ Args:
+ src_module_zip: input zip module path.
+ tmp_dir: Path to temporary directory, where the new output module might
+ be written to.
+ split_dimensions: list of split dimensions.
+
+ Returns:
+ If the module doesn't need asset targeting, doesn't do anything and
+ returns src_module_zip. Otherwise, create a new module zip archive under
+ tmp_dir with the same file name, but which contains assets paths targeting
+ the proper dimensions.
+ """
+ split_language = 'LANGUAGE' in split_dimensions
+ if not split_language:
+ # Nothing to target, so return original module path.
+ return src_module_zip
+
+ with zipfile.ZipFile(src_module_zip, 'r') as src_zip:
+ language_files = [
+ f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)]
+
+ if not language_files:
+ # Not language-based assets to split in this module.
+ return src_module_zip
+
+ tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip))
+ with zipfile.ZipFile(tmp_zip, 'w') as dst_zip:
+ for info in src_zip.infolist():
+ src_path = info.filename
+ is_compressed = info.compress_type != zipfile.ZIP_STORED
+
+ dst_path = src_path
+ if src_path in language_files:
+ dst_path = _RewriteLanguageAssetPath(src_path)
+
+ build_utils.AddToZipHermetic(
+ dst_zip,
+ dst_path,
+ data=src_zip.read(src_path),
+ compress=is_compressed)
+
+ return tmp_zip
+
+
+def _GenerateBaseResourcesAllowList(base_module_rtxt_path,
+ base_allowlist_rtxt_path):
+ """Generate a allowlist of base master resource ids.
+
+ Args:
+ base_module_rtxt_path: Path to base module R.txt file.
+ base_allowlist_rtxt_path: Path to base allowlist R.txt file.
+ Returns:
+ list of resource ids.
+ """
+ ids_map = resource_utils.GenerateStringResourcesAllowList(
+ base_module_rtxt_path, base_allowlist_rtxt_path)
+ return ids_map.keys()
+
+
+def _ConcatTextFiles(in_paths, out_path):
+ """Concatenate the contents of multiple text files into one.
+
+ The each file contents is preceded by a line containing the original filename.
+
+ Args:
+ in_paths: List of input file paths.
+ out_path: Path to output file.
+ """
+ with open(out_path, 'w') as out_file:
+ for in_path in in_paths:
+ if not os.path.exists(in_path):
+ continue
+ with open(in_path, 'r') as in_file:
+ out_file.write('-- Contents of {}\n'.format(os.path.basename(in_path)))
+ out_file.write(in_file.read())
+
+
+def _LoadPathmap(pathmap_path):
+ """Load the pathmap of obfuscated resource paths.
+
+ Returns: A dict mapping from obfuscated paths to original paths or an
+ empty dict if passed a None |pathmap_path|.
+ """
+ if pathmap_path is None:
+ return {}
+
+ pathmap = {}
+ with open(pathmap_path, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith('--') or line == '':
+ continue
+ original, renamed = line.split(' -> ')
+ pathmap[renamed] = original
+ return pathmap
+
+
+def _WriteBundlePathmap(module_pathmap_paths, module_names,
+ bundle_pathmap_path):
+ """Combine the contents of module pathmaps into a bundle pathmap.
+
+ This rebases the resource paths inside the module pathmap before adding them
+ to the bundle pathmap. So res/a.xml inside the base module pathmap would be
+ base/res/a.xml in the bundle pathmap.
+ """
+ with open(bundle_pathmap_path, 'w') as bundle_pathmap_file:
+ for module_pathmap_path, module_name in zip(module_pathmap_paths,
+ module_names):
+ if not os.path.exists(module_pathmap_path):
+ continue
+ module_pathmap = _LoadPathmap(module_pathmap_path)
+ for short_path, long_path in module_pathmap.items():
+ rebased_long_path = '{}/{}'.format(module_name, long_path)
+ rebased_short_path = '{}/{}'.format(module_name, short_path)
+ line = '{} -> {}\n'.format(rebased_long_path, rebased_short_path)
+ bundle_pathmap_file.write(line)
+
+
+def _GetManifestForModule(bundle_path, module_name):
+ return ElementTree.fromstring(
+ bundletool.RunBundleTool([
+ 'dump', 'manifest', '--bundle', bundle_path, '--module', module_name
+ ]))
+
+
+def _GetComponentNames(manifest, tag_name):
+ android_name = '{%s}name' % manifest_utils.ANDROID_NAMESPACE
+ return [s.attrib.get(android_name) for s in manifest.iter(tag_name)]
+
+
+def _MaybeCheckServicesAndProvidersPresentInBase(bundle_path, module_zips):
+ """Checks bundles with isolated splits define all services in the base module.
+
+ Due to b/169196314, service classes are not found if they are not present in
+ the base module. Providers are also checked because they are loaded early in
+ startup, and keeping them in the base module gives more time for the chrome
+ split to load.
+ """
+ base_manifest = _GetManifestForModule(bundle_path, 'base')
+ isolated_splits = base_manifest.get('{%s}isolatedSplits' %
+ manifest_utils.ANDROID_NAMESPACE)
+ if isolated_splits != 'true':
+ return
+
+ # Collect service names from all split manifests.
+ base_zip = None
+ service_names = _GetComponentNames(base_manifest, 'service')
+ provider_names = _GetComponentNames(base_manifest, 'provider')
+ for module_zip in module_zips:
+ name = os.path.basename(module_zip)[:-len('.zip')]
+ if name == 'base':
+ base_zip = module_zip
+ else:
+ service_names.extend(
+ _GetComponentNames(_GetManifestForModule(bundle_path, name),
+ 'service'))
+ module_providers = _GetComponentNames(
+ _GetManifestForModule(bundle_path, name), 'provider')
+ if module_providers:
+ raise Exception("Providers should all be declared in the base manifest."
+ " '%s' module declared: %s" % (name, module_providers))
+
+ # Extract classes from the base module's dex.
+ classes = set()
+ base_package_name = manifest_utils.GetPackage(base_manifest)
+ for package in dexdump.Dump(base_zip):
+ for name, package_dict in package.items():
+ if not name:
+ name = base_package_name
+ classes.update('%s.%s' % (name, c)
+ for c in package_dict['classes'].keys())
+
+ ignored_service_names = {
+ # Defined in the chime DFM manifest, but unused.
+ # org.chromium.chrome.browser.chime.ScheduledTaskService is used instead.
+ ("com.google.android.libraries.notifications.entrypoints.scheduled."
+ "ScheduledTaskService"),
+
+ # Defined in the chime DFM manifest, only used pre-O (where isolated
+ # splits are not supported).
+ ("com.google.android.libraries.notifications.executor.impl.basic."
+ "ChimeExecutorApiService"),
+ }
+
+ # Ensure all services are present in base module.
+ for service_name in service_names:
+ if service_name not in classes:
+ if service_name in ignored_service_names:
+ continue
+ raise Exception("Service %s should be present in the base module's dex."
+ " See b/169196314 for more details." % service_name)
+
+ # Ensure all providers are present in base module.
+ for provider_name in provider_names:
+ if provider_name not in classes:
+ raise Exception(
+ "Provider %s should be present in the base module's dex." %
+ provider_name)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ split_dimensions = []
+ if options.split_dimensions:
+ split_dimensions = [x.upper() for x in options.split_dimensions]
+
+
+ with build_utils.TempDir() as tmp_dir:
+ module_zips = [
+ _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \
+ for module in options.module_zips]
+
+ base_master_resource_ids = None
+ if options.base_module_rtxt_path:
+ base_master_resource_ids = _GenerateBaseResourcesAllowList(
+ options.base_module_rtxt_path, options.base_allowlist_rtxt_path)
+
+ bundle_config = _GenerateBundleConfigJson(options.uncompressed_assets,
+ options.compress_dex,
+ options.compress_shared_libraries,
+ split_dimensions,
+ base_master_resource_ids)
+
+ tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle')
+
+ # Important: bundletool requires that the bundle config file is
+ # named with a .pb.json extension.
+ tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json'
+
+ with open(tmp_bundle_config, 'w') as f:
+ f.write(bundle_config)
+
+ cmd_args = build_utils.JavaCmd(options.warnings_as_errors) + [
+ '-jar',
+ bundletool.BUNDLETOOL_JAR_PATH,
+ 'build-bundle',
+ '--modules=' + ','.join(module_zips),
+ '--output=' + tmp_bundle,
+ '--config=' + tmp_bundle_config,
+ ]
+
+ build_utils.CheckOutput(
+ cmd_args,
+ print_stdout=True,
+ print_stderr=True,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
+ fail_on_output=options.warnings_as_errors)
+
+ if options.validate_services:
+ # TODO(crbug.com/1126301): This step takes 0.4s locally for bundles with
+ # isolated splits disabled and 2s for bundles with isolated splits
+ # enabled. Consider making this run in parallel or move into a separate
+ # step before enabling isolated splits by default.
+ _MaybeCheckServicesAndProvidersPresentInBase(tmp_bundle, module_zips)
+
+ shutil.move(tmp_bundle, options.out_bundle)
+
+ if options.rtxt_out_path:
+ _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path)
+
+ if options.pathmap_out_path:
+ _WriteBundlePathmap(options.pathmap_in_paths, options.module_names,
+ options.pathmap_out_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/create_app_bundle.pydeps b/third_party/libwebrtc/build/android/gyp/create_app_bundle.pydeps
new file mode 100644
index 0000000000..503dfb0dc5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_app_bundle.pydeps
@@ -0,0 +1,49 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/base_error.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/catapult/devil/devil/utils/__init__.py
+../../../third_party/catapult/devil/devil/utils/cmd_helper.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/dexdump.py
+bundletool.py
+create_app_bundle.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.py b/third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.py
new file mode 100755
index 0000000000..059b4dd8af
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an .apks from an .aab."""
+
+import argparse
+import os
+import sys
+
+sys.path.append(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from pylib.utils import app_bundle_utils
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument(
+ '--bundle', required=True, help='Path to input .aab file.')
+ parser.add_argument(
+ '--output', required=True, help='Path to output .apks file.')
+ parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.')
+ parser.add_argument(
+ '--keystore-path', required=True, help='Path to keystore.')
+ parser.add_argument(
+ '--keystore-password', required=True, help='Keystore password.')
+ parser.add_argument(
+ '--keystore-name', required=True, help='Key name within keystore')
+ parser.add_argument(
+ '--minimal',
+ action='store_true',
+ help='Create APKs archive with minimal language support.')
+ parser.add_argument('--local-testing',
+ action='store_true',
+ help='Create APKs archive with local testing support.')
+
+ args = parser.parse_args()
+
+ app_bundle_utils.GenerateBundleApks(args.bundle,
+ args.output,
+ args.aapt2_path,
+ args.keystore_path,
+ args.keystore_password,
+ args.keystore_name,
+ local_testing=args.local_testing,
+ minimal=args.minimal,
+ check_for_noop=False)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.pydeps b/third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.pydeps
new file mode 100644
index 0000000000..5e04dae1d9
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_app_bundle_apks.pydeps
@@ -0,0 +1,37 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../../print_python_deps.py
+../pylib/__init__.py
+../pylib/utils/__init__.py
+../pylib/utils/app_bundle_utils.py
+bundletool.py
+create_app_bundle_apks.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.py b/third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.py
new file mode 100755
index 0000000000..1bdb7670d3
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a wrapper script to manage an Android App Bundle."""
+
+import argparse
+import os
+import string
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = string.Template("""\
+#!/usr/bin/env python3
+#
+# This file was generated by build/android/gyp/create_bundle_wrapper_script.py
+
+import os
+import sys
+
+def main():
+ script_directory = os.path.dirname(__file__)
+ resolve = lambda p: p if p is None else os.path.abspath(os.path.join(
+ script_directory, p))
+ sys.path.append(resolve(${WRAPPED_SCRIPT_DIR}))
+ import apk_operations
+
+ additional_apk_paths = [resolve(p) for p in ${ADDITIONAL_APK_PATHS}]
+ apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}),
+ bundle_path=resolve(${BUNDLE_PATH}),
+ bundle_apks_path=resolve(${BUNDLE_APKS_PATH}),
+ additional_apk_paths=additional_apk_paths,
+ aapt2_path=resolve(${AAPT2_PATH}),
+ keystore_path=resolve(${KEYSTORE_PATH}),
+ keystore_password=${KEYSTORE_PASSWORD},
+ keystore_alias=${KEY_NAME},
+ package_name=${PACKAGE_NAME},
+ command_line_flags_file=${FLAGS_FILE},
+ proguard_mapping_path=resolve(${MAPPING_PATH}),
+ target_cpu=${TARGET_CPU},
+ system_image_locales=${SYSTEM_IMAGE_LOCALES},
+ default_modules=${DEFAULT_MODULES})
+
+if __name__ == '__main__':
+ sys.exit(main())
+""")
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--script-output-path', required=True,
+ help='Output path for executable script.')
+ parser.add_argument('--bundle-path', required=True)
+ parser.add_argument('--bundle-apks-path', required=True)
+ parser.add_argument(
+ '--additional-apk-path',
+ action='append',
+ dest='additional_apk_paths',
+ default=[],
+ help='Paths to APKs to be installed prior to --apk-path.')
+ parser.add_argument('--package-name', required=True)
+ parser.add_argument('--aapt2-path', required=True)
+ parser.add_argument('--keystore-path', required=True)
+ parser.add_argument('--keystore-password', required=True)
+ parser.add_argument('--key-name', required=True)
+ parser.add_argument('--command-line-flags-file')
+ parser.add_argument('--proguard-mapping-path')
+ parser.add_argument('--target-cpu')
+ parser.add_argument('--system-image-locales')
+ parser.add_argument('--default-modules', nargs='*', default=[])
+ args = parser.parse_args(args)
+
+ def relativize(path):
+ """Returns the path relative to the output script directory."""
+ if path is None:
+ return path
+ return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+ wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir)
+ wrapped_script_dir = relativize(wrapped_script_dir)
+ with open(args.script_output_path, 'w') as script:
+ script_dict = {
+ 'WRAPPED_SCRIPT_DIR':
+ repr(wrapped_script_dir),
+ 'OUTPUT_DIR':
+ repr(relativize('.')),
+ 'BUNDLE_PATH':
+ repr(relativize(args.bundle_path)),
+ 'BUNDLE_APKS_PATH':
+ repr(relativize(args.bundle_apks_path)),
+ 'ADDITIONAL_APK_PATHS':
+ [relativize(p) for p in args.additional_apk_paths],
+ 'PACKAGE_NAME':
+ repr(args.package_name),
+ 'AAPT2_PATH':
+ repr(relativize(args.aapt2_path)),
+ 'KEYSTORE_PATH':
+ repr(relativize(args.keystore_path)),
+ 'KEYSTORE_PASSWORD':
+ repr(args.keystore_password),
+ 'KEY_NAME':
+ repr(args.key_name),
+ 'MAPPING_PATH':
+ repr(relativize(args.proguard_mapping_path)),
+ 'FLAGS_FILE':
+ repr(args.command_line_flags_file),
+ 'TARGET_CPU':
+ repr(args.target_cpu),
+ 'SYSTEM_IMAGE_LOCALES':
+ repr(build_utils.ParseGnList(args.system_image_locales)),
+ 'DEFAULT_MODULES':
+ repr(args.default_modules),
+ }
+ script.write(SCRIPT_TEMPLATE.substitute(script_dict))
+ os.chmod(args.script_output_path, 0o750)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.pydeps b/third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.pydeps
new file mode 100644
index 0000000000..7758ed6272
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_bundle_wrapper_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py
+../../gn_helpers.py
+create_bundle_wrapper_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_java_binary_script.py b/third_party/libwebrtc/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000000..91fe600ea8
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python3
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import argparse
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+extra_program_args = {extra_program_args}
+java_path = {java_path}
+if os.getcwd() != self_dir:
+ offset = os.path.relpath(self_dir, os.getcwd())
+ fix_path = lambda p: os.path.normpath(os.path.join(offset, p))
+ classpath = [fix_path(p) for p in classpath]
+ java_path = fix_path(java_path)
+java_cmd = [java_path]
+# This is a simple argparser for jvm, jar, and classpath arguments.
+parser = argparse.ArgumentParser(add_help=False)
+parser.add_argument('--jar-args')
+parser.add_argument('--jvm-args')
+parser.add_argument('--classpath')
+# Test_runner parses the classpath for sharding junit tests.
+parser.add_argument('--print-classpath', action='store_true',
+ help='Prints the classpass. Used by test_runner.')
+known_args, unknown_args = parser.parse_known_args(sys.argv[1:])
+
+if known_args.print_classpath:
+ sys.stdout.write(':'.join(classpath))
+ sys.exit(0)
+
+if known_args.jvm_args:
+ jvm_arguments = known_args.jvm_args.strip('"').split()
+ java_cmd.extend(jvm_arguments)
+if known_args.jar_args:
+ jar_arguments = known_args.jar_args.strip('"').split()
+ if unknown_args:
+ raise Exception('There are unknown arguments')
+else:
+ jar_arguments = unknown_args
+
+if known_args.classpath:
+ classpath += [known_args.classpath]
+
+{extra_flags}
+java_cmd.extend(
+ ['-classpath', ':'.join(classpath), '-enableassertions', \"{main_class}\"])
+java_cmd.extend(extra_program_args)
+java_cmd.extend(jar_arguments)
+os.execvp(java_cmd[0], java_cmd)
+"""
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv)
+ parser = optparse.OptionParser()
+ parser.add_option('--output', help='Output path for executable script.')
+ parser.add_option('--main-class',
+ help='Name of the java class with the "main" entry point.')
+ parser.add_option('--classpath', action='append', default=[],
+ help='Classpath for running the jar.')
+ parser.add_option('--noverify', action='store_true',
+ help='JVM flag: noverify.')
+ parser.add_option('--tiered-stop-at-level-one',
+ action='store_true',
+ help='JVM flag: -XX:TieredStopAtLevel=1.')
+
+ options, extra_program_args = parser.parse_args(argv)
+
+ extra_flags = []
+ if options.noverify:
+ extra_flags.append('java_cmd.append("-noverify")')
+ if options.tiered_stop_at_level_one:
+ extra_flags.append('java_cmd.append("-XX:TieredStopAtLevel=1")')
+
+ classpath = []
+ for cp_arg in options.classpath:
+ classpath += build_utils.ParseGnList(cp_arg)
+
+ run_dir = os.path.dirname(options.output)
+ classpath = [os.path.relpath(p, run_dir) for p in classpath]
+ java_path = os.path.relpath(
+ os.path.join(build_utils.JAVA_HOME, 'bin', 'java'), run_dir)
+
+ with build_utils.AtomicOutput(options.output, mode='w') as script:
+ script.write(
+ script_template.format(classpath=('"%s"' % '", "'.join(classpath)),
+ java_path=repr(java_path),
+ main_class=options.main_class,
+ extra_program_args=repr(extra_program_args),
+ extra_flags='\n'.join(extra_flags)))
+
+ os.chmod(options.output, 0o750)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/create_java_binary_script.pydeps b/third_party/libwebrtc/build/android/gyp/create_java_binary_script.pydeps
new file mode 100644
index 0000000000..6bc21fa7e2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_java_binary_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py
+../../gn_helpers.py
+create_java_binary_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_r_java.py b/third_party/libwebrtc/build/android/gyp/create_r_java.py
new file mode 100755
index 0000000000..97e512d2f8
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_r_java.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a dummy R.java file from a list of R.txt files."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+
+def _ConcatRTxts(rtxt_in_paths, combined_out_path):
+ all_lines = set()
+ for rtxt_in_path in rtxt_in_paths:
+ with open(rtxt_in_path) as rtxt_in:
+ all_lines.update(rtxt_in.read().splitlines())
+ with open(combined_out_path, 'w') as combined_out:
+ combined_out.write('\n'.join(sorted(all_lines)))
+
+
+def _CreateRJava(rtxts, package_name, srcjar_out):
+ with resource_utils.BuildContext() as build:
+ _ConcatRTxts(rtxts, build.r_txt_path)
+ rjava_build_options = resource_utils.RJavaBuildOptions()
+ rjava_build_options.ExportAllResources()
+ rjava_build_options.ExportAllStyleables()
+ rjava_build_options.GenerateOnResourcesLoaded(fake=True)
+ resource_utils.CreateRJavaFiles(build.srcjar_dir,
+ package_name,
+ build.r_txt_path,
+ extra_res_packages=[],
+ rjava_build_options=rjava_build_options,
+ srcjar_out=srcjar_out,
+ ignore_mismatched_values=True)
+ build_utils.ZipDir(srcjar_out, build.srcjar_dir)
+
+
+def main(args):
+ parser = argparse.ArgumentParser(description='Create an R.java srcjar.')
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--srcjar-out',
+ required=True,
+ help='Path to output srcjar.')
+ parser.add_argument('--deps-rtxts',
+ required=True,
+ help='List of rtxts of resource dependencies.')
+ parser.add_argument('--r-package',
+ required=True,
+ help='R.java package to use.')
+ options = parser.parse_args(build_utils.ExpandFileArgs(args))
+ options.deps_rtxts = build_utils.ParseGnList(options.deps_rtxts)
+
+ _CreateRJava(options.deps_rtxts, options.r_package, options.srcjar_out)
+ build_utils.WriteDepfile(options.depfile,
+ options.srcjar_out,
+ inputs=options.deps_rtxts)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/create_r_java.pydeps b/third_party/libwebrtc/build/android/gyp/create_r_java.pydeps
new file mode 100644
index 0000000000..b259751ced
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_r_java.pydeps
@@ -0,0 +1,31 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_r_java.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_r_txt.py b/third_party/libwebrtc/build/android/gyp/create_r_txt.py
new file mode 100755
index 0000000000..2adde5dfb9
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_r_txt.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a dummy R.txt file from a resource zip."""
+
+import argparse
+import sys
+
+from util import build_utils
+from util import resource_utils
+from util import resources_parser
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description='Create an R.txt from resources.')
+ parser.add_argument('--resources-zip-path',
+ required=True,
+ help='Path to input resources zip.')
+ parser.add_argument('--rtxt-path',
+ required=True,
+ help='Path to output R.txt file.')
+ options = parser.parse_args(build_utils.ExpandFileArgs(args))
+ with build_utils.TempDir() as temp:
+ dep_subdirs = resource_utils.ExtractDeps([options.resources_zip_path], temp)
+ resources_parser.RTxtGenerator(dep_subdirs).WriteRTxtFile(options.rtxt_path)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/create_r_txt.pydeps b/third_party/libwebrtc/build/android/gyp/create_r_txt.pydeps
new file mode 100644
index 0000000000..54e5670eb0
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_r_txt.pydeps
@@ -0,0 +1,32 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_r_txt.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
+util/resources_parser.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_size_info_files.py b/third_party/libwebrtc/build/android/gyp/create_size_info_files.py
new file mode 100755
index 0000000000..c60b02d7c8
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_size_info_files.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python3
+
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates size-info/*.info files used by SuperSize."""
+
+import argparse
+import collections
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+
+
+_AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)')
+
+
+def _RemoveDuplicatesFromList(source_list):
+ return collections.OrderedDict.fromkeys(source_list).keys()
+
+
+def _TransformAarPaths(path):
+ # .aar files within //third_party/android_deps have a version suffix.
+ # The suffix changes each time .aar files are updated, which makes size diffs
+ # hard to compare (since the before/after have different source paths).
+ # Rather than changing how android_deps works, we employ this work-around
+ # to normalize the paths.
+ # From: .../androidx_appcompat_appcompat/appcompat-1.1.0.aar/res/...
+ # To: .../androidx_appcompat_appcompat.aar/res/...
+ # https://crbug.com/1056455
+ if 'android_deps' not in path:
+ return path
+ return _AAR_VERSION_PATTERN.sub(r'\1', path)
+
+
+def _MergeResInfoFiles(res_info_path, info_paths):
+ # Concatenate them all.
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(res_info_path, only_if_changed=False,
+ mode='w+') as dst:
+ for p in info_paths:
+ with open(p) as src:
+ dst.writelines(_TransformAarPaths(l) for l in src)
+
+
+def _PakInfoPathsForAssets(assets):
+ return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')]
+
+
+def _MergePakInfoFiles(merged_path, pak_infos):
+ info_lines = set()
+ for pak_info_path in pak_infos:
+ with open(pak_info_path, 'r') as src_info_file:
+ info_lines.update(_TransformAarPaths(x) for x in src_info_file)
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(merged_path, only_if_changed=False,
+ mode='w+') as f:
+ f.writelines(sorted(info_lines))
+
+
+def _FullJavaNameFromClassFilePath(path):
+ # Input: base/android/java/src/org/chromium/Foo.class
+ # Output: base.android.java.src.org.chromium.Foo
+ if not path.endswith('.class'):
+ return ''
+ path = os.path.splitext(path)[0]
+ parts = []
+ while path:
+ # Use split to be platform independent.
+ head, tail = os.path.split(path)
+ path = head
+ parts.append(tail)
+ parts.reverse() # Package comes first
+ return '.'.join(parts)
+
+
+def _MergeJarInfoFiles(output, inputs):
+ """Merge several .jar.info files to generate an .apk.jar.info.
+
+ Args:
+ output: output file path.
+ inputs: List of .jar.info or .jar files.
+ """
+ info_data = dict()
+ for path in inputs:
+ # For non-prebuilts: .jar.info files are written by compile_java.py and map
+ # .class files to .java source paths.
+ #
+ # For prebuilts: No .jar.info file exists, we scan the .jar files here and
+ # map .class files to the .jar.
+ #
+ # For .aar files: We look for a "source.info" file in the containing
+ # directory in order to map classes back to the .aar (rather than mapping
+ # them to the extracted .jar file).
+ if path.endswith('.info'):
+ info_data.update(jar_info_utils.ParseJarInfoFile(path))
+ else:
+ attributed_path = path
+ if not path.startswith('..'):
+ parent_path = os.path.dirname(path)
+ # See if it's an sub-jar within the .aar.
+ if os.path.basename(parent_path) == 'libs':
+ parent_path = os.path.dirname(parent_path)
+ aar_source_info_path = os.path.join(parent_path, 'source.info')
+ # source.info files exist only for jars from android_aar_prebuilt().
+ # E.g. Could have an java_prebuilt() pointing to a generated .jar.
+ if os.path.exists(aar_source_info_path):
+ attributed_path = jar_info_utils.ReadAarSourceInfo(
+ aar_source_info_path)
+
+ with zipfile.ZipFile(path) as zip_info:
+ for name in zip_info.namelist():
+ fully_qualified_name = _FullJavaNameFromClassFilePath(name)
+ if fully_qualified_name:
+ info_data[fully_qualified_name] = _TransformAarPaths('{}/{}'.format(
+ attributed_path, name))
+
+ # only_if_changed=False since no build rules depend on this as an input.
+ with build_utils.AtomicOutput(output, only_if_changed=False) as f:
+ jar_info_utils.WriteJarInfoFile(f, info_data)
+
+
+def _FindJarInputs(jar_paths):
+ ret = []
+ for jar_path in jar_paths:
+ jar_info_path = jar_path + '.info'
+ if os.path.exists(jar_info_path):
+ ret.append(jar_info_path)
+ else:
+ ret.append(jar_path)
+ return ret
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--jar-info-path', required=True, help='Output .jar.info file')
+ parser.add_argument(
+ '--pak-info-path', required=True, help='Output .pak.info file')
+ parser.add_argument(
+ '--res-info-path', required=True, help='Output .res.info file')
+ parser.add_argument(
+ '--jar-files',
+ required=True,
+ action='append',
+ help='GN-list of .jar file paths')
+ parser.add_argument(
+ '--assets',
+ required=True,
+ action='append',
+ help='GN-list of files to add as assets in the form '
+ '"srcPath:zipPath", where ":zipPath" is optional.')
+ parser.add_argument(
+ '--uncompressed-assets',
+ required=True,
+ action='append',
+ help='Same as --assets, except disables compression.')
+ parser.add_argument(
+ '--in-res-info-path',
+ required=True,
+ action='append',
+ help='Paths to .ap_.info files')
+
+ options = parser.parse_args(args)
+
+ options.jar_files = build_utils.ParseGnList(options.jar_files)
+ options.assets = build_utils.ParseGnList(options.assets)
+ options.uncompressed_assets = build_utils.ParseGnList(
+ options.uncompressed_assets)
+
+ jar_inputs = _FindJarInputs(_RemoveDuplicatesFromList(options.jar_files))
+ pak_inputs = _PakInfoPathsForAssets(options.assets +
+ options.uncompressed_assets)
+ res_inputs = options.in_res_info_path
+
+ # Just create the info files every time. See https://crbug.com/1045024
+ _MergeJarInfoFiles(options.jar_info_path, jar_inputs)
+ _MergePakInfoFiles(options.pak_info_path, pak_inputs)
+ _MergeResInfoFiles(options.res_info_path, res_inputs)
+
+ all_inputs = jar_inputs + pak_inputs + res_inputs
+ build_utils.WriteDepfile(options.depfile,
+ options.jar_info_path,
+ inputs=all_inputs)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/create_size_info_files.pydeps b/third_party/libwebrtc/build/android/gyp/create_size_info_files.pydeps
new file mode 100644
index 0000000000..1a69c553d7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_size_info_files.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py
+../../gn_helpers.py
+create_size_info_files.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.py b/third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.py
new file mode 100755
index 0000000000..772dab7709
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a zip archive containing localized locale name Android resource
+strings!
+
+This script takes a list of input Chrome-specific locale names, as well as an
+output zip file path.
+
+Each output file will contain the definition of a single string resource,
+named 'current_locale', whose value will be the matching Chromium locale name.
+E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+sys.path.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp'))
+
+from util import build_utils
+from util import resource_utils
+
+# A small string template for the content of each strings.xml file.
+# NOTE: The name is chosen to avoid any conflicts with other string defined
+# by other resource archives.
+_TEMPLATE = """\
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <string name="current_detected_ui_locale_name">{resource_text}</string>
+</resources>
+"""
+
+# The default Chrome locale value.
+_DEFAULT_CHROME_LOCALE = 'en-US'
+
+
+def _GenerateLocaleStringsXml(locale):
+ return _TEMPLATE.format(resource_text=locale)
+
+
+def _AddLocaleResourceFileToZip(out_zip, android_locale, locale):
+ locale_data = _GenerateLocaleStringsXml(locale)
+ if android_locale:
+ zip_path = 'values-%s/strings.xml' % android_locale
+ else:
+ zip_path = 'values/strings.xml'
+ build_utils.AddToZipHermetic(
+ out_zip, zip_path, data=locale_data, compress=False)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ parser.add_argument(
+ '--locale-list',
+ required=True,
+ help='GN-list of Chrome-specific locale names.')
+ parser.add_argument(
+ '--output-zip', required=True, help='Output zip archive path.')
+
+ args = parser.parse_args()
+
+ locale_list = build_utils.ParseGnList(args.locale_list)
+ if not locale_list:
+ raise Exception('Locale list cannot be empty!')
+
+ with build_utils.AtomicOutput(args.output_zip) as tmp_file:
+ with zipfile.ZipFile(tmp_file, 'w') as out_zip:
+ # First, write the default value, since aapt requires one.
+ _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE)
+
+ for locale in locale_list:
+ android_locale = resource_utils.ToAndroidLocaleName(locale)
+ _AddLocaleResourceFileToZip(out_zip, android_locale, locale)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.pydeps b/third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.pydeps
new file mode 100644
index 0000000000..a147237677
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/create_ui_locale_resources.pydeps
@@ -0,0 +1,31 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+create_ui_locale_resources.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/desugar.py b/third_party/libwebrtc/build/android/gyp/desugar.py
new file mode 100755
index 0000000000..87eb1590a5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/desugar.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main():
+ args = build_utils.ExpandFileArgs(sys.argv[1:])
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--desugar-jar', required=True,
+ help='Path to Desugar.jar.')
+ parser.add_argument('--input-jar', required=True,
+ help='Jar input path to include .class files from.')
+ parser.add_argument('--output-jar', required=True,
+ help='Jar output path.')
+ parser.add_argument('--classpath',
+ action='append',
+ required=True,
+ help='Classpath.')
+ parser.add_argument('--bootclasspath', required=True,
+ help='Path to javac bootclasspath interface jar.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ options = parser.parse_args(args)
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.classpath = build_utils.ParseGnList(options.classpath)
+
+ cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+ '-jar',
+ options.desugar_jar,
+ '--input',
+ options.input_jar,
+ '--output',
+ options.output_jar,
+ '--generate_base_classes_for_default_methods',
+ # Don't include try-with-resources files in every .jar. Instead, they
+ # are included via //third_party/bazel/desugar:desugar_runtime_java.
+ '--desugar_try_with_resources_omit_runtime_classes',
+ ]
+ for path in options.bootclasspath:
+ cmd += ['--bootclasspath_entry', path]
+ for path in options.classpath:
+ cmd += ['--classpath_entry', path]
+ build_utils.CheckOutput(
+ cmd,
+ print_stdout=False,
+ stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings,
+ fail_on_output=options.warnings_as_errors)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile,
+ options.output_jar,
+ inputs=options.bootclasspath + options.classpath)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/android/gyp/desugar.pydeps b/third_party/libwebrtc/build/android/gyp/desugar.pydeps
new file mode 100644
index 0000000000..3e5c9ea231
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/desugar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py
+../../gn_helpers.py
+desugar.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/dex.py b/third_party/libwebrtc/build/android/gyp/dex.py
new file mode 100755
index 0000000000..79304a6392
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dex.py
@@ -0,0 +1,650 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+from util import md5_check
+from util import zipalign
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
+
+import convert_dex_profile
+
+
+_DEX_XMX = '2G' # Increase this when __final_dex OOMs.
+
+_IGNORE_WARNINGS = (
+ # Caused by Play Services:
+ r'Type `libcore.io.Memory` was not found',
+ # Caused by flogger supporting these as fallbacks. Not needed at runtime.
+ r'Type `dalvik.system.VMStack` was not found',
+ r'Type `sun.misc.JavaLangAccess` was not found',
+ r'Type `sun.misc.SharedSecrets` was not found',
+ # Caused by jacoco code coverage:
+ r'Type `java.lang.management.ManagementFactory` was not found',
+ # TODO(wnwen): Remove this after R8 version 3.0.26-dev:
+ r'Missing class sun.misc.Unsafe',
+ # Caused when the test apk and the apk under test do not having native libs.
+ r'Missing class org.chromium.build.NativeLibraries',
+ # Caused by internal annotation: https://crbug.com/1180222
+ r'Missing class com.google.errorprone.annotations.RestrictedInheritance',
+ # Caused by internal protobuf package: https://crbug.com/1183971
+ r'referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension', # pylint: disable=line-too-long
+ # Caused by using Bazel desugar instead of D8 for desugar, since Bazel
+ # desugar doesn't preserve interfaces in the same way. This should be
+ # removed when D8 is used for desugaring.
+ r'Warning: Cannot emulate interface ',
+ # Only relevant for R8 when optimizing an app that doesn't use proto.
+ r'Ignoring -shrinkunusedprotofields since the protobuf-lite runtime is',
+)
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--output', required=True, help='Dex output path.')
+ parser.add_argument(
+ '--class-inputs',
+ action='append',
+ help='GN-list of .jars with .class files.')
+ parser.add_argument(
+ '--class-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .class files (added to depfile).')
+ parser.add_argument(
+ '--dex-inputs', action='append', help='GN-list of .jars with .dex files.')
+ parser.add_argument(
+ '--dex-inputs-filearg',
+ action='append',
+ help='GN-list of .jars with .dex files (added to depfile).')
+ parser.add_argument(
+ '--incremental-dir',
+ help='Path of directory to put intermediate dex files.')
+ parser.add_argument('--main-dex-rules-path',
+ action='append',
+ help='Path to main dex rules for multidex.')
+ parser.add_argument(
+ '--multi-dex',
+ action='store_true',
+ help='Allow multiple dex files within output.')
+ parser.add_argument('--library',
+ action='store_true',
+ help='Allow numerous dex files within output.')
+ parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
+ parser.add_argument('--skip-custom-d8',
+ action='store_true',
+ help='When rebuilding the CustomD8 jar, this may be '
+ 'necessary to avoid incompatibility with the new r8 '
+ 'jar.')
+ parser.add_argument('--custom-d8-jar-path',
+ required=True,
+ help='Path to our customized d8 jar.')
+ parser.add_argument('--desugar-dependencies',
+ help='Path to store desugar dependencies.')
+ parser.add_argument('--desugar', action='store_true')
+ parser.add_argument(
+ '--bootclasspath',
+ action='append',
+ help='GN-list of bootclasspath. Needed for --desugar')
+ parser.add_argument(
+ '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+ parser.add_argument('--show-desugar-default-interface-warnings',
+ action='store_true',
+ help='Enable desugaring warnings.')
+ parser.add_argument(
+ '--classpath',
+ action='append',
+ help='GN-list of full classpath. Needed for --desugar')
+ parser.add_argument(
+ '--release',
+ action='store_true',
+ help='Run D8 in release mode. Release mode maximises main dex and '
+ 'deletes non-essential line number information (vs debug which minimizes '
+ 'main dex and keeps all line number information, and then some.')
+ parser.add_argument(
+ '--min-api', help='Minimum Android API level compatibility.')
+ parser.add_argument('--force-enable-assertions',
+ action='store_true',
+ help='Forcefully enable javac generated assertion code.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ parser.add_argument('--dump-inputs',
+ action='store_true',
+ help='Use when filing D8 bugs to capture inputs.'
+ ' Stores inputs to d8inputs.zip')
+
+ group = parser.add_argument_group('Dexlayout')
+ group.add_argument(
+ '--dexlayout-profile',
+ help=('Text profile for dexlayout. If present, a dexlayout '
+ 'pass will happen'))
+ group.add_argument(
+ '--profman-path',
+ help=('Path to ART profman binary. There should be a lib/ directory at '
+ 'the same path with shared libraries (shared with dexlayout).'))
+ group.add_argument(
+ '--dexlayout-path',
+ help=('Path to ART dexlayout binary. There should be a lib/ directory at '
+ 'the same path with shared libraries (shared with dexlayout).'))
+ group.add_argument('--dexdump-path', help='Path to dexdump binary.')
+ group.add_argument(
+ '--proguard-mapping-path',
+ help=('Path to proguard map from obfuscated symbols in the jar to '
+ 'unobfuscated symbols present in the code. If not present, the jar '
+ 'is assumed not to be obfuscated.'))
+
+ options = parser.parse_args(args)
+
+ if options.dexlayout_profile:
+ build_utils.CheckOptions(
+ options,
+ parser,
+ required=('profman_path', 'dexlayout_path', 'dexdump_path'))
+ elif options.proguard_mapping_path is not None:
+ parser.error('Unexpected proguard mapping without dexlayout')
+
+ if options.main_dex_rules_path and not options.multi_dex:
+ parser.error('--main-dex-rules-path is unused if multidex is not enabled')
+
+ options.class_inputs = build_utils.ParseGnList(options.class_inputs)
+ options.class_inputs_filearg = build_utils.ParseGnList(
+ options.class_inputs_filearg)
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.classpath = build_utils.ParseGnList(options.classpath)
+ options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
+ options.dex_inputs_filearg = build_utils.ParseGnList(
+ options.dex_inputs_filearg)
+
+ return options
+
+
+def CreateStderrFilter(show_desugar_default_interface_warnings):
+ def filter_stderr(output):
+ patterns = list(_IGNORE_WARNINGS)
+
+ # When using Bazel's Desugar tool to desugar lambdas and interface methods,
+ # we do not provide D8 with a classpath, which causes a lot of warnings from
+ # D8's default interface desugaring pass. Not having a classpath makes
+ # incremental dexing much more effective. D8 still does backported method
+ # desugaring.
+ # These warnings are also turned off when bytecode checks are turned off.
+ if not show_desugar_default_interface_warnings:
+ patterns += ['default or static interface methods']
+
+ combined_pattern = '|'.join(re.escape(p) for p in patterns)
+ output = build_utils.FilterLines(output, combined_pattern)
+
+ # Each warning has a prefix line of the file it's from. If we've filtered
+ # out the warning, then also filter out the file header.
+ # E.g.:
+ # Warning in path/to/Foo.class:
+ # Error message #1 indented here.
+ # Error message #2 indented here.
+ output = re.sub(r'^Warning in .*?:\n(?! )', '', output, flags=re.MULTILINE)
+ return output
+
+ return filter_stderr
+
+
+def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors,
+ show_desugar_default_interface_warnings):
+ dex_cmd = dex_cmd + ['--output', output_path] + input_paths
+
+ stderr_filter = CreateStderrFilter(show_desugar_default_interface_warnings)
+
+ with tempfile.NamedTemporaryFile(mode='w') as flag_file:
+ # Chosen arbitrarily. Needed to avoid command-line length limits.
+ MAX_ARGS = 50
+ if len(dex_cmd) > MAX_ARGS:
+ flag_file.write('\n'.join(dex_cmd[MAX_ARGS:]))
+ flag_file.flush()
+ dex_cmd = dex_cmd[:MAX_ARGS]
+ dex_cmd.append('@' + flag_file.name)
+
+ # stdout sometimes spams with things like:
+ # Stripped invalid locals information from 1 method.
+ build_utils.CheckOutput(dex_cmd,
+ stderr_filter=stderr_filter,
+ fail_on_output=warnings_as_errors)
+
+
+def _EnvWithArtLibPath(binary_path):
+ """Return an environment dictionary for ART host shared libraries.
+
+ Args:
+ binary_path: the path to an ART host binary.
+
+ Returns:
+ An environment dictionary where LD_LIBRARY_PATH has been augmented with the
+ shared library path for the binary. This assumes that there is a lib/
+ directory in the same location as the binary.
+ """
+ lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
+ env = os.environ.copy()
+ libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
+ libraries.append(lib_path)
+ env['LD_LIBRARY_PATH'] = ':'.join(libraries)
+ return env
+
+
+def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
+ """Create a binary profile for dexlayout.
+
+ Args:
+ text_profile: The ART text profile that will be converted to a binary
+ profile.
+ input_dex: The input dex file to layout.
+ profman_path: Path to the profman binary.
+ temp_dir: Directory to work in.
+
+ Returns:
+ The name of the binary profile, which will live in temp_dir.
+ """
+ binary_profile = os.path.join(
+ temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
+ open(binary_profile, 'w').close() # Touch binary_profile.
+ profman_cmd = [profman_path,
+ '--apk=' + input_dex,
+ '--dex-location=' + input_dex,
+ '--create-profile-from=' + text_profile,
+ '--reference-profile-file=' + binary_profile]
+ build_utils.CheckOutput(
+ profman_cmd,
+ env=_EnvWithArtLibPath(profman_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output, '|'.join(
+ [r'Could not find (method_id|proto_id|name):',
+ r'Could not create type list'])))
+ return binary_profile
+
+
+def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
+ """Layout a dexfile using a profile.
+
+ Args:
+ binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
+ input_dex: The dex file used to create the binary profile.
+ dexlayout_path: Path to the dexlayout binary.
+ temp_dir: Directory to work in.
+
+ Returns:
+ List of output files produced by dexlayout. This will be one if the input
+ was a single dexfile, or multiple files if the input was a multidex
+ zip. These output files are located in temp_dir.
+ """
+ dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
+ os.mkdir(dexlayout_output_dir)
+ dexlayout_cmd = [ dexlayout_path,
+ '-u', # Update checksum
+ '-p', binary_profile,
+ '-w', dexlayout_output_dir,
+ input_dex ]
+ build_utils.CheckOutput(
+ dexlayout_cmd,
+ env=_EnvWithArtLibPath(dexlayout_path),
+ stderr_filter=lambda output:
+ build_utils.FilterLines(output,
+ r'Can.t mmap dex file.*please zipalign'))
+ output_files = os.listdir(dexlayout_output_dir)
+ if not output_files:
+ raise Exception('dexlayout unexpectedly produced no output')
+ return sorted([os.path.join(dexlayout_output_dir, f) for f in output_files])
+
+
+def _ZipMultidex(file_dir, dex_files):
+ """Zip dex files into a multidex.
+
+ Args:
+ file_dir: The directory into which to write the output.
+ dex_files: The dexfiles forming the multizip. Their names must end with
+ classes.dex, classes2.dex, ...
+
+ Returns:
+ The name of the multidex file, which will live in file_dir.
+ """
+ ordered_files = [] # List of (archive name, file name)
+ for f in dex_files:
+ if f.endswith('dex.jar'):
+ ordered_files.append(('classes.dex', f))
+ break
+ if not ordered_files:
+ raise Exception('Could not find classes.dex multidex file in %s',
+ dex_files)
+ for dex_idx in range(2, len(dex_files) + 1):
+ archive_name = 'classes%d.dex' % dex_idx
+ for f in dex_files:
+ if f.endswith(archive_name):
+ ordered_files.append((archive_name, f))
+ break
+ else:
+ raise Exception('Could not find classes%d.dex multidex file in %s',
+ dex_files)
+ if len(set(f[1] for f in ordered_files)) != len(ordered_files):
+ raise Exception('Unexpected clashing filenames for multidex in %s',
+ dex_files)
+
+ zip_name = os.path.join(file_dir, 'multidex_classes.zip')
+ build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
+ for archive_name, file_name in ordered_files),
+ zip_name)
+ return zip_name
+
+
+def _ZipAligned(dex_files, output_path):
+ """Creates a .dex.jar with 4-byte aligned files.
+
+ Args:
+ dex_files: List of dex files.
+ output_path: The output file in which to write the zip.
+ """
+ with zipfile.ZipFile(output_path, 'w') as z:
+ for i, dex_file in enumerate(dex_files):
+ name = 'classes{}.dex'.format(i + 1 if i > 0 else '')
+ zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4)
+
+
+def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
+ if options.proguard_mapping_path is not None:
+ matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
+ convert_dex_profile.ObfuscateProfile(
+ options.dexlayout_profile, tmp_dex_output,
+ options.proguard_mapping_path, options.dexdump_path, matching_profile)
+ else:
+ logging.warning('No obfuscation for %s', options.dexlayout_profile)
+ matching_profile = options.dexlayout_profile
+ binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
+ options.profman_path, tmp_dir)
+ output_files = _LayoutDex(binary_profile, tmp_dex_output,
+ options.dexlayout_path, tmp_dir)
+ if len(output_files) > 1:
+ return _ZipMultidex(tmp_dir, output_files)
+
+ if zipfile.is_zipfile(output_files[0]):
+ return output_files[0]
+
+ final_output = os.path.join(tmp_dir, 'dex_classes.zip')
+ _ZipAligned(output_files, final_output)
+ return final_output
+
+
+def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
+ tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
+ needs_dexing = not all(f.endswith('.dex') for f in d8_inputs)
+ needs_dexmerge = output.endswith('.dex') or not (options and options.library)
+ if needs_dexing or needs_dexmerge:
+ if options and options.main_dex_rules_path:
+ for main_dex_rule in options.main_dex_rules_path:
+ dex_cmd = dex_cmd + ['--main-dex-rules', main_dex_rule]
+
+ tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
+ os.mkdir(tmp_dex_dir)
+
+ _RunD8(dex_cmd, d8_inputs, tmp_dex_dir,
+ (not options or options.warnings_as_errors),
+ (options and options.show_desugar_default_interface_warnings))
+ logging.debug('Performed dex merging')
+
+ dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)]
+
+ if output.endswith('.dex'):
+ if len(dex_files) > 1:
+ raise Exception('%d files created, expected 1' % len(dex_files))
+ tmp_dex_output = dex_files[0]
+ else:
+ _ZipAligned(sorted(dex_files), tmp_dex_output)
+ else:
+ # Skip dexmerger. Just put all incrementals into the .jar individually.
+ _ZipAligned(sorted(d8_inputs), tmp_dex_output)
+ logging.debug('Quick-zipped %d files', len(d8_inputs))
+
+ if options and options.dexlayout_profile:
+ tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
+
+ # The dex file is complete and can be moved out of tmp_dir.
+ shutil.move(tmp_dex_output, output)
+
+
+def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
+ """Returns a list of all intermediate dex file paths."""
+ dex_files = []
+ for jar in class_inputs:
+ with zipfile.ZipFile(jar, 'r') as z:
+ for subpath in z.namelist():
+ if subpath.endswith('.class'):
+ subpath = subpath[:-5] + 'dex'
+ dex_files.append(os.path.join(incremental_dir, subpath))
+ return dex_files
+
+
+def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
+ """Deletes intermediate .dex files that are no longer needed."""
+ all_files = build_utils.FindInDirectory(dex_dir)
+ desired_files = set(dex_files)
+ for path in all_files:
+ if path not in desired_files:
+ os.unlink(path)
+
+
+def _ParseDesugarDeps(desugar_dependencies_file):
+ dependents_from_dependency = collections.defaultdict(set)
+ if desugar_dependencies_file and os.path.exists(desugar_dependencies_file):
+ with open(desugar_dependencies_file, 'r') as f:
+ for line in f:
+ dependent, dependency = line.rstrip().split(' -> ')
+ dependents_from_dependency[dependency].add(dependent)
+ return dependents_from_dependency
+
+
+def _ComputeRequiredDesugarClasses(changes, desugar_dependencies_file,
+ class_inputs, classpath):
+ dependents_from_dependency = _ParseDesugarDeps(desugar_dependencies_file)
+ required_classes = set()
+ # Gather classes that need to be re-desugared from changes in the classpath.
+ for jar in classpath:
+ for subpath in changes.IterChangedSubpaths(jar):
+ dependency = '{}:{}'.format(jar, subpath)
+ required_classes.update(dependents_from_dependency[dependency])
+
+ for jar in class_inputs:
+ for subpath in changes.IterChangedSubpaths(jar):
+ required_classes.update(dependents_from_dependency[subpath])
+
+ return required_classes
+
+
+def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set):
+ classes_list = []
+ for jar in class_inputs:
+ if changes:
+ changed_class_list = (set(changes.IterChangedSubpaths(jar))
+ | required_classes_set)
+ predicate = lambda x: x in changed_class_list and x.endswith('.class')
+ else:
+ predicate = lambda x: x.endswith('.class')
+
+ classes_list.extend(
+ build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
+ return classes_list
+
+
+def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
+ # Create temporary directory for classes to be extracted to.
+ tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir')
+ os.mkdir(tmp_extract_dir)
+
+ # Do a full rebuild when changes occur in non-input files.
+ allowed_changed = set(options.class_inputs)
+ allowed_changed.update(options.dex_inputs)
+ allowed_changed.update(options.classpath)
+ strings_changed = changes.HasStringChanges()
+ non_direct_input_changed = next(
+ (p for p in changes.IterChangedPaths() if p not in allowed_changed), None)
+
+ if strings_changed or non_direct_input_changed:
+ logging.debug('Full dex required: strings_changed=%s path_changed=%s',
+ strings_changed, non_direct_input_changed)
+ changes = None
+
+ if changes:
+ required_desugar_classes_set = _ComputeRequiredDesugarClasses(
+ changes, options.desugar_dependencies, options.class_inputs,
+ options.classpath)
+ logging.debug('Class files needing re-desugar: %d',
+ len(required_desugar_classes_set))
+ else:
+ required_desugar_classes_set = set()
+ class_files = _ExtractClassFiles(changes, tmp_extract_dir,
+ options.class_inputs,
+ required_desugar_classes_set)
+ logging.debug('Extracted class files: %d', len(class_files))
+
+ # If the only change is deleting a file, class_files will be empty.
+ if class_files:
+ # Dex necessary classes into intermediate dex files.
+ dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file']
+ if options.desugar_dependencies and not options.skip_custom_d8:
+ dex_cmd += ['--file-tmp-prefix', tmp_extract_dir]
+ _RunD8(dex_cmd, class_files, options.incremental_dir,
+ options.warnings_as_errors,
+ options.show_desugar_default_interface_warnings)
+ logging.debug('Dexed class files.')
+
+
+def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
+ logging.debug('_OnStaleMd5')
+ with build_utils.TempDir() as tmp_dir:
+ if options.incremental_dir:
+ # Create directory for all intermediate dex files.
+ if not os.path.exists(options.incremental_dir):
+ os.makedirs(options.incremental_dir)
+
+ _DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs)
+ logging.debug('Stale files deleted')
+ _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd)
+
+ _CreateFinalDex(
+ final_dex_inputs, options.output, tmp_dir, dex_cmd, options=options)
+
+
+def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar,
+ min_api):
+ dex_cmd = build_utils.JavaCmd(verify=False, xmx=_DEX_XMX) + [
+ '-cp',
+ r8_jar_path,
+ 'com.android.tools.r8.D8',
+ '--min-api',
+ min_api,
+ ]
+ with build_utils.TempDir() as tmp_dir:
+ _CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd)
+
+
+def main(args):
+ build_utils.InitLogging('DEX_DEBUG')
+ options = _ParseArgs(args)
+
+ options.class_inputs += options.class_inputs_filearg
+ options.dex_inputs += options.dex_inputs_filearg
+
+ input_paths = options.class_inputs + options.dex_inputs
+ input_paths.append(options.r8_jar_path)
+ input_paths.append(options.custom_d8_jar_path)
+ if options.main_dex_rules_path:
+ input_paths.extend(options.main_dex_rules_path)
+
+ depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg
+
+ output_paths = [options.output]
+
+ track_subpaths_allowlist = []
+ if options.incremental_dir:
+ final_dex_inputs = _IntermediateDexFilePathsFromInputJars(
+ options.class_inputs, options.incremental_dir)
+ output_paths += final_dex_inputs
+ track_subpaths_allowlist += options.class_inputs
+ else:
+ final_dex_inputs = list(options.class_inputs)
+ final_dex_inputs += options.dex_inputs
+
+ dex_cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx=_DEX_XMX)
+
+ if options.dump_inputs:
+ dex_cmd += ['-Dcom.android.tools.r8.dumpinputtofile=d8inputs.zip']
+
+ if not options.skip_custom_d8:
+ dex_cmd += [
+ '-cp',
+ '{}:{}'.format(options.r8_jar_path, options.custom_d8_jar_path),
+ 'org.chromium.build.CustomD8',
+ ]
+ else:
+ dex_cmd += [
+ '-cp',
+ options.r8_jar_path,
+ 'com.android.tools.r8.D8',
+ ]
+
+ if options.release:
+ dex_cmd += ['--release']
+ if options.min_api:
+ dex_cmd += ['--min-api', options.min_api]
+
+ if not options.desugar:
+ dex_cmd += ['--no-desugaring']
+ elif options.classpath:
+ # The classpath is used by D8 to for interface desugaring.
+ if options.desugar_dependencies and not options.skip_custom_d8:
+ dex_cmd += ['--desugar-dependencies', options.desugar_dependencies]
+ if track_subpaths_allowlist:
+ track_subpaths_allowlist += options.classpath
+ depfile_deps += options.classpath
+ input_paths += options.classpath
+ # Still pass the entire classpath in case a new dependency is needed by
+ # desugar, so that desugar_dependencies will be updated for the next build.
+ for path in options.classpath:
+ dex_cmd += ['--classpath', path]
+
+ if options.classpath or options.main_dex_rules_path:
+ # --main-dex-rules requires bootclasspath.
+ dex_cmd += ['--lib', build_utils.JAVA_HOME]
+ for path in options.bootclasspath:
+ dex_cmd += ['--lib', path]
+ depfile_deps += options.bootclasspath
+ input_paths += options.bootclasspath
+
+
+ if options.desugar_jdk_libs_json:
+ dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
+ if options.force_enable_assertions:
+ dex_cmd += ['--force-enable-assertions']
+
+ # The changes feature from md5_check allows us to only re-dex the class files
+ # that have changed and the class files that need to be re-desugared by D8.
+ md5_check.CallAndWriteDepfileIfStale(
+ lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd),
+ options,
+ input_paths=input_paths,
+ input_strings=dex_cmd + [bool(options.incremental_dir)],
+ output_paths=output_paths,
+ pass_changes=True,
+ track_subpaths_allowlist=track_subpaths_allowlist,
+ depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/dex.pydeps b/third_party/libwebrtc/build/android/gyp/dex.pydeps
new file mode 100644
index 0000000000..23856f3c84
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dex.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py
+../../gn_helpers.py
+../../print_python_deps.py
+../convert_dex_profile.py
+dex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/zipalign.py
diff --git a/third_party/libwebrtc/build/android/gyp/dex_jdk_libs.py b/third_party/libwebrtc/build/android/gyp/dex_jdk_libs.py
new file mode 100755
index 0000000000..6304779104
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dex_jdk_libs.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--output', required=True, help='Dex output path.')
+ parser.add_argument('--r8-path', required=True, help='Path to R8 jar.')
+ parser.add_argument(
+ '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+ parser.add_argument(
+ '--desugar-jdk-libs-jar', help='Path to desugar_jdk_libs.jar.')
+ parser.add_argument('--desugar-jdk-libs-configuration-jar',
+ help='Path to desugar_jdk_libs_configuration.jar.')
+ parser.add_argument('--min-api', help='minSdkVersion', required=True)
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ options = parser.parse_args(args)
+ return options
+
+
+def DexJdkLibJar(r8_path,
+ min_api,
+ desugar_jdk_libs_json,
+ desugar_jdk_libs_jar,
+ desugar_jdk_libs_configuration_jar,
+ output,
+ warnings_as_errors,
+ config_paths=None):
+ # TODO(agrieve): Spews a lot of stderr about missing classes.
+ with build_utils.TempDir() as tmp_dir:
+ cmd = build_utils.JavaCmd(warnings_as_errors) + [
+ '-cp',
+ r8_path,
+ 'com.android.tools.r8.L8',
+ '--min-api',
+ min_api,
+ '--lib',
+ build_utils.JAVA_HOME,
+ '--desugared-lib',
+ desugar_jdk_libs_json,
+ ]
+
+ # If no desugaring is required, no keep rules are generated, and the keep
+ # file will not be created.
+ if config_paths is not None:
+ for path in config_paths:
+ cmd += ['--pg-conf', path]
+
+ cmd += [
+ '--output', tmp_dir, desugar_jdk_libs_jar,
+ desugar_jdk_libs_configuration_jar
+ ]
+
+ build_utils.CheckOutput(cmd,
+ print_stdout=True,
+ fail_on_output=warnings_as_errors)
+ if os.path.exists(os.path.join(tmp_dir, 'classes2.dex')):
+ raise Exception('Achievement unlocked: desugar_jdk_libs is multidex!')
+
+ # classes.dex might not exists if the "desugar_jdk_libs_jar" is not used
+ # at all.
+ if os.path.exists(os.path.join(tmp_dir, 'classes.dex')):
+ shutil.move(os.path.join(tmp_dir, 'classes.dex'), output)
+ return True
+ return False
+
+
+def main(args):
+ options = _ParseArgs(args)
+ DexJdkLibJar(options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+ options.desugar_jdk_libs_jar,
+ options.desugar_jdk_libs_configuration_jar, options.output,
+ options.warnings_as_errors)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/dex_jdk_libs.pydeps b/third_party/libwebrtc/build/android/gyp/dex_jdk_libs.pydeps
new file mode 100644
index 0000000000..28d181f528
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dex_jdk_libs.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex_jdk_libs.pydeps build/android/gyp/dex_jdk_libs.py
+../../gn_helpers.py
+dex_jdk_libs.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/dexsplitter.py b/third_party/libwebrtc/build/android/gyp/dexsplitter.py
new file mode 100755
index 0000000000..80b49c7f8e
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dexsplitter.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParseOptions(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--depfile', help='Path to the depfile to write to.')
+ parser.add_argument('--stamp', help='Path to stamp to mark when finished.')
+ parser.add_argument('--r8-path', help='Path to the r8.jar to use.')
+ parser.add_argument(
+ '--input-dex-zip', help='Path to dex files in zip being split.')
+ parser.add_argument(
+ '--proguard-mapping-file', help='Path to proguard mapping file.')
+ parser.add_argument(
+ '--feature-name',
+ action='append',
+ dest='feature_names',
+ help='The name of the feature module.')
+ parser.add_argument(
+ '--feature-jars',
+ action='append',
+ help='GN list of path to jars which compirse the corresponding feature.')
+ parser.add_argument(
+ '--dex-dest',
+ action='append',
+ dest='dex_dests',
+ help='Destination for dex file of the corresponding feature.')
+ options = parser.parse_args(args)
+
+ assert len(options.feature_names) == len(options.feature_jars) and len(
+ options.feature_names) == len(options.dex_dests)
+ options.features = {}
+ for i, name in enumerate(options.feature_names):
+ options.features[name] = build_utils.ParseGnList(options.feature_jars[i])
+
+ return options
+
+
+def _RunDexsplitter(options, output_dir):
+ cmd = build_utils.JavaCmd() + [
+ '-cp',
+ options.r8_path,
+ 'com.android.tools.r8.dexsplitter.DexSplitter',
+ '--output',
+ output_dir,
+ '--proguard-map',
+ options.proguard_mapping_file,
+ ]
+
+ for base_jar in options.features['base']:
+ cmd += ['--base-jar', base_jar]
+
+ base_jars_lookup = set(options.features['base'])
+ for feature in options.features:
+ if feature == 'base':
+ continue
+ for feature_jar in options.features[feature]:
+ if feature_jar not in base_jars_lookup:
+ cmd += ['--feature-jar', feature_jar + ':' + feature]
+
+ with build_utils.TempDir() as temp_dir:
+ unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir)
+ for file_name in unzipped_files:
+ cmd += ['--input', file_name]
+ build_utils.CheckOutput(cmd)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseOptions(args)
+
+ input_paths = [options.input_dex_zip]
+ for feature_jars in options.features.values():
+ for feature_jar in feature_jars:
+ input_paths.append(feature_jar)
+
+ with build_utils.TempDir() as dexsplitter_output_dir:
+ curr_location_to_dest = []
+ if len(options.features) == 1:
+ # Don't run dexsplitter since it needs at least 1 feature module.
+ curr_location_to_dest.append((options.input_dex_zip,
+ options.dex_dests[0]))
+ else:
+ _RunDexsplitter(options, dexsplitter_output_dir)
+
+ for i, dest in enumerate(options.dex_dests):
+ module_dex_file = os.path.join(dexsplitter_output_dir,
+ options.feature_names[i], 'classes.dex')
+ if os.path.exists(module_dex_file):
+ curr_location_to_dest.append((module_dex_file, dest))
+ else:
+ module_dex_file += '.jar'
+ assert os.path.exists(
+ module_dex_file), 'Dexsplitter tool output not found.'
+ curr_location_to_dest.append((module_dex_file + '.jar', dest))
+
+ for curr_location, dest in curr_location_to_dest:
+ with build_utils.AtomicOutput(dest) as f:
+ if curr_location.endswith('.jar'):
+ if dest.endswith('.jar'):
+ shutil.copy(curr_location, f.name)
+ else:
+ with zipfile.ZipFile(curr_location, 'r') as z:
+ namelist = z.namelist()
+ assert len(namelist) == 1, (
+ 'Unzipping to single dex file, but not single dex file in ' +
+ options.input_dex_zip)
+ z.extract(namelist[0], f.name)
+ else:
+ if dest.endswith('.jar'):
+ build_utils.ZipDir(
+ f.name, os.path.abspath(os.path.join(curr_location, os.pardir)))
+ else:
+ shutil.move(curr_location, f.name)
+
+ build_utils.Touch(options.stamp)
+ build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/dexsplitter.pydeps b/third_party/libwebrtc/build/android/gyp/dexsplitter.pydeps
new file mode 100644
index 0000000000..cefc5722d5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dexsplitter.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py
+../../gn_helpers.py
+dexsplitter.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/dist_aar.py b/third_party/libwebrtc/build/android/gyp/dist_aar.py
new file mode 100755
index 0000000000..6bf0573f51
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dist_aar.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates an Android .aar file."""
+
+import argparse
+import os
+import posixpath
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import filter_zip
+from util import build_utils
+
+
+_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__))
+
+
+def _MergeRTxt(r_paths, include_globs):
+ """Merging the given R.txt files and returns them as a string."""
+ all_lines = set()
+ for r_path in r_paths:
+ if include_globs and not build_utils.MatchesGlob(r_path, include_globs):
+ continue
+ with open(r_path) as f:
+ all_lines.update(f.readlines())
+ return ''.join(sorted(all_lines))
+
+
+def _MergeProguardConfigs(proguard_configs):
+ """Merging the given proguard config files and returns them as a string."""
+ ret = []
+ for config in proguard_configs:
+ ret.append('# FROM: {}'.format(config))
+ with open(config) as f:
+ ret.append(f.read())
+ return '\n'.join(ret)
+
+
+def _AddResources(aar_zip, resource_zips, include_globs):
+ """Adds all resource zips to the given aar_zip.
+
+ Ensures all res/values/* files have unique names by prefixing them.
+ """
+ for i, path in enumerate(resource_zips):
+ if include_globs and not build_utils.MatchesGlob(path, include_globs):
+ continue
+ with zipfile.ZipFile(path) as res_zip:
+ for info in res_zip.infolist():
+ data = res_zip.read(info)
+ dirname, basename = posixpath.split(info.filename)
+ if 'values' in dirname:
+ root, ext = os.path.splitext(basename)
+ basename = '{}_{}{}'.format(root, i, ext)
+ info.filename = posixpath.join(dirname, basename)
+ info.filename = posixpath.join('res', info.filename)
+ aar_zip.writestr(info, data)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--output', required=True, help='Path to output aar.')
+ parser.add_argument('--jars', required=True, help='GN list of jar inputs.')
+ parser.add_argument('--dependencies-res-zips', required=True,
+ help='GN list of resource zips')
+ parser.add_argument('--r-text-files', required=True,
+ help='GN list of R.txt files to merge')
+ parser.add_argument('--proguard-configs', required=True,
+ help='GN list of ProGuard flag files to merge.')
+ parser.add_argument(
+ '--android-manifest',
+ help='Path to AndroidManifest.xml to include.',
+ default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml'))
+ parser.add_argument('--native-libraries', default='',
+ help='GN list of native libraries. If non-empty then '
+ 'ABI must be specified.')
+ parser.add_argument('--abi',
+ help='ABI (e.g. armeabi-v7a) for native libraries.')
+ parser.add_argument(
+ '--jar-excluded-globs',
+ help='GN-list of globs for paths to exclude in jar.')
+ parser.add_argument(
+ '--jar-included-globs',
+ help='GN-list of globs for paths to include in jar.')
+ parser.add_argument(
+ '--resource-included-globs',
+ help='GN-list of globs for paths to include in R.txt and resources zips.')
+
+ options = parser.parse_args(args)
+
+ if options.native_libraries and not options.abi:
+ parser.error('You must provide --abi if you have native libs')
+
+ options.jars = build_utils.ParseGnList(options.jars)
+ options.dependencies_res_zips = build_utils.ParseGnList(
+ options.dependencies_res_zips)
+ options.r_text_files = build_utils.ParseGnList(options.r_text_files)
+ options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+ options.native_libraries = build_utils.ParseGnList(options.native_libraries)
+ options.jar_excluded_globs = build_utils.ParseGnList(
+ options.jar_excluded_globs)
+ options.jar_included_globs = build_utils.ParseGnList(
+ options.jar_included_globs)
+ options.resource_included_globs = build_utils.ParseGnList(
+ options.resource_included_globs)
+
+ with tempfile.NamedTemporaryFile(delete=False) as staging_file:
+ try:
+ with zipfile.ZipFile(staging_file.name, 'w') as z:
+ build_utils.AddToZipHermetic(
+ z, 'AndroidManifest.xml', src_path=options.android_manifest)
+
+ path_transform = filter_zip.CreatePathTransform(
+ options.jar_excluded_globs, options.jar_included_globs)
+ with tempfile.NamedTemporaryFile() as jar_file:
+ build_utils.MergeZips(
+ jar_file.name, options.jars, path_transform=path_transform)
+ build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name)
+
+ build_utils.AddToZipHermetic(
+ z,
+ 'R.txt',
+ data=_MergeRTxt(options.r_text_files,
+ options.resource_included_globs))
+ build_utils.AddToZipHermetic(z, 'public.txt', data='')
+
+ if options.proguard_configs:
+ build_utils.AddToZipHermetic(
+ z, 'proguard.txt',
+ data=_MergeProguardConfigs(options.proguard_configs))
+
+ _AddResources(z, options.dependencies_res_zips,
+ options.resource_included_globs)
+
+ for native_library in options.native_libraries:
+ libname = os.path.basename(native_library)
+ build_utils.AddToZipHermetic(
+ z, os.path.join('jni', options.abi, libname),
+ src_path=native_library)
+ except:
+ os.unlink(staging_file.name)
+ raise
+ shutil.move(staging_file.name, options.output)
+
+ if options.depfile:
+ all_inputs = (options.jars + options.dependencies_res_zips +
+ options.r_text_files + options.proguard_configs)
+ build_utils.WriteDepfile(options.depfile, options.output, all_inputs)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/dist_aar.pydeps b/third_party/libwebrtc/build/android/gyp/dist_aar.pydeps
new file mode 100644
index 0000000000..3182580af7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/dist_aar.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py
+../../gn_helpers.py
+dist_aar.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/extract_unwind_tables.py b/third_party/libwebrtc/build/android/gyp/extract_unwind_tables.py
new file mode 100755
index 0000000000..65c2db441d
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/extract_unwind_tables.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts the unwind tables in from breakpad symbol files
+
+Runs dump_syms on the given binary file and extracts the CFI data into the
+given output file.
+The output file is a binary file containing CFI rows ordered based on function
+address. The output file only contains rows that match the most popular rule
+type in CFI table, to reduce the output size and specify data in compact format.
+See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md.
+1. The CFA rules should be of postfix form "SP <val> +".
+2. The RA rules should be of postfix form "CFA <val> + ^".
+Note: breakpad represents dereferencing address with '^' operator.
+
+The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI
+format. The first table contains function addresses and an index into the
+UNW_DATA table. The second table contains one or more rows for the function
+unwind information.
+
+The output file starts with 4 bytes counting the number of entries in UNW_INDEX.
+Then UNW_INDEX table and UNW_DATA table.
+
+UNW_INDEX contains two columns of N rows each, where N is the number of
+functions.
+ 1. First column 4 byte rows of all the function start address as offset from
+ start of the binary, in sorted order.
+ 2. For each function addr, the second column contains 2 byte indices in order.
+ The indices are offsets (in count of 2 bytes) of the CFI data from start of
+ UNW_DATA.
+The last entry in the table always contains CANT_UNWIND index to specify the
+end address of the last function.
+
+UNW_DATA contains data of all the functions. Each function data contains N rows.
+The data found at the address pointed from UNW_INDEX will be:
+ 2 bytes: N - number of rows that belong to current function.
+ N * 4 bytes: N rows of data. 16 bits : Address offset from function start.
+ 14 bits : CFA offset / 4.
+ 2 bits : RA offset / 4.
+
+The function is not added to the unwind table in following conditions:
+C1. If length of the function code (number of instructions) is greater than
+ 0xFFFF (2 byte address span). This is because we use 16 bits to refer to
+ offset of instruction from start of the address.
+C2. If the function moves the SP by more than 0xFFFF bytes. This is because we
+ use 14 bits to denote CFA offset (last 2 bits are 0).
+C3. If the Return Address is stored at an offset >= 16 from the CFA. Some
+ functions which have variable arguments can have offset upto 16.
+ TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since
+ we never have 0.
+C4: Some functions do not have unwind information defined in dwarf info. These
+ functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table.
+
+
+Usage:
+ extract_unwind_tables.py --input_path [root path to unstripped chrome.so]
+ --output_path [output path] --dump_syms_path [path to dump_syms binary]
+"""
+
+import argparse
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+_CFA_REG = '.cfa'
+_RA_REG = '.ra'
+
+_ADDR_ENTRY = 0
+_LENGTH_ENTRY = 1
+
+_CANT_UNWIND = 0xFFFF
+
+
+def _Write4Bytes(output_file, val):
+ """Writes a 32 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<L', val));
+
+
+def _Write2Bytes(output_file, val):
+ """Writes a 16 bit unsigned integer to the given output file."""
+ output_file.write(struct.pack('<H', val));
+
+
+def _FindRuleForRegister(cfi_row, reg):
+ """Returns the postfix expression as string for a given register.
+
+ Breakpad CFI row format specifies rules for unwinding each register in postfix
+ expression form separated by space. Each rule starts with register name and a
+ colon. Eg: "CFI R1: <rule> R2: <rule>".
+ """
+ out = []
+ found_register = False
+ for part in cfi_row:
+ if found_register:
+ if part[-1] == ':':
+ break
+ out.append(part)
+ elif part == reg + ':':
+ found_register = True
+ return ' '.join(out)
+
+
+def _GetCfaAndRaOffset(cfi_row):
+ """Returns a tuple with 2 numbers (cfa_offset, ra_offset).
+
+ Returns right values if rule matches the predefined criteria. Returns (0, 0)
+ otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule
+ is postfix form "CFA -<val> + ^".
+ """
+ cfa_offset = 0
+ ra_offset = 0
+ cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG)
+ ra_rule = _FindRuleForRegister(cfi_row, _RA_REG)
+ if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule):
+ cfa_offset = int(cfa_rule.split()[1], 10)
+ if ra_rule:
+ if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule):
+ return (0, 0)
+ ra_offset = -1 * int(ra_rule.split()[1], 10)
+ return (cfa_offset, ra_offset)
+
+
+def _GetAllCfiRows(symbol_file):
+ """Returns parsed CFI data from given symbol_file.
+
+ Each entry in the cfi data dictionary returned is a map from function start
+ address to array of function rows, starting with FUNCTION type, followed by
+ one or more CFI rows.
+ """
+ cfi_data = {}
+ current_func = []
+ for line in symbol_file:
+ line = line.decode('utf8')
+ if 'STACK CFI' not in line:
+ continue
+
+ parts = line.split()
+ data = {}
+ if parts[2] == 'INIT':
+ # Add the previous function to the output
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ current_func = []
+
+ # The function line is of format "STACK CFI INIT <addr> <length> ..."
+ data[_ADDR_ENTRY] = int(parts[3], 16)
+ data[_LENGTH_ENTRY] = int(parts[4], 16)
+
+ # Condition C1: Skip if length is large.
+ if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff:
+ continue # Skip the current function.
+ else:
+ # The current function is skipped.
+ if len(current_func) == 0:
+ continue
+
+ # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..."
+ data[_ADDR_ENTRY] = int(parts[2], 16)
+ (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts)
+
+ # Condition C2 and C3: Skip based on limits on offsets.
+ if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff:
+ current_func = []
+ continue
+ assert data[_CFA_REG] % 4 == 0
+ # Since we skipped functions with code size larger than 0xffff, we should
+ # have no function offset larger than the same value.
+ assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff
+
+ if data[_ADDR_ENTRY] == 0:
+ # Skip current function, delete all previous entries.
+ current_func = []
+ continue
+ assert data[_ADDR_ENTRY] % 2 == 0
+ current_func.append(data)
+
+ # Condition C4: Skip function without CFI rows.
+ if len(current_func) > 1:
+ cfi_data[current_func[0][_ADDR_ENTRY]] = current_func
+ return cfi_data
+
+
+def _WriteCfiData(cfi_data, out_file):
+ """Writes the CFI data in defined format to out_file."""
+ # Stores the final data that will be written to UNW_DATA table, in order
+ # with 2 byte items.
+ unw_data = []
+
+ # Represent all the CFI data of functions as set of numbers and map them to an
+ # index in the |unw_data|. This index is later written to the UNW_INDEX table
+ # for each function. This map is used to find index of the data for functions.
+ data_to_index = {}
+ # Store mapping between the functions to the index.
+ func_addr_to_index = {}
+ previous_func_end = 0
+ for addr, function in sorted(cfi_data.items()):
+ # Add an empty function entry when functions CFIs are missing between 2
+ # functions.
+ if previous_func_end != 0 and addr - previous_func_end > 4:
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+ previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY]
+
+ assert len(function) > 1
+ func_data_arr = []
+ func_data = 0
+ # The first row contains the function address and length. The rest of the
+ # rows have CFI data. Create function data array as given in the format.
+ for row in function[1:]:
+ addr_offset = row[_ADDR_ENTRY] - addr
+ cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] // 4)
+
+ func_data_arr.append(addr_offset)
+ func_data_arr.append(cfa_offset)
+
+ # Consider all the rows in the data as one large integer and add it as a key
+ # to the |data_to_index|.
+ for data in func_data_arr:
+ func_data = (func_data << 16) | data
+
+ row_count = len(func_data_arr) // 2
+ if func_data not in data_to_index:
+ # When data is not found, create a new index = len(unw_data), and write
+ # the data to |unw_data|.
+ index = len(unw_data)
+ data_to_index[func_data] = index
+ unw_data.append(row_count)
+ for row in func_data_arr:
+ unw_data.append(row)
+ else:
+ # If the data was found, then use the same index for the function.
+ index = data_to_index[func_data]
+ assert row_count == unw_data[index]
+ func_addr_to_index[addr] = data_to_index[func_data]
+
+ # Mark the end end of last function entry.
+ func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND
+
+ # Write the size of UNW_INDEX file in bytes.
+ _Write4Bytes(out_file, len(func_addr_to_index))
+
+ # Write the UNW_INDEX table. First list of addresses and then indices.
+ sorted_unw_index = sorted(func_addr_to_index.items())
+ for addr, index in sorted_unw_index:
+ _Write4Bytes(out_file, addr)
+ for addr, index in sorted_unw_index:
+ _Write2Bytes(out_file, index)
+
+ # Write the UNW_DATA table.
+ for data in unw_data:
+ _Write2Bytes(out_file, data)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--input_path', required=True,
+ help='The input path of the unstripped binary')
+ parser.add_argument(
+ '--output_path', required=True,
+ help='The path of the output file')
+ parser.add_argument(
+ '--dump_syms_path', required=True,
+ help='The path of the dump_syms binary')
+
+ args = parser.parse_args()
+ cmd = ['./' + args.dump_syms_path, args.input_path, '-v']
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ cfi_data = _GetAllCfiRows(proc.stdout)
+ if proc.wait():
+ sys.stderr.write('dump_syms exited with code {} after {} symbols\n'.format(
+ proc.returncode, len(cfi_data)))
+ sys.exit(proc.returncode)
+ with open(args.output_path, 'wb') as out_file:
+ _WriteCfiData(cfi_data, out_file)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/extract_unwind_tables_tests.py b/third_party/libwebrtc/build/android/gyp/extract_unwind_tables_tests.py
new file mode 100755
index 0000000000..59436ff2cd
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/extract_unwind_tables_tests.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for extract_unwind_tables.py
+
+This test suite contains various tests for extracting CFI tables from breakpad
+symbol files.
+"""
+
+import optparse
+import os
+import struct
+import sys
+import tempfile
+import unittest
+
+import extract_unwind_tables
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestExtractUnwindTables(unittest.TestCase):
+ def testExtractCfi(self):
+ with tempfile.NamedTemporaryFile() as output_file:
+ test_data_lines = """
+MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so
+INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642
+FILE 0 ../../base/allocator/allocator_check.cc
+FILE 1 ../../base/allocator/allocator_extension.cc
+FILE 2 ../../base/allocator/allocator_shim.cc
+FUNC 1adcb60 54 0 i2d_name_canon
+1adcb60 1a 509 17054
+3b94c70 2 69 40
+PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize()
+PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File)
+STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI 2 .cfa: sp 4 +
+STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI 6 .cfa: sp 16 +
+STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr
+STACK CFI e1a970 .cfa: sp 4 +
+STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^
+STACK CFI e1a974 .cfa: sp 16 +
+STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr
+STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 +
+STACK CFI e1a1e8 .cfa: sp 80 +
+STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^
+STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr
+STACK CFI e17004 2 .cfa: sp 0 + .ra: lr
+STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr
+STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^
+STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4
+STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^
+STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr
+STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^
+STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr
+STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^
+""".splitlines()
+ extract_unwind_tables._ParseCfiData(
+ [l.encode('utf8') for l in test_data_lines], output_file.name)
+
+ expected_cfi_data = {
+ 0xe1a1e4 : [0x2, 0x11, 0x4, 0x50],
+ 0xe1a296 : [],
+ 0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10],
+ 0xe1a990 : [],
+ 0x3b92e24: [0x28, 0x13],
+ 0x3b92e62: [],
+ }
+ expected_function_count = len(expected_cfi_data)
+
+ actual_output = []
+ with open(output_file.name, 'rb') as f:
+ while True:
+ read = f.read(2)
+ if not read:
+ break
+ actual_output.append(struct.unpack('H', read)[0])
+
+ # First value is size of unw_index table.
+ unw_index_size = actual_output[1] << 16 | actual_output[0]
+ # |unw_index_size| should match entry count.
+ self.assertEqual(expected_function_count, unw_index_size)
+ # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing
+ # size.
+ unw_index_start = 2
+ unw_index_addr_end = unw_index_start + expected_function_count * 2
+ unw_index_end = unw_index_addr_end + expected_function_count
+ unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end]
+ unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end]
+
+ unw_data_start = unw_index_end
+ unw_data = actual_output[unw_data_start:]
+
+ for func_iter in range(0, expected_function_count):
+ func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 |
+ unw_index_addr_col[func_iter * 2])
+ index = unw_index_index_col[func_iter]
+ # If index is CANT_UNWIND then invalid function.
+ if index == 0xFFFF:
+ self.assertEqual(expected_cfi_data[func_addr], [])
+ continue
+
+ func_start = index + 1
+ func_end = func_start + unw_data[index] * 2
+ self.assertEqual(len(expected_cfi_data[func_addr]),
+ func_end - func_start)
+ func_cfi = unw_data[func_start : func_end]
+ self.assertEqual(expected_cfi_data[func_addr], func_cfi)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/filter_zip.py b/third_party/libwebrtc/build/android/gyp/filter_zip.py
new file mode 100755
index 0000000000..caa26eb690
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/filter_zip.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import shutil
+import sys
+
+from util import build_utils
+
+
+def CreatePathTransform(exclude_globs, include_globs):
+ """Returns a function to strip paths for the given patterns.
+
+ Args:
+ exclude_globs: List of globs that if matched should be excluded.
+ include_globs: List of globs that if not matched should be excluded.
+
+ Returns:
+ * None if no filters are needed.
+ * A function "(path) -> path" that returns None when |path| should be
+ stripped, or |path| otherwise.
+ """
+ if not (exclude_globs or include_globs):
+ return None
+ exclude_globs = list(exclude_globs or [])
+ def path_transform(path):
+ # Exclude filters take precidence over include filters.
+ if build_utils.MatchesGlob(path, exclude_globs):
+ return None
+ if include_globs and not build_utils.MatchesGlob(path, include_globs):
+ return None
+ return path
+
+ return path_transform
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True,
+ help='Input zip file.')
+ parser.add_argument('--output', required=True,
+ help='Output zip file')
+ parser.add_argument('--exclude-globs',
+ help='GN list of exclude globs')
+ parser.add_argument('--include-globs',
+ help='GN list of include globs')
+ argv = build_utils.ExpandFileArgs(sys.argv[1:])
+ args = parser.parse_args(argv)
+
+ args.exclude_globs = build_utils.ParseGnList(args.exclude_globs)
+ args.include_globs = build_utils.ParseGnList(args.include_globs)
+
+ path_transform = CreatePathTransform(args.exclude_globs, args.include_globs)
+ with build_utils.AtomicOutput(args.output) as f:
+ if path_transform:
+ build_utils.MergeZips(f.name, [args.input], path_transform=path_transform)
+ else:
+ shutil.copy(args.input, f.name)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/filter_zip.pydeps b/third_party/libwebrtc/build/android/gyp/filter_zip.pydeps
new file mode 100644
index 0000000000..f561e05c45
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/filter_zip.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py
+../../gn_helpers.py
+filter_zip.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/finalize_apk.py b/third_party/libwebrtc/build/android/gyp/finalize_apk.py
new file mode 100644
index 0000000000..b465f713db
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/finalize_apk.py
@@ -0,0 +1,78 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and aligns an APK."""
+
+import argparse
+import logging
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from util import build_utils
+
+
+def FinalizeApk(apksigner_path,
+ zipalign_path,
+ unsigned_apk_path,
+ final_apk_path,
+ key_path,
+ key_passwd,
+ key_name,
+ min_sdk_version,
+ warnings_as_errors=False):
+ # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime
+ # and a corrupted state.
+ with tempfile.NamedTemporaryFile() as staging_file:
+ if zipalign_path:
+ # v2 signing requires that zipalign happen first.
+ logging.debug('Running zipalign')
+ zipalign_cmd = [
+ zipalign_path, '-p', '-f', '4', unsigned_apk_path, staging_file.name
+ ]
+ build_utils.CheckOutput(zipalign_cmd,
+ print_stdout=True,
+ fail_on_output=warnings_as_errors)
+ signer_input_path = staging_file.name
+ else:
+ signer_input_path = unsigned_apk_path
+
+ sign_cmd = build_utils.JavaCmd(warnings_as_errors) + [
+ '-jar',
+ apksigner_path,
+ 'sign',
+ '--in',
+ signer_input_path,
+ '--out',
+ staging_file.name,
+ '--ks',
+ key_path,
+ '--ks-key-alias',
+ key_name,
+ '--ks-pass',
+ 'pass:' + key_passwd,
+ ]
+ # V3 signing adds security niceties, which are irrelevant for local builds.
+ sign_cmd += ['--v3-signing-enabled', 'false']
+
+ if min_sdk_version >= 24:
+ # Disable v1 signatures when v2 signing can be used (it's much faster).
+ # By default, both v1 and v2 signing happen.
+ sign_cmd += ['--v1-signing-enabled', 'false']
+ else:
+ # Force SHA-1 (makes signing faster; insecure is fine for local builds).
+ # Leave v2 signing enabled since it verifies faster on device when
+ # supported.
+ sign_cmd += ['--min-sdk-version', '1']
+
+ logging.debug('Signing apk')
+ build_utils.CheckOutput(sign_cmd,
+ print_stdout=True,
+ fail_on_output=warnings_as_errors)
+ shutil.move(staging_file.name, final_apk_path)
+ # TODO(crbug.com/1174969): Remove this once Python2 is obsoleted.
+ if sys.version_info.major == 2:
+ staging_file.delete = False
+ else:
+ staging_file._closer.delete = False
diff --git a/third_party/libwebrtc/build/android/gyp/find.py b/third_party/libwebrtc/build/android/gyp/find.py
new file mode 100755
index 0000000000..b05874bfb7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/find.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+from __future__ import print_function
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('--pattern', default='*', help='File pattern to match.')
+ options, directories = parser.parse_args(argv)
+
+ for d in directories:
+ if not os.path.exists(d):
+ print('%s does not exist' % d, file=sys.stderr)
+ return 1
+ for root, _, filenames in os.walk(d):
+ for f in fnmatch.filter(filenames, options.pattern):
+ print(os.path.join(root, f))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/gcc_preprocess.py b/third_party/libwebrtc/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000000..70ae10fc13
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import posixpath
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def _ParsePackageName(data):
+ m = re.match(r'^\s*package\s+(.*?)\s*;', data, re.MULTILINE)
+ return m.group(1) if m else ''
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--include-dirs', help='GN list of include directories.')
+ parser.add_argument('--output', help='Path for .srcjar.')
+ parser.add_argument('--define',
+ action='append',
+ dest='defines',
+ help='List of -D args')
+ parser.add_argument('templates', nargs='+', help='Template files.')
+ options = parser.parse_args(args)
+
+ options.defines = build_utils.ParseGnList(options.defines)
+ options.include_dirs = build_utils.ParseGnList(options.include_dirs)
+
+ gcc_cmd = [
+ 'gcc',
+ '-E', # stop after preprocessing.
+ '-DANDROID', # Specify ANDROID define for pre-processor.
+ '-x',
+ 'c-header', # treat sources as C header files
+ '-P', # disable line markers, i.e. '#line 309'
+ ]
+ gcc_cmd.extend('-D' + x for x in options.defines)
+ gcc_cmd.extend('-I' + x for x in options.include_dirs)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ with zipfile.ZipFile(f, 'w') as z:
+ for template in options.templates:
+ data = build_utils.CheckOutput(gcc_cmd + [template])
+ package_name = _ParsePackageName(data)
+ if not package_name:
+ raise Exception('Could not find java package of ' + template)
+ zip_path = posixpath.join(
+ package_name.replace('.', '/'),
+ os.path.splitext(os.path.basename(template))[0]) + '.java'
+ build_utils.AddToZipHermetic(z, zip_path, data=data)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/gcc_preprocess.pydeps b/third_party/libwebrtc/build/android/gyp/gcc_preprocess.pydeps
new file mode 100644
index 0000000000..39e56f7008
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/gcc_preprocess.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py
+../../gn_helpers.py
+gcc_preprocess.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/generate_android_wrapper.py b/third_party/libwebrtc/build/android/gyp/generate_android_wrapper.py
new file mode 100755
index 0000000000..c8b762c754
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/generate_android_wrapper.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+from util import build_utils
+
+sys.path.append(
+ os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', 'util')))
+
+import generate_wrapper
+
+_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)')
+
+
+def ExpandWrappedPathLists(args):
+ expanded_args = []
+ for arg in args:
+ m = _WRAPPED_PATH_LIST_RE.match(arg)
+ if m:
+ for p in build_utils.ParseGnList(m.group(2)):
+ expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p])
+ else:
+ expanded_args.append(arg)
+ return expanded_args
+
+
+def main(raw_args):
+ parser = generate_wrapper.CreateArgumentParser()
+ expanded_raw_args = build_utils.ExpandFileArgs(raw_args)
+ expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args)
+ args = parser.parse_args(expanded_raw_args)
+ return generate_wrapper.Wrap(args)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/generate_linker_version_script.py b/third_party/libwebrtc/build/android/gyp/generate_linker_version_script.py
new file mode 100755
index 0000000000..995fcd7b88
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/generate_linker_version_script.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Generate linker version scripts for Chrome on Android shared libraries."""
+
+import argparse
+import os
+
+from util import build_utils
+
+_SCRIPT_HEADER = """\
+# AUTO-GENERATED FILE. DO NOT MODIFY.
+#
+# See: %s
+
+{
+ global:
+""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+_SCRIPT_FOOTER = """\
+ local:
+ *;
+};
+"""
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--output',
+ required=True,
+ help='Path to output linker version script file.')
+ parser.add_argument(
+ '--export-java-symbols',
+ action='store_true',
+ help='Export Java_* JNI methods')
+ parser.add_argument(
+ '--export-symbol-allowlist-file',
+ action='append',
+ default=[],
+ dest='allowlists',
+ help='Path to an input file containing an allowlist of extra symbols to '
+ 'export, one symbol per line. Multiple files may be specified.')
+ parser.add_argument(
+ '--export-feature-registrations',
+ action='store_true',
+ help='Export JNI_OnLoad_* methods')
+ options = parser.parse_args()
+
+ # JNI_OnLoad is always exported.
+ # CrashpadHandlerMain() is the entry point to the Crashpad handler, required
+ # for libcrashpad_handler_trampoline.so.
+ symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad']
+
+ if options.export_java_symbols:
+ symbol_list.append('Java_*')
+
+ if options.export_feature_registrations:
+ symbol_list.append('JNI_OnLoad_*')
+
+ for allowlist in options.allowlists:
+ with open(allowlist, 'rt') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#':
+ continue
+ symbol_list.append(line)
+
+ script_content = [_SCRIPT_HEADER]
+ for symbol in symbol_list:
+ script_content.append(' %s;\n' % symbol)
+ script_content.append(_SCRIPT_FOOTER)
+
+ script = ''.join(script_content)
+
+ with build_utils.AtomicOutput(options.output, mode='w') as f:
+ f.write(script)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/generate_linker_version_script.pydeps b/third_party/libwebrtc/build/android/gyp/generate_linker_version_script.pydeps
new file mode 100644
index 0000000000..de9fa56a95
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/generate_linker_version_script.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py
+../../gn_helpers.py
+generate_linker_version_script.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/ijar.py b/third_party/libwebrtc/build/android/gyp/ijar.py
new file mode 100755
index 0000000000..45413f62fd
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/ijar.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+# python -c "import zipfile; zipfile.ZipFile('test.jar', 'w')"
+# du -b test.jar
+_EMPTY_JAR_SIZE = 22
+
+
+def main():
+ # The point of this wrapper is to use AtomicOutput so that output timestamps
+ # are not updated when outputs are unchanged.
+ ijar_bin, in_jar, out_jar = sys.argv[1:]
+ with build_utils.AtomicOutput(out_jar) as f:
+ # ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162
+ if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE:
+ with open(in_jar, 'rb') as in_f:
+ f.write(in_f.read())
+ else:
+ build_utils.CheckOutput([ijar_bin, in_jar, f.name])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/ijar.pydeps b/third_party/libwebrtc/build/android/gyp/ijar.pydeps
new file mode 100644
index 0000000000..e9ecb6636d
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/ijar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py
+../../gn_helpers.py
+ijar.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/jacoco_instr.py b/third_party/libwebrtc/build/android/gyp/jacoco_instr.py
new file mode 100755
index 0000000000..8e5f29c9cd
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/jacoco_instr.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'jacoco_instr' action in the Java build process.
+Depending on whether jacoco_instrument is set, the 'jacoco_instr' action will
+call the instrument command which accepts a jar and instruments it using
+jacococli.jar.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+
+def _AddArguments(parser):
+ """Adds arguments related to instrumentation to parser.
+
+ Args:
+ parser: ArgumentParser object.
+ """
+ parser.add_argument(
+ '--input-path',
+ required=True,
+ help='Path to input file(s). Either the classes '
+ 'directory, or the path to a jar.')
+ parser.add_argument(
+ '--output-path',
+ required=True,
+ help='Path to output final file(s) to. Either the '
+ 'final classes directory, or the directory in '
+ 'which to place the instrumented/copied jar.')
+ parser.add_argument(
+ '--sources-json-file',
+ required=True,
+ help='File to create with the list of source directories '
+ 'and input path.')
+ parser.add_argument(
+ '--java-sources-file',
+ required=True,
+ help='File containing newline-separated .java paths')
+ parser.add_argument(
+ '--jacococli-jar', required=True, help='Path to jacococli.jar.')
+ parser.add_argument(
+ '--files-to-instrument',
+ help='Path to a file containing which source files are affected.')
+
+
+def _GetSourceDirsFromSourceFiles(source_files):
+ """Returns list of directories for the files in |source_files|.
+
+ Args:
+ source_files: List of source files.
+
+ Returns:
+ List of source directories.
+ """
+ return list(set(os.path.dirname(source_file) for source_file in source_files))
+
+
+def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file,
+ src_root):
+ """Adds all normalized source directories and input path to
+ |sources_json_file|.
+
+ Args:
+ source_dirs: List of source directories.
+ input_path: The input path to non-instrumented class files.
+ sources_json_file: File into which to write the list of source directories
+ and input path.
+ src_root: Root which sources added to the file should be relative to.
+
+ Returns:
+ An exit code.
+ """
+ src_root = os.path.abspath(src_root)
+ relative_sources = []
+ for s in source_dirs:
+ abs_source = os.path.abspath(s)
+ if abs_source[:len(src_root)] != src_root:
+ print('Error: found source directory not under repository root: %s %s' %
+ (abs_source, src_root))
+ return 1
+ rel_source = os.path.relpath(abs_source, src_root)
+
+ relative_sources.append(rel_source)
+
+ data = {}
+ data['source_dirs'] = relative_sources
+ data['input_path'] = []
+ if input_path:
+ data['input_path'].append(os.path.abspath(input_path))
+ with open(sources_json_file, 'w') as f:
+ json.dump(data, f)
+
+
+def _GetAffectedClasses(jar_file, source_files):
+ """Gets affected classes by affected source files to a jar.
+
+ Args:
+ jar_file: The jar file to get all members.
+ source_files: The list of affected source files.
+
+ Returns:
+ A tuple of affected classes and unaffected members.
+ """
+ with zipfile.ZipFile(jar_file) as f:
+ members = f.namelist()
+
+ affected_classes = []
+ unaffected_members = []
+
+ for member in members:
+ if not member.endswith('.class'):
+ unaffected_members.append(member)
+ continue
+
+ is_affected = False
+ index = member.find('$')
+ if index == -1:
+ index = member.find('.class')
+ for source_file in source_files:
+ if source_file.endswith(member[:index] + '.java'):
+ affected_classes.append(member)
+ is_affected = True
+ break
+ if not is_affected:
+ unaffected_members.append(member)
+
+ return affected_classes, unaffected_members
+
+
+def _InstrumentClassFiles(instrument_cmd,
+ input_path,
+ output_path,
+ temp_dir,
+ affected_source_files=None):
+ """Instruments class files from input jar.
+
+ Args:
+ instrument_cmd: JaCoCo instrument command.
+ input_path: The input path to non-instrumented jar.
+ output_path: The output path to instrumented jar.
+ temp_dir: The temporary directory.
+ affected_source_files: The affected source file paths to input jar.
+ Default is None, which means instrumenting all class files in jar.
+ """
+ affected_classes = None
+ unaffected_members = None
+ if affected_source_files:
+ affected_classes, unaffected_members = _GetAffectedClasses(
+ input_path, affected_source_files)
+
+ # Extract affected class files.
+ with zipfile.ZipFile(input_path) as f:
+ f.extractall(temp_dir, affected_classes)
+
+ instrumented_dir = os.path.join(temp_dir, 'instrumented')
+
+ # Instrument extracted class files.
+ instrument_cmd.extend([temp_dir, '--dest', instrumented_dir])
+ build_utils.CheckOutput(instrument_cmd)
+
+ if affected_source_files and unaffected_members:
+ # Extract unaffected members to instrumented_dir.
+ with zipfile.ZipFile(input_path) as f:
+ f.extractall(instrumented_dir, unaffected_members)
+
+ # Zip all files to output_path
+ build_utils.ZipDir(output_path, instrumented_dir)
+
+
+def _RunInstrumentCommand(parser):
+ """Instruments class or Jar files using JaCoCo.
+
+ Args:
+ parser: ArgumentParser object.
+
+ Returns:
+ An exit code.
+ """
+ args = parser.parse_args()
+
+ source_files = []
+ if args.java_sources_file:
+ source_files.extend(build_utils.ReadSourcesList(args.java_sources_file))
+
+ with build_utils.TempDir() as temp_dir:
+ instrument_cmd = build_utils.JavaCmd() + [
+ '-jar', args.jacococli_jar, 'instrument'
+ ]
+
+ if not args.files_to_instrument:
+ _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+ temp_dir)
+ else:
+ affected_files = build_utils.ReadSourcesList(args.files_to_instrument)
+ source_set = set(source_files)
+ affected_source_files = [f for f in affected_files if f in source_set]
+
+ # Copy input_path to output_path and return if no source file affected.
+ if not affected_source_files:
+ shutil.copyfile(args.input_path, args.output_path)
+ # Create a dummy sources_json_file.
+ _CreateSourcesJsonFile([], None, args.sources_json_file,
+ build_utils.DIR_SOURCE_ROOT)
+ return 0
+ else:
+ _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path,
+ temp_dir, affected_source_files)
+
+ source_dirs = _GetSourceDirsFromSourceFiles(source_files)
+ # TODO(GYP): In GN, we are passed the list of sources, detecting source
+ # directories, then walking them to re-establish the list of sources.
+ # This can obviously be simplified!
+ _CreateSourcesJsonFile(source_dirs, args.input_path, args.sources_json_file,
+ build_utils.DIR_SOURCE_ROOT)
+
+ return 0
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ _AddArguments(parser)
+ _RunInstrumentCommand(parser)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/android/gyp/jacoco_instr.pydeps b/third_party/libwebrtc/build/android/gyp/jacoco_instr.pydeps
new file mode 100644
index 0000000000..d7fec19fde
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/jacoco_instr.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jacoco_instr.pydeps build/android/gyp/jacoco_instr.py
+../../gn_helpers.py
+jacoco_instr.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_enum.py b/third_party/libwebrtc/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000000..08a381a968
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,437 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+from datetime import date
+import re
+import optparse
+import os
+from string import Template
+import sys
+import textwrap
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_ALLOWLIST = [
+ 'char', 'unsigned char', 'short', 'unsigned short', 'int', 'int8_t',
+ 'int16_t', 'int32_t', 'uint8_t', 'uint16_t'
+]
+
+
+class EnumDefinition(object):
+ def __init__(self, original_enum_name=None, class_name_override=None,
+ enum_package=None, entries=None, comments=None, fixed_type=None):
+ self.original_enum_name = original_enum_name
+ self.class_name_override = class_name_override
+ self.enum_package = enum_package
+ self.entries = collections.OrderedDict(entries or [])
+ self.comments = collections.OrderedDict(comments or [])
+ self.prefix_to_strip = None
+ self.fixed_type = fixed_type
+
+ def AppendEntry(self, key, value):
+ if key in self.entries:
+ raise Exception('Multiple definitions of key %s found.' % key)
+ self.entries[key] = value
+
+ def AppendEntryComment(self, key, value):
+ if key in self.comments:
+ raise Exception('Multiple definitions of key %s found.' % key)
+ self.comments[key] = value
+
+ @property
+ def class_name(self):
+ return self.class_name_override or self.original_enum_name
+
+ def Finalize(self):
+ self._Validate()
+ self._AssignEntryIndices()
+ self._StripPrefix()
+ self._NormalizeNames()
+
+ def _Validate(self):
+ assert self.class_name
+ assert self.enum_package
+ assert self.entries
+ if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_ALLOWLIST:
+ raise Exception('Fixed type %s for enum %s not in allowlist.' %
+ (self.fixed_type, self.class_name))
+
+ def _AssignEntryIndices(self):
+ # Enums, if given no value, are given the value of the previous enum + 1.
+ if not all(self.entries.values()):
+ prev_enum_value = -1
+ for key, value in self.entries.items():
+ if not value:
+ self.entries[key] = prev_enum_value + 1
+ elif value in self.entries:
+ self.entries[key] = self.entries[value]
+ else:
+ try:
+ self.entries[key] = int(value)
+ except ValueError:
+ raise Exception('Could not interpret integer from enum value "%s" '
+ 'for key %s.' % (value, key))
+ prev_enum_value = self.entries[key]
+
+
+ def _StripPrefix(self):
+ prefix_to_strip = self.prefix_to_strip
+ if not prefix_to_strip:
+ shout_case = self.original_enum_name
+ shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper()
+ shout_case += '_'
+
+ prefixes = [shout_case, self.original_enum_name,
+ 'k' + self.original_enum_name]
+
+ for prefix in prefixes:
+ if all([w.startswith(prefix) for w in self.entries.keys()]):
+ prefix_to_strip = prefix
+ break
+ else:
+ prefix_to_strip = ''
+
+ def StripEntries(entries):
+ ret = collections.OrderedDict()
+ for k, v in entries.items():
+ stripped_key = k.replace(prefix_to_strip, '', 1)
+ if isinstance(v, str):
+ stripped_value = v.replace(prefix_to_strip, '')
+ else:
+ stripped_value = v
+ ret[stripped_key] = stripped_value
+
+ return ret
+
+ self.entries = StripEntries(self.entries)
+ self.comments = StripEntries(self.comments)
+
+ def _NormalizeNames(self):
+ self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty)
+ self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty)
+
+
+def _TransformKeys(d, func):
+ """Normalize keys in |d| and update references to old keys in |d| values."""
+ keys_map = {k: func(k) for k in d}
+ ret = collections.OrderedDict()
+ for k, v in d.items():
+ # Need to transform values as well when the entry value was explicitly set
+ # (since it could contain references to other enum entry values).
+ if isinstance(v, str):
+ # First check if a full replacement is available. This avoids issues when
+ # one key is a substring of another.
+ if v in d:
+ v = keys_map[v]
+ else:
+ for old_key, new_key in keys_map.items():
+ v = v.replace(old_key, new_key)
+ ret[keys_map[k]] = v
+ return ret
+
+
+class DirectiveSet(object):
+ class_name_override_key = 'CLASS_NAME_OVERRIDE'
+ enum_package_key = 'ENUM_PACKAGE'
+ prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+ known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+ def __init__(self):
+ self._directives = {}
+
+ def Update(self, key, value):
+ if key not in DirectiveSet.known_keys:
+ raise Exception("Unknown directive: " + key)
+ self._directives[key] = value
+
+ @property
+ def empty(self):
+ return len(self._directives) == 0
+
+ def UpdateDefinition(self, definition):
+ definition.class_name_override = self._directives.get(
+ DirectiveSet.class_name_override_key, '')
+ definition.enum_package = self._directives.get(
+ DirectiveSet.enum_package_key)
+ definition.prefix_to_strip = self._directives.get(
+ DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+ single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)')
+ multi_line_comment_start_re = re.compile(r'\s*/\*')
+ enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+ enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+ generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$')
+ generator_directive_re = re.compile(
+ r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+ multi_line_generator_directive_start_re = re.compile(
+ r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+ multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$')
+ multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$')
+
+ optional_class_or_struct_re = r'(class|struct)?'
+ enum_name_re = r'(\w+)'
+ optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+ enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+ optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+ optional_fixed_type_re + '\s*{\s*')
+ enum_single_line_re = re.compile(
+ r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P<enum_entries>.*)}.*$')
+
+ def __init__(self, lines, path=''):
+ self._lines = lines
+ self._path = path
+ self._enum_definitions = []
+ self._in_enum = False
+ self._current_definition = None
+ self._current_comments = []
+ self._generator_directives = DirectiveSet()
+ self._multi_line_generator_directive = None
+ self._current_enum_entry = ''
+
+ def _ApplyGeneratorDirectives(self):
+ self._generator_directives.UpdateDefinition(self._current_definition)
+ self._generator_directives = DirectiveSet()
+
+ def ParseDefinitions(self):
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._enum_definitions
+
+ def _ParseLine(self, line):
+ if self._multi_line_generator_directive:
+ self._ParseMultiLineDirectiveLine(line)
+ elif not self._in_enum:
+ self._ParseRegularLine(line)
+ else:
+ self._ParseEnumLine(line)
+
+ def _ParseEnumLine(self, line):
+ if HeaderParser.multi_line_comment_start_re.match(line):
+ raise Exception('Multi-line comments in enums are not supported in ' +
+ self._path)
+
+ enum_comment = HeaderParser.single_line_comment_re.match(line)
+ if enum_comment:
+ comment = enum_comment.groups()[0]
+ if comment:
+ self._current_comments.append(comment)
+ elif HeaderParser.enum_end_re.match(line):
+ self._FinalizeCurrentEnumDefinition()
+ else:
+ self._AddToCurrentEnumEntry(line)
+ if ',' in line:
+ self._ParseCurrentEnumEntry()
+
+ def _ParseSingleLineEnum(self, line):
+ for entry in line.split(','):
+ self._AddToCurrentEnumEntry(entry)
+ self._ParseCurrentEnumEntry()
+
+ self._FinalizeCurrentEnumDefinition()
+
+ def _ParseCurrentEnumEntry(self):
+ if not self._current_enum_entry:
+ return
+
+ enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry)
+ if not enum_entry:
+ raise Exception('Unexpected error while attempting to parse %s as enum '
+ 'entry.' % self._current_enum_entry)
+
+ enum_key = enum_entry.groups()[0]
+ enum_value = enum_entry.groups()[2]
+ self._current_definition.AppendEntry(enum_key, enum_value)
+ if self._current_comments:
+ self._current_definition.AppendEntryComment(
+ enum_key, ' '.join(self._current_comments))
+ self._current_comments = []
+ self._current_enum_entry = ''
+
+ def _AddToCurrentEnumEntry(self, line):
+ self._current_enum_entry += ' ' + line.strip()
+
+ def _FinalizeCurrentEnumDefinition(self):
+ if self._current_enum_entry:
+ self._ParseCurrentEnumEntry()
+ self._ApplyGeneratorDirectives()
+ self._current_definition.Finalize()
+ self._enum_definitions.append(self._current_definition)
+ self._current_definition = None
+ self._in_enum = False
+
+ def _ParseMultiLineDirectiveLine(self, line):
+ multi_line_directive_continuation = (
+ HeaderParser.multi_line_directive_continuation_re.match(line))
+ multi_line_directive_end = (
+ HeaderParser.multi_line_directive_end_re.match(line))
+
+ if multi_line_directive_continuation:
+ value_cont = multi_line_directive_continuation.groups()[0]
+ self._multi_line_generator_directive[1].append(value_cont)
+ elif multi_line_directive_end:
+ directive_name = self._multi_line_generator_directive[0]
+ directive_value = "".join(self._multi_line_generator_directive[1])
+ directive_value += multi_line_directive_end.groups()[0]
+ self._multi_line_generator_directive = None
+ self._generator_directives.Update(directive_name, directive_value)
+ else:
+ raise Exception('Malformed multi-line directive declaration in ' +
+ self._path)
+
+ def _ParseRegularLine(self, line):
+ enum_start = HeaderParser.enum_start_re.match(line)
+ generator_directive_error = HeaderParser.generator_error_re.match(line)
+ generator_directive = HeaderParser.generator_directive_re.match(line)
+ multi_line_generator_directive_start = (
+ HeaderParser.multi_line_generator_directive_start_re.match(line))
+ single_line_enum = HeaderParser.enum_single_line_re.match(line)
+
+ if generator_directive_error:
+ raise Exception('Malformed directive declaration in ' + self._path +
+ '. Use () for multi-line directives. E.g.\n' +
+ '// GENERATED_JAVA_ENUM_PACKAGE: (\n' +
+ '// foo.package)')
+ elif generator_directive:
+ directive_name = generator_directive.groups()[0]
+ directive_value = generator_directive.groups()[1]
+ self._generator_directives.Update(directive_name, directive_value)
+ elif multi_line_generator_directive_start:
+ directive_name = multi_line_generator_directive_start.groups()[0]
+ directive_value = multi_line_generator_directive_start.groups()[1]
+ self._multi_line_generator_directive = (directive_name, [directive_value])
+ elif enum_start or single_line_enum:
+ if self._generator_directives.empty:
+ return
+ self._current_definition = EnumDefinition(
+ original_enum_name=enum_start.groups()[1],
+ fixed_type=enum_start.groups()[3])
+ self._in_enum = True
+ if single_line_enum:
+ self._ParseSingleLineEnum(single_line_enum.group('enum_entries'))
+
+
+def DoGenerate(source_paths):
+ for source_path in source_paths:
+ enum_definitions = DoParseHeaderFile(source_path)
+ if not enum_definitions:
+ raise Exception('No enums found in %s\n'
+ 'Did you forget prefixing enums with '
+ '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+ source_path)
+ for enum_definition in enum_definitions:
+ output_path = java_cpp_utils.GetJavaFilePath(enum_definition.enum_package,
+ enum_definition.class_name)
+ output = GenerateOutput(source_path, enum_definition)
+ yield output_path, output
+
+
+def DoParseHeaderFile(path):
+ with open(path) as f:
+ return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+ template = Template("""
+// Copyright ${YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// ${SCRIPT_NAME}
+// From
+// ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+${INT_DEF}
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+ enum_template = Template(' int ${NAME} = ${VALUE};')
+ enum_entries_string = []
+ enum_names = []
+ for enum_name, enum_value in enum_definition.entries.items():
+ values = {
+ 'NAME': enum_name,
+ 'VALUE': enum_value,
+ }
+ enum_comments = enum_definition.comments.get(enum_name)
+ if enum_comments:
+ enum_comments_indent = ' * '
+ comments_line_wrapper = textwrap.TextWrapper(
+ initial_indent=enum_comments_indent,
+ subsequent_indent=enum_comments_indent,
+ width=100)
+ enum_entries_string.append(' /**')
+ enum_entries_string.append('\n'.join(
+ comments_line_wrapper.wrap(enum_comments)))
+ enum_entries_string.append(' */')
+ enum_entries_string.append(enum_template.substitute(values))
+ if enum_name != "NUM_ENTRIES":
+ enum_names.append(enum_definition.class_name + '.' + enum_name)
+ enum_entries_string = '\n'.join(enum_entries_string)
+
+ enum_names_indent = ' ' * 4
+ wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent,
+ subsequent_indent = enum_names_indent,
+ width = 100)
+ enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names)))
+
+ values = {
+ 'CLASS_NAME': enum_definition.class_name,
+ 'ENUM_ENTRIES': enum_entries_string,
+ 'PACKAGE': enum_definition.enum_package,
+ 'INT_DEF': enum_names_string,
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATH': source_path,
+ 'YEAR': str(date.today().year)
+ }
+ return template.substitute(values)
+
+
+def DoMain(argv):
+ usage = 'usage: %prog [options] [output_dir] input_file(s)...'
+ parser = optparse.OptionParser(usage=usage)
+
+ parser.add_option('--srcjar',
+ help='When specified, a .srcjar at the given path is '
+ 'created instead of individual .java files.')
+
+ options, args = parser.parse_args(argv)
+
+ if not args:
+ parser.error('Need to specify at least one input file')
+ input_paths = args
+
+ with build_utils.AtomicOutput(options.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ for output_path, data in DoGenerate(input_paths):
+ build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+
+
+if __name__ == '__main__':
+ DoMain(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_enum.pydeps b/third_party/libwebrtc/build/android/gyp/java_cpp_enum.pydeps
new file mode 100644
index 0000000000..e6aaeb7b1f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_enum.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py
+../../gn_helpers.py
+java_cpp_enum.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_enum_tests.py b/third_party/libwebrtc/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000000..6d5f150fa0
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,783 @@
+#!/usr/bin/env python3
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite contains various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+from datetime import date
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput
+from java_cpp_enum import HeaderParser
+from util import java_cpp_utils
+
+
+class TestPreprocess(unittest.TestCase):
+ def testOutput(self):
+ definition = EnumDefinition(original_enum_name='ClassName',
+ enum_package='some.package',
+ entries=[('E1', 1), ('E2', '2 << 2')],
+ comments=[('E2', 'This is a comment.'),
+ ('E1', 'This is a multiple line '
+ 'comment that is really long. '
+ 'This is a multiple line '
+ 'comment that is really '
+ 'really long.')])
+ output = GenerateOutput('path/to/file', definition)
+ expected = """
+// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// %s
+// From
+// path/to/file
+
+package some.package;
+
+import androidx.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@IntDef({
+ ClassName.E1, ClassName.E2
+})
+@Retention(RetentionPolicy.SOURCE)
+public @interface ClassName {
+ /**
+ * %s
+ * really really long.
+ */
+ int E1 = 1;
+ /**
+ * This is a comment.
+ */
+ int E2 = 2 << 2;
+}
+"""
+ long_comment = ('This is a multiple line comment that is really long. '
+ 'This is a multiple line comment that is')
+ self.assertEqual(
+ expected % (date.today().year, java_cpp_utils.GetScriptName(),
+ long_comment), output)
+
+ def testParseSimpleEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ VALUE_ZERO,
+ VALUE_ONE,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumName', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+ ('VALUE_ONE', 1)]),
+ definition.entries)
+
+ def testParseBitShifts(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ VALUE_ZERO = 1 << 0,
+ VALUE_ONE = 1 << 1,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ ENUM_NAME_ZERO = 1 << 0,
+ ENUM_NAME_ONE = 1 << 1,
+ ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumName', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+ ('VALUE_ONE', '1 << 1')]),
+ definition.entries)
+
+ definition = definitions[1]
+ expected_entries = collections.OrderedDict([
+ ('ZERO', '1 << 0'),
+ ('ONE', '1 << 1'),
+ ('TWO', 'ZERO | ONE')])
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseMultilineEnumEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1 << 0,
+ VALUE_ONE =
+ SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey,
+ VALUE_TWO = 1 << 18,
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1 << 0'),
+ ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+ 'ControlKey'),
+ ('VALUE_TWO', '1 << 18')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseEnumEntryWithTrailingMultilineEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1,
+ VALUE_ONE =
+ SymbolKey | FnKey | AltGrKey | MetaKey |
+ AltKey | ControlKey | ShiftKey,
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1'),
+ ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | '
+ 'ControlKey | ShiftKey')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+
+ def testParseNoCommaAfterLastEntry(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace
+ enum Foo {
+ VALUE_ZERO = 1,
+
+ // This is a multiline
+ //
+ // comment with an empty line.
+ VALUE_ONE = 2
+ };
+ """.split('\n')
+ expected_entries = collections.OrderedDict([
+ ('VALUE_ZERO', '1'),
+ ('VALUE_ONE', '2')])
+ expected_comments = collections.OrderedDict([
+ ('VALUE_ONE', 'This is a multiline comment with an empty line.')])
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('bar.namespace', definition.enum_package)
+ self.assertEqual(expected_entries, definition.entries)
+ self.assertEqual(expected_comments, definition.comments)
+
+ def testParseClassNameOverride(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ FOO
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+ enum PrefixTest {
+ PREFIX_TEST_A,
+ PREFIX_TEST_B,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('OverrideName', definition.class_name)
+
+ definition = definitions[1]
+ self.assertEqual('OtherOverride', definition.class_name)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParsePreservesCommentsWhenPrefixStripping(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ ENUM_ONE_A = 1,
+ // Comment there
+ ENUM_ONE_B = A,
+ };
+
+ enum EnumIgnore {
+ C, D, E
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo {
+ P_A,
+ // This comment spans
+ // two lines.
+ P_B
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('EnumOne', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', 'A')]),
+ definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+ definition.comments)
+ definition = definitions[1]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict(
+ [('B', 'This comment spans two lines.')]), definition.comments)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParseTwoEnums(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum AnEnum {
+ ENUM_ONE_A = 1,
+ ENUM_ONE_B = A,
+ };
+
+ enum EnumIgnore {
+ C, D, E
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ enum EnumTwo {
+ P_A,
+ P_B
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(2, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('AnEnum', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'),
+ ('ENUM_ONE_B', 'A')]),
+ definition.entries)
+ definition = definitions[1]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('P_A', 0),
+ ('P_B', 1)]),
+ definition.entries)
+
+ def testParseSingleLineEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo { P_A, P_B };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1)]),
+ definition.entries)
+
+ def testParseWithStrippingAndRelativeReferences(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+ enum EnumTwo {
+ P_A = 1,
+ // P_A is old-don't use P_A.
+ P_B = P_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual('EnumTwo', definition.class_name)
+ self.assertEqual('other.package', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', 'A')]),
+ definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]),
+ definition.comments)
+
+ def testParseSingleLineAndRegularEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ ENUM_ONE_A = 1,
+ // Comment there
+ ENUM_ONE_B = A,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: other.package
+ enum EnumTwo { P_A, P_B };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ ENUM_NAME_FOO
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries)
+ self.assertEqual(collections.OrderedDict([('B', 'Comment there')]),
+ definition.comments)
+
+ self.assertEqual(3, len(definitions))
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries)
+
+ definition = definitions[2]
+ self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries)
+
+ def testParseWithCamelCaseNames(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumTest {
+ EnumTestA = 1,
+ // comment for EnumTestB.
+ EnumTestB = 2,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_PREFIX_TO_STRIP: Test
+ enum AnEnum {
+ TestHTTPOption,
+ TestHTTPSOption,
+ };
+
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('A', '1'), ('B', '2')]),
+ definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([('B', 'comment for B.')]),
+ definition.comments)
+
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]),
+ definition.entries)
+
+ def testParseWithKCamelCaseNames(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumOne {
+ kEnumOne = 1,
+ // comment for kEnumTwo.
+ kEnumTwo = 2,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+ enum EnumName {
+ kEnumNameFoo,
+ kEnumNameBar
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum EnumName {
+ kEnumNameFoo,
+ kEnumBar,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Keys {
+ kSymbolKey = 1 << 0,
+ kAltKey = 1 << 1,
+ kUpKey = 1 << 2,
+ kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers,
+ };
+
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Mixed {
+ kTestVal,
+ kCodecMPEG2
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ definition = definitions[0]
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]),
+ definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]),
+ definition.comments)
+
+ definition = definitions[1]
+ self.assertEqual(
+ collections.OrderedDict([('FOO', 0), ('BAR', 1)]),
+ definition.entries)
+
+ definition = definitions[2]
+ self.assertEqual(
+ collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]),
+ definition.entries)
+
+ definition = definitions[3]
+ expected_entries = collections.OrderedDict([
+ ('SYMBOL_KEY', '1 << 0'),
+ ('ALT_KEY', '1 << 1'),
+ ('UP_KEY', '1 << 2'),
+ ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')])
+ self.assertEqual(expected_entries, definition.entries)
+
+ definition = definitions[4]
+ self.assertEqual(
+ collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]),
+ definition.entries)
+
+ def testParseThrowsOnUnknownDirective(self):
+ test_data = """
+ // GENERATED_JAVA_UNKNOWN: Value
+ enum EnumName {
+ VALUE_ONE,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseReturnsEmptyListWithoutDirectives(self):
+ test_data = """
+ enum EnumName {
+ VALUE_ONE,
+ };
+ """.split('\n')
+ self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+ def testParseEnumClass(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseEnumClassOneValueSubstringOfAnother(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class SafeBrowsingStatus {
+ kChecking = 0,
+ kEnabled = 1,
+ kDisabled = 2,
+ kDisabledByAdmin = 3,
+ kDisabledByExtension = 4,
+ kEnabledStandard = 5,
+ kEnabledEnhanced = 6,
+ // New enum values must go above here.
+ kMaxValue = kEnabledEnhanced,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('SafeBrowsingStatus', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(
+ collections.OrderedDict([
+ ('CHECKING', '0'),
+ ('ENABLED', '1'),
+ ('DISABLED', '2'),
+ ('DISABLED_BY_ADMIN', '3'),
+ ('DISABLED_BY_EXTENSION', '4'),
+ ('ENABLED_STANDARD', '5'),
+ ('ENABLED_ENHANCED', '6'),
+ ('MAX_VALUE', 'ENABLED_ENHANCED'),
+ ]), definition.entries)
+ self.assertEqual(
+ collections.OrderedDict([
+ ('MAX_VALUE', 'New enum values must go above here.')
+ ]), definition.comments)
+
+ def testParseEnumStruct(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum struct Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseFixedTypeEnum(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum Foo : int {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual('int', definition.fixed_type)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseFixedTypeEnumClass(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo: unsigned short {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual(1, len(definitions))
+ definition = definitions[0]
+ self.assertEqual('Foo', definition.class_name)
+ self.assertEqual('test.namespace', definition.enum_package)
+ self.assertEqual('unsigned short', definition.fixed_type)
+ self.assertEqual(collections.OrderedDict([('A', 0)]),
+ definition.entries)
+
+ def testParseUnknownFixedTypeRaises(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+ enum class Foo: foo_type {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseSimpleMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.namespace)
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+ self.assertEqual('Bar', definitions[0].class_name)
+
+ def testParseMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (te
+ // st.name
+ // space)
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+
+ def testParseMultiLineDirectiveWithOtherDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.namespace)
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+ // Ba
+ // r
+ // )
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ definitions = HeaderParser(test_data).ParseDefinitions()
+ self.assertEqual('test.namespace', definitions[0].enum_package)
+ self.assertEqual('Bar', definitions[0].class_name)
+
+ def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.name
+ // space
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirective(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ // test.name
+ // space
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirectiveShort(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE: (
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testParseMalformedMultiLineDirectiveMissingBrackets(self):
+ test_data = """
+ // GENERATED_JAVA_ENUM_PACKAGE:
+ // test.namespace
+ enum Foo {
+ FOO_A,
+ };
+ """.split('\n')
+ with self.assertRaises(Exception):
+ HeaderParser(test_data).ParseDefinitions()
+
+ def testEnumValueAssignmentNoneDefined(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('C', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2)]),
+ definition.entries)
+
+ def testEnumValueAssignmentAllDefined(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', '1')
+ definition.AppendEntry('B', '2')
+ definition.AppendEntry('C', '3')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', '1'),
+ ('B', '2'),
+ ('C', '3')]),
+ definition.entries)
+
+ def testEnumValueAssignmentReferences(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'A')
+ definition.AppendEntry('C', None)
+ definition.AppendEntry('D', 'C')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 0),
+ ('C', 1),
+ ('D', 1)]),
+ definition.entries)
+
+ def testEnumValueAssignmentSet(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', '2')
+ definition.AppendEntry('C', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 2),
+ ('C', 3)]),
+ definition.entries)
+
+ def testEnumValueAssignmentSetReferences(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'A')
+ definition.AppendEntry('C', 'B')
+ definition.AppendEntry('D', None)
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 0),
+ ('C', 0),
+ ('D', 1)]),
+ definition.entries)
+
+ def testEnumValueAssignmentRaises(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', 'foo')
+ definition.AppendEntry('C', None)
+ with self.assertRaises(Exception):
+ definition.Finalize()
+
+ def testExplicitPrefixStripping(self):
+ definition = EnumDefinition(original_enum_name='c', enum_package='p')
+ definition.AppendEntry('P_A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('P_C', None)
+ definition.AppendEntry('P_LAST', 'P_C')
+ definition.prefix_to_strip = 'P_'
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2),
+ ('LAST', 2)]),
+ definition.entries)
+
+ def testImplicitPrefixStripping(self):
+ definition = EnumDefinition(original_enum_name='ClassName',
+ enum_package='p')
+ definition.AppendEntry('CLASS_NAME_A', None)
+ definition.AppendEntry('CLASS_NAME_B', None)
+ definition.AppendEntry('CLASS_NAME_C', None)
+ definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+ definition.Finalize()
+ self.assertEqual(collections.OrderedDict([('A', 0),
+ ('B', 1),
+ ('C', 2),
+ ('LAST', 2)]),
+ definition.entries)
+
+ def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+ definition = EnumDefinition(original_enum_name='Name',
+ enum_package='p')
+ definition.AppendEntry('A', None)
+ definition.AppendEntry('B', None)
+ definition.AppendEntry('NAME_LAST', None)
+ definition.Finalize()
+ self.assertEqual(['A', 'B', 'NAME_LAST'], list(definition.entries.keys()))
+
+ def testGenerateThrowsOnEmptyInput(self):
+ with self.assertRaises(Exception):
+ original_do_parse = java_cpp_enum.DoParseHeaderFile
+ try:
+ java_cpp_enum.DoParseHeaderFile = lambda _: []
+ for _ in java_cpp_enum.DoGenerate(['file']):
+ pass
+ finally:
+ java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_features.py b/third_party/libwebrtc/build/android/gyp/java_cpp_features.py
new file mode 100755
index 0000000000..8e7c2440d7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_features.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+class FeatureParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
+ # Ex. 'const base::Feature kConstantName{"StringNameOfTheFeature", ...};'
+ # would parse as:
+ # ExtractConstantName() -> 'ConstantName'
+ # ExtractValue() -> '"StringNameOfTheFeature"'
+ FEATURE_RE = re.compile(r'\s*const (?:base::)?Feature\s+k(\w+)\s*(?:=\s*)?{')
+ VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*,')
+
+ def ExtractConstantName(self, line):
+ match = FeatureParserDelegate.FEATURE_RE.match(line)
+ return match.group(1) if match else None
+
+ def ExtractValue(self, line):
+ match = FeatureParserDelegate.VALUE_RE.search(line)
+ return match.group(1) if match else None
+
+ def CreateJavaConstant(self, name, value, comments):
+ return java_cpp_utils.JavaString(name, value, comments)
+
+
+def _GenerateOutput(template, source_paths, template_path, features):
+ description_template = """
+ // This following string constants were inserted by
+ // {SCRIPT_NAME}
+ // From
+ // {SOURCE_PATHS}
+ // Into
+ // {TEMPLATE_PATH}
+
+"""
+ values = {
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATHS': ',\n // '.join(source_paths),
+ 'TEMPLATE_PATH': template_path,
+ }
+ description = description_template.format(**values)
+ native_features = '\n\n'.join(x.Format() for x in features)
+
+ values = {
+ 'NATIVE_FEATURES': description + native_features,
+ }
+ return template.format(**values)
+
+
+def _ParseFeatureFile(path):
+ with open(path) as f:
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ FeatureParserDelegate(), f.readlines())
+ return feature_file_parser.Parse()
+
+
+def _Generate(source_paths, template_path):
+ with open(template_path) as f:
+ lines = f.readlines()
+
+ template = ''.join(lines)
+ package, class_name = java_cpp_utils.ParseTemplateFile(lines)
+ output_path = java_cpp_utils.GetJavaFilePath(package, class_name)
+
+ features = []
+ for source_path in source_paths:
+ features.extend(_ParseFeatureFile(source_path))
+
+ output = _GenerateOutput(template, source_paths, template_path, features)
+ return output, output_path
+
+
+def _Main(argv):
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--srcjar',
+ required=True,
+ help='The path at which to generate the .srcjar file')
+
+ parser.add_argument('--template',
+ required=True,
+ help='The template file with which to generate the Java '
+ 'class. Must have "{NATIVE_FEATURES}" somewhere in '
+ 'the template.')
+
+ parser.add_argument('inputs',
+ nargs='+',
+ help='Input file(s)',
+ metavar='INPUTFILE')
+ args = parser.parse_args(argv)
+
+ with build_utils.AtomicOutput(args.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ data, path = _Generate(args.inputs, args.template)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+ _Main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_features.pydeps b/third_party/libwebrtc/build/android/gyp/java_cpp_features.pydeps
new file mode 100644
index 0000000000..acffae2bb9
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_features.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_features.pydeps build/android/gyp/java_cpp_features.py
+../../gn_helpers.py
+java_cpp_features.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_features_tests.py b/third_party/libwebrtc/build/android/gyp/java_cpp_features_tests.py
new file mode 100755
index 0000000000..5dcdcd8b8c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_features_tests.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Tests for java_cpp_features.py.
+
+This test suite contains various tests for the C++ -> Java base::Feature
+generator.
+"""
+
+import unittest
+
+import java_cpp_features
+from util import java_cpp_utils
+
+
+class _TestFeaturesParser(unittest.TestCase):
+ def testParseComments(self):
+ test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const base::Feature kSomeFeature{"SomeFeature",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Real comment that spans
+// multiple lines.
+const base::Feature kSomeOtherFeature{"SomeOtherFeature",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+
+// Comment followed by nothing.
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual(2, len(features))
+ self.assertEqual('SOME_FEATURE', features[0].name)
+ self.assertEqual('"SomeFeature"', features[0].value)
+ self.assertEqual(1, len(features[0].comments.split('\n')))
+ self.assertEqual('SOME_OTHER_FEATURE', features[1].name)
+ self.assertEqual('"SomeOtherFeature"', features[1].value)
+ self.assertEqual(2, len(features[1].comments.split('\n')))
+
+ def testWhitespace(self):
+ test_data = """
+// 1 line
+const base::Feature kShort{"Short", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// 2 lines
+const base::Feature kTwoLineFeatureA{"TwoLineFeatureA",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kTwoLineFeatureB{
+ "TwoLineFeatureB", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// 3 lines
+const base::Feature kFeatureWithAVeryLongNameThatWillHaveToWrap{
+ "FeatureWithAVeryLongNameThatWillHaveToWrap",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual(4, len(features))
+ self.assertEqual('SHORT', features[0].name)
+ self.assertEqual('"Short"', features[0].value)
+ self.assertEqual('TWO_LINE_FEATURE_A', features[1].name)
+ self.assertEqual('"TwoLineFeatureA"', features[1].value)
+ self.assertEqual('TWO_LINE_FEATURE_B', features[2].name)
+ self.assertEqual('"TwoLineFeatureB"', features[2].value)
+ self.assertEqual('FEATURE_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+ features[3].name)
+ self.assertEqual('"FeatureWithAVeryLongNameThatWillHaveToWrap"',
+ features[3].value)
+
+ def testCppSyntax(self):
+ test_data = """
+// Mismatched name
+const base::Feature kMismatchedFeature{"MismatchedName",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
+namespace myfeature {
+// In a namespace
+const base::Feature kSomeFeature{"SomeFeature",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+}
+
+// Defined with equals sign
+const base::Feature kFoo = {"Foo", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Build config-specific base::Feature
+#if defined(OS_ANDROID)
+const base::Feature kAndroidOnlyFeature{"AndroidOnlyFeature",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+#endif
+
+// Value depends on build config
+const base::Feature kMaybeEnabled{"MaybeEnabled",
+#if defined(OS_ANDROID)
+ base::FEATURE_DISABLED_BY_DEFAULT
+#else
+ base::FEATURE_ENABLED_BY_DEFAULT
+#endif
+};
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual(5, len(features))
+ self.assertEqual('MISMATCHED_FEATURE', features[0].name)
+ self.assertEqual('"MismatchedName"', features[0].value)
+ self.assertEqual('SOME_FEATURE', features[1].name)
+ self.assertEqual('"SomeFeature"', features[1].value)
+ self.assertEqual('FOO', features[2].name)
+ self.assertEqual('"Foo"', features[2].value)
+ self.assertEqual('ANDROID_ONLY_FEATURE', features[3].name)
+ self.assertEqual('"AndroidOnlyFeature"', features[3].value)
+ self.assertEqual('MAYBE_ENABLED', features[4].name)
+ self.assertEqual('"MaybeEnabled"', features[4].value)
+
+ def testNotYetSupported(self):
+ # Negative test for cases we don't yet support, to ensure we don't misparse
+ # these until we intentionally add proper support.
+ test_data = """
+// Not currently supported: name depends on C++ directive
+const base::Feature kNameDependsOnOs{
+#if defined(OS_ANDROID)
+ "MaybeName1",
+#else
+ "MaybeName2",
+#endif
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Not currently supported: feature named with a constant instead of literal
+const base::Feature kNamedAfterConstant{kNamedStringConstant,
+ base::FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual(0, len(features))
+
+ def testTreatWebViewLikeOneWord(self):
+ test_data = """
+const base::Feature kSomeWebViewFeature{"SomeWebViewFeature",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kWebViewOtherFeature{"WebViewOtherFeature",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+const base::Feature kFeatureWithPluralWebViews{
+ "FeatureWithPluralWebViews",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual('SOME_WEBVIEW_FEATURE', features[0].name)
+ self.assertEqual('"SomeWebViewFeature"', features[0].value)
+ self.assertEqual('WEBVIEW_OTHER_FEATURE', features[1].name)
+ self.assertEqual('"WebViewOtherFeature"', features[1].value)
+ self.assertEqual('FEATURE_WITH_PLURAL_WEBVIEWS', features[2].name)
+ self.assertEqual('"FeatureWithPluralWebViews"', features[2].value)
+
+ def testSpecialCharacters(self):
+ test_data = r"""
+const base::Feature kFeatureWithEscapes{"Weird\tfeature\"name\n",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kFeatureWithEscapes2{
+ "Weird\tfeature\"name\n",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual('FEATURE_WITH_ESCAPES', features[0].name)
+ self.assertEqual(r'"Weird\tfeature\"name\n"', features[0].value)
+ self.assertEqual('FEATURE_WITH_ESCAPES2', features[1].name)
+ self.assertEqual(r'"Weird\tfeature\"name\n"', features[1].value)
+
+ def testNoBaseNamespacePrefix(self):
+ test_data = """
+const Feature kSomeFeature{"SomeFeature", FEATURE_DISABLED_BY_DEFAULT};
+""".split('\n')
+ feature_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_features.FeatureParserDelegate(), test_data)
+ features = feature_file_parser.Parse()
+ self.assertEqual('SOME_FEATURE', features[0].name)
+ self.assertEqual('"SomeFeature"', features[0].value)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_strings.py b/third_party/libwebrtc/build/android/gyp/java_cpp_strings.py
new file mode 100755
index 0000000000..d713599793
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_strings.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import java_cpp_utils
+
+
+class StringParserDelegate(java_cpp_utils.CppConstantParser.Delegate):
+ STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=')
+ VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*;')
+
+ def ExtractConstantName(self, line):
+ match = StringParserDelegate.STRING_RE.match(line)
+ return match.group(1) if match else None
+
+ def ExtractValue(self, line):
+ match = StringParserDelegate.VALUE_RE.search(line)
+ return match.group(1) if match else None
+
+ def CreateJavaConstant(self, name, value, comments):
+ return java_cpp_utils.JavaString(name, value, comments)
+
+
+def _GenerateOutput(template, source_paths, template_path, strings):
+ description_template = """
+ // This following string constants were inserted by
+ // {SCRIPT_NAME}
+ // From
+ // {SOURCE_PATHS}
+ // Into
+ // {TEMPLATE_PATH}
+
+"""
+ values = {
+ 'SCRIPT_NAME': java_cpp_utils.GetScriptName(),
+ 'SOURCE_PATHS': ',\n // '.join(source_paths),
+ 'TEMPLATE_PATH': template_path,
+ }
+ description = description_template.format(**values)
+ native_strings = '\n\n'.join(x.Format() for x in strings)
+
+ values = {
+ 'NATIVE_STRINGS': description + native_strings,
+ }
+ return template.format(**values)
+
+
+def _ParseStringFile(path):
+ with open(path) as f:
+ string_file_parser = java_cpp_utils.CppConstantParser(
+ StringParserDelegate(), f.readlines())
+ return string_file_parser.Parse()
+
+
+def _Generate(source_paths, template_path):
+ with open(template_path) as f:
+ lines = f.readlines()
+
+ template = ''.join(lines)
+ package, class_name = java_cpp_utils.ParseTemplateFile(lines)
+ output_path = java_cpp_utils.GetJavaFilePath(package, class_name)
+ strings = []
+ for source_path in source_paths:
+ strings.extend(_ParseStringFile(source_path))
+
+ output = _GenerateOutput(template, source_paths, template_path, strings)
+ return output, output_path
+
+
+def _Main(argv):
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--srcjar',
+ required=True,
+ help='The path at which to generate the .srcjar file')
+
+ parser.add_argument('--template',
+ required=True,
+ help='The template file with which to generate the Java '
+ 'class. Must have "{NATIVE_STRINGS}" somewhere in '
+ 'the template.')
+
+ parser.add_argument(
+ 'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE')
+ args = parser.parse_args(argv)
+
+ with build_utils.AtomicOutput(args.srcjar) as f:
+ with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar:
+ data, path = _Generate(args.inputs, args.template)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+if __name__ == '__main__':
+ _Main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_strings.pydeps b/third_party/libwebrtc/build/android/gyp/java_cpp_strings.pydeps
new file mode 100644
index 0000000000..0a821f4469
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_strings.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py
+../../gn_helpers.py
+java_cpp_strings.py
+util/__init__.py
+util/build_utils.py
+util/java_cpp_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/java_cpp_strings_tests.py b/third_party/libwebrtc/build/android/gyp/java_cpp_strings_tests.py
new file mode 100755
index 0000000000..4cb1eeeae7
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_cpp_strings_tests.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_cpp_strings.py.
+
+This test suite contains various tests for the C++ -> Java string generator.
+"""
+
+import unittest
+
+import java_cpp_strings
+from util import java_cpp_utils
+
+
+class _TestStringsParser(unittest.TestCase):
+
+ def testParseComments(self):
+ test_data = """
+/**
+ * This should be ignored as well.
+ */
+
+// Comment followed by a blank line.
+
+// Comment followed by unrelated code.
+int foo() { return 3; }
+
+// Real comment.
+const char kASwitch[] = "a-value";
+
+// Real comment that spans
+// multiple lines.
+const char kAnotherSwitch[] = "another-value";
+
+// Comment followed by nothing.
+""".split('\n')
+ string_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_strings.StringParserDelegate(), test_data)
+ strings = string_file_parser.Parse()
+ self.assertEqual(2, len(strings))
+ self.assertEqual('A_SWITCH', strings[0].name)
+ self.assertEqual('"a-value"', strings[0].value)
+ self.assertEqual(1, len(strings[0].comments.split('\n')))
+ self.assertEqual('ANOTHER_SWITCH', strings[1].name)
+ self.assertEqual('"another-value"', strings[1].value)
+ self.assertEqual(2, len(strings[1].comments.split('\n')))
+
+ def testStringValues(self):
+ test_data = r"""
+// Single line string constants.
+const char kAString[] = "a-value";
+const char kNoComment[] = "no-comment";
+
+namespace myfeature {
+const char kMyFeatureNoComment[] = "myfeature.no-comment";
+}
+
+// Single line switch with a big space.
+const char kAStringWithSpace[] = "a-value";
+
+// Wrapped constant definition.
+const char kAStringWithAVeryLongNameThatWillHaveToWrap[] =
+ "a-string-with-a-very-long-name-that-will-have-to-wrap";
+
+// This one has no comment before it.
+
+const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] =
+ "a-string-with-a-very-long-name-that-will-have-to-wrap2";
+
+const char kStringWithEscapes[] = "tab\tquote\"newline\n";
+const char kStringWithEscapes2[] =
+ "tab\tquote\"newline\n";
+
+const char kEmptyString[] = "";
+
+// These are valid C++ but not currently supported by the script.
+const char kInvalidLineBreak[] =
+
+ "invalid-line-break";
+
+const char kConcatenateMultipleStringLiterals[] =
+ "first line"
+ "second line";
+""".split('\n')
+ string_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_strings.StringParserDelegate(), test_data)
+ strings = string_file_parser.Parse()
+ self.assertEqual(9, len(strings))
+ self.assertEqual('A_STRING', strings[0].name)
+ self.assertEqual('"a-value"', strings[0].value)
+ self.assertEqual('NO_COMMENT', strings[1].name)
+ self.assertEqual('"no-comment"', strings[1].value)
+ self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name)
+ self.assertEqual('"myfeature.no-comment"', strings[2].value)
+ self.assertEqual('A_STRING_WITH_SPACE', strings[3].name)
+ self.assertEqual('"a-value"', strings[3].value)
+ self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP',
+ strings[4].name)
+ self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"',
+ strings[4].value)
+ self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2',
+ strings[5].name)
+ self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"',
+ strings[5].value)
+ self.assertEqual('STRING_WITH_ESCAPES', strings[6].name)
+ self.assertEqual(r'"tab\tquote\"newline\n"', strings[6].value)
+ self.assertEqual('STRING_WITH_ESCAPES2', strings[7].name)
+ self.assertEqual(r'"tab\tquote\"newline\n"', strings[7].value)
+ self.assertEqual('EMPTY_STRING', strings[8].name)
+ self.assertEqual('""', strings[8].value)
+
+ def testTreatWebViewLikeOneWord(self):
+ test_data = """
+const char kSomeWebViewSwitch[] = "some-webview-switch";
+const char kWebViewOtherSwitch[] = "webview-other-switch";
+const char kSwitchWithPluralWebViews[] = "switch-with-plural-webviews";
+""".split('\n')
+ string_file_parser = java_cpp_utils.CppConstantParser(
+ java_cpp_strings.StringParserDelegate(), test_data)
+ strings = string_file_parser.Parse()
+ self.assertEqual('SOME_WEBVIEW_SWITCH', strings[0].name)
+ self.assertEqual('"some-webview-switch"', strings[0].value)
+ self.assertEqual('WEBVIEW_OTHER_SWITCH', strings[1].name)
+ self.assertEqual('"webview-other-switch"', strings[1].value)
+ self.assertEqual('SWITCH_WITH_PLURAL_WEBVIEWS', strings[2].name)
+ self.assertEqual('"switch-with-plural-webviews"', strings[2].value)
+
+ def testTemplateParsing(self):
+ test_data = """
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package my.java.package;
+
+public any sort of class MyClass {{
+
+{NATIVE_STRINGS}
+
+}}
+""".split('\n')
+ package, class_name = java_cpp_utils.ParseTemplateFile(test_data)
+ self.assertEqual('my.java.package', package)
+ self.assertEqual('MyClass', class_name)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/java_google_api_keys.py b/third_party/libwebrtc/build/android/gyp/java_google_api_keys.py
new file mode 100755
index 0000000000..a58628a78f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_google_api_keys.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a Java file with API keys.
+
+import argparse
+import os
+import string
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.append(
+ os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
+import google_api_keys
+
+
+PACKAGE = 'org.chromium.chrome'
+CLASSNAME = 'GoogleAPIKeys'
+
+
+def GetScriptName():
+ return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT)
+
+
+def GenerateOutput(constant_definitions):
+ template = string.Template("""
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// ${SCRIPT_NAME}
+// From
+// ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${CONSTANT_ENTRIES}
+}
+""")
+
+ constant_template = string.Template(
+ ' public static final String ${NAME} = "${VALUE}";')
+ constant_entries_list = []
+ for constant_name, constant_value in constant_definitions.items():
+ values = {
+ 'NAME': constant_name,
+ 'VALUE': constant_value,
+ }
+ constant_entries_list.append(constant_template.substitute(values))
+ constant_entries_string = '\n'.join(constant_entries_list)
+
+ values = {
+ 'CLASS_NAME': CLASSNAME,
+ 'CONSTANT_ENTRIES': constant_entries_string,
+ 'PACKAGE': PACKAGE,
+ 'SCRIPT_NAME': GetScriptName(),
+ 'SOURCE_PATH': 'google_api_keys/google_api_keys.h',
+ }
+ return template.substitute(values)
+
+
+def _DoWriteJavaOutput(output_path, constant_definition):
+ folder = os.path.dirname(output_path)
+ if folder and not os.path.exists(folder):
+ os.makedirs(folder)
+ with open(output_path, 'w') as out_file:
+ out_file.write(GenerateOutput(constant_definition))
+
+
+def _DoWriteJarOutput(output_path, constant_definition):
+ folder = os.path.dirname(output_path)
+ if folder and not os.path.exists(folder):
+ os.makedirs(folder)
+ with zipfile.ZipFile(output_path, 'w') as srcjar:
+ path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
+ data = GenerateOutput(constant_definition)
+ build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+def _DoMain(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--out", help="Path for java output.")
+ parser.add_argument("--srcjar", help="Path for srcjar output.")
+ options = parser.parse_args(argv)
+ if not options.out and not options.srcjar:
+ parser.print_help()
+ sys.exit(-1)
+
+ values = {}
+ values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey()
+ values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys.
+ GetAPIKeyPhysicalWebTest())
+ values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN')
+ values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN')
+ values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID(
+ 'CLOUD_PRINT')
+ values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret(
+ 'CLOUD_PRINT')
+ values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING')
+ values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret(
+ 'REMOTING')
+ values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID(
+ 'REMOTING_HOST')
+ values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys.
+ GetClientSecret('REMOTING_HOST'))
+ values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys.
+ GetClientID('REMOTING_IDENTITY_API'))
+
+ if options.out:
+ _DoWriteJavaOutput(options.out, values)
+ if options.srcjar:
+ _DoWriteJarOutput(options.srcjar, values)
+
+
+if __name__ == '__main__':
+ _DoMain(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/java_google_api_keys.pydeps b/third_party/libwebrtc/build/android/gyp/java_google_api_keys.pydeps
new file mode 100644
index 0000000000..ebb717273f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_google_api_keys.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_google_api_keys.pydeps build/android/gyp/java_google_api_keys.py
+../../../google_apis/google_api_keys.py
+../../gn_helpers.py
+java_google_api_keys.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/java_google_api_keys_tests.py b/third_party/libwebrtc/build/android/gyp/java_google_api_keys_tests.py
new file mode 100755
index 0000000000..e00e86cb74
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/java_google_api_keys_tests.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_google_api_keys.py.
+
+This test suite contains various tests for the C++ -> Java Google API Keys
+generator.
+"""
+
+import unittest
+
+import java_google_api_keys
+
+
+class TestJavaGoogleAPIKeys(unittest.TestCase):
+ def testOutput(self):
+ definition = {'E1': 'abc', 'E2': 'defgh'}
+ output = java_google_api_keys.GenerateOutput(definition)
+ expected = """
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+// %s
+// From
+// google_api_keys/google_api_keys.h
+
+package org.chromium.chrome;
+
+public class GoogleAPIKeys {
+ public static final String E1 = "abc";
+ public static final String E2 = "defgh";
+}
+"""
+ self.assertEqual(expected % java_google_api_keys.GetScriptName(), output)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/javac_output_processor.py b/third_party/libwebrtc/build/android/gyp/javac_output_processor.py
new file mode 100755
index 0000000000..298c12573b
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/javac_output_processor.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+#
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Contains helper class for processing javac output."""
+
+import os
+import pathlib
+import re
+import sys
+
+from util import build_utils
+
+sys.path.insert(
+ 0,
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src'))
+import colorama
+sys.path.insert(
+ 0,
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'tools', 'android',
+ 'modularization', 'convenience'))
+import lookup_dep
+
+
+class JavacOutputProcessor:
+ def __init__(self, target_name):
+ self._target_name = target_name
+
+ # Example: ../../ui/android/java/src/org/chromium/ui/base/Clipboard.java:45:
+ fileline_prefix = (
+ r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)')
+
+ self._warning_re = re.compile(
+ fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+ self._error_re = re.compile(fileline_prefix +
+ r'(?P<full_message> (?P<message>.*))$')
+ self._marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+ # Matches output modification performed by _ElaborateLineForUnknownSymbol()
+ # so that it can be colorized.
+ # Example: org.chromium.base.Log found in dep //base:base_java.
+ self._please_add_dep_re = re.compile(
+ r'(?P<full_message>Please add //[\w/:]+ dep to //[\w/:]+.*)$')
+
+ # First element in pair is bool which indicates whether the missing
+ # class/package is part of the error message.
+ self._symbol_not_found_re_list = [
+ # Example:
+ # error: package org.chromium.components.url_formatter does not exist
+ (True,
+ re.compile(fileline_prefix +
+ r'( error: package [\w.]+ does not exist)$')),
+ # Example: error: cannot find symbol
+ (False, re.compile(fileline_prefix + r'( error: cannot find symbol)$')),
+ # Example: error: symbol not found org.chromium.url.GURL
+ (True,
+ re.compile(fileline_prefix + r'( error: symbol not found [\w.]+)$')),
+ ]
+
+ # Example: import org.chromium.url.GURL;
+ self._import_re = re.compile(r'\s*import (?P<imported_class>[\w\.]+);$')
+
+ self._warning_color = [
+ 'full_message', colorama.Fore.YELLOW + colorama.Style.DIM
+ ]
+ self._error_color = [
+ 'full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT
+ ]
+ self._marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+ self._class_lookup_index = None
+
+ colorama.init()
+
+ def Process(self, lines):
+ """ Processes javac output.
+
+ - Applies colors to output.
+ - Suggests GN dep to add for 'unresolved symbol in Java import' errors.
+ """
+ lines = self._ElaborateLinesForUnknownSymbol(iter(lines))
+ return (self._ApplyColors(l) for l in lines)
+
+ def _ElaborateLinesForUnknownSymbol(self, lines):
+ """ Elaborates passed-in javac output for unresolved symbols.
+
+ Looks for unresolved symbols in imports.
+ Adds:
+ - Line with GN target which cannot compile.
+ - Mention of unresolved class if not present in error message.
+ - Line with suggestion of GN dep to add.
+
+ Args:
+ lines: Generator with javac input.
+ Returns:
+ Generator with processed output.
+ """
+ previous_line = next(lines, None)
+ line = next(lines, None)
+ while previous_line != None:
+ elaborated_lines = self._ElaborateLineForUnknownSymbol(
+ previous_line, line)
+ for elaborated_line in elaborated_lines:
+ yield elaborated_line
+
+ previous_line = line
+ line = next(lines, None)
+
+ def _ApplyColors(self, line):
+ """Adds colors to passed-in line and returns processed line."""
+ if self._warning_re.match(line):
+ line = self._Colorize(line, self._warning_re, self._warning_color)
+ elif self._error_re.match(line):
+ line = self._Colorize(line, self._error_re, self._error_color)
+ elif self._please_add_dep_re.match(line):
+ line = self._Colorize(line, self._please_add_dep_re, self._error_color)
+ elif self._marker_re.match(line):
+ line = self._Colorize(line, self._marker_re, self._marker_color)
+ return line
+
+ def _ElaborateLineForUnknownSymbol(self, line, next_line):
+ if not next_line:
+ return [line]
+
+ import_re_match = self._import_re.match(next_line)
+ if not import_re_match:
+ return [line]
+
+ symbol_missing = False
+ has_missing_symbol_in_error_msg = False
+ for symbol_in_error_msg, regex in self._symbol_not_found_re_list:
+ if regex.match(line):
+ symbol_missing = True
+ has_missing_symbol_in_error_msg = symbol_in_error_msg
+ break
+
+ if not symbol_missing:
+ return [line]
+
+ class_to_lookup = import_re_match.group('imported_class')
+ if self._class_lookup_index == None:
+ self._class_lookup_index = lookup_dep.ClassLookupIndex(pathlib.Path(
+ os.getcwd()),
+ should_build=False)
+ suggested_deps = self._class_lookup_index.match(class_to_lookup)
+
+ if len(suggested_deps) != 1:
+ suggested_deps = self._FindFactoryDep(suggested_deps)
+ if len(suggested_deps) != 1:
+ return [line]
+
+ suggested_target = suggested_deps[0].target
+
+ target_name = self._RemoveSuffixesIfPresent(
+ ["__compile_java", "__errorprone", "__header"], self._target_name)
+ if not has_missing_symbol_in_error_msg:
+ line = "{} {}".format(line, class_to_lookup)
+
+ return [
+ line,
+ "Please add {} dep to {}. ".format(suggested_target, target_name) +
+ "File a crbug if this suggestion is incorrect.",
+ ]
+
+ @staticmethod
+ def _FindFactoryDep(class_entries):
+ """Find the android_library_factory() GN target."""
+ if len(class_entries) != 2:
+ return []
+
+ # android_library_factory() targets set low_classpath_priority=true.
+ # This logic is correct if GN targets other than android_library_factory()
+ # set low_classpath_priority=true. low_classpath_priority=true indicates
+ # that the target is depended on (and overridden) by other targets which
+ # contain the same class. We want to recommend the leaf target.
+ if class_entries[0].low_classpath_priority == class_entries[
+ 1].low_classpath_priority:
+ return []
+
+ if class_entries[0].low_classpath_priority:
+ return [class_entries[0]]
+ return [class_entries[1]]
+
+ @staticmethod
+ def _RemoveSuffixesIfPresent(suffixes, text):
+ for suffix in suffixes:
+ if text.endswith(suffix):
+ return text[:-len(suffix)]
+ return text
+
+ @staticmethod
+ def _Colorize(line, regex, color):
+ match = regex.match(line)
+ start = match.start(color[0])
+ end = match.end(color[0])
+ return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET +
+ colorama.Style.RESET_ALL + line[end:])
diff --git a/third_party/libwebrtc/build/android/gyp/jetify_jar.py b/third_party/libwebrtc/build/android/gyp/jetify_jar.py
new file mode 100755
index 0000000000..e97ad97d99
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/jetify_jar.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import argparse
+import os
+import subprocess
+import sys
+
+from util import build_utils
+
+
+def _AddArguments(parser):
+ """Adds arguments related to jetifying to parser.
+
+ Args:
+ parser: ArgumentParser object.
+ """
+ parser.add_argument(
+ '--input-path',
+ required=True,
+ help='Path to input file(s). Either the classes '
+ 'directory, or the path to a jar.')
+ parser.add_argument(
+ '--output-path',
+ required=True,
+ help='Path to output final file(s) to. Either the '
+ 'final classes directory, or the directory in '
+ 'which to place the instrumented/copied jar.')
+ parser.add_argument(
+ '--jetify-path', required=True, help='Path to jetify bin.')
+ parser.add_argument(
+ '--jetify-config-path', required=True, help='Path to jetify config file.')
+
+
+def _RunJetifyCommand(parser):
+ args = parser.parse_args()
+ cmd = [
+ args.jetify_path,
+ '-i',
+ args.input_path,
+ '-o',
+ args.output_path,
+ # Need to suppress a lot of warning output when jar doesn't have
+ # any references rewritten.
+ '-l',
+ 'error'
+ ]
+ if args.jetify_config_path:
+ cmd.extend(['-c', args.jetify_config_path])
+ # Must wait for jetify command to complete to prevent race condition.
+ env = os.environ.copy()
+ env['JAVA_HOME'] = build_utils.JAVA_HOME
+ subprocess.check_call(cmd, env=env)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ _AddArguments(parser)
+ _RunJetifyCommand(parser)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/android/gyp/jetify_jar.pydeps b/third_party/libwebrtc/build/android/gyp/jetify_jar.pydeps
new file mode 100644
index 0000000000..6a1a589a7d
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/jetify_jar.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jetify_jar.pydeps build/android/gyp/jetify_jar.py
+../../gn_helpers.py
+jetify_jar.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/jinja_template.py b/third_party/libwebrtc/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000000..d42189ba38
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/jinja_template.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import argparse
+import os
+import sys
+
+from util import build_utils
+from util import resource_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.constants import host_paths
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2 # pylint: disable=F0401
+
+
+class _RecordingFileSystemLoader(jinja2.FileSystemLoader):
+ def __init__(self, searchpath):
+ jinja2.FileSystemLoader.__init__(self, searchpath)
+ self.loaded_templates = set()
+
+ def get_source(self, environment, template):
+ contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+ self, environment, template)
+ self.loaded_templates.add(os.path.relpath(filename))
+ return contents, filename, uptodate
+
+
+class JinjaProcessor(object):
+ """Allows easy rendering of jinja templates with input file tracking."""
+ def __init__(self, loader_base_dir, variables=None):
+ self.loader_base_dir = loader_base_dir
+ self.variables = variables or {}
+ self.loader = _RecordingFileSystemLoader(loader_base_dir)
+ self.env = jinja2.Environment(loader=self.loader)
+ self.env.undefined = jinja2.StrictUndefined
+ self.env.line_comment_prefix = '##'
+ self.env.trim_blocks = True
+ self.env.lstrip_blocks = True
+ self._template_cache = {} # Map of path -> Template
+
+ def Render(self, input_filename, variables=None):
+ input_rel_path = os.path.relpath(input_filename, self.loader_base_dir)
+ template = self._template_cache.get(input_rel_path)
+ if not template:
+ template = self.env.get_template(input_rel_path)
+ self._template_cache[input_rel_path] = template
+ return template.render(variables or self.variables)
+
+ def GetLoadedTemplates(self):
+ return list(self.loader.loaded_templates)
+
+
+def _ProcessFile(processor, input_filename, output_filename):
+ output = processor.Render(input_filename)
+
+ # If |output| is same with the file content, we skip update and
+ # ninja's restat will avoid rebuilding things that depend on it.
+ if os.path.isfile(output_filename):
+ with codecs.open(output_filename, 'r', 'utf-8') as f:
+ if f.read() == output:
+ return
+
+ with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+ output_file.write(output)
+
+
+def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip):
+ with build_utils.TempDir() as temp_dir:
+ path_info = resource_utils.ResourceInfoFile()
+ for input_filename in input_filenames:
+ relpath = os.path.relpath(os.path.abspath(input_filename),
+ os.path.abspath(inputs_base_dir))
+ if relpath.startswith(os.pardir):
+ raise Exception('input file %s is not contained in inputs base dir %s'
+ % (input_filename, inputs_base_dir))
+
+ output_filename = os.path.join(temp_dir, relpath)
+ parent_dir = os.path.dirname(output_filename)
+ build_utils.MakeDirectory(parent_dir)
+ _ProcessFile(processor, input_filename, output_filename)
+ path_info.AddMapping(relpath, input_filename)
+
+ path_info.Write(outputs_zip + '.info')
+ build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def _ParseVariables(variables_arg, error_func):
+ variables = {}
+ for v in build_utils.ParseGnList(variables_arg):
+ if '=' not in v:
+ error_func('--variables argument must contain "=": ' + v)
+ name, _, value = v.partition('=')
+ variables[name] = value
+ return variables
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--inputs', required=True,
+ help='GN-list of template files to process.')
+ parser.add_argument('--includes', default='',
+ help="GN-list of files that get {% include %}'ed.")
+ parser.add_argument('--output', help='The output file to generate. Valid '
+ 'only if there is a single input.')
+ parser.add_argument('--outputs-zip', help='A zip file for the processed '
+ 'templates. Required if there are multiple inputs.')
+ parser.add_argument('--inputs-base-dir', help='A common ancestor directory '
+ 'of the inputs. Each output\'s path in the output zip '
+ 'will match the relative path from INPUTS_BASE_DIR to '
+ 'the input. Required if --output-zip is given.')
+ parser.add_argument('--loader-base-dir', help='Base path used by the '
+ 'template loader. Must be a common ancestor directory of '
+ 'the inputs. Defaults to DIR_SOURCE_ROOT.',
+ default=host_paths.DIR_SOURCE_ROOT)
+ parser.add_argument('--variables', help='Variables to be made available in '
+ 'the template processing environment, as a GYP list '
+ '(e.g. --variables "channel=beta mstone=39")', default='')
+ parser.add_argument('--check-includes', action='store_true',
+ help='Enable inputs and includes checks.')
+ options = parser.parse_args()
+
+ inputs = build_utils.ParseGnList(options.inputs)
+ includes = build_utils.ParseGnList(options.includes)
+
+ if (options.output is None) == (options.outputs_zip is None):
+ parser.error('Exactly one of --output and --output-zip must be given')
+ if options.output and len(inputs) != 1:
+ parser.error('--output cannot be used with multiple inputs')
+ if options.outputs_zip and not options.inputs_base_dir:
+ parser.error('--inputs-base-dir must be given when --output-zip is used')
+
+ variables = _ParseVariables(options.variables, parser.error)
+ processor = JinjaProcessor(options.loader_base_dir, variables=variables)
+
+ if options.output:
+ _ProcessFile(processor, inputs[0], options.output)
+ else:
+ _ProcessFiles(processor, inputs, options.inputs_base_dir,
+ options.outputs_zip)
+
+ if options.check_includes:
+ all_inputs = set(processor.GetLoadedTemplates())
+ all_inputs.difference_update(inputs)
+ all_inputs.difference_update(includes)
+ if all_inputs:
+ raise Exception('Found files not listed via --includes:\n' +
+ '\n'.join(sorted(all_inputs)))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/jinja_template.pydeps b/third_party/libwebrtc/build/android/gyp/jinja_template.pydeps
new file mode 100644
index 0000000000..98de9329b3
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/jinja_template.pydeps
@@ -0,0 +1,43 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py
+../../../third_party/catapult/devil/devil/__init__.py
+../../../third_party/catapult/devil/devil/android/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/__init__.py
+../../../third_party/catapult/devil/devil/android/constants/chrome.py
+../../../third_party/catapult/devil/devil/android/sdk/__init__.py
+../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
+../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
+../../../third_party/catapult/devil/devil/constants/__init__.py
+../../../third_party/catapult/devil/devil/constants/exit_codes.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/constants/__init__.py
+../pylib/constants/host_paths.py
+jinja_template.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/lint.py b/third_party/libwebrtc/build/android/gyp/lint.py
new file mode 100755
index 0000000000..61763c1624
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/lint.py
@@ -0,0 +1,494 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Runs Android's lint tool."""
+
+from __future__ import print_function
+
+import argparse
+import functools
+import logging
+import os
+import re
+import shutil
+import sys
+import time
+import traceback
+from xml.dom import minidom
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import manifest_utils
+from util import server_utils
+
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/main/build/android/docs/lint.md' # pylint: disable=line-too-long
+
+# These checks are not useful for chromium.
+_DISABLED_ALWAYS = [
+ "AppCompatResource", # Lint does not correctly detect our appcompat lib.
+ "Assert", # R8 --force-enable-assertions is used to enable java asserts.
+ "InflateParams", # Null is ok when inflating views for dialogs.
+ "InlinedApi", # Constants are copied so they are always available.
+ "LintBaseline", # Don't warn about using baseline.xml files.
+ "MissingApplicationIcon", # False positive for non-production targets.
+ "SwitchIntDef", # Many C++ enums are not used at all in java.
+ "UniqueConstants", # Chromium enums allow aliases.
+ "UnusedAttribute", # Chromium apks have various minSdkVersion values.
+ "ObsoleteLintCustomCheck", # We have no control over custom lint checks.
+]
+
+# These checks are not useful for test targets and adds an unnecessary burden
+# to suppress them.
+_DISABLED_FOR_TESTS = [
+ # We should not require test strings.xml files to explicitly add
+ # translatable=false since they are not translated and not used in
+ # production.
+ "MissingTranslation",
+ # Test strings.xml files often have simple names and are not translatable,
+ # so it may conflict with a production string and cause this error.
+ "Untranslatable",
+ # Test targets often use the same strings target and resources target as the
+ # production targets but may not use all of them.
+ "UnusedResources",
+ # TODO(wnwen): Turn this back on since to crash it would require running on
+ # a device with all the various minSdkVersions.
+ # Real NewApi violations crash the app, so the only ones that lint catches
+ # but tests still succeed are false positives.
+ "NewApi",
+ # Tests should be allowed to access these methods/classes.
+ "VisibleForTests",
+]
+
+_RES_ZIP_DIR = 'RESZIPS'
+_SRCJAR_DIR = 'SRCJARS'
+_AAR_DIR = 'AARS'
+
+
+def _SrcRelative(path):
+ """Returns relative path to top-level src dir."""
+ return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT)
+
+
+def _GenerateProjectFile(android_manifest,
+ android_sdk_root,
+ cache_dir,
+ sources=None,
+ classpath=None,
+ srcjar_sources=None,
+ resource_sources=None,
+ custom_lint_jars=None,
+ custom_annotation_zips=None,
+ android_sdk_version=None):
+ project = ElementTree.Element('project')
+ root = ElementTree.SubElement(project, 'root')
+ # Run lint from output directory: crbug.com/1115594
+ root.set('dir', os.getcwd())
+ sdk = ElementTree.SubElement(project, 'sdk')
+ # Lint requires that the sdk path be an absolute path.
+ sdk.set('dir', os.path.abspath(android_sdk_root))
+ cache = ElementTree.SubElement(project, 'cache')
+ cache.set('dir', cache_dir)
+ main_module = ElementTree.SubElement(project, 'module')
+ main_module.set('name', 'main')
+ main_module.set('android', 'true')
+ main_module.set('library', 'false')
+ if android_sdk_version:
+ main_module.set('compile_sdk_version', android_sdk_version)
+ manifest = ElementTree.SubElement(main_module, 'manifest')
+ manifest.set('file', android_manifest)
+ if srcjar_sources:
+ for srcjar_file in srcjar_sources:
+ src = ElementTree.SubElement(main_module, 'src')
+ src.set('file', srcjar_file)
+ if sources:
+ for source in sources:
+ src = ElementTree.SubElement(main_module, 'src')
+ src.set('file', source)
+ if classpath:
+ for file_path in classpath:
+ classpath_element = ElementTree.SubElement(main_module, 'classpath')
+ classpath_element.set('file', file_path)
+ if resource_sources:
+ for resource_file in resource_sources:
+ resource = ElementTree.SubElement(main_module, 'resource')
+ resource.set('file', resource_file)
+ if custom_lint_jars:
+ for lint_jar in custom_lint_jars:
+ lint = ElementTree.SubElement(main_module, 'lint-checks')
+ lint.set('file', lint_jar)
+ if custom_annotation_zips:
+ for annotation_zip in custom_annotation_zips:
+ annotation = ElementTree.SubElement(main_module, 'annotations')
+ annotation.set('file', annotation_zip)
+ return project
+
+
+def _RetrieveBackportedMethods(backported_methods_path):
+ with open(backported_methods_path) as f:
+ methods = f.read().splitlines()
+ # Methods look like:
+ # java/util/Set#of(Ljava/lang/Object;)Ljava/util/Set;
+ # But error message looks like:
+ # Call requires API level R (current min is 21): java.util.Set#of [NewApi]
+ methods = (m.replace('/', '\\.') for m in methods)
+ methods = (m[:m.index('(')] for m in methods)
+ return sorted(set(methods))
+
+
+def _GenerateConfigXmlTree(orig_config_path, backported_methods):
+ if orig_config_path:
+ root_node = ElementTree.parse(orig_config_path).getroot()
+ else:
+ root_node = ElementTree.fromstring('<lint/>')
+
+ issue_node = ElementTree.SubElement(root_node, 'issue')
+ issue_node.attrib['id'] = 'NewApi'
+ ignore_node = ElementTree.SubElement(issue_node, 'ignore')
+ ignore_node.attrib['regexp'] = '|'.join(backported_methods)
+ return root_node
+
+
+def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths,
+ min_sdk_version, android_sdk_version):
+ # Set minSdkVersion in the manifest to the correct value.
+ doc, manifest, app_node = manifest_utils.ParseManifest(original_manifest_path)
+
+ # TODO(crbug.com/1126301): Should this be done using manifest merging?
+ # Add anything in the application node of the extra manifests to the main
+ # manifest to prevent unused resource errors.
+ for path in extra_manifest_paths:
+ _, _, extra_app_node = manifest_utils.ParseManifest(path)
+ for node in extra_app_node:
+ app_node.append(node)
+
+ if app_node.find(
+ '{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE) is None:
+ # Assume no backup is intended, appeases AllowBackup lint check and keeping
+ # it working for manifests that do define android:allowBackup.
+ app_node.set('{%s}allowBackup' % manifest_utils.ANDROID_NAMESPACE, 'false')
+
+ uses_sdk = manifest.find('./uses-sdk')
+ if uses_sdk is None:
+ uses_sdk = ElementTree.Element('uses-sdk')
+ manifest.insert(0, uses_sdk)
+ uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+ min_sdk_version)
+ uses_sdk.set('{%s}targetSdkVersion' % manifest_utils.ANDROID_NAMESPACE,
+ android_sdk_version)
+ return doc
+
+
+def _WriteXmlFile(root, path):
+ logging.info('Writing xml file %s', path)
+ build_utils.MakeDirectory(os.path.dirname(path))
+ with build_utils.AtomicOutput(path) as f:
+ # Although we can write it just with ElementTree.tostring, using minidom
+ # makes it a lot easier to read as a human (also on code search).
+ f.write(
+ minidom.parseString(ElementTree.tostring(
+ root, encoding='utf-8')).toprettyxml(indent=' ').encode('utf-8'))
+
+
+def _RunLint(lint_binary_path,
+ backported_methods_path,
+ config_path,
+ manifest_path,
+ extra_manifest_paths,
+ sources,
+ classpath,
+ cache_dir,
+ android_sdk_version,
+ aars,
+ srcjars,
+ min_sdk_version,
+ resource_sources,
+ resource_zips,
+ android_sdk_root,
+ lint_gen_dir,
+ baseline,
+ testonly_target=False,
+ warnings_as_errors=False):
+ logging.info('Lint starting')
+
+ cmd = [
+ lint_binary_path,
+ '--quiet', # Silences lint's "." progress updates.
+ '--disable',
+ ','.join(_DISABLED_ALWAYS),
+ ]
+
+ if baseline:
+ cmd.extend(['--baseline', baseline])
+ if testonly_target:
+ cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)])
+
+ if not manifest_path:
+ manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build',
+ 'android', 'AndroidManifest.xml')
+
+ logging.info('Generating config.xml')
+ backported_methods = _RetrieveBackportedMethods(backported_methods_path)
+ config_xml_node = _GenerateConfigXmlTree(config_path, backported_methods)
+ generated_config_path = os.path.join(lint_gen_dir, 'config.xml')
+ _WriteXmlFile(config_xml_node, generated_config_path)
+ cmd.extend(['--config', generated_config_path])
+
+ logging.info('Generating Android manifest file')
+ android_manifest_tree = _GenerateAndroidManifest(manifest_path,
+ extra_manifest_paths,
+ min_sdk_version,
+ android_sdk_version)
+ # Include the rebased manifest_path in the lint generated path so that it is
+ # clear in error messages where the original AndroidManifest.xml came from.
+ lint_android_manifest_path = os.path.join(lint_gen_dir, manifest_path)
+ _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path)
+
+ resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR)
+ # These are zip files with generated resources (e. g. strings from GRD).
+ logging.info('Extracting resource zips')
+ for resource_zip in resource_zips:
+ # Use a consistent root and name rather than a temporary file so that
+ # suppressions can be local to the lint target and the resource target.
+ resource_dir = os.path.join(resource_root_dir, resource_zip)
+ shutil.rmtree(resource_dir, True)
+ os.makedirs(resource_dir)
+ resource_sources.extend(
+ build_utils.ExtractAll(resource_zip, path=resource_dir))
+
+ logging.info('Extracting aars')
+ aar_root_dir = os.path.join(lint_gen_dir, _AAR_DIR)
+ custom_lint_jars = []
+ custom_annotation_zips = []
+ if aars:
+ for aar in aars:
+ # androidx custom lint checks require a newer version of lint. Disable
+ # until we update see https://crbug.com/1225326
+ if 'androidx' in aar:
+ continue
+ # Use relative source for aar files since they are not generated.
+ aar_dir = os.path.join(aar_root_dir,
+ os.path.splitext(_SrcRelative(aar))[0])
+ shutil.rmtree(aar_dir, True)
+ os.makedirs(aar_dir)
+ aar_files = build_utils.ExtractAll(aar, path=aar_dir)
+ for f in aar_files:
+ if f.endswith('lint.jar'):
+ custom_lint_jars.append(f)
+ elif f.endswith('annotations.zip'):
+ custom_annotation_zips.append(f)
+
+ logging.info('Extracting srcjars')
+ srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR)
+ srcjar_sources = []
+ if srcjars:
+ for srcjar in srcjars:
+ # Use path without extensions since otherwise the file name includes
+ # .srcjar and lint treats it as a srcjar.
+ srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0])
+ shutil.rmtree(srcjar_dir, True)
+ os.makedirs(srcjar_dir)
+ # Sadly lint's srcjar support is broken since it only considers the first
+ # srcjar. Until we roll a lint version with that fixed, we need to extract
+ # it ourselves.
+ srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir))
+
+ logging.info('Generating project file')
+ project_file_root = _GenerateProjectFile(lint_android_manifest_path,
+ android_sdk_root, cache_dir, sources,
+ classpath, srcjar_sources,
+ resource_sources, custom_lint_jars,
+ custom_annotation_zips,
+ android_sdk_version)
+
+ project_xml_path = os.path.join(lint_gen_dir, 'project.xml')
+ _WriteXmlFile(project_file_root, project_xml_path)
+ cmd += ['--project', project_xml_path]
+
+ logging.info('Preparing environment variables')
+ env = os.environ.copy()
+ # It is important that lint uses the checked-in JDK11 as it is almost 50%
+ # faster than JDK8.
+ env['JAVA_HOME'] = build_utils.JAVA_HOME
+ # This is necessary so that lint errors print stack traces in stdout.
+ env['LINT_PRINT_STACKTRACE'] = 'true'
+ if baseline and not os.path.exists(baseline):
+ # Generating new baselines is only done locally, and requires more memory to
+ # avoid OOMs.
+ env['LINT_OPTS'] = '-Xmx4g'
+ else:
+ # The default set in the wrapper script is 1g, but it seems not enough :(
+ env['LINT_OPTS'] = '-Xmx2g'
+
+ # This filter is necessary for JDK11.
+ stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings
+ stdout_filter = lambda x: build_utils.FilterLines(x, 'No issues found')
+
+ start = time.time()
+ logging.debug('Lint command %s', ' '.join(cmd))
+ failed = True
+ try:
+ failed = bool(
+ build_utils.CheckOutput(cmd,
+ env=env,
+ print_stdout=True,
+ stdout_filter=stdout_filter,
+ stderr_filter=stderr_filter,
+ fail_on_output=warnings_as_errors))
+ finally:
+ # When not treating warnings as errors, display the extra footer.
+ is_debug = os.environ.get('LINT_DEBUG', '0') != '0'
+
+ if failed:
+ print('- For more help with lint in Chrome:', _LINT_MD_URL)
+ if is_debug:
+ print('- DEBUG MODE: Here is the project.xml: {}'.format(
+ _SrcRelative(project_xml_path)))
+ else:
+ print('- Run with LINT_DEBUG=1 to enable lint configuration debugging')
+
+ end = time.time() - start
+ logging.info('Lint command took %ss', end)
+ if not is_debug:
+ shutil.rmtree(aar_root_dir, ignore_errors=True)
+ shutil.rmtree(resource_root_dir, ignore_errors=True)
+ shutil.rmtree(srcjar_root_dir, ignore_errors=True)
+ os.unlink(project_xml_path)
+
+ logging.info('Lint completed')
+
+
+def _ParseArgs(argv):
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--target-name', help='Fully qualified GN target name.')
+ parser.add_argument('--skip-build-server',
+ action='store_true',
+ help='Avoid using the build server.')
+ parser.add_argument('--lint-binary-path',
+ required=True,
+ help='Path to lint executable.')
+ parser.add_argument('--backported-methods',
+ help='Path to backported methods file created by R8.')
+ parser.add_argument('--cache-dir',
+ required=True,
+ help='Path to the directory in which the android cache '
+ 'directory tree should be stored.')
+ parser.add_argument('--config-path', help='Path to lint suppressions file.')
+ parser.add_argument('--lint-gen-dir',
+ required=True,
+ help='Path to store generated xml files.')
+ parser.add_argument('--stamp', help='Path to stamp upon success.')
+ parser.add_argument('--android-sdk-version',
+ help='Version (API level) of the Android SDK used for '
+ 'building.')
+ parser.add_argument('--min-sdk-version',
+ required=True,
+ help='Minimal SDK version to lint against.')
+ parser.add_argument('--android-sdk-root',
+ required=True,
+ help='Lint needs an explicit path to the android sdk.')
+ parser.add_argument('--testonly',
+ action='store_true',
+ help='If set, some checks like UnusedResources will be '
+ 'disabled since they are not helpful for test '
+ 'targets.')
+ parser.add_argument('--create-cache',
+ action='store_true',
+ help='Whether this invocation is just warming the cache.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ parser.add_argument('--java-sources',
+ help='File containing a list of java sources files.')
+ parser.add_argument('--aars', help='GN list of included aars.')
+ parser.add_argument('--srcjars', help='GN list of included srcjars.')
+ parser.add_argument('--manifest-path',
+ help='Path to original AndroidManifest.xml')
+ parser.add_argument('--extra-manifest-paths',
+ action='append',
+ help='GYP-list of manifest paths to merge into the '
+ 'original AndroidManifest.xml')
+ parser.add_argument('--resource-sources',
+ default=[],
+ action='append',
+ help='GYP-list of resource sources files, similar to '
+ 'java sources files, but for resource files.')
+ parser.add_argument('--resource-zips',
+ default=[],
+ action='append',
+ help='GYP-list of resource zips, zip files of generated '
+ 'resource files.')
+ parser.add_argument('--classpath',
+ help='List of jars to add to the classpath.')
+ parser.add_argument('--baseline',
+ help='Baseline file to ignore existing errors and fail '
+ 'on new errors.')
+
+ args = parser.parse_args(build_utils.ExpandFileArgs(argv))
+ args.java_sources = build_utils.ParseGnList(args.java_sources)
+ args.aars = build_utils.ParseGnList(args.aars)
+ args.srcjars = build_utils.ParseGnList(args.srcjars)
+ args.resource_sources = build_utils.ParseGnList(args.resource_sources)
+ args.extra_manifest_paths = build_utils.ParseGnList(args.extra_manifest_paths)
+ args.resource_zips = build_utils.ParseGnList(args.resource_zips)
+ args.classpath = build_utils.ParseGnList(args.classpath)
+ return args
+
+
+def main():
+ build_utils.InitLogging('LINT_DEBUG')
+ args = _ParseArgs(sys.argv[1:])
+
+ # TODO(wnwen): Consider removing lint cache now that there are only two lint
+ # invocations.
+ # Avoid parallelizing cache creation since lint runs without the cache defeat
+ # the purpose of creating the cache in the first place.
+ if (not args.create_cache and not args.skip_build_server
+ and server_utils.MaybeRunCommand(
+ name=args.target_name, argv=sys.argv, stamp_file=args.stamp)):
+ return
+
+ sources = []
+ for java_sources_file in args.java_sources:
+ sources.extend(build_utils.ReadSourcesList(java_sources_file))
+ resource_sources = []
+ for resource_sources_file in args.resource_sources:
+ resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file))
+
+ possible_depfile_deps = (args.srcjars + args.resource_zips + sources +
+ resource_sources + [
+ args.baseline,
+ args.manifest_path,
+ ])
+ depfile_deps = [p for p in possible_depfile_deps if p]
+
+ _RunLint(args.lint_binary_path,
+ args.backported_methods,
+ args.config_path,
+ args.manifest_path,
+ args.extra_manifest_paths,
+ sources,
+ args.classpath,
+ args.cache_dir,
+ args.android_sdk_version,
+ args.aars,
+ args.srcjars,
+ args.min_sdk_version,
+ resource_sources,
+ args.resource_zips,
+ args.android_sdk_root,
+ args.lint_gen_dir,
+ args.baseline,
+ testonly_target=args.testonly,
+ warnings_as_errors=args.warnings_as_errors)
+ logging.info('Creating stamp file')
+ build_utils.Touch(args.stamp)
+
+ if args.depfile:
+ build_utils.WriteDepfile(args.depfile, args.stamp, depfile_deps)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/android/gyp/lint.pydeps b/third_party/libwebrtc/build/android/gyp/lint.pydeps
new file mode 100644
index 0000000000..0994e19a4a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/lint.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py
+../../gn_helpers.py
+lint.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
+util/server_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/merge_manifest.py b/third_party/libwebrtc/build/android/gyp/merge_manifest.py
new file mode 100755
index 0000000000..d0a93a8c78
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/merge_manifest.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges dependency Android manifests into a root manifest."""
+
+import argparse
+import contextlib
+import os
+import sys
+import tempfile
+import xml.etree.ElementTree as ElementTree
+
+from util import build_utils
+from util import manifest_utils
+
+_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger'
+_MANIFEST_MERGER_JARS = [
+ os.path.join('build-system', 'manifest-merger.jar'),
+ os.path.join('common', 'common.jar'),
+ os.path.join('sdk-common', 'sdk-common.jar'),
+ os.path.join('sdklib', 'sdklib.jar'),
+ os.path.join('external', 'com', 'google', 'guava', 'guava', '28.1-jre',
+ 'guava-28.1-jre.jar'),
+ os.path.join('external', 'kotlin-plugin-ij', 'Kotlin', 'kotlinc', 'lib',
+ 'kotlin-stdlib.jar'),
+ os.path.join('external', 'com', 'google', 'code', 'gson', 'gson', '2.8.6',
+ 'gson-2.8.6.jar'),
+]
+
+
+@contextlib.contextmanager
+def _ProcessManifest(manifest_path, min_sdk_version, target_sdk_version,
+ max_sdk_version, manifest_package):
+ """Patches an Android manifest's package and performs assertions to ensure
+ correctness for the manifest.
+ """
+ doc, manifest, _ = manifest_utils.ParseManifest(manifest_path)
+ manifest_utils.AssertUsesSdk(manifest, min_sdk_version, target_sdk_version,
+ max_sdk_version)
+ assert manifest_utils.GetPackage(manifest) or manifest_package, \
+ 'Must set manifest package in GN or in AndroidManifest.xml'
+ manifest_utils.AssertPackage(manifest, manifest_package)
+ if manifest_package:
+ manifest.set('package', manifest_package)
+ tmp_prefix = os.path.basename(manifest_path)
+ with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest:
+ manifest_utils.SaveManifest(doc, patched_manifest.name)
+ yield patched_manifest.name, manifest_utils.GetPackage(manifest)
+
+
+def _BuildManifestMergerClasspath(android_sdk_cmdline_tools):
+ return ':'.join([
+ os.path.join(android_sdk_cmdline_tools, 'lib', jar)
+ for jar in _MANIFEST_MERGER_JARS
+ ])
+
+
+def main(argv):
+ argv = build_utils.ExpandFileArgs(argv)
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument(
+ '--android-sdk-cmdline-tools',
+ help='Path to SDK\'s cmdline-tools folder.',
+ required=True)
+ parser.add_argument('--root-manifest',
+ help='Root manifest which to merge into',
+ required=True)
+ parser.add_argument('--output', help='Output manifest path', required=True)
+ parser.add_argument('--extras',
+ help='GN list of additional manifest to merge')
+ parser.add_argument(
+ '--min-sdk-version',
+ required=True,
+ help='android:minSdkVersion for merging.')
+ parser.add_argument(
+ '--target-sdk-version',
+ required=True,
+ help='android:targetSdkVersion for merging.')
+ parser.add_argument(
+ '--max-sdk-version', help='android:maxSdkVersion for merging.')
+ parser.add_argument(
+ '--manifest-package',
+ help='Package name of the merged AndroidManifest.xml.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ args = parser.parse_args(argv)
+
+ classpath = _BuildManifestMergerClasspath(args.android_sdk_cmdline_tools)
+
+ with build_utils.AtomicOutput(args.output) as output:
+ cmd = build_utils.JavaCmd(args.warnings_as_errors) + [
+ '-cp',
+ classpath,
+ _MANIFEST_MERGER_MAIN_CLASS,
+ '--out',
+ output.name,
+ '--property',
+ 'MIN_SDK_VERSION=' + args.min_sdk_version,
+ '--property',
+ 'TARGET_SDK_VERSION=' + args.target_sdk_version,
+ ]
+
+ if args.max_sdk_version:
+ cmd += [
+ '--property',
+ 'MAX_SDK_VERSION=' + args.max_sdk_version,
+ ]
+
+ extras = build_utils.ParseGnList(args.extras)
+ if extras:
+ cmd += ['--libs', ':'.join(extras)]
+
+ with _ProcessManifest(args.root_manifest, args.min_sdk_version,
+ args.target_sdk_version, args.max_sdk_version,
+ args.manifest_package) as tup:
+ root_manifest, package = tup
+ cmd += [
+ '--main',
+ root_manifest,
+ '--property',
+ 'PACKAGE=' + package,
+ '--remove-tools-declarations',
+ ]
+ build_utils.CheckOutput(
+ cmd,
+ # https://issuetracker.google.com/issues/63514300:
+ # The merger doesn't set a nonzero exit code for failures.
+ fail_func=lambda returncode, stderr: returncode != 0 or build_utils.
+ IsTimeStale(output.name, [root_manifest] + extras),
+ fail_on_output=args.warnings_as_errors)
+
+ # Check for correct output.
+ _, manifest, _ = manifest_utils.ParseManifest(output.name)
+ manifest_utils.AssertUsesSdk(manifest, args.min_sdk_version,
+ args.target_sdk_version)
+ manifest_utils.AssertPackage(manifest, package)
+
+ if args.depfile:
+ inputs = extras + classpath.split(':')
+ build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/merge_manifest.pydeps b/third_party/libwebrtc/build/android/gyp/merge_manifest.pydeps
new file mode 100644
index 0000000000..ef9bb34047
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/merge_manifest.pydeps
@@ -0,0 +1,7 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py
+../../gn_helpers.py
+merge_manifest.py
+util/__init__.py
+util/build_utils.py
+util/manifest_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/native_libraries_template.py b/third_party/libwebrtc/build/android/gyp/native_libraries_template.py
new file mode 100644
index 0000000000..cf336ecf49
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/native_libraries_template.py
@@ -0,0 +1,39 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+NATIVE_LIBRARIES_TEMPLATE = """\
+// This file is autogenerated by
+// build/android/gyp/write_native_libraries_java.py
+// Please do not change its content.
+
+package org.chromium.build;
+
+public class NativeLibraries {{
+ public static final int CPU_FAMILY_UNKNOWN = 0;
+ public static final int CPU_FAMILY_ARM = 1;
+ public static final int CPU_FAMILY_MIPS = 2;
+ public static final int CPU_FAMILY_X86 = 3;
+
+ // Set to true to enable the use of the Chromium Linker.
+ public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
+ public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
+ public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
+
+ // This is the list of native libraries to be loaded (in the correct order)
+ // by LibraryLoader.java.
+ // TODO(cjhopman): This is public since it is referenced by NativeTestActivity.java
+ // directly. The two ways of library loading should be refactored into one.
+ public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
+
+ // This is the expected version of the 'main' native library, which is the one that
+ // implements the initial set of base JNI functions including
+ // base::android::nativeGetVersionName()
+ // TODO(torne): This is public to work around classloader issues in Trichrome
+ // where NativeLibraries is not in the same dex as LibraryLoader.
+ // We should instead split up Java code along package boundaries.
+ public static {MAYBE_FINAL}String sVersionNumber = {VERSION_NUMBER};
+
+ public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
+}}
+"""
diff --git a/third_party/libwebrtc/build/android/gyp/nocompile_test.py b/third_party/libwebrtc/build/android/gyp/nocompile_test.py
new file mode 100755
index 0000000000..69fb395067
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/nocompile_test.py
@@ -0,0 +1,212 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Checks that compiling targets in BUILD.gn file fails."""
+
+import argparse
+import json
+import os
+import subprocess
+import re
+import sys
+from util import build_utils
+
+_CHROMIUM_SRC = os.path.normpath(os.path.join(__file__, '..', '..', '..', '..'))
+_NINJA_PATH = os.path.join(_CHROMIUM_SRC, 'third_party', 'depot_tools', 'ninja')
+
+# Relative to _CHROMIUM_SRC
+_GN_SRC_REL_PATH = os.path.join('third_party', 'depot_tools', 'gn')
+
+# Regex for determining whether compile failed because 'gn gen' needs to be run.
+_GN_GEN_REGEX = re.compile(r'ninja: (error|fatal):')
+
+
+def _raise_command_exception(args, returncode, output):
+ """Raises an exception whose message describes a command failure.
+
+ Args:
+ args: shell command-line (as passed to subprocess.Popen())
+ returncode: status code.
+ output: command output.
+ Raises:
+ a new Exception.
+ """
+ message = 'Command failed with status {}: {}\n' \
+ 'Output:-----------------------------------------\n{}\n' \
+ '------------------------------------------------\n'.format(
+ returncode, args, output)
+ raise Exception(message)
+
+
+def _run_command(args, cwd=None):
+ """Runs shell command. Raises exception if command fails."""
+ p = subprocess.Popen(args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=cwd)
+ pout, _ = p.communicate()
+ if p.returncode != 0:
+ _raise_command_exception(args, p.returncode, pout)
+
+
+def _run_command_get_failure_output(args):
+ """Runs shell command.
+
+ Returns:
+ Command output if command fails, None if command succeeds.
+ """
+ p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ pout, _ = p.communicate()
+
+ if p.returncode == 0:
+ return None
+
+ # For Python3 only:
+ if isinstance(pout, bytes) and sys.version_info >= (3, ):
+ pout = pout.decode('utf-8')
+ return '' if pout is None else pout
+
+
+def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args):
+ """Copies args.gn.
+
+ Args:
+ src_args_path: args.gn file to copy.
+ dest_args_path: Copy file destination.
+ extra_args: Text to append to args.gn after copy.
+ """
+ with open(src_args_path) as f_in, open(dest_args_path, 'w') as f_out:
+ f_out.write(f_in.read())
+ f_out.write('\n')
+ f_out.write('\n'.join(extra_args))
+
+
+def _find_regex_in_test_failure_output(test_output, regex):
+ """Searches for regex in test output.
+
+ Args:
+ test_output: test output.
+ regex: regular expression to search for.
+ Returns:
+ Whether the regular expression was found in the part of the test output
+ after the 'FAILED' message.
+
+ If the regex does not contain '\n':
+ the first 5 lines after the 'FAILED' message (including the text on the
+ line after the 'FAILED' message) is searched.
+ Otherwise:
+ the entire test output after the 'FAILED' message is searched.
+ """
+ if test_output is None:
+ return False
+
+ failed_index = test_output.find('FAILED')
+ if failed_index < 0:
+ return False
+
+ failure_message = test_output[failed_index:]
+ if regex.find('\n') >= 0:
+ return re.search(regex, failure_message)
+
+ return _search_regex_in_list(failure_message.split('\n')[:5], regex)
+
+
+def _search_regex_in_list(value, regex):
+ for line in value:
+ if re.search(regex, line):
+ return True
+ return False
+
+
+def _do_build_get_failure_output(gn_path, gn_cmd, options):
+ # Extract directory from test target. As all of the test targets are declared
+ # in the same BUILD.gn file, it does not matter which test target is used.
+ target_dir = gn_path.rsplit(':', 1)[0]
+
+ if gn_cmd is not None:
+ gn_args = [
+ _GN_SRC_REL_PATH, '--root-target=' + target_dir, gn_cmd,
+ os.path.relpath(options.out_dir, _CHROMIUM_SRC)
+ ]
+ _run_command(gn_args, cwd=_CHROMIUM_SRC)
+
+ ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path]
+ return _run_command_get_failure_output(ninja_args)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--gn-args-path',
+ required=True,
+ help='Path to args.gn file.')
+ parser.add_argument('--test-configs-path',
+ required=True,
+ help='Path to file with test configurations')
+ parser.add_argument('--out-dir',
+ required=True,
+ help='Path to output directory to use for compilation.')
+ parser.add_argument('--stamp', help='Path to touch.')
+ options = parser.parse_args()
+
+ with open(options.test_configs_path) as f:
+ # Escape '\' in '\.' now. This avoids having to do the escaping in the test
+ # specification.
+ config_text = f.read().replace(r'\.', r'\\.')
+ test_configs = json.loads(config_text)
+
+ if not os.path.exists(options.out_dir):
+ os.makedirs(options.out_dir)
+
+ out_gn_args_path = os.path.join(options.out_dir, 'args.gn')
+ extra_gn_args = [
+ 'enable_android_nocompile_tests = true',
+ 'treat_warnings_as_errors = true',
+ # GOMA does not work with non-standard output directories.
+ 'use_goma = false',
+ ]
+ _copy_and_append_gn_args(options.gn_args_path, out_gn_args_path,
+ extra_gn_args)
+
+ ran_gn_gen = False
+ did_clean_build = False
+ error_messages = []
+ for config in test_configs:
+ # Strip leading '//'
+ gn_path = config['target'][2:]
+ expect_regex = config['expect_regex']
+
+ test_output = _do_build_get_failure_output(gn_path, None, options)
+
+ # 'gn gen' takes > 1s to run. Only run 'gn gen' if it is needed for compile.
+ if (test_output
+ and _search_regex_in_list(test_output.split('\n'), _GN_GEN_REGEX)):
+ assert not ran_gn_gen
+ ran_gn_gen = True
+ test_output = _do_build_get_failure_output(gn_path, 'gen', options)
+
+ if (not _find_regex_in_test_failure_output(test_output, expect_regex)
+ and not did_clean_build):
+ # Ensure the failure is not due to incremental build.
+ did_clean_build = True
+ test_output = _do_build_get_failure_output(gn_path, 'clean', options)
+
+ if not _find_regex_in_test_failure_output(test_output, expect_regex):
+ if test_output is None:
+ # Purpose of quotes at beginning of message is to make it clear that
+ # "Compile successful." is not a compiler log message.
+ test_output = '""\nCompile successful.'
+ error_message = '//{} failed.\nExpected compile output pattern:\n'\
+ '{}\nActual compile output:\n{}'.format(
+ gn_path, expect_regex, test_output)
+ error_messages.append(error_message)
+
+ if error_messages:
+ raise Exception('\n'.join(error_messages))
+
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/optimize_resources.py b/third_party/libwebrtc/build/android/gyp/optimize_resources.py
new file mode 100755
index 0000000000..d3b11636f5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/optimize_resources.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+#
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import logging
+import os
+import sys
+
+from util import build_utils
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--aapt2-path',
+ required=True,
+ help='Path to the Android aapt2 tool.')
+ parser.add_argument(
+ '--short-resource-paths',
+ action='store_true',
+ help='Whether to shorten resource paths inside the apk or module.')
+ parser.add_argument(
+ '--strip-resource-names',
+ action='store_true',
+ help='Whether to strip resource names from the resource table of the apk '
+ 'or module.')
+ parser.add_argument('--proto-path',
+ required=True,
+ help='Input proto format resources APK.')
+ parser.add_argument('--resources-config-paths',
+ default='[]',
+ help='GN list of paths to aapt2 resources config files.')
+ parser.add_argument('--r-text-in',
+ required=True,
+ help='Path to R.txt. Used to exclude id/ resources.')
+ parser.add_argument(
+ '--resources-path-map-out-path',
+ help='Path to file produced by aapt2 that maps original resource paths '
+ 'to shortened resource paths inside the apk or module.')
+ parser.add_argument('--optimized-proto-path',
+ required=True,
+ help='Output for `aapt2 optimize`.')
+ options = parser.parse_args(args)
+
+ options.resources_config_paths = build_utils.ParseGnList(
+ options.resources_config_paths)
+
+ if options.resources_path_map_out_path and not options.short_resource_paths:
+ parser.error(
+ '--resources-path-map-out-path requires --short-resource-paths')
+ return options
+
+
+def _CombineResourceConfigs(resources_config_paths, out_config_path):
+ with open(out_config_path, 'w') as out_config:
+ for config_path in resources_config_paths:
+ with open(config_path) as config:
+ out_config.write(config.read())
+ out_config.write('\n')
+
+
+def _ExtractNonCollapsableResources(rtxt_path):
+ """Extract resources that should not be collapsed from the R.txt file
+
+ Resources of type ID are references to UI elements/views. They are used by
+ UI automation testing frameworks. They are kept in so that they don't break
+ tests, even though they may not actually be used during runtime. See
+ https://crbug.com/900993
+ App icons (aka mipmaps) are sometimes referenced by other apps by name so must
+ be keps as well. See https://b/161564466
+
+ Args:
+ rtxt_path: Path to R.txt file with all the resources
+ Returns:
+ List of resources in the form of <resource_type>/<resource_name>
+ """
+ resources = []
+ _NO_COLLAPSE_TYPES = ['id', 'mipmap']
+ with open(rtxt_path) as rtxt:
+ for line in rtxt:
+ for resource_type in _NO_COLLAPSE_TYPES:
+ if ' {} '.format(resource_type) in line:
+ resource_name = line.split()[2]
+ resources.append('{}/{}'.format(resource_type, resource_name))
+ return resources
+
+
+def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path):
+ """Optimize intermediate .ap_ file with aapt2.
+
+ Args:
+ output: Path to write to.
+ options: The command-line options.
+ temp_dir: A temporary directory.
+ unoptimized_path: path of the apk to optimize.
+ r_txt_path: path to the R.txt file of the unoptimized apk.
+ """
+ optimize_command = [
+ options.aapt2_path,
+ 'optimize',
+ unoptimized_path,
+ '-o',
+ output,
+ ]
+
+ # Optimize the resources.pb file by obfuscating resource names and only
+ # allow usage via R.java constant.
+ if options.strip_resource_names:
+ no_collapse_resources = _ExtractNonCollapsableResources(r_txt_path)
+ gen_config_path = os.path.join(temp_dir, 'aapt2.config')
+ if options.resources_config_paths:
+ _CombineResourceConfigs(options.resources_config_paths, gen_config_path)
+ with open(gen_config_path, 'a') as config:
+ for resource in no_collapse_resources:
+ config.write('{}#no_collapse\n'.format(resource))
+
+ optimize_command += [
+ '--collapse-resource-names',
+ '--resources-config-path',
+ gen_config_path,
+ ]
+
+ if options.short_resource_paths:
+ optimize_command += ['--shorten-resource-paths']
+ if options.resources_path_map_out_path:
+ optimize_command += [
+ '--resource-path-shortening-map', options.resources_path_map_out_path
+ ]
+
+ logging.debug('Running aapt2 optimize')
+ build_utils.CheckOutput(optimize_command,
+ print_stdout=False,
+ print_stderr=False)
+
+
+def main(args):
+ options = _ParseArgs(args)
+ with build_utils.TempDir() as temp_dir:
+ _OptimizeApk(options.optimized_proto_path, options, temp_dir,
+ options.proto_path, options.r_text_in)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/optimize_resources.pydeps b/third_party/libwebrtc/build/android/gyp/optimize_resources.pydeps
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/optimize_resources.pydeps
diff --git a/third_party/libwebrtc/build/android/gyp/prepare_resources.py b/third_party/libwebrtc/build/android/gyp/prepare_resources.py
new file mode 100755
index 0000000000..ba75afaee3
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/prepare_resources.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resource directories to generate .resources.zip and R.txt
+files."""
+
+import argparse
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+from util import jar_info_utils
+from util import md5_check
+from util import resources_parser
+from util import resource_utils
+
+
+def _ParseArgs(args):
+ """Parses command line options.
+
+ Returns:
+ An options object as from argparse.ArgumentParser.parse_args()
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_argument('--res-sources-path',
+ required=True,
+ help='Path to a list of input resources for this target.')
+
+ parser.add_argument(
+ '--r-text-in',
+ help='Path to pre-existing R.txt. Its resource IDs override those found '
+ 'in the generated R.txt when generating R.java.')
+
+ parser.add_argument(
+ '--allow-missing-resources',
+ action='store_true',
+ help='Do not fail if some resources exist in the res/ dir but are not '
+ 'listed in the sources.')
+
+ parser.add_argument(
+ '--resource-zip-out',
+ help='Path to a zip archive containing all resources from '
+ '--resource-dirs, merged into a single directory tree.')
+
+ parser.add_argument('--r-text-out',
+ help='Path to store the generated R.txt file.')
+
+ parser.add_argument('--strip-drawables',
+ action="store_true",
+ help='Remove drawables from the resources.')
+
+ options = parser.parse_args(args)
+
+ with open(options.res_sources_path) as f:
+ options.sources = f.read().splitlines()
+ options.resource_dirs = resource_utils.DeduceResourceDirsFromFileList(
+ options.sources)
+
+ return options
+
+
+def _CheckAllFilesListed(resource_files, resource_dirs):
+ resource_files = set(resource_files)
+ missing_files = []
+ for path, _ in resource_utils.IterResourceFilesInDirectories(resource_dirs):
+ if path not in resource_files:
+ missing_files.append(path)
+
+ if missing_files:
+ sys.stderr.write('Error: Found files not listed in the sources list of '
+ 'the BUILD.gn target:\n')
+ for path in missing_files:
+ sys.stderr.write('{}\n'.format(path))
+ sys.exit(1)
+
+
+def _ZipResources(resource_dirs, zip_path, ignore_pattern):
+ # ignore_pattern is a string of ':' delimited list of globs used to ignore
+ # files that should not be part of the final resource zip.
+ files_to_zip = []
+ path_info = resource_utils.ResourceInfoFile()
+ for index, resource_dir in enumerate(resource_dirs):
+ attributed_aar = None
+ if not resource_dir.startswith('..'):
+ aar_source_info_path = os.path.join(
+ os.path.dirname(resource_dir), 'source.info')
+ if os.path.exists(aar_source_info_path):
+ attributed_aar = jar_info_utils.ReadAarSourceInfo(aar_source_info_path)
+
+ for path, archive_path in resource_utils.IterResourceFilesInDirectories(
+ [resource_dir], ignore_pattern):
+ attributed_path = path
+ if attributed_aar:
+ attributed_path = os.path.join(attributed_aar, 'res',
+ path[len(resource_dir) + 1:])
+ # Use the non-prefixed archive_path in the .info file.
+ path_info.AddMapping(archive_path, attributed_path)
+
+ resource_dir_name = os.path.basename(resource_dir)
+ archive_path = '{}_{}/{}'.format(index, resource_dir_name, archive_path)
+ files_to_zip.append((archive_path, path))
+
+ path_info.Write(zip_path + '.info')
+
+ with zipfile.ZipFile(zip_path, 'w') as z:
+ # This magic comment signals to resource_utils.ExtractDeps that this zip is
+ # not just the contents of a single res dir, without the encapsulating res/
+ # (like the outputs of android_generated_resources targets), but instead has
+ # the contents of possibly multiple res/ dirs each within an encapsulating
+ # directory within the zip.
+ z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING
+ build_utils.DoZip(files_to_zip, z)
+
+
+def _GenerateRTxt(options, r_txt_path):
+ """Generate R.txt file.
+
+ Args:
+ options: The command-line options tuple.
+ r_txt_path: Locates where the R.txt file goes.
+ """
+ ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN
+ if options.strip_drawables:
+ ignore_pattern += ':*drawable*'
+
+ resources_parser.RTxtGenerator(options.resource_dirs,
+ ignore_pattern).WriteRTxtFile(r_txt_path)
+
+
+def _OnStaleMd5(options):
+ with resource_utils.BuildContext() as build:
+ if options.sources and not options.allow_missing_resources:
+ _CheckAllFilesListed(options.sources, options.resource_dirs)
+ if options.r_text_in:
+ r_txt_path = options.r_text_in
+ else:
+ _GenerateRTxt(options, build.r_txt_path)
+ r_txt_path = build.r_txt_path
+
+ if options.r_text_out:
+ shutil.copyfile(r_txt_path, options.r_text_out)
+
+ if options.resource_zip_out:
+ ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN
+ if options.strip_drawables:
+ ignore_pattern += ':*drawable*'
+ _ZipResources(options.resource_dirs, options.resource_zip_out,
+ ignore_pattern)
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ options = _ParseArgs(args)
+
+ # Order of these must match order specified in GN so that the correct one
+ # appears first in the depfile.
+ output_paths = [
+ options.resource_zip_out,
+ options.resource_zip_out + '.info',
+ options.r_text_out,
+ ]
+
+ input_paths = [options.res_sources_path]
+ if options.r_text_in:
+ input_paths += [options.r_text_in]
+
+ # Resource files aren't explicitly listed in GN. Listing them in the depfile
+ # ensures the target will be marked stale when resource files are removed.
+ depfile_deps = []
+ resource_names = []
+ for resource_dir in options.resource_dirs:
+ for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+ # Don't list the empty .keep file in depfile. Since it doesn't end up
+ # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors
+ # if ever moved.
+ if not resource_file.endswith(os.path.join('empty', '.keep')):
+ input_paths.append(resource_file)
+ depfile_deps.append(resource_file)
+ resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+ # Resource filenames matter to the output, so add them to strings as well.
+ # This matters if a file is renamed but not changed (http://crbug.com/597126).
+ input_strings = sorted(resource_names) + [
+ options.strip_drawables,
+ ]
+
+ # Since android_resources targets like *__all_dfm_resources depend on java
+ # targets that they do not need (in reality it only needs the transitive
+ # resource targets that those java targets depend on), md5_check is used to
+ # prevent outputs from being re-written when real inputs have not changed.
+ md5_check.CallAndWriteDepfileIfStale(lambda: _OnStaleMd5(options),
+ options,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ depfile_deps=depfile_deps)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/prepare_resources.pydeps b/third_party/libwebrtc/build/android/gyp/prepare_resources.pydeps
new file mode 100644
index 0000000000..8136e733ef
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/prepare_resources.pydeps
@@ -0,0 +1,35 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+../../print_python_deps.py
+prepare_resources.py
+util/__init__.py
+util/build_utils.py
+util/jar_info_utils.py
+util/md5_check.py
+util/resource_utils.py
+util/resources_parser.py
diff --git a/third_party/libwebrtc/build/android/gyp/process_native_prebuilt.py b/third_party/libwebrtc/build/android/gyp/process_native_prebuilt.py
new file mode 100755
index 0000000000..52645d9b16
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/process_native_prebuilt.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def main(args):
+ parser = argparse.ArgumentParser(args)
+ parser.add_argument('--strip-path', required=True, help='')
+ parser.add_argument('--input-path', required=True, help='')
+ parser.add_argument('--stripped-output-path', required=True, help='')
+ parser.add_argument('--unstripped-output-path', required=True, help='')
+ options = parser.parse_args(args)
+
+ # eu-strip's output keeps mode from source file which might not be writable
+ # thus it fails to override its output on the next run. AtomicOutput fixes
+ # the issue.
+ with build_utils.AtomicOutput(options.stripped_output_path) as out:
+ cmd = [
+ options.strip_path,
+ options.input_path,
+ '-o',
+ out.name,
+ ]
+ build_utils.CheckOutput(cmd)
+ shutil.copyfile(options.input_path, options.unstripped_output_path)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/process_native_prebuilt.pydeps b/third_party/libwebrtc/build/android/gyp/process_native_prebuilt.pydeps
new file mode 100644
index 0000000000..8e2012aceb
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/process_native_prebuilt.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/process_native_prebuilt.pydeps build/android/gyp/process_native_prebuilt.py
+../../gn_helpers.py
+process_native_prebuilt.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/proguard.py b/third_party/libwebrtc/build/android/gyp/proguard.py
new file mode 100755
index 0000000000..9da100e42d
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/proguard.py
@@ -0,0 +1,710 @@
+#!/usr/bin/env python3
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+from collections import defaultdict
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+import dex
+import dex_jdk_libs
+from pylib.dex import dex_parser
+from util import build_utils
+from util import diff_utils
+
+_API_LEVEL_VERSION_CODE = [
+ (21, 'L'),
+ (22, 'LollipopMR1'),
+ (23, 'M'),
+ (24, 'N'),
+ (25, 'NMR1'),
+ (26, 'O'),
+ (27, 'OMR1'),
+ (28, 'P'),
+ (29, 'Q'),
+ (30, 'R'),
+ (31, 'S'),
+]
+
+
+def _ParseOptions():
+ args = build_utils.ExpandFileArgs(sys.argv[1:])
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--r8-path',
+ required=True,
+ help='Path to the R8.jar to use.')
+ parser.add_argument(
+ '--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
+ parser.add_argument('--input-paths',
+ action='append',
+ required=True,
+ help='GN-list of .jar files to optimize.')
+ parser.add_argument('--desugar-jdk-libs-jar',
+ help='Path to desugar_jdk_libs.jar.')
+ parser.add_argument('--desugar-jdk-libs-configuration-jar',
+ help='Path to desugar_jdk_libs_configuration.jar.')
+ parser.add_argument('--output-path', help='Path to the generated .jar file.')
+ parser.add_argument(
+ '--proguard-configs',
+ action='append',
+ required=True,
+ help='GN-list of configuration files.')
+ parser.add_argument(
+ '--apply-mapping', help='Path to ProGuard mapping to apply.')
+ parser.add_argument(
+ '--mapping-output',
+ required=True,
+ help='Path for ProGuard to output mapping file to.')
+ parser.add_argument(
+ '--extra-mapping-output-paths',
+ help='GN-list of additional paths to copy output mapping file to.')
+ parser.add_argument(
+ '--classpath',
+ action='append',
+ help='GN-list of .jar files to include as libraries.')
+ parser.add_argument('--main-dex-rules-path',
+ action='append',
+ help='Path to main dex rules for multidex.')
+ parser.add_argument(
+ '--min-api', help='Minimum Android API level compatibility.')
+ parser.add_argument('--enable-obfuscation',
+ action='store_true',
+ help='Minify symbol names')
+ parser.add_argument(
+ '--verbose', '-v', action='store_true', help='Print all ProGuard output')
+ parser.add_argument(
+ '--repackage-classes', help='Package all optimized classes are put in.')
+ parser.add_argument(
+ '--disable-outlining',
+ action='store_true',
+ help='Disable the outlining optimization provided by R8.')
+ parser.add_argument(
+ '--disable-checks',
+ action='store_true',
+ help='Disable -checkdiscard directives and missing symbols check')
+ parser.add_argument('--sourcefile', help='Value for source file attribute')
+ parser.add_argument(
+ '--force-enable-assertions',
+ action='store_true',
+ help='Forcefully enable javac generated assertion code.')
+ parser.add_argument(
+ '--feature-jars',
+ action='append',
+ help='GN list of path to jars which comprise the corresponding feature.')
+ parser.add_argument(
+ '--dex-dest',
+ action='append',
+ dest='dex_dests',
+ help='Destination for dex file of the corresponding feature.')
+ parser.add_argument(
+ '--feature-name',
+ action='append',
+ dest='feature_names',
+ help='The name of the feature module.')
+ parser.add_argument(
+ '--uses-split',
+ action='append',
+ help='List of name pairs separated by : mapping a feature module to a '
+ 'dependent feature module.')
+ parser.add_argument(
+ '--keep-rules-targets-regex',
+ metavar='KEEP_RULES_REGEX',
+ help='If passed outputs keep rules for references from all other inputs '
+ 'to the subset of inputs that satisfy the KEEP_RULES_REGEX.')
+ parser.add_argument(
+ '--keep-rules-output-path',
+ help='Output path to the keep rules for references to the '
+ '--keep-rules-targets-regex inputs from the rest of the inputs.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ parser.add_argument('--show-desugar-default-interface-warnings',
+ action='store_true',
+ help='Enable desugaring warnings.')
+ parser.add_argument('--dump-inputs',
+ action='store_true',
+ help='Use when filing R8 bugs to capture inputs.'
+ ' Stores inputs to r8inputs.zip')
+ parser.add_argument(
+ '--stamp',
+ help='File to touch upon success. Mutually exclusive with --output-path')
+ parser.add_argument('--desugared-library-keep-rule-output',
+ help='Path to desugared library keep rule output file.')
+
+ diff_utils.AddCommandLineFlags(parser)
+ options = parser.parse_args(args)
+
+ if options.feature_names:
+ if options.output_path:
+ parser.error('Feature splits cannot specify an output in GN.')
+ if not options.actual_file and not options.stamp:
+ parser.error('Feature splits require a stamp file as output.')
+ elif not options.output_path:
+ parser.error('Output path required when feature splits aren\'t used')
+
+ if bool(options.keep_rules_targets_regex) != bool(
+ options.keep_rules_output_path):
+ raise Exception('You must path both --keep-rules-targets-regex and '
+ '--keep-rules-output-path')
+
+ options.classpath = build_utils.ParseGnList(options.classpath)
+ options.proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+ options.input_paths = build_utils.ParseGnList(options.input_paths)
+ options.extra_mapping_output_paths = build_utils.ParseGnList(
+ options.extra_mapping_output_paths)
+
+ if options.feature_names:
+ if 'base' not in options.feature_names:
+ parser.error('"base" feature required when feature arguments are used.')
+ if len(options.feature_names) != len(options.feature_jars) or len(
+ options.feature_names) != len(options.dex_dests):
+ parser.error('Invalid feature argument lengths.')
+
+ options.feature_jars = [
+ build_utils.ParseGnList(x) for x in options.feature_jars
+ ]
+
+ split_map = {}
+ if options.uses_split:
+ for split_pair in options.uses_split:
+ child, parent = split_pair.split(':')
+ for name in (child, parent):
+ if name not in options.feature_names:
+ parser.error('"%s" referenced in --uses-split not present.' % name)
+ split_map[child] = parent
+ options.uses_split = split_map
+
+ return options
+
+
+class _SplitContext(object):
+ def __init__(self, name, output_path, input_jars, work_dir, parent_name=None):
+ self.name = name
+ self.parent_name = parent_name
+ self.input_jars = set(input_jars)
+ self.final_output_path = output_path
+ self.staging_dir = os.path.join(work_dir, name)
+ os.mkdir(self.staging_dir)
+
+ def CreateOutput(self, has_imported_lib=False, keep_rule_output=None):
+ found_files = build_utils.FindInDirectory(self.staging_dir)
+ if not found_files:
+ raise Exception('Missing dex outputs in {}'.format(self.staging_dir))
+
+ if self.final_output_path.endswith('.dex'):
+ if has_imported_lib:
+ raise Exception(
+ 'Trying to create a single .dex file, but a dependency requires '
+ 'JDK Library Desugaring (which necessitates a second file).'
+ 'Refer to %s to see what desugaring was required' %
+ keep_rule_output)
+ if len(found_files) != 1:
+ raise Exception('Expected exactly 1 dex file output, found: {}'.format(
+ '\t'.join(found_files)))
+ shutil.move(found_files[0], self.final_output_path)
+ return
+
+ # Add to .jar using Python rather than having R8 output to a .zip directly
+ # in order to disable compression of the .jar, saving ~500ms.
+ tmp_jar_output = self.staging_dir + '.jar'
+ build_utils.DoZip(found_files, tmp_jar_output, base_dir=self.staging_dir)
+ shutil.move(tmp_jar_output, self.final_output_path)
+
+
+def _DeDupeInputJars(split_contexts_by_name):
+ """Moves jars used by multiple splits into common ancestors.
+
+ Updates |input_jars| for each _SplitContext.
+ """
+
+ def count_ancestors(split_context):
+ ret = 0
+ if split_context.parent_name:
+ ret += 1
+ ret += count_ancestors(split_contexts_by_name[split_context.parent_name])
+ return ret
+
+ base_context = split_contexts_by_name['base']
+ # Sort by tree depth so that ensure children are visited before their parents.
+ sorted_contexts = list(split_contexts_by_name.values())
+ sorted_contexts.remove(base_context)
+ sorted_contexts.sort(key=count_ancestors, reverse=True)
+
+ # If a jar is present in multiple siblings, promote it to their parent.
+ seen_jars_by_parent = defaultdict(set)
+ for split_context in sorted_contexts:
+ seen_jars = seen_jars_by_parent[split_context.parent_name]
+ new_dupes = seen_jars.intersection(split_context.input_jars)
+ parent_context = split_contexts_by_name[split_context.parent_name]
+ parent_context.input_jars.update(new_dupes)
+ seen_jars.update(split_context.input_jars)
+
+ def ancestor_jars(parent_name, dest=None):
+ dest = dest or set()
+ if not parent_name:
+ return dest
+ parent_context = split_contexts_by_name[parent_name]
+ dest.update(parent_context.input_jars)
+ return ancestor_jars(parent_context.parent_name, dest)
+
+ # Now that jars have been moved up the tree, remove those that appear in
+ # ancestors.
+ for split_context in sorted_contexts:
+ split_context.input_jars -= ancestor_jars(split_context.parent_name)
+
+
+def _OptimizeWithR8(options,
+ config_paths,
+ libraries,
+ dynamic_config_data,
+ print_stdout=False):
+ with build_utils.TempDir() as tmp_dir:
+ if dynamic_config_data:
+ dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags')
+ with open(dynamic_config_path, 'w') as f:
+ f.write(dynamic_config_data)
+ config_paths = config_paths + [dynamic_config_path]
+
+ tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt')
+ # If there is no output (no classes are kept), this prevents this script
+ # from failing.
+ build_utils.Touch(tmp_mapping_path)
+
+ tmp_output = os.path.join(tmp_dir, 'r8out')
+ os.mkdir(tmp_output)
+
+ split_contexts_by_name = {}
+ if options.feature_names:
+ for name, dest_dex, input_jars in zip(options.feature_names,
+ options.dex_dests,
+ options.feature_jars):
+ parent_name = options.uses_split.get(name)
+ if parent_name is None and name != 'base':
+ parent_name = 'base'
+ split_context = _SplitContext(name,
+ dest_dex,
+ input_jars,
+ tmp_output,
+ parent_name=parent_name)
+ split_contexts_by_name[name] = split_context
+ else:
+ # Base context will get populated via "extra_jars" below.
+ split_contexts_by_name['base'] = _SplitContext('base',
+ options.output_path, [],
+ tmp_output)
+ base_context = split_contexts_by_name['base']
+
+ # R8 OOMs with the default xmx=1G.
+ cmd = build_utils.JavaCmd(options.warnings_as_errors, xmx='2G') + [
+ '-Dcom.android.tools.r8.allowTestProguardOptions=1',
+ '-Dcom.android.tools.r8.disableHorizontalClassMerging=1',
+ ]
+ if options.disable_outlining:
+ cmd += ['-Dcom.android.tools.r8.disableOutlining=1']
+ if options.dump_inputs:
+ cmd += ['-Dcom.android.tools.r8.dumpinputtofile=r8inputs.zip']
+ cmd += [
+ '-cp',
+ options.r8_path,
+ 'com.android.tools.r8.R8',
+ '--no-data-resources',
+ '--output',
+ base_context.staging_dir,
+ '--pg-map-output',
+ tmp_mapping_path,
+ ]
+
+ if options.disable_checks:
+ # Info level priority logs are not printed by default.
+ cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'info']
+
+ if options.desugar_jdk_libs_json:
+ cmd += [
+ '--desugared-lib',
+ options.desugar_jdk_libs_json,
+ '--desugared-lib-pg-conf-output',
+ options.desugared_library_keep_rule_output,
+ ]
+
+ if options.min_api:
+ cmd += ['--min-api', options.min_api]
+
+ if options.force_enable_assertions:
+ cmd += ['--force-enable-assertions']
+
+ for lib in libraries:
+ cmd += ['--lib', lib]
+
+ for config_file in config_paths:
+ cmd += ['--pg-conf', config_file]
+
+ if options.main_dex_rules_path:
+ for main_dex_rule in options.main_dex_rules_path:
+ cmd += ['--main-dex-rules', main_dex_rule]
+
+ _DeDupeInputJars(split_contexts_by_name)
+
+ # Add any extra inputs to the base context (e.g. desugar runtime).
+ extra_jars = set(options.input_paths)
+ for split_context in split_contexts_by_name.values():
+ extra_jars -= split_context.input_jars
+ base_context.input_jars.update(extra_jars)
+
+ for split_context in split_contexts_by_name.values():
+ if split_context is base_context:
+ continue
+ for in_jar in sorted(split_context.input_jars):
+ cmd += ['--feature', in_jar, split_context.staging_dir]
+
+ cmd += sorted(base_context.input_jars)
+
+ try:
+ stderr_filter = dex.CreateStderrFilter(
+ options.show_desugar_default_interface_warnings)
+ logging.debug('Running R8')
+ build_utils.CheckOutput(cmd,
+ print_stdout=print_stdout,
+ stderr_filter=stderr_filter,
+ fail_on_output=options.warnings_as_errors)
+ except build_utils.CalledProcessError:
+ # Python will print the original exception as well.
+ raise Exception(
+ 'R8 failed. Please see '
+ 'https://chromium.googlesource.com/chromium/src/+/HEAD/build/'
+ 'android/docs/java_optimization.md#Debugging-common-failures')
+
+ base_has_imported_lib = False
+ if options.desugar_jdk_libs_json:
+ logging.debug('Running L8')
+ existing_files = build_utils.FindInDirectory(base_context.staging_dir)
+ jdk_dex_output = os.path.join(base_context.staging_dir,
+ 'classes%d.dex' % (len(existing_files) + 1))
+ # Use -applymapping to avoid name collisions.
+ l8_dynamic_config_path = os.path.join(tmp_dir, 'l8_dynamic_config.flags')
+ with open(l8_dynamic_config_path, 'w') as f:
+ f.write("-applymapping '{}'\n".format(tmp_mapping_path))
+ # Pass the dynamic config so that obfuscation options are picked up.
+ l8_config_paths = [dynamic_config_path, l8_dynamic_config_path]
+ if os.path.exists(options.desugared_library_keep_rule_output):
+ l8_config_paths.append(options.desugared_library_keep_rule_output)
+
+ base_has_imported_lib = dex_jdk_libs.DexJdkLibJar(
+ options.r8_path, options.min_api, options.desugar_jdk_libs_json,
+ options.desugar_jdk_libs_jar,
+ options.desugar_jdk_libs_configuration_jar, jdk_dex_output,
+ options.warnings_as_errors, l8_config_paths)
+ if int(options.min_api) >= 24 and base_has_imported_lib:
+ with open(jdk_dex_output, 'rb') as f:
+ dexfile = dex_parser.DexFile(bytearray(f.read()))
+ for m in dexfile.IterMethodSignatureParts():
+ print('{}#{}'.format(m[0], m[2]))
+ assert False, (
+ 'Desugared JDK libs are disabled on Monochrome and newer - see '
+ 'crbug.com/1159984 for details, and see above list for desugared '
+ 'classes and methods.')
+
+ logging.debug('Collecting ouputs')
+ base_context.CreateOutput(base_has_imported_lib,
+ options.desugared_library_keep_rule_output)
+ for split_context in split_contexts_by_name.values():
+ if split_context is not base_context:
+ split_context.CreateOutput()
+
+ with open(options.mapping_output, 'w') as out_file, \
+ open(tmp_mapping_path) as in_file:
+ # Mapping files generated by R8 include comments that may break
+ # some of our tooling so remove those (specifically: apkanalyzer).
+ out_file.writelines(l for l in in_file if not l.startswith('#'))
+ return base_context
+
+
+def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string,
+ keep_rules_output):
+ cmd = build_utils.JavaCmd(False) + [
+ '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
+ '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
+ '--keep-rules', '--output', keep_rules_output
+ ]
+ targets_re = re.compile(targets_re_string)
+ for path in input_paths:
+ if targets_re.search(path):
+ cmd += ['--target', path]
+ else:
+ cmd += ['--source', path]
+ for path in classpath:
+ cmd += ['--lib', path]
+
+ build_utils.CheckOutput(cmd, print_stderr=False, fail_on_output=False)
+
+
+def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors,
+ error_title):
+ cmd = build_utils.JavaCmd(warnings_as_errors) + [
+ '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences',
+ '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning',
+ '--check'
+ ]
+
+ for path in classpath:
+ cmd += ['--lib', path]
+ for path in dex_files:
+ cmd += ['--source', path]
+
+ def stderr_filter(stderr):
+ ignored_lines = [
+ # Summary contains warning count, which our filtering makes wrong.
+ 'Warning: Tracereferences found',
+
+ # TODO(agrieve): Create interface jars for these missing classes rather
+ # than allowlisting here.
+ 'dalvik.system',
+ 'libcore.io',
+ 'sun.misc.Unsafe',
+
+ # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper
+ 'android.text.StaticLayout.<init>',
+
+ # Explicictly guarded by try (NoClassDefFoundError) in Flogger's
+ # PlatformProvider.
+ 'com.google.common.flogger.backend.google.GooglePlatform',
+ 'com.google.common.flogger.backend.system.DefaultPlatform',
+
+ # trichrome_webview_google_bundle contains this missing reference.
+ # TODO(crbug.com/1142530): Fix this missing reference properly.
+ 'org.chromium.build.NativeLibraries',
+
+ # TODO(agrieve): Exclude these only when use_jacoco_coverage=true.
+ 'java.lang.instrument.ClassFileTransformer',
+ 'java.lang.instrument.IllegalClassFormatException',
+ 'java.lang.instrument.Instrumentation',
+ 'java.lang.management.ManagementFactory',
+ 'javax.management.MBeanServer',
+ 'javax.management.ObjectInstance',
+ 'javax.management.ObjectName',
+ 'javax.management.StandardMBean',
+
+ # Explicitly guarded by try (NoClassDefFoundError) in Firebase's
+ # KotlinDetector: com.google.firebase.platforminfo.KotlinDetector.
+ 'kotlin.KotlinVersion',
+ ]
+
+ had_unfiltered_items = ' ' in stderr
+ stderr = build_utils.FilterLines(
+ stderr, '|'.join(re.escape(x) for x in ignored_lines))
+ if stderr:
+ if ' ' in stderr:
+ stderr = error_title + """
+Tip: Build with:
+ is_java_debug=false
+ treat_warnings_as_errors=false
+ enable_proguard_obfuscation=false
+ and then use dexdump to see which class(s) reference them.
+
+ E.g.:
+ third_party/android_sdk/public/build-tools/*/dexdump -d \
+out/Release/apks/YourApk.apk > dex.txt
+""" + stderr
+
+ if 'FragmentActivity' in stderr:
+ stderr += """
+You may need to update build configs to run FragmentActivityReplacer for
+additional targets. See
+https://chromium.googlesource.com/chromium/src.git/+/main/docs/ui/android/bytecode_rewriting.md.
+"""
+ elif had_unfiltered_items:
+ # Left only with empty headings. All indented items filtered out.
+ stderr = ''
+ return stderr
+
+ logging.debug('cmd: %s', ' '.join(cmd))
+ build_utils.CheckOutput(cmd,
+ print_stdout=True,
+ stderr_filter=stderr_filter,
+ fail_on_output=warnings_as_errors)
+
+
+def _CombineConfigs(configs, dynamic_config_data, exclude_generated=False):
+ ret = []
+
+ # Sort in this way so //clank versions of the same libraries will sort
+ # to the same spot in the file.
+ def sort_key(path):
+ return tuple(reversed(path.split(os.path.sep)))
+
+ for config in sorted(configs, key=sort_key):
+ if exclude_generated and config.endswith('.resources.proguard.txt'):
+ continue
+
+ with open(config) as config_file:
+ contents = config_file.read().rstrip()
+
+ if not contents.strip():
+ # Ignore empty files.
+ continue
+
+ # Fix up line endings (third_party configs can have windows endings).
+ contents = contents.replace('\r', '')
+ # Remove numbers from generated rule comments to make file more
+ # diff'able.
+ contents = re.sub(r' #generated:\d+', '', contents)
+ ret.append('# File: ' + config)
+ ret.append(contents)
+ ret.append('')
+
+ if dynamic_config_data:
+ ret.append('# File: //build/android/gyp/proguard.py (generated rules)')
+ ret.append(dynamic_config_data)
+ ret.append('')
+ return '\n'.join(ret)
+
+
+def _CreateDynamicConfig(options):
+ # Our scripts already fail on output. Adding -ignorewarnings makes R8 output
+ # warnings rather than throw exceptions so we can selectively ignore them via
+ # dex.py's ignore list. Context: https://crbug.com/1180222
+ ret = ["-ignorewarnings"]
+
+ if options.sourcefile:
+ ret.append("-renamesourcefileattribute '%s' # OMIT FROM EXPECTATIONS" %
+ options.sourcefile)
+
+ if options.enable_obfuscation:
+ ret.append("-repackageclasses ''")
+ else:
+ ret.append("-dontobfuscate")
+
+ if options.apply_mapping:
+ ret.append("-applymapping '%s'" % options.apply_mapping)
+
+ _min_api = int(options.min_api) if options.min_api else 0
+ for api_level, version_code in _API_LEVEL_VERSION_CODE:
+ annotation_name = 'org.chromium.base.annotations.VerifiesOn' + version_code
+ if api_level > _min_api:
+ ret.append('-keep @interface %s' % annotation_name)
+ ret.append("""\
+-if @%s class * {
+ *** *(...);
+}
+-keep,allowobfuscation class <1> {
+ *** <2>(...);
+}""" % annotation_name)
+ ret.append("""\
+-keepclassmembers,allowobfuscation class ** {
+ @%s <methods>;
+}""" % annotation_name)
+ return '\n'.join(ret)
+
+
+def _VerifyNoEmbeddedConfigs(jar_paths):
+ failed = False
+ for jar_path in jar_paths:
+ with zipfile.ZipFile(jar_path) as z:
+ for name in z.namelist():
+ if name.startswith('META-INF/proguard/'):
+ failed = True
+ sys.stderr.write("""\
+Found embedded proguard config within {}.
+Embedded configs are not permitted (https://crbug.com/989505)
+""".format(jar_path))
+ break
+ if failed:
+ sys.exit(1)
+
+
+def _ContainsDebuggingConfig(config_str):
+ debugging_configs = ('-whyareyoukeeping', '-whyareyounotinlining')
+ return any(config in config_str for config in debugging_configs)
+
+
+def _MaybeWriteStampAndDepFile(options, inputs):
+ output = options.output_path
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+ output = options.stamp
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, output, inputs=inputs)
+
+
+def main():
+ build_utils.InitLogging('PROGUARD_DEBUG')
+ options = _ParseOptions()
+
+ logging.debug('Preparing configs')
+ proguard_configs = options.proguard_configs
+
+ # ProGuard configs that are derived from flags.
+ dynamic_config_data = _CreateDynamicConfig(options)
+
+ # ProGuard configs that are derived from flags.
+ merged_configs = _CombineConfigs(
+ proguard_configs, dynamic_config_data, exclude_generated=True)
+ print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose
+
+ if options.expected_file:
+ diff_utils.CheckExpectations(merged_configs, options)
+ if options.only_verify_expectations:
+ build_utils.WriteDepfile(options.depfile,
+ options.actual_file,
+ inputs=options.proguard_configs)
+ return
+
+ logging.debug('Looking for embedded configs')
+ libraries = []
+ for p in options.classpath:
+ # TODO(bjoyce): Remove filter once old android support libraries are gone.
+ # Fix for having Library class extend program class dependency problem.
+ if 'com_android_support' in p or 'android_support_test' in p:
+ continue
+ # If a jar is part of input no need to include it as library jar.
+ if p not in libraries and p not in options.input_paths:
+ libraries.append(p)
+ _VerifyNoEmbeddedConfigs(options.input_paths + libraries)
+ if options.keep_rules_output_path:
+ _OutputKeepRules(options.r8_path, options.input_paths, options.classpath,
+ options.keep_rules_targets_regex,
+ options.keep_rules_output_path)
+ return
+
+ base_context = _OptimizeWithR8(options, proguard_configs, libraries,
+ dynamic_config_data, print_stdout)
+
+ if not options.disable_checks:
+ logging.debug('Running tracereferences')
+ all_dex_files = []
+ if options.output_path:
+ all_dex_files.append(options.output_path)
+ if options.dex_dests:
+ all_dex_files.extend(options.dex_dests)
+ error_title = 'DEX contains references to non-existent symbols after R8.'
+ _CheckForMissingSymbols(options.r8_path, all_dex_files, options.classpath,
+ options.warnings_as_errors, error_title)
+ # Also ensure that base module doesn't have any references to child dex
+ # symbols.
+ # TODO(agrieve): Remove this check once r8 desugaring is fixed to not put
+ # synthesized classes in the base module.
+ error_title = 'Base module DEX contains references symbols within DFMs.'
+ _CheckForMissingSymbols(options.r8_path, [base_context.final_output_path],
+ options.classpath, options.warnings_as_errors,
+ error_title)
+
+ for output in options.extra_mapping_output_paths:
+ shutil.copy(options.mapping_output, output)
+
+ inputs = options.proguard_configs + options.input_paths + libraries
+ if options.apply_mapping:
+ inputs.append(options.apply_mapping)
+
+ _MaybeWriteStampAndDepFile(options, inputs)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/android/gyp/proguard.pydeps b/third_party/libwebrtc/build/android/gyp/proguard.pydeps
new file mode 100644
index 0000000000..c1de73b57e
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/proguard.pydeps
@@ -0,0 +1,16 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py
+../../gn_helpers.py
+../../print_python_deps.py
+../convert_dex_profile.py
+../pylib/__init__.py
+../pylib/dex/__init__.py
+../pylib/dex/dex_parser.py
+dex.py
+dex_jdk_libs.py
+proguard.py
+util/__init__.py
+util/build_utils.py
+util/diff_utils.py
+util/md5_check.py
+util/zipalign.py
diff --git a/third_party/libwebrtc/build/android/gyp/proto/Configuration_pb2.py b/third_party/libwebrtc/build/android/gyp/proto/Configuration_pb2.py
new file mode 100644
index 0000000000..859183089a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/proto/Configuration_pb2.py
@@ -0,0 +1,697 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: frameworks/base/tools/aapt2/Configuration.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='frameworks/base/tools/aapt2/Configuration.proto',
+ package='aapt.pb',
+ syntax='proto3',
+ serialized_options=_b('\n\020com.android.aapt'),
+ serialized_pb=_b('\n/frameworks/base/tools/aapt2/Configuration.proto\x12\x07\x61\x61pt.pb\"\xd9\x14\n\rConfiguration\x12\x0b\n\x03mcc\x18\x01 \x01(\r\x12\x0b\n\x03mnc\x18\x02 \x01(\r\x12\x0e\n\x06locale\x18\x03 \x01(\t\x12@\n\x10layout_direction\x18\x04 \x01(\x0e\x32&.aapt.pb.Configuration.LayoutDirection\x12\x14\n\x0cscreen_width\x18\x05 \x01(\r\x12\x15\n\rscreen_height\x18\x06 \x01(\r\x12\x17\n\x0fscreen_width_dp\x18\x07 \x01(\r\x12\x18\n\x10screen_height_dp\x18\x08 \x01(\r\x12 \n\x18smallest_screen_width_dp\x18\t \x01(\r\x12\x43\n\x12screen_layout_size\x18\n \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutSize\x12\x43\n\x12screen_layout_long\x18\x0b \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutLong\x12\x38\n\x0cscreen_round\x18\x0c \x01(\x0e\x32\".aapt.pb.Configuration.ScreenRound\x12?\n\x10wide_color_gamut\x18\r \x01(\x0e\x32%.aapt.pb.Configuration.WideColorGamut\x12\'\n\x03hdr\x18\x0e \x01(\x0e\x32\x1a.aapt.pb.Configuration.Hdr\x12\x37\n\x0borientation\x18\x0f \x01(\x0e\x32\".aapt.pb.Configuration.Orientation\x12\x37\n\x0cui_mode_type\x18\x10 \x01(\x0e\x32!.aapt.pb.Configuration.UiModeType\x12\x39\n\rui_mode_night\x18\x11 \x01(\x0e\x32\".aapt.pb.Configuration.UiModeNight\x12\x0f\n\x07\x64\x65nsity\x18\x12 \x01(\r\x12\x37\n\x0btouchscreen\x18\x13 \x01(\x0e\x32\".aapt.pb.Configuration.Touchscreen\x12\x36\n\x0bkeys_hidden\x18\x14 \x01(\x0e\x32!.aapt.pb.Configuration.KeysHidden\x12\x31\n\x08keyboard\x18\x15 \x01(\x0e\x32\x1f.aapt.pb.Configuration.Keyboard\x12\x34\n\nnav_hidden\x18\x16 \x01(\x0e\x32 .aapt.pb.Configuration.NavHidden\x12\x35\n\nnavigation\x18\x17 \x01(\x0e\x32!.aapt.pb.Configuration.Navigation\x12\x13\n\x0bsdk_version\x18\x18 \x01(\r\x12\x0f\n\x07product\x18\x19 \x01(\t\"a\n\x0fLayoutDirection\x12\x1a\n\x16LAYOUT_DIRECTION_UNSET\x10\x00\x12\x18\n\x14LAYOUT_DIRECTION_LTR\x10\x01\x12\x18\n\x14LAYOUT_DIRECTION_RTL\x10\x02\"\xaa\x01\n\x10ScreenLayoutSize\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_UNSET\x10\x00\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_SMALL\x10\x01\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_NORMAL\x10\x02\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_LARGE\x10\x03\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_XLARGE\x10\x04\"m\n\x10ScreenLayoutLong\x12\x1c\n\x18SCREEN_LAYOUT_LONG_UNSET\x10\x00\x12\x1b\n\x17SCREEN_LAYOUT_LONG_LONG\x10\x01\x12\x1e\n\x1aSCREEN_LAYOUT_LONG_NOTLONG\x10\x02\"X\n\x0bScreenRound\x12\x16\n\x12SCREEN_ROUND_UNSET\x10\x00\x12\x16\n\x12SCREEN_ROUND_ROUND\x10\x01\x12\x19\n\x15SCREEN_ROUND_NOTROUND\x10\x02\"h\n\x0eWideColorGamut\x12\x1a\n\x16WIDE_COLOR_GAMUT_UNSET\x10\x00\x12\x1b\n\x17WIDE_COLOR_GAMUT_WIDECG\x10\x01\x12\x1d\n\x19WIDE_COLOR_GAMUT_NOWIDECG\x10\x02\"3\n\x03Hdr\x12\r\n\tHDR_UNSET\x10\x00\x12\x0e\n\nHDR_HIGHDR\x10\x01\x12\r\n\tHDR_LOWDR\x10\x02\"h\n\x0bOrientation\x12\x15\n\x11ORIENTATION_UNSET\x10\x00\x12\x14\n\x10ORIENTATION_PORT\x10\x01\x12\x14\n\x10ORIENTATION_LAND\x10\x02\x12\x16\n\x12ORIENTATION_SQUARE\x10\x03\"\xd7\x01\n\nUiModeType\x12\x16\n\x12UI_MODE_TYPE_UNSET\x10\x00\x12\x17\n\x13UI_MODE_TYPE_NORMAL\x10\x01\x12\x15\n\x11UI_MODE_TYPE_DESK\x10\x02\x12\x14\n\x10UI_MODE_TYPE_CAR\x10\x03\x12\x1b\n\x17UI_MODE_TYPE_TELEVISION\x10\x04\x12\x1a\n\x16UI_MODE_TYPE_APPLIANCE\x10\x05\x12\x16\n\x12UI_MODE_TYPE_WATCH\x10\x06\x12\x1a\n\x16UI_MODE_TYPE_VRHEADSET\x10\x07\"[\n\x0bUiModeNight\x12\x17\n\x13UI_MODE_NIGHT_UNSET\x10\x00\x12\x17\n\x13UI_MODE_NIGHT_NIGHT\x10\x01\x12\x1a\n\x16UI_MODE_NIGHT_NOTNIGHT\x10\x02\"m\n\x0bTouchscreen\x12\x15\n\x11TOUCHSCREEN_UNSET\x10\x00\x12\x17\n\x13TOUCHSCREEN_NOTOUCH\x10\x01\x12\x16\n\x12TOUCHSCREEN_STYLUS\x10\x02\x12\x16\n\x12TOUCHSCREEN_FINGER\x10\x03\"v\n\nKeysHidden\x12\x15\n\x11KEYS_HIDDEN_UNSET\x10\x00\x12\x1b\n\x17KEYS_HIDDEN_KEYSEXPOSED\x10\x01\x12\x1a\n\x16KEYS_HIDDEN_KEYSHIDDEN\x10\x02\x12\x18\n\x14KEYS_HIDDEN_KEYSSOFT\x10\x03\"`\n\x08Keyboard\x12\x12\n\x0eKEYBOARD_UNSET\x10\x00\x12\x13\n\x0fKEYBOARD_NOKEYS\x10\x01\x12\x13\n\x0fKEYBOARD_QWERTY\x10\x02\x12\x16\n\x12KEYBOARD_TWELVEKEY\x10\x03\"V\n\tNavHidden\x12\x14\n\x10NAV_HIDDEN_UNSET\x10\x00\x12\x19\n\x15NAV_HIDDEN_NAVEXPOSED\x10\x01\x12\x18\n\x14NAV_HIDDEN_NAVHIDDEN\x10\x02\"}\n\nNavigation\x12\x14\n\x10NAVIGATION_UNSET\x10\x00\x12\x14\n\x10NAVIGATION_NONAV\x10\x01\x12\x13\n\x0fNAVIGATION_DPAD\x10\x02\x12\x18\n\x14NAVIGATION_TRACKBALL\x10\x03\x12\x14\n\x10NAVIGATION_WHEEL\x10\x04\x42\x12\n\x10\x63om.android.aaptb\x06proto3')
+)
+
+
+
+_CONFIGURATION_LAYOUTDIRECTION = _descriptor.EnumDescriptor(
+ name='LayoutDirection',
+ full_name='aapt.pb.Configuration.LayoutDirection',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='LAYOUT_DIRECTION_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LAYOUT_DIRECTION_LTR', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LAYOUT_DIRECTION_RTL', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1119,
+ serialized_end=1216,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_LAYOUTDIRECTION)
+
+_CONFIGURATION_SCREENLAYOUTSIZE = _descriptor.EnumDescriptor(
+ name='ScreenLayoutSize',
+ full_name='aapt.pb.Configuration.ScreenLayoutSize',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_SIZE_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_SIZE_SMALL', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_SIZE_NORMAL', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_SIZE_LARGE', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_SIZE_XLARGE', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1219,
+ serialized_end=1389,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTSIZE)
+
+_CONFIGURATION_SCREENLAYOUTLONG = _descriptor.EnumDescriptor(
+ name='ScreenLayoutLong',
+ full_name='aapt.pb.Configuration.ScreenLayoutLong',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_LONG_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_LONG_LONG', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_LAYOUT_LONG_NOTLONG', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1391,
+ serialized_end=1500,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTLONG)
+
+_CONFIGURATION_SCREENROUND = _descriptor.EnumDescriptor(
+ name='ScreenRound',
+ full_name='aapt.pb.Configuration.ScreenRound',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_ROUND_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_ROUND_ROUND', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SCREEN_ROUND_NOTROUND', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1502,
+ serialized_end=1590,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENROUND)
+
+_CONFIGURATION_WIDECOLORGAMUT = _descriptor.EnumDescriptor(
+ name='WideColorGamut',
+ full_name='aapt.pb.Configuration.WideColorGamut',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='WIDE_COLOR_GAMUT_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='WIDE_COLOR_GAMUT_WIDECG', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='WIDE_COLOR_GAMUT_NOWIDECG', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1592,
+ serialized_end=1696,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_WIDECOLORGAMUT)
+
+_CONFIGURATION_HDR = _descriptor.EnumDescriptor(
+ name='Hdr',
+ full_name='aapt.pb.Configuration.Hdr',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='HDR_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='HDR_HIGHDR', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='HDR_LOWDR', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1698,
+ serialized_end=1749,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_HDR)
+
+_CONFIGURATION_ORIENTATION = _descriptor.EnumDescriptor(
+ name='Orientation',
+ full_name='aapt.pb.Configuration.Orientation',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ORIENTATION_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ORIENTATION_PORT', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ORIENTATION_LAND', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ORIENTATION_SQUARE', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1751,
+ serialized_end=1855,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_ORIENTATION)
+
+_CONFIGURATION_UIMODETYPE = _descriptor.EnumDescriptor(
+ name='UiModeType',
+ full_name='aapt.pb.Configuration.UiModeType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_NORMAL', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_DESK', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_CAR', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_TELEVISION', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_APPLIANCE', index=5, number=5,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_WATCH', index=6, number=6,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_TYPE_VRHEADSET', index=7, number=7,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1858,
+ serialized_end=2073,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODETYPE)
+
+_CONFIGURATION_UIMODENIGHT = _descriptor.EnumDescriptor(
+ name='UiModeNight',
+ full_name='aapt.pb.Configuration.UiModeNight',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_NIGHT_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_NIGHT_NIGHT', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UI_MODE_NIGHT_NOTNIGHT', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2075,
+ serialized_end=2166,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODENIGHT)
+
+_CONFIGURATION_TOUCHSCREEN = _descriptor.EnumDescriptor(
+ name='Touchscreen',
+ full_name='aapt.pb.Configuration.Touchscreen',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='TOUCHSCREEN_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TOUCHSCREEN_NOTOUCH', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TOUCHSCREEN_STYLUS', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TOUCHSCREEN_FINGER', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2168,
+ serialized_end=2277,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_TOUCHSCREEN)
+
+_CONFIGURATION_KEYSHIDDEN = _descriptor.EnumDescriptor(
+ name='KeysHidden',
+ full_name='aapt.pb.Configuration.KeysHidden',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='KEYS_HIDDEN_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEYS_HIDDEN_KEYSEXPOSED', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEYS_HIDDEN_KEYSHIDDEN', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEYS_HIDDEN_KEYSSOFT', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2279,
+ serialized_end=2397,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYSHIDDEN)
+
+_CONFIGURATION_KEYBOARD = _descriptor.EnumDescriptor(
+ name='Keyboard',
+ full_name='aapt.pb.Configuration.Keyboard',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='KEYBOARD_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEYBOARD_NOKEYS', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEYBOARD_QWERTY', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEYBOARD_TWELVEKEY', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2399,
+ serialized_end=2495,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYBOARD)
+
+_CONFIGURATION_NAVHIDDEN = _descriptor.EnumDescriptor(
+ name='NavHidden',
+ full_name='aapt.pb.Configuration.NavHidden',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NAV_HIDDEN_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NAV_HIDDEN_NAVEXPOSED', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NAV_HIDDEN_NAVHIDDEN', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2497,
+ serialized_end=2583,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVHIDDEN)
+
+_CONFIGURATION_NAVIGATION = _descriptor.EnumDescriptor(
+ name='Navigation',
+ full_name='aapt.pb.Configuration.Navigation',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NAVIGATION_UNSET', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NAVIGATION_NONAV', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NAVIGATION_DPAD', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NAVIGATION_TRACKBALL', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NAVIGATION_WHEEL', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2585,
+ serialized_end=2710,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVIGATION)
+
+
+_CONFIGURATION = _descriptor.Descriptor(
+ name='Configuration',
+ full_name='aapt.pb.Configuration',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='mcc', full_name='aapt.pb.Configuration.mcc', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='mnc', full_name='aapt.pb.Configuration.mnc', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='locale', full_name='aapt.pb.Configuration.locale', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='layout_direction', full_name='aapt.pb.Configuration.layout_direction', index=3,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_width', full_name='aapt.pb.Configuration.screen_width', index=4,
+ number=5, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_height', full_name='aapt.pb.Configuration.screen_height', index=5,
+ number=6, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_width_dp', full_name='aapt.pb.Configuration.screen_width_dp', index=6,
+ number=7, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_height_dp', full_name='aapt.pb.Configuration.screen_height_dp', index=7,
+ number=8, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='smallest_screen_width_dp', full_name='aapt.pb.Configuration.smallest_screen_width_dp', index=8,
+ number=9, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_layout_size', full_name='aapt.pb.Configuration.screen_layout_size', index=9,
+ number=10, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_layout_long', full_name='aapt.pb.Configuration.screen_layout_long', index=10,
+ number=11, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='screen_round', full_name='aapt.pb.Configuration.screen_round', index=11,
+ number=12, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='wide_color_gamut', full_name='aapt.pb.Configuration.wide_color_gamut', index=12,
+ number=13, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='hdr', full_name='aapt.pb.Configuration.hdr', index=13,
+ number=14, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='orientation', full_name='aapt.pb.Configuration.orientation', index=14,
+ number=15, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='ui_mode_type', full_name='aapt.pb.Configuration.ui_mode_type', index=15,
+ number=16, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='ui_mode_night', full_name='aapt.pb.Configuration.ui_mode_night', index=16,
+ number=17, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='density', full_name='aapt.pb.Configuration.density', index=17,
+ number=18, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='touchscreen', full_name='aapt.pb.Configuration.touchscreen', index=18,
+ number=19, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='keys_hidden', full_name='aapt.pb.Configuration.keys_hidden', index=19,
+ number=20, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='keyboard', full_name='aapt.pb.Configuration.keyboard', index=20,
+ number=21, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='nav_hidden', full_name='aapt.pb.Configuration.nav_hidden', index=21,
+ number=22, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='navigation', full_name='aapt.pb.Configuration.navigation', index=22,
+ number=23, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='sdk_version', full_name='aapt.pb.Configuration.sdk_version', index=23,
+ number=24, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='product', full_name='aapt.pb.Configuration.product', index=24,
+ number=25, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _CONFIGURATION_LAYOUTDIRECTION,
+ _CONFIGURATION_SCREENLAYOUTSIZE,
+ _CONFIGURATION_SCREENLAYOUTLONG,
+ _CONFIGURATION_SCREENROUND,
+ _CONFIGURATION_WIDECOLORGAMUT,
+ _CONFIGURATION_HDR,
+ _CONFIGURATION_ORIENTATION,
+ _CONFIGURATION_UIMODETYPE,
+ _CONFIGURATION_UIMODENIGHT,
+ _CONFIGURATION_TOUCHSCREEN,
+ _CONFIGURATION_KEYSHIDDEN,
+ _CONFIGURATION_KEYBOARD,
+ _CONFIGURATION_NAVHIDDEN,
+ _CONFIGURATION_NAVIGATION,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=61,
+ serialized_end=2710,
+)
+
+_CONFIGURATION.fields_by_name['layout_direction'].enum_type = _CONFIGURATION_LAYOUTDIRECTION
+_CONFIGURATION.fields_by_name['screen_layout_size'].enum_type = _CONFIGURATION_SCREENLAYOUTSIZE
+_CONFIGURATION.fields_by_name['screen_layout_long'].enum_type = _CONFIGURATION_SCREENLAYOUTLONG
+_CONFIGURATION.fields_by_name['screen_round'].enum_type = _CONFIGURATION_SCREENROUND
+_CONFIGURATION.fields_by_name['wide_color_gamut'].enum_type = _CONFIGURATION_WIDECOLORGAMUT
+_CONFIGURATION.fields_by_name['hdr'].enum_type = _CONFIGURATION_HDR
+_CONFIGURATION.fields_by_name['orientation'].enum_type = _CONFIGURATION_ORIENTATION
+_CONFIGURATION.fields_by_name['ui_mode_type'].enum_type = _CONFIGURATION_UIMODETYPE
+_CONFIGURATION.fields_by_name['ui_mode_night'].enum_type = _CONFIGURATION_UIMODENIGHT
+_CONFIGURATION.fields_by_name['touchscreen'].enum_type = _CONFIGURATION_TOUCHSCREEN
+_CONFIGURATION.fields_by_name['keys_hidden'].enum_type = _CONFIGURATION_KEYSHIDDEN
+_CONFIGURATION.fields_by_name['keyboard'].enum_type = _CONFIGURATION_KEYBOARD
+_CONFIGURATION.fields_by_name['nav_hidden'].enum_type = _CONFIGURATION_NAVHIDDEN
+_CONFIGURATION.fields_by_name['navigation'].enum_type = _CONFIGURATION_NAVIGATION
+_CONFIGURATION_LAYOUTDIRECTION.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENLAYOUTSIZE.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENLAYOUTLONG.containing_type = _CONFIGURATION
+_CONFIGURATION_SCREENROUND.containing_type = _CONFIGURATION
+_CONFIGURATION_WIDECOLORGAMUT.containing_type = _CONFIGURATION
+_CONFIGURATION_HDR.containing_type = _CONFIGURATION
+_CONFIGURATION_ORIENTATION.containing_type = _CONFIGURATION
+_CONFIGURATION_UIMODETYPE.containing_type = _CONFIGURATION
+_CONFIGURATION_UIMODENIGHT.containing_type = _CONFIGURATION
+_CONFIGURATION_TOUCHSCREEN.containing_type = _CONFIGURATION
+_CONFIGURATION_KEYSHIDDEN.containing_type = _CONFIGURATION
+_CONFIGURATION_KEYBOARD.containing_type = _CONFIGURATION
+_CONFIGURATION_NAVHIDDEN.containing_type = _CONFIGURATION
+_CONFIGURATION_NAVIGATION.containing_type = _CONFIGURATION
+DESCRIPTOR.message_types_by_name['Configuration'] = _CONFIGURATION
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Configuration = _reflection.GeneratedProtocolMessageType('Configuration', (_message.Message,), {
+ 'DESCRIPTOR' : _CONFIGURATION,
+ '__module__' : 'frameworks.base.tools.aapt2.Configuration_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Configuration)
+ })
+_sym_db.RegisterMessage(Configuration)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/third_party/libwebrtc/build/android/gyp/proto/README.md b/third_party/libwebrtc/build/android/gyp/proto/README.md
new file mode 100644
index 0000000000..685041087a
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/proto/README.md
@@ -0,0 +1,13 @@
+# Protos
+These protos are generated from Resources.proto and Configuration.proto from the
+Android repo. They are found in the frameworks/base/tools/aapt2/ directory. To
+regenerate these if there are changes, run this command from the root of an
+Android checkout:
+
+ protoc --python_out=some_dir frameworks/base/tools/aapt2/Resources.proto \
+ frameworks/base/tools/aapt2/Configuration.proto
+
+Then copy the resulting \*pb2.py files from some_dir here. To make sure
+Resources_pb2.py is able to import Configuration_pb2.py, replace the
+"from frameworks.base.tools.aapt2" portion of the import statement with
+"from ." so it will instead be imported from the current directory.
diff --git a/third_party/libwebrtc/build/android/gyp/proto/Resources_pb2.py b/third_party/libwebrtc/build/android/gyp/proto/Resources_pb2.py
new file mode 100644
index 0000000000..3bbd7028b5
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/proto/Resources_pb2.py
@@ -0,0 +1,2779 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: frameworks/base/tools/aapt2/Resources.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import Configuration_pb2 as frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='frameworks/base/tools/aapt2/Resources.proto',
+ package='aapt.pb',
+ syntax='proto3',
+ serialized_options=_b('\n\020com.android.aapt'),
+ serialized_pb=_b('\n+frameworks/base/tools/aapt2/Resources.proto\x12\x07\x61\x61pt.pb\x1a/frameworks/base/tools/aapt2/Configuration.proto\"\x1a\n\nStringPool\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"<\n\x0eSourcePosition\x12\x13\n\x0bline_number\x18\x01 \x01(\r\x12\x15\n\rcolumn_number\x18\x02 \x01(\r\"E\n\x06Source\x12\x10\n\x08path_idx\x18\x01 \x01(\r\x12)\n\x08position\x18\x02 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"0\n\x0fToolFingerprint\x12\x0c\n\x04tool\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"\xbb\x01\n\rResourceTable\x12(\n\x0bsource_pool\x18\x01 \x01(\x0b\x32\x13.aapt.pb.StringPool\x12!\n\x07package\x18\x02 \x03(\x0b\x32\x10.aapt.pb.Package\x12)\n\x0boverlayable\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Overlayable\x12\x32\n\x10tool_fingerprint\x18\x04 \x03(\x0b\x32\x18.aapt.pb.ToolFingerprint\"\x17\n\tPackageId\x12\n\n\x02id\x18\x01 \x01(\r\"d\n\x07Package\x12&\n\npackage_id\x18\x01 \x01(\x0b\x32\x12.aapt.pb.PackageId\x12\x14\n\x0cpackage_name\x18\x02 \x01(\t\x12\x1b\n\x04type\x18\x03 \x03(\x0b\x32\r.aapt.pb.Type\"\x14\n\x06TypeId\x12\n\n\x02id\x18\x01 \x01(\r\"U\n\x04Type\x12 \n\x07type_id\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.TypeId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1d\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x0e.aapt.pb.Entry\"\x97\x01\n\nVisibility\x12(\n\x05level\x18\x01 \x01(\x0e\x32\x19.aapt.pb.Visibility.Level\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x03 \x01(\t\"-\n\x05Level\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PRIVATE\x10\x01\x12\n\n\x06PUBLIC\x10\x02\"<\n\x08\x41llowNew\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\"K\n\x0bOverlayable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\r\n\x05\x61\x63tor\x18\x03 \x01(\t\"\xf3\x01\n\x0fOverlayableItem\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12/\n\x06policy\x18\x03 \x03(\x0e\x32\x1f.aapt.pb.OverlayableItem.Policy\x12\x17\n\x0foverlayable_idx\x18\x04 \x01(\r\"d\n\x06Policy\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06PUBLIC\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\n\n\x06VENDOR\x10\x03\x12\x0b\n\x07PRODUCT\x10\x04\x12\r\n\tSIGNATURE\x10\x05\x12\x07\n\x03ODM\x10\x06\x12\x07\n\x03OEM\x10\x07\"\x15\n\x07\x45ntryId\x12\n\n\x02id\x18\x01 \x01(\r\"\xe8\x01\n\x05\x45ntry\x12\"\n\x08\x65ntry_id\x18\x01 \x01(\x0b\x32\x10.aapt.pb.EntryId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\nvisibility\x18\x03 \x01(\x0b\x32\x13.aapt.pb.Visibility\x12$\n\tallow_new\x18\x04 \x01(\x0b\x32\x11.aapt.pb.AllowNew\x12\x32\n\x10overlayable_item\x18\x05 \x01(\x0b\x32\x18.aapt.pb.OverlayableItem\x12*\n\x0c\x63onfig_value\x18\x06 \x03(\x0b\x32\x14.aapt.pb.ConfigValue\"T\n\x0b\x43onfigValue\x12&\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x16.aapt.pb.Configuration\x12\x1d\n\x05value\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.Value\"\xa1\x01\n\x05Value\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x0c\n\x04weak\x18\x03 \x01(\x08\x12\x1d\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.ItemH\x00\x12\x30\n\x0e\x63ompound_value\x18\x05 \x01(\x0b\x32\x16.aapt.pb.CompoundValueH\x00\x42\x07\n\x05value\"\x8d\x02\n\x04Item\x12!\n\x03ref\x18\x01 \x01(\x0b\x32\x12.aapt.pb.ReferenceH\x00\x12\x1e\n\x03str\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.StringH\x00\x12%\n\x07raw_str\x18\x03 \x01(\x0b\x32\x12.aapt.pb.RawStringH\x00\x12+\n\nstyled_str\x18\x04 \x01(\x0b\x32\x15.aapt.pb.StyledStringH\x00\x12&\n\x04\x66ile\x18\x05 \x01(\x0b\x32\x16.aapt.pb.FileReferenceH\x00\x12\x19\n\x02id\x18\x06 \x01(\x0b\x32\x0b.aapt.pb.IdH\x00\x12\"\n\x04prim\x18\x07 \x01(\x0b\x32\x12.aapt.pb.PrimitiveH\x00\x42\x07\n\x05value\"\xca\x01\n\rCompoundValue\x12\"\n\x04\x61ttr\x18\x01 \x01(\x0b\x32\x12.aapt.pb.AttributeH\x00\x12\x1f\n\x05style\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.StyleH\x00\x12\'\n\tstyleable\x18\x03 \x01(\x0b\x32\x12.aapt.pb.StyleableH\x00\x12\x1f\n\x05\x61rray\x18\x04 \x01(\x0b\x32\x0e.aapt.pb.ArrayH\x00\x12!\n\x06plural\x18\x05 \x01(\x0b\x32\x0f.aapt.pb.PluralH\x00\x42\x07\n\x05value\"\x18\n\x07\x42oolean\x12\r\n\x05value\x18\x01 \x01(\x08\"\xa9\x01\n\tReference\x12%\n\x04type\x18\x01 \x01(\x0e\x32\x17.aapt.pb.Reference.Type\x12\n\n\x02id\x18\x02 \x01(\r\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07private\x18\x04 \x01(\x08\x12$\n\nis_dynamic\x18\x05 \x01(\x0b\x32\x10.aapt.pb.Boolean\"$\n\x04Type\x12\r\n\tREFERENCE\x10\x00\x12\r\n\tATTRIBUTE\x10\x01\"\x04\n\x02Id\"\x17\n\x06String\x12\r\n\x05value\x18\x01 \x01(\t\"\x1a\n\tRawString\x12\r\n\x05value\x18\x01 \x01(\t\"\x83\x01\n\x0cStyledString\x12\r\n\x05value\x18\x01 \x01(\t\x12(\n\x04span\x18\x02 \x03(\x0b\x32\x1a.aapt.pb.StyledString.Span\x1a:\n\x04Span\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x12\n\nfirst_char\x18\x02 \x01(\r\x12\x11\n\tlast_char\x18\x03 \x01(\r\"\x85\x01\n\rFileReference\x12\x0c\n\x04path\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.aapt.pb.FileReference.Type\";\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03PNG\x10\x01\x12\x0e\n\nBINARY_XML\x10\x02\x12\r\n\tPROTO_XML\x10\x03\"\x83\x04\n\tPrimitive\x12\x31\n\nnull_value\x18\x01 \x01(\x0b\x32\x1b.aapt.pb.Primitive.NullTypeH\x00\x12\x33\n\x0b\x65mpty_value\x18\x02 \x01(\x0b\x32\x1c.aapt.pb.Primitive.EmptyTypeH\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x19\n\x0f\x64imension_value\x18\r \x01(\rH\x00\x12\x18\n\x0e\x66raction_value\x18\x0e \x01(\rH\x00\x12\x1b\n\x11int_decimal_value\x18\x06 \x01(\x05H\x00\x12\x1f\n\x15int_hexadecimal_value\x18\x07 \x01(\rH\x00\x12\x17\n\rboolean_value\x18\x08 \x01(\x08H\x00\x12\x1b\n\x11\x63olor_argb8_value\x18\t \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb8_value\x18\n \x01(\rH\x00\x12\x1b\n\x11\x63olor_argb4_value\x18\x0b \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb4_value\x18\x0c \x01(\rH\x00\x12(\n\x1a\x64imension_value_deprecated\x18\x04 \x01(\x02\x42\x02\x18\x01H\x00\x12\'\n\x19\x66raction_value_deprecated\x18\x05 \x01(\x02\x42\x02\x18\x01H\x00\x1a\n\n\x08NullType\x1a\x0b\n\tEmptyTypeB\r\n\x0boneof_value\"\x90\x03\n\tAttribute\x12\x14\n\x0c\x66ormat_flags\x18\x01 \x01(\r\x12\x0f\n\x07min_int\x18\x02 \x01(\x05\x12\x0f\n\x07max_int\x18\x03 \x01(\x05\x12)\n\x06symbol\x18\x04 \x03(\x0b\x32\x19.aapt.pb.Attribute.Symbol\x1ay\n\x06Symbol\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04name\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\r\n\x05value\x18\x04 \x01(\r\x12\x0c\n\x04type\x18\x05 \x01(\r\"\xa4\x01\n\x0b\x46ormatFlags\x12\x08\n\x04NONE\x10\x00\x12\t\n\x03\x41NY\x10\xff\xff\x03\x12\r\n\tREFERENCE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x0b\n\x07INTEGER\x10\x04\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\t\n\x05\x43OLOR\x10\x10\x12\t\n\x05\x46LOAT\x10 \x12\r\n\tDIMENSION\x10@\x12\r\n\x08\x46RACTION\x10\x80\x01\x12\n\n\x04\x45NUM\x10\x80\x80\x04\x12\x0b\n\x05\x46LAGS\x10\x80\x80\x08\"\xf1\x01\n\x05Style\x12\"\n\x06parent\x18\x01 \x01(\x0b\x32\x12.aapt.pb.Reference\x12&\n\rparent_source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12#\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Style.Entry\x1aw\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1f\n\x03key\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"\x91\x01\n\tStyleable\x12\'\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x18.aapt.pb.Styleable.Entry\x1a[\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04\x61ttr\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\"\x8a\x01\n\x05\x41rray\x12\'\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x16.aapt.pb.Array.Element\x1aX\n\x07\x45lement\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1b\n\x04item\x18\x03 \x01(\x0b\x32\r.aapt.pb.Item\"\xef\x01\n\x06Plural\x12$\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x15.aapt.pb.Plural.Entry\x1a|\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12$\n\x05\x61rity\x18\x03 \x01(\x0e\x32\x15.aapt.pb.Plural.Arity\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"A\n\x05\x41rity\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\x12\x07\n\x03TWO\x10\x02\x12\x07\n\x03\x46\x45W\x10\x03\x12\x08\n\x04MANY\x10\x04\x12\t\n\x05OTHER\x10\x05\"r\n\x07XmlNode\x12&\n\x07\x65lement\x18\x01 \x01(\x0b\x32\x13.aapt.pb.XmlElementH\x00\x12\x0e\n\x04text\x18\x02 \x01(\tH\x00\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePositionB\x06\n\x04node\"\xb2\x01\n\nXmlElement\x12\x34\n\x15namespace_declaration\x18\x01 \x03(\x0b\x32\x15.aapt.pb.XmlNamespace\x12\x15\n\rnamespace_uri\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12(\n\tattribute\x18\x04 \x03(\x0b\x32\x15.aapt.pb.XmlAttribute\x12\x1f\n\x05\x63hild\x18\x05 \x03(\x0b\x32\x10.aapt.pb.XmlNode\"T\n\x0cXmlNamespace\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"\xa6\x01\n\x0cXmlAttribute\x12\x15\n\rnamespace_uri\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\'\n\x06source\x18\x04 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\x12\x13\n\x0bresource_id\x18\x05 \x01(\r\x12$\n\rcompiled_item\x18\x06 \x01(\x0b\x32\r.aapt.pb.ItemB\x12\n\x10\x63om.android.aaptb\x06proto3')
+ ,
+ dependencies=[frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2.DESCRIPTOR,])
+
+
+
+_VISIBILITY_LEVEL = _descriptor.EnumDescriptor(
+ name='Level',
+ full_name='aapt.pb.Visibility.Level',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PRIVATE', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PUBLIC', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=849,
+ serialized_end=894,
+)
+_sym_db.RegisterEnumDescriptor(_VISIBILITY_LEVEL)
+
+_OVERLAYABLEITEM_POLICY = _descriptor.EnumDescriptor(
+ name='Policy',
+ full_name='aapt.pb.OverlayableItem.Policy',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NONE', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PUBLIC', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SYSTEM', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='VENDOR', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PRODUCT', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SIGNATURE', index=5, number=5,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ODM', index=6, number=6,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='OEM', index=7, number=7,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1179,
+ serialized_end=1279,
+)
+_sym_db.RegisterEnumDescriptor(_OVERLAYABLEITEM_POLICY)
+
+_REFERENCE_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='aapt.pb.Reference.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='REFERENCE', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ATTRIBUTE', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2426,
+ serialized_end=2462,
+)
+_sym_db.RegisterEnumDescriptor(_REFERENCE_TYPE)
+
+_FILEREFERENCE_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='aapt.pb.FileReference.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PNG', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BINARY_XML', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PROTO_XML', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=2732,
+ serialized_end=2791,
+)
+_sym_db.RegisterEnumDescriptor(_FILEREFERENCE_TYPE)
+
+_ATTRIBUTE_FORMATFLAGS = _descriptor.EnumDescriptor(
+ name='FormatFlags',
+ full_name='aapt.pb.Attribute.FormatFlags',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NONE', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ANY', index=1, number=65535,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='REFERENCE', index=2, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='STRING', index=3, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='INTEGER', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BOOLEAN', index=5, number=8,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='COLOR', index=6, number=16,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FLOAT', index=7, number=32,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DIMENSION', index=8, number=64,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FRACTION', index=9, number=128,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ENUM', index=10, number=65536,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FLAGS', index=11, number=131072,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=3548,
+ serialized_end=3712,
+)
+_sym_db.RegisterEnumDescriptor(_ATTRIBUTE_FORMATFLAGS)
+
+_PLURAL_ARITY = _descriptor.EnumDescriptor(
+ name='Arity',
+ full_name='aapt.pb.Plural.Arity',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ZERO', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ONE', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TWO', index=2, number=2,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FEW', index=3, number=3,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MANY', index=4, number=4,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='OTHER', index=5, number=5,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=4422,
+ serialized_end=4487,
+)
+_sym_db.RegisterEnumDescriptor(_PLURAL_ARITY)
+
+
+_STRINGPOOL = _descriptor.Descriptor(
+ name='StringPool',
+ full_name='aapt.pb.StringPool',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='data', full_name='aapt.pb.StringPool.data', index=0,
+ number=1, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=105,
+ serialized_end=131,
+)
+
+
+_SOURCEPOSITION = _descriptor.Descriptor(
+ name='SourcePosition',
+ full_name='aapt.pb.SourcePosition',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='line_number', full_name='aapt.pb.SourcePosition.line_number', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='column_number', full_name='aapt.pb.SourcePosition.column_number', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=133,
+ serialized_end=193,
+)
+
+
+_SOURCE = _descriptor.Descriptor(
+ name='Source',
+ full_name='aapt.pb.Source',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path_idx', full_name='aapt.pb.Source.path_idx', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='position', full_name='aapt.pb.Source.position', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=195,
+ serialized_end=264,
+)
+
+
+_TOOLFINGERPRINT = _descriptor.Descriptor(
+ name='ToolFingerprint',
+ full_name='aapt.pb.ToolFingerprint',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='tool', full_name='aapt.pb.ToolFingerprint.tool', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='version', full_name='aapt.pb.ToolFingerprint.version', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=266,
+ serialized_end=314,
+)
+
+
+_RESOURCETABLE = _descriptor.Descriptor(
+ name='ResourceTable',
+ full_name='aapt.pb.ResourceTable',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source_pool', full_name='aapt.pb.ResourceTable.source_pool', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='package', full_name='aapt.pb.ResourceTable.package', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='overlayable', full_name='aapt.pb.ResourceTable.overlayable', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='tool_fingerprint', full_name='aapt.pb.ResourceTable.tool_fingerprint', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=317,
+ serialized_end=504,
+)
+
+
+_PACKAGEID = _descriptor.Descriptor(
+ name='PackageId',
+ full_name='aapt.pb.PackageId',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='id', full_name='aapt.pb.PackageId.id', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=506,
+ serialized_end=529,
+)
+
+
+_PACKAGE = _descriptor.Descriptor(
+ name='Package',
+ full_name='aapt.pb.Package',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='package_id', full_name='aapt.pb.Package.package_id', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='package_name', full_name='aapt.pb.Package.package_name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='type', full_name='aapt.pb.Package.type', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=531,
+ serialized_end=631,
+)
+
+
+_TYPEID = _descriptor.Descriptor(
+ name='TypeId',
+ full_name='aapt.pb.TypeId',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='id', full_name='aapt.pb.TypeId.id', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=633,
+ serialized_end=653,
+)
+
+
+_TYPE = _descriptor.Descriptor(
+ name='Type',
+ full_name='aapt.pb.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type_id', full_name='aapt.pb.Type.type_id', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.Type.name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='entry', full_name='aapt.pb.Type.entry', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=655,
+ serialized_end=740,
+)
+
+
+_VISIBILITY = _descriptor.Descriptor(
+ name='Visibility',
+ full_name='aapt.pb.Visibility',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='level', full_name='aapt.pb.Visibility.level', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Visibility.source', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Visibility.comment', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _VISIBILITY_LEVEL,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=743,
+ serialized_end=894,
+)
+
+
+_ALLOWNEW = _descriptor.Descriptor(
+ name='AllowNew',
+ full_name='aapt.pb.AllowNew',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.AllowNew.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.AllowNew.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=896,
+ serialized_end=956,
+)
+
+
+_OVERLAYABLE = _descriptor.Descriptor(
+ name='Overlayable',
+ full_name='aapt.pb.Overlayable',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.Overlayable.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Overlayable.source', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='actor', full_name='aapt.pb.Overlayable.actor', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=958,
+ serialized_end=1033,
+)
+
+
+_OVERLAYABLEITEM = _descriptor.Descriptor(
+ name='OverlayableItem',
+ full_name='aapt.pb.OverlayableItem',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.OverlayableItem.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.OverlayableItem.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='policy', full_name='aapt.pb.OverlayableItem.policy', index=2,
+ number=3, type=14, cpp_type=8, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='overlayable_idx', full_name='aapt.pb.OverlayableItem.overlayable_idx', index=3,
+ number=4, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _OVERLAYABLEITEM_POLICY,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1036,
+ serialized_end=1279,
+)
+
+
+_ENTRYID = _descriptor.Descriptor(
+ name='EntryId',
+ full_name='aapt.pb.EntryId',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='id', full_name='aapt.pb.EntryId.id', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1281,
+ serialized_end=1302,
+)
+
+
+_ENTRY = _descriptor.Descriptor(
+ name='Entry',
+ full_name='aapt.pb.Entry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='entry_id', full_name='aapt.pb.Entry.entry_id', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.Entry.name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='visibility', full_name='aapt.pb.Entry.visibility', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='allow_new', full_name='aapt.pb.Entry.allow_new', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='overlayable_item', full_name='aapt.pb.Entry.overlayable_item', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='config_value', full_name='aapt.pb.Entry.config_value', index=5,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1305,
+ serialized_end=1537,
+)
+
+
+_CONFIGVALUE = _descriptor.Descriptor(
+ name='ConfigValue',
+ full_name='aapt.pb.ConfigValue',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='config', full_name='aapt.pb.ConfigValue.config', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.ConfigValue.value', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1539,
+ serialized_end=1623,
+)
+
+
+_VALUE = _descriptor.Descriptor(
+ name='Value',
+ full_name='aapt.pb.Value',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Value.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Value.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='weak', full_name='aapt.pb.Value.weak', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='item', full_name='aapt.pb.Value.item', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='compound_value', full_name='aapt.pb.Value.compound_value', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='value', full_name='aapt.pb.Value.value',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=1626,
+ serialized_end=1787,
+)
+
+
+_ITEM = _descriptor.Descriptor(
+ name='Item',
+ full_name='aapt.pb.Item',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='ref', full_name='aapt.pb.Item.ref', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='str', full_name='aapt.pb.Item.str', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='raw_str', full_name='aapt.pb.Item.raw_str', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='styled_str', full_name='aapt.pb.Item.styled_str', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='file', full_name='aapt.pb.Item.file', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='id', full_name='aapt.pb.Item.id', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='prim', full_name='aapt.pb.Item.prim', index=6,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='value', full_name='aapt.pb.Item.value',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=1790,
+ serialized_end=2059,
+)
+
+
+_COMPOUNDVALUE = _descriptor.Descriptor(
+ name='CompoundValue',
+ full_name='aapt.pb.CompoundValue',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='attr', full_name='aapt.pb.CompoundValue.attr', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='style', full_name='aapt.pb.CompoundValue.style', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='styleable', full_name='aapt.pb.CompoundValue.styleable', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='array', full_name='aapt.pb.CompoundValue.array', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='plural', full_name='aapt.pb.CompoundValue.plural', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='value', full_name='aapt.pb.CompoundValue.value',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=2062,
+ serialized_end=2264,
+)
+
+
+_BOOLEAN = _descriptor.Descriptor(
+ name='Boolean',
+ full_name='aapt.pb.Boolean',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.Boolean.value', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2266,
+ serialized_end=2290,
+)
+
+
+_REFERENCE = _descriptor.Descriptor(
+ name='Reference',
+ full_name='aapt.pb.Reference',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='aapt.pb.Reference.type', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='id', full_name='aapt.pb.Reference.id', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.Reference.name', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='private', full_name='aapt.pb.Reference.private', index=3,
+ number=4, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='is_dynamic', full_name='aapt.pb.Reference.is_dynamic', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _REFERENCE_TYPE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2293,
+ serialized_end=2462,
+)
+
+
+_ID = _descriptor.Descriptor(
+ name='Id',
+ full_name='aapt.pb.Id',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2464,
+ serialized_end=2468,
+)
+
+
+_STRING = _descriptor.Descriptor(
+ name='String',
+ full_name='aapt.pb.String',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.String.value', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2470,
+ serialized_end=2493,
+)
+
+
+_RAWSTRING = _descriptor.Descriptor(
+ name='RawString',
+ full_name='aapt.pb.RawString',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.RawString.value', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2495,
+ serialized_end=2521,
+)
+
+
+_STYLEDSTRING_SPAN = _descriptor.Descriptor(
+ name='Span',
+ full_name='aapt.pb.StyledString.Span',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='tag', full_name='aapt.pb.StyledString.Span.tag', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='first_char', full_name='aapt.pb.StyledString.Span.first_char', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='last_char', full_name='aapt.pb.StyledString.Span.last_char', index=2,
+ number=3, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2597,
+ serialized_end=2655,
+)
+
+_STYLEDSTRING = _descriptor.Descriptor(
+ name='StyledString',
+ full_name='aapt.pb.StyledString',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.StyledString.value', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='span', full_name='aapt.pb.StyledString.span', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_STYLEDSTRING_SPAN, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2524,
+ serialized_end=2655,
+)
+
+
+_FILEREFERENCE = _descriptor.Descriptor(
+ name='FileReference',
+ full_name='aapt.pb.FileReference',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path', full_name='aapt.pb.FileReference.path', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='type', full_name='aapt.pb.FileReference.type', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FILEREFERENCE_TYPE,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2658,
+ serialized_end=2791,
+)
+
+
+_PRIMITIVE_NULLTYPE = _descriptor.Descriptor(
+ name='NullType',
+ full_name='aapt.pb.Primitive.NullType',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3271,
+ serialized_end=3281,
+)
+
+_PRIMITIVE_EMPTYTYPE = _descriptor.Descriptor(
+ name='EmptyType',
+ full_name='aapt.pb.Primitive.EmptyType',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3283,
+ serialized_end=3294,
+)
+
+_PRIMITIVE = _descriptor.Descriptor(
+ name='Primitive',
+ full_name='aapt.pb.Primitive',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='null_value', full_name='aapt.pb.Primitive.null_value', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='empty_value', full_name='aapt.pb.Primitive.empty_value', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='float_value', full_name='aapt.pb.Primitive.float_value', index=2,
+ number=3, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='dimension_value', full_name='aapt.pb.Primitive.dimension_value', index=3,
+ number=13, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='fraction_value', full_name='aapt.pb.Primitive.fraction_value', index=4,
+ number=14, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='int_decimal_value', full_name='aapt.pb.Primitive.int_decimal_value', index=5,
+ number=6, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='int_hexadecimal_value', full_name='aapt.pb.Primitive.int_hexadecimal_value', index=6,
+ number=7, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='boolean_value', full_name='aapt.pb.Primitive.boolean_value', index=7,
+ number=8, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='color_argb8_value', full_name='aapt.pb.Primitive.color_argb8_value', index=8,
+ number=9, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='color_rgb8_value', full_name='aapt.pb.Primitive.color_rgb8_value', index=9,
+ number=10, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='color_argb4_value', full_name='aapt.pb.Primitive.color_argb4_value', index=10,
+ number=11, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='color_rgb4_value', full_name='aapt.pb.Primitive.color_rgb4_value', index=11,
+ number=12, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='dimension_value_deprecated', full_name='aapt.pb.Primitive.dimension_value_deprecated', index=12,
+ number=4, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=_b('\030\001'), file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='fraction_value_deprecated', full_name='aapt.pb.Primitive.fraction_value_deprecated', index=13,
+ number=5, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=_b('\030\001'), file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_PRIMITIVE_NULLTYPE, _PRIMITIVE_EMPTYTYPE, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='oneof_value', full_name='aapt.pb.Primitive.oneof_value',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=2794,
+ serialized_end=3309,
+)
+
+
+_ATTRIBUTE_SYMBOL = _descriptor.Descriptor(
+ name='Symbol',
+ full_name='aapt.pb.Attribute.Symbol',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Attribute.Symbol.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Attribute.Symbol.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.Attribute.Symbol.name', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.Attribute.Symbol.value', index=3,
+ number=4, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='type', full_name='aapt.pb.Attribute.Symbol.type', index=4,
+ number=5, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3424,
+ serialized_end=3545,
+)
+
+_ATTRIBUTE = _descriptor.Descriptor(
+ name='Attribute',
+ full_name='aapt.pb.Attribute',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='format_flags', full_name='aapt.pb.Attribute.format_flags', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='min_int', full_name='aapt.pb.Attribute.min_int', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='max_int', full_name='aapt.pb.Attribute.max_int', index=2,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='symbol', full_name='aapt.pb.Attribute.symbol', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_ATTRIBUTE_SYMBOL, ],
+ enum_types=[
+ _ATTRIBUTE_FORMATFLAGS,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3312,
+ serialized_end=3712,
+)
+
+
+_STYLE_ENTRY = _descriptor.Descriptor(
+ name='Entry',
+ full_name='aapt.pb.Style.Entry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Style.Entry.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Style.Entry.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='key', full_name='aapt.pb.Style.Entry.key', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='item', full_name='aapt.pb.Style.Entry.item', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3837,
+ serialized_end=3956,
+)
+
+_STYLE = _descriptor.Descriptor(
+ name='Style',
+ full_name='aapt.pb.Style',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='parent', full_name='aapt.pb.Style.parent', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='parent_source', full_name='aapt.pb.Style.parent_source', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='entry', full_name='aapt.pb.Style.entry', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_STYLE_ENTRY, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3715,
+ serialized_end=3956,
+)
+
+
+_STYLEABLE_ENTRY = _descriptor.Descriptor(
+ name='Entry',
+ full_name='aapt.pb.Styleable.Entry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Styleable.Entry.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Styleable.Entry.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='attr', full_name='aapt.pb.Styleable.Entry.attr', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4013,
+ serialized_end=4104,
+)
+
+_STYLEABLE = _descriptor.Descriptor(
+ name='Styleable',
+ full_name='aapt.pb.Styleable',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='entry', full_name='aapt.pb.Styleable.entry', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_STYLEABLE_ENTRY, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=3959,
+ serialized_end=4104,
+)
+
+
+_ARRAY_ELEMENT = _descriptor.Descriptor(
+ name='Element',
+ full_name='aapt.pb.Array.Element',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Array.Element.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Array.Element.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='item', full_name='aapt.pb.Array.Element.item', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4157,
+ serialized_end=4245,
+)
+
+_ARRAY = _descriptor.Descriptor(
+ name='Array',
+ full_name='aapt.pb.Array',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='element', full_name='aapt.pb.Array.element', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_ARRAY_ELEMENT, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4107,
+ serialized_end=4245,
+)
+
+
+_PLURAL_ENTRY = _descriptor.Descriptor(
+ name='Entry',
+ full_name='aapt.pb.Plural.Entry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.Plural.Entry.source', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='comment', full_name='aapt.pb.Plural.Entry.comment', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='arity', full_name='aapt.pb.Plural.Entry.arity', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='item', full_name='aapt.pb.Plural.Entry.item', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4296,
+ serialized_end=4420,
+)
+
+_PLURAL = _descriptor.Descriptor(
+ name='Plural',
+ full_name='aapt.pb.Plural',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='entry', full_name='aapt.pb.Plural.entry', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[_PLURAL_ENTRY, ],
+ enum_types=[
+ _PLURAL_ARITY,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4248,
+ serialized_end=4487,
+)
+
+
+_XMLNODE = _descriptor.Descriptor(
+ name='XmlNode',
+ full_name='aapt.pb.XmlNode',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='element', full_name='aapt.pb.XmlNode.element', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='text', full_name='aapt.pb.XmlNode.text', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.XmlNode.source', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='node', full_name='aapt.pb.XmlNode.node',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=4489,
+ serialized_end=4603,
+)
+
+
+_XMLELEMENT = _descriptor.Descriptor(
+ name='XmlElement',
+ full_name='aapt.pb.XmlElement',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='namespace_declaration', full_name='aapt.pb.XmlElement.namespace_declaration', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='namespace_uri', full_name='aapt.pb.XmlElement.namespace_uri', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.XmlElement.name', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='attribute', full_name='aapt.pb.XmlElement.attribute', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='child', full_name='aapt.pb.XmlElement.child', index=4,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4606,
+ serialized_end=4784,
+)
+
+
+_XMLNAMESPACE = _descriptor.Descriptor(
+ name='XmlNamespace',
+ full_name='aapt.pb.XmlNamespace',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='prefix', full_name='aapt.pb.XmlNamespace.prefix', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='uri', full_name='aapt.pb.XmlNamespace.uri', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.XmlNamespace.source', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4786,
+ serialized_end=4870,
+)
+
+
+_XMLATTRIBUTE = _descriptor.Descriptor(
+ name='XmlAttribute',
+ full_name='aapt.pb.XmlAttribute',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='namespace_uri', full_name='aapt.pb.XmlAttribute.namespace_uri', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='aapt.pb.XmlAttribute.name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='aapt.pb.XmlAttribute.value', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='source', full_name='aapt.pb.XmlAttribute.source', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='resource_id', full_name='aapt.pb.XmlAttribute.resource_id', index=4,
+ number=5, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='compiled_item', full_name='aapt.pb.XmlAttribute.compiled_item', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=4873,
+ serialized_end=5039,
+)
+
+_SOURCE.fields_by_name['position'].message_type = _SOURCEPOSITION
+_RESOURCETABLE.fields_by_name['source_pool'].message_type = _STRINGPOOL
+_RESOURCETABLE.fields_by_name['package'].message_type = _PACKAGE
+_RESOURCETABLE.fields_by_name['overlayable'].message_type = _OVERLAYABLE
+_RESOURCETABLE.fields_by_name['tool_fingerprint'].message_type = _TOOLFINGERPRINT
+_PACKAGE.fields_by_name['package_id'].message_type = _PACKAGEID
+_PACKAGE.fields_by_name['type'].message_type = _TYPE
+_TYPE.fields_by_name['type_id'].message_type = _TYPEID
+_TYPE.fields_by_name['entry'].message_type = _ENTRY
+_VISIBILITY.fields_by_name['level'].enum_type = _VISIBILITY_LEVEL
+_VISIBILITY.fields_by_name['source'].message_type = _SOURCE
+_VISIBILITY_LEVEL.containing_type = _VISIBILITY
+_ALLOWNEW.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLE.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLEITEM.fields_by_name['source'].message_type = _SOURCE
+_OVERLAYABLEITEM.fields_by_name['policy'].enum_type = _OVERLAYABLEITEM_POLICY
+_OVERLAYABLEITEM_POLICY.containing_type = _OVERLAYABLEITEM
+_ENTRY.fields_by_name['entry_id'].message_type = _ENTRYID
+_ENTRY.fields_by_name['visibility'].message_type = _VISIBILITY
+_ENTRY.fields_by_name['allow_new'].message_type = _ALLOWNEW
+_ENTRY.fields_by_name['overlayable_item'].message_type = _OVERLAYABLEITEM
+_ENTRY.fields_by_name['config_value'].message_type = _CONFIGVALUE
+_CONFIGVALUE.fields_by_name['config'].message_type = frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2._CONFIGURATION
+_CONFIGVALUE.fields_by_name['value'].message_type = _VALUE
+_VALUE.fields_by_name['source'].message_type = _SOURCE
+_VALUE.fields_by_name['item'].message_type = _ITEM
+_VALUE.fields_by_name['compound_value'].message_type = _COMPOUNDVALUE
+_VALUE.oneofs_by_name['value'].fields.append(
+ _VALUE.fields_by_name['item'])
+_VALUE.fields_by_name['item'].containing_oneof = _VALUE.oneofs_by_name['value']
+_VALUE.oneofs_by_name['value'].fields.append(
+ _VALUE.fields_by_name['compound_value'])
+_VALUE.fields_by_name['compound_value'].containing_oneof = _VALUE.oneofs_by_name['value']
+_ITEM.fields_by_name['ref'].message_type = _REFERENCE
+_ITEM.fields_by_name['str'].message_type = _STRING
+_ITEM.fields_by_name['raw_str'].message_type = _RAWSTRING
+_ITEM.fields_by_name['styled_str'].message_type = _STYLEDSTRING
+_ITEM.fields_by_name['file'].message_type = _FILEREFERENCE
+_ITEM.fields_by_name['id'].message_type = _ID
+_ITEM.fields_by_name['prim'].message_type = _PRIMITIVE
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['ref'])
+_ITEM.fields_by_name['ref'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['str'])
+_ITEM.fields_by_name['str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['raw_str'])
+_ITEM.fields_by_name['raw_str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['styled_str'])
+_ITEM.fields_by_name['styled_str'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['file'])
+_ITEM.fields_by_name['file'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['id'])
+_ITEM.fields_by_name['id'].containing_oneof = _ITEM.oneofs_by_name['value']
+_ITEM.oneofs_by_name['value'].fields.append(
+ _ITEM.fields_by_name['prim'])
+_ITEM.fields_by_name['prim'].containing_oneof = _ITEM.oneofs_by_name['value']
+_COMPOUNDVALUE.fields_by_name['attr'].message_type = _ATTRIBUTE
+_COMPOUNDVALUE.fields_by_name['style'].message_type = _STYLE
+_COMPOUNDVALUE.fields_by_name['styleable'].message_type = _STYLEABLE
+_COMPOUNDVALUE.fields_by_name['array'].message_type = _ARRAY
+_COMPOUNDVALUE.fields_by_name['plural'].message_type = _PLURAL
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+ _COMPOUNDVALUE.fields_by_name['attr'])
+_COMPOUNDVALUE.fields_by_name['attr'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+ _COMPOUNDVALUE.fields_by_name['style'])
+_COMPOUNDVALUE.fields_by_name['style'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+ _COMPOUNDVALUE.fields_by_name['styleable'])
+_COMPOUNDVALUE.fields_by_name['styleable'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+ _COMPOUNDVALUE.fields_by_name['array'])
+_COMPOUNDVALUE.fields_by_name['array'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_COMPOUNDVALUE.oneofs_by_name['value'].fields.append(
+ _COMPOUNDVALUE.fields_by_name['plural'])
+_COMPOUNDVALUE.fields_by_name['plural'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value']
+_REFERENCE.fields_by_name['type'].enum_type = _REFERENCE_TYPE
+_REFERENCE.fields_by_name['is_dynamic'].message_type = _BOOLEAN
+_REFERENCE_TYPE.containing_type = _REFERENCE
+_STYLEDSTRING_SPAN.containing_type = _STYLEDSTRING
+_STYLEDSTRING.fields_by_name['span'].message_type = _STYLEDSTRING_SPAN
+_FILEREFERENCE.fields_by_name['type'].enum_type = _FILEREFERENCE_TYPE
+_FILEREFERENCE_TYPE.containing_type = _FILEREFERENCE
+_PRIMITIVE_NULLTYPE.containing_type = _PRIMITIVE
+_PRIMITIVE_EMPTYTYPE.containing_type = _PRIMITIVE
+_PRIMITIVE.fields_by_name['null_value'].message_type = _PRIMITIVE_NULLTYPE
+_PRIMITIVE.fields_by_name['empty_value'].message_type = _PRIMITIVE_EMPTYTYPE
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['null_value'])
+_PRIMITIVE.fields_by_name['null_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['empty_value'])
+_PRIMITIVE.fields_by_name['empty_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['float_value'])
+_PRIMITIVE.fields_by_name['float_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['dimension_value'])
+_PRIMITIVE.fields_by_name['dimension_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['fraction_value'])
+_PRIMITIVE.fields_by_name['fraction_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['int_decimal_value'])
+_PRIMITIVE.fields_by_name['int_decimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['int_hexadecimal_value'])
+_PRIMITIVE.fields_by_name['int_hexadecimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['boolean_value'])
+_PRIMITIVE.fields_by_name['boolean_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['color_argb8_value'])
+_PRIMITIVE.fields_by_name['color_argb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['color_rgb8_value'])
+_PRIMITIVE.fields_by_name['color_rgb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['color_argb4_value'])
+_PRIMITIVE.fields_by_name['color_argb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['color_rgb4_value'])
+_PRIMITIVE.fields_by_name['color_rgb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['dimension_value_deprecated'])
+_PRIMITIVE.fields_by_name['dimension_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append(
+ _PRIMITIVE.fields_by_name['fraction_value_deprecated'])
+_PRIMITIVE.fields_by_name['fraction_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value']
+_ATTRIBUTE_SYMBOL.fields_by_name['source'].message_type = _SOURCE
+_ATTRIBUTE_SYMBOL.fields_by_name['name'].message_type = _REFERENCE
+_ATTRIBUTE_SYMBOL.containing_type = _ATTRIBUTE
+_ATTRIBUTE.fields_by_name['symbol'].message_type = _ATTRIBUTE_SYMBOL
+_ATTRIBUTE_FORMATFLAGS.containing_type = _ATTRIBUTE
+_STYLE_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_STYLE_ENTRY.fields_by_name['key'].message_type = _REFERENCE
+_STYLE_ENTRY.fields_by_name['item'].message_type = _ITEM
+_STYLE_ENTRY.containing_type = _STYLE
+_STYLE.fields_by_name['parent'].message_type = _REFERENCE
+_STYLE.fields_by_name['parent_source'].message_type = _SOURCE
+_STYLE.fields_by_name['entry'].message_type = _STYLE_ENTRY
+_STYLEABLE_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_STYLEABLE_ENTRY.fields_by_name['attr'].message_type = _REFERENCE
+_STYLEABLE_ENTRY.containing_type = _STYLEABLE
+_STYLEABLE.fields_by_name['entry'].message_type = _STYLEABLE_ENTRY
+_ARRAY_ELEMENT.fields_by_name['source'].message_type = _SOURCE
+_ARRAY_ELEMENT.fields_by_name['item'].message_type = _ITEM
+_ARRAY_ELEMENT.containing_type = _ARRAY
+_ARRAY.fields_by_name['element'].message_type = _ARRAY_ELEMENT
+_PLURAL_ENTRY.fields_by_name['source'].message_type = _SOURCE
+_PLURAL_ENTRY.fields_by_name['arity'].enum_type = _PLURAL_ARITY
+_PLURAL_ENTRY.fields_by_name['item'].message_type = _ITEM
+_PLURAL_ENTRY.containing_type = _PLURAL
+_PLURAL.fields_by_name['entry'].message_type = _PLURAL_ENTRY
+_PLURAL_ARITY.containing_type = _PLURAL
+_XMLNODE.fields_by_name['element'].message_type = _XMLELEMENT
+_XMLNODE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLNODE.oneofs_by_name['node'].fields.append(
+ _XMLNODE.fields_by_name['element'])
+_XMLNODE.fields_by_name['element'].containing_oneof = _XMLNODE.oneofs_by_name['node']
+_XMLNODE.oneofs_by_name['node'].fields.append(
+ _XMLNODE.fields_by_name['text'])
+_XMLNODE.fields_by_name['text'].containing_oneof = _XMLNODE.oneofs_by_name['node']
+_XMLELEMENT.fields_by_name['namespace_declaration'].message_type = _XMLNAMESPACE
+_XMLELEMENT.fields_by_name['attribute'].message_type = _XMLATTRIBUTE
+_XMLELEMENT.fields_by_name['child'].message_type = _XMLNODE
+_XMLNAMESPACE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLATTRIBUTE.fields_by_name['source'].message_type = _SOURCEPOSITION
+_XMLATTRIBUTE.fields_by_name['compiled_item'].message_type = _ITEM
+DESCRIPTOR.message_types_by_name['StringPool'] = _STRINGPOOL
+DESCRIPTOR.message_types_by_name['SourcePosition'] = _SOURCEPOSITION
+DESCRIPTOR.message_types_by_name['Source'] = _SOURCE
+DESCRIPTOR.message_types_by_name['ToolFingerprint'] = _TOOLFINGERPRINT
+DESCRIPTOR.message_types_by_name['ResourceTable'] = _RESOURCETABLE
+DESCRIPTOR.message_types_by_name['PackageId'] = _PACKAGEID
+DESCRIPTOR.message_types_by_name['Package'] = _PACKAGE
+DESCRIPTOR.message_types_by_name['TypeId'] = _TYPEID
+DESCRIPTOR.message_types_by_name['Type'] = _TYPE
+DESCRIPTOR.message_types_by_name['Visibility'] = _VISIBILITY
+DESCRIPTOR.message_types_by_name['AllowNew'] = _ALLOWNEW
+DESCRIPTOR.message_types_by_name['Overlayable'] = _OVERLAYABLE
+DESCRIPTOR.message_types_by_name['OverlayableItem'] = _OVERLAYABLEITEM
+DESCRIPTOR.message_types_by_name['EntryId'] = _ENTRYID
+DESCRIPTOR.message_types_by_name['Entry'] = _ENTRY
+DESCRIPTOR.message_types_by_name['ConfigValue'] = _CONFIGVALUE
+DESCRIPTOR.message_types_by_name['Value'] = _VALUE
+DESCRIPTOR.message_types_by_name['Item'] = _ITEM
+DESCRIPTOR.message_types_by_name['CompoundValue'] = _COMPOUNDVALUE
+DESCRIPTOR.message_types_by_name['Boolean'] = _BOOLEAN
+DESCRIPTOR.message_types_by_name['Reference'] = _REFERENCE
+DESCRIPTOR.message_types_by_name['Id'] = _ID
+DESCRIPTOR.message_types_by_name['String'] = _STRING
+DESCRIPTOR.message_types_by_name['RawString'] = _RAWSTRING
+DESCRIPTOR.message_types_by_name['StyledString'] = _STYLEDSTRING
+DESCRIPTOR.message_types_by_name['FileReference'] = _FILEREFERENCE
+DESCRIPTOR.message_types_by_name['Primitive'] = _PRIMITIVE
+DESCRIPTOR.message_types_by_name['Attribute'] = _ATTRIBUTE
+DESCRIPTOR.message_types_by_name['Style'] = _STYLE
+DESCRIPTOR.message_types_by_name['Styleable'] = _STYLEABLE
+DESCRIPTOR.message_types_by_name['Array'] = _ARRAY
+DESCRIPTOR.message_types_by_name['Plural'] = _PLURAL
+DESCRIPTOR.message_types_by_name['XmlNode'] = _XMLNODE
+DESCRIPTOR.message_types_by_name['XmlElement'] = _XMLELEMENT
+DESCRIPTOR.message_types_by_name['XmlNamespace'] = _XMLNAMESPACE
+DESCRIPTOR.message_types_by_name['XmlAttribute'] = _XMLATTRIBUTE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+StringPool = _reflection.GeneratedProtocolMessageType('StringPool', (_message.Message,), {
+ 'DESCRIPTOR' : _STRINGPOOL,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.StringPool)
+ })
+_sym_db.RegisterMessage(StringPool)
+
+SourcePosition = _reflection.GeneratedProtocolMessageType('SourcePosition', (_message.Message,), {
+ 'DESCRIPTOR' : _SOURCEPOSITION,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.SourcePosition)
+ })
+_sym_db.RegisterMessage(SourcePosition)
+
+Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), {
+ 'DESCRIPTOR' : _SOURCE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Source)
+ })
+_sym_db.RegisterMessage(Source)
+
+ToolFingerprint = _reflection.GeneratedProtocolMessageType('ToolFingerprint', (_message.Message,), {
+ 'DESCRIPTOR' : _TOOLFINGERPRINT,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.ToolFingerprint)
+ })
+_sym_db.RegisterMessage(ToolFingerprint)
+
+ResourceTable = _reflection.GeneratedProtocolMessageType('ResourceTable', (_message.Message,), {
+ 'DESCRIPTOR' : _RESOURCETABLE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.ResourceTable)
+ })
+_sym_db.RegisterMessage(ResourceTable)
+
+PackageId = _reflection.GeneratedProtocolMessageType('PackageId', (_message.Message,), {
+ 'DESCRIPTOR' : _PACKAGEID,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.PackageId)
+ })
+_sym_db.RegisterMessage(PackageId)
+
+Package = _reflection.GeneratedProtocolMessageType('Package', (_message.Message,), {
+ 'DESCRIPTOR' : _PACKAGE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Package)
+ })
+_sym_db.RegisterMessage(Package)
+
+TypeId = _reflection.GeneratedProtocolMessageType('TypeId', (_message.Message,), {
+ 'DESCRIPTOR' : _TYPEID,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.TypeId)
+ })
+_sym_db.RegisterMessage(TypeId)
+
+Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), {
+ 'DESCRIPTOR' : _TYPE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Type)
+ })
+_sym_db.RegisterMessage(Type)
+
+Visibility = _reflection.GeneratedProtocolMessageType('Visibility', (_message.Message,), {
+ 'DESCRIPTOR' : _VISIBILITY,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Visibility)
+ })
+_sym_db.RegisterMessage(Visibility)
+
+AllowNew = _reflection.GeneratedProtocolMessageType('AllowNew', (_message.Message,), {
+ 'DESCRIPTOR' : _ALLOWNEW,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.AllowNew)
+ })
+_sym_db.RegisterMessage(AllowNew)
+
+Overlayable = _reflection.GeneratedProtocolMessageType('Overlayable', (_message.Message,), {
+ 'DESCRIPTOR' : _OVERLAYABLE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Overlayable)
+ })
+_sym_db.RegisterMessage(Overlayable)
+
+OverlayableItem = _reflection.GeneratedProtocolMessageType('OverlayableItem', (_message.Message,), {
+ 'DESCRIPTOR' : _OVERLAYABLEITEM,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.OverlayableItem)
+ })
+_sym_db.RegisterMessage(OverlayableItem)
+
+EntryId = _reflection.GeneratedProtocolMessageType('EntryId', (_message.Message,), {
+ 'DESCRIPTOR' : _ENTRYID,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.EntryId)
+ })
+_sym_db.RegisterMessage(EntryId)
+
+Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+ 'DESCRIPTOR' : _ENTRY,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Entry)
+ })
+_sym_db.RegisterMessage(Entry)
+
+ConfigValue = _reflection.GeneratedProtocolMessageType('ConfigValue', (_message.Message,), {
+ 'DESCRIPTOR' : _CONFIGVALUE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.ConfigValue)
+ })
+_sym_db.RegisterMessage(ConfigValue)
+
+Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
+ 'DESCRIPTOR' : _VALUE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Value)
+ })
+_sym_db.RegisterMessage(Value)
+
+Item = _reflection.GeneratedProtocolMessageType('Item', (_message.Message,), {
+ 'DESCRIPTOR' : _ITEM,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Item)
+ })
+_sym_db.RegisterMessage(Item)
+
+CompoundValue = _reflection.GeneratedProtocolMessageType('CompoundValue', (_message.Message,), {
+ 'DESCRIPTOR' : _COMPOUNDVALUE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.CompoundValue)
+ })
+_sym_db.RegisterMessage(CompoundValue)
+
+Boolean = _reflection.GeneratedProtocolMessageType('Boolean', (_message.Message,), {
+ 'DESCRIPTOR' : _BOOLEAN,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Boolean)
+ })
+_sym_db.RegisterMessage(Boolean)
+
+Reference = _reflection.GeneratedProtocolMessageType('Reference', (_message.Message,), {
+ 'DESCRIPTOR' : _REFERENCE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Reference)
+ })
+_sym_db.RegisterMessage(Reference)
+
+Id = _reflection.GeneratedProtocolMessageType('Id', (_message.Message,), {
+ 'DESCRIPTOR' : _ID,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Id)
+ })
+_sym_db.RegisterMessage(Id)
+
+String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), {
+ 'DESCRIPTOR' : _STRING,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.String)
+ })
+_sym_db.RegisterMessage(String)
+
+RawString = _reflection.GeneratedProtocolMessageType('RawString', (_message.Message,), {
+ 'DESCRIPTOR' : _RAWSTRING,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.RawString)
+ })
+_sym_db.RegisterMessage(RawString)
+
+StyledString = _reflection.GeneratedProtocolMessageType('StyledString', (_message.Message,), {
+
+ 'Span' : _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), {
+ 'DESCRIPTOR' : _STYLEDSTRING_SPAN,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.StyledString.Span)
+ })
+ ,
+ 'DESCRIPTOR' : _STYLEDSTRING,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.StyledString)
+ })
+_sym_db.RegisterMessage(StyledString)
+_sym_db.RegisterMessage(StyledString.Span)
+
+FileReference = _reflection.GeneratedProtocolMessageType('FileReference', (_message.Message,), {
+ 'DESCRIPTOR' : _FILEREFERENCE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.FileReference)
+ })
+_sym_db.RegisterMessage(FileReference)
+
+Primitive = _reflection.GeneratedProtocolMessageType('Primitive', (_message.Message,), {
+
+ 'NullType' : _reflection.GeneratedProtocolMessageType('NullType', (_message.Message,), {
+ 'DESCRIPTOR' : _PRIMITIVE_NULLTYPE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.NullType)
+ })
+ ,
+
+ 'EmptyType' : _reflection.GeneratedProtocolMessageType('EmptyType', (_message.Message,), {
+ 'DESCRIPTOR' : _PRIMITIVE_EMPTYTYPE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.EmptyType)
+ })
+ ,
+ 'DESCRIPTOR' : _PRIMITIVE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Primitive)
+ })
+_sym_db.RegisterMessage(Primitive)
+_sym_db.RegisterMessage(Primitive.NullType)
+_sym_db.RegisterMessage(Primitive.EmptyType)
+
+Attribute = _reflection.GeneratedProtocolMessageType('Attribute', (_message.Message,), {
+
+ 'Symbol' : _reflection.GeneratedProtocolMessageType('Symbol', (_message.Message,), {
+ 'DESCRIPTOR' : _ATTRIBUTE_SYMBOL,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Attribute.Symbol)
+ })
+ ,
+ 'DESCRIPTOR' : _ATTRIBUTE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Attribute)
+ })
+_sym_db.RegisterMessage(Attribute)
+_sym_db.RegisterMessage(Attribute.Symbol)
+
+Style = _reflection.GeneratedProtocolMessageType('Style', (_message.Message,), {
+
+ 'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+ 'DESCRIPTOR' : _STYLE_ENTRY,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Style.Entry)
+ })
+ ,
+ 'DESCRIPTOR' : _STYLE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Style)
+ })
+_sym_db.RegisterMessage(Style)
+_sym_db.RegisterMessage(Style.Entry)
+
+Styleable = _reflection.GeneratedProtocolMessageType('Styleable', (_message.Message,), {
+
+ 'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+ 'DESCRIPTOR' : _STYLEABLE_ENTRY,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Styleable.Entry)
+ })
+ ,
+ 'DESCRIPTOR' : _STYLEABLE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Styleable)
+ })
+_sym_db.RegisterMessage(Styleable)
+_sym_db.RegisterMessage(Styleable.Entry)
+
+Array = _reflection.GeneratedProtocolMessageType('Array', (_message.Message,), {
+
+ 'Element' : _reflection.GeneratedProtocolMessageType('Element', (_message.Message,), {
+ 'DESCRIPTOR' : _ARRAY_ELEMENT,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Array.Element)
+ })
+ ,
+ 'DESCRIPTOR' : _ARRAY,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Array)
+ })
+_sym_db.RegisterMessage(Array)
+_sym_db.RegisterMessage(Array.Element)
+
+Plural = _reflection.GeneratedProtocolMessageType('Plural', (_message.Message,), {
+
+ 'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), {
+ 'DESCRIPTOR' : _PLURAL_ENTRY,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Plural.Entry)
+ })
+ ,
+ 'DESCRIPTOR' : _PLURAL,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.Plural)
+ })
+_sym_db.RegisterMessage(Plural)
+_sym_db.RegisterMessage(Plural.Entry)
+
+XmlNode = _reflection.GeneratedProtocolMessageType('XmlNode', (_message.Message,), {
+ 'DESCRIPTOR' : _XMLNODE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.XmlNode)
+ })
+_sym_db.RegisterMessage(XmlNode)
+
+XmlElement = _reflection.GeneratedProtocolMessageType('XmlElement', (_message.Message,), {
+ 'DESCRIPTOR' : _XMLELEMENT,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.XmlElement)
+ })
+_sym_db.RegisterMessage(XmlElement)
+
+XmlNamespace = _reflection.GeneratedProtocolMessageType('XmlNamespace', (_message.Message,), {
+ 'DESCRIPTOR' : _XMLNAMESPACE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.XmlNamespace)
+ })
+_sym_db.RegisterMessage(XmlNamespace)
+
+XmlAttribute = _reflection.GeneratedProtocolMessageType('XmlAttribute', (_message.Message,), {
+ 'DESCRIPTOR' : _XMLATTRIBUTE,
+ '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2'
+ # @@protoc_insertion_point(class_scope:aapt.pb.XmlAttribute)
+ })
+_sym_db.RegisterMessage(XmlAttribute)
+
+
+DESCRIPTOR._options = None
+_PRIMITIVE.fields_by_name['dimension_value_deprecated']._options = None
+_PRIMITIVE.fields_by_name['fraction_value_deprecated']._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/third_party/libwebrtc/build/android/gyp/proto/__init__.py b/third_party/libwebrtc/build/android/gyp/proto/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/proto/__init__.py
diff --git a/third_party/libwebrtc/build/android/gyp/test/BUILD.gn b/third_party/libwebrtc/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000000..301a220d03
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,11 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+ sources = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+ deps = [ ":hello_world_java" ]
+ sources = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+ main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000000..10860d8332
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+ public static void main(String[] args) {
+ if (args.length > 0) {
+ System.exit(Integer.parseInt(args[0]));
+ }
+ HelloWorldPrinter.print();
+ }
+}
+
diff --git a/third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000000..b09673e21f
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+ public static void print() {
+ System.out.println("Hello, world!");
+ }
+}
+
diff --git a/third_party/libwebrtc/build/android/gyp/turbine.py b/third_party/libwebrtc/build/android/gyp/turbine.py
new file mode 100755
index 0000000000..074550e047
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/turbine.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Wraps the turbine jar and expands @FileArgs."""
+
+import argparse
+import functools
+import logging
+import os
+import shutil
+import sys
+import time
+
+import javac_output_processor
+from util import build_utils
+
+
+def ProcessJavacOutput(output, target_name):
+ output_processor = javac_output_processor.JavacOutputProcessor(target_name)
+ lines = output_processor.Process(output.split('\n'))
+ return '\n'.join(lines)
+
+
+def main(argv):
+ build_utils.InitLogging('TURBINE_DEBUG')
+ argv = build_utils.ExpandFileArgs(argv[1:])
+ parser = argparse.ArgumentParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--target-name', help='Fully qualified GN target name.')
+ parser.add_argument(
+ '--turbine-jar-path', required=True, help='Path to the turbine jar file.')
+ parser.add_argument(
+ '--java-srcjars',
+ action='append',
+ default=[],
+ help='List of srcjars to include in compilation.')
+ parser.add_argument(
+ '--bootclasspath',
+ action='append',
+ default=[],
+ help='Boot classpath for javac. If this is specified multiple times, '
+ 'they will all be appended to construct the classpath.')
+ parser.add_argument(
+ '--java-version',
+ help='Java language version to use in -source and -target args to javac.')
+ parser.add_argument('--classpath', action='append', help='Classpath to use.')
+ parser.add_argument(
+ '--processors',
+ action='append',
+ help='GN list of annotation processor main classes.')
+ parser.add_argument(
+ '--processorpath',
+ action='append',
+ help='GN list of jars that comprise the classpath used for Annotation '
+ 'Processors.')
+ parser.add_argument(
+ '--processor-args',
+ action='append',
+ help='key=value arguments for the annotation processors.')
+ parser.add_argument('--jar-path', help='Jar output path.', required=True)
+ parser.add_argument(
+ '--generated-jar-path',
+ required=True,
+ help='Output path for generated source files.')
+ parser.add_argument('--warnings-as-errors',
+ action='store_true',
+ help='Treat all warnings as errors.')
+ options, unknown_args = parser.parse_known_args(argv)
+
+ options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
+ options.classpath = build_utils.ParseGnList(options.classpath)
+ options.processorpath = build_utils.ParseGnList(options.processorpath)
+ options.processors = build_utils.ParseGnList(options.processors)
+ options.java_srcjars = build_utils.ParseGnList(options.java_srcjars)
+
+ files = []
+ for arg in unknown_args:
+ # Interpret a path prefixed with @ as a file containing a list of sources.
+ if arg.startswith('@'):
+ files.extend(build_utils.ReadSourcesList(arg[1:]))
+
+ cmd = build_utils.JavaCmd(options.warnings_as_errors) + [
+ '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main'
+ ]
+ javac_cmd = []
+
+ # Turbine reads lists from command line args by consuming args until one
+ # starts with double dash (--). Thus command line args should be grouped
+ # together and passed in together.
+ if options.processors:
+ cmd += ['--processors']
+ cmd += options.processors
+
+ if options.java_version:
+ javac_cmd.extend([
+ '-source',
+ options.java_version,
+ '-target',
+ options.java_version,
+ ])
+ if options.java_version == '1.8':
+ # Android's boot jar doesn't contain all java 8 classes.
+ options.bootclasspath.append(build_utils.RT_JAR_PATH)
+
+ if options.bootclasspath:
+ cmd += ['--bootclasspath']
+ for bootclasspath in options.bootclasspath:
+ cmd += bootclasspath.split(':')
+
+ if options.processorpath:
+ cmd += ['--processorpath']
+ cmd += options.processorpath
+
+ if options.processor_args:
+ for arg in options.processor_args:
+ javac_cmd.extend(['-A%s' % arg])
+
+ if options.classpath:
+ cmd += ['--classpath']
+ cmd += options.classpath
+
+ if options.java_srcjars:
+ cmd += ['--source_jars']
+ cmd += options.java_srcjars
+
+ if files:
+ # Use jar_path to ensure paths are relative (needed for goma).
+ files_rsp_path = options.jar_path + '.files_list.txt'
+ with open(files_rsp_path, 'w') as f:
+ f.write(' '.join(files))
+ # Pass source paths as response files to avoid extremely long command lines
+ # that are tedius to debug.
+ cmd += ['--sources']
+ cmd += ['@' + files_rsp_path]
+
+ if javac_cmd:
+ cmd.append('--javacopts')
+ cmd += javac_cmd
+ cmd.append('--') # Terminate javacopts
+
+ # Use AtomicOutput so that output timestamps are not updated when outputs
+ # are not changed.
+ with build_utils.AtomicOutput(options.jar_path) as output_jar, \
+ build_utils.AtomicOutput(options.generated_jar_path) as generated_jar:
+ cmd += ['--output', output_jar.name, '--gensrc_output', generated_jar.name]
+
+ process_javac_output_partial = functools.partial(
+ ProcessJavacOutput, target_name=options.target_name)
+
+ logging.debug('Command: %s', cmd)
+ start = time.time()
+ build_utils.CheckOutput(cmd,
+ print_stdout=True,
+ stdout_filter=process_javac_output_partial,
+ stderr_filter=process_javac_output_partial,
+ fail_on_output=options.warnings_as_errors)
+ end = time.time() - start
+ logging.info('Header compilation took %ss', end)
+
+ if options.depfile:
+ # GN already knows of the java files, so avoid listing individual java files
+ # in the depfile.
+ depfile_deps = (options.bootclasspath + options.classpath +
+ options.processorpath + options.java_srcjars)
+ build_utils.WriteDepfile(options.depfile, options.jar_path, depfile_deps)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/third_party/libwebrtc/build/android/gyp/turbine.pydeps b/third_party/libwebrtc/build/android/gyp/turbine.pydeps
new file mode 100644
index 0000000000..f0b2411e58
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/turbine.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py
+../../gn_helpers.py
+turbine.py
+util/__init__.py
+util/build_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/unused_resources.py b/third_party/libwebrtc/build/android/gyp/unused_resources.py
new file mode 100755
index 0000000000..cdaf4cf5b1
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/unused_resources.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+# Copyright (c) 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import sys
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+from util import resource_utils
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--script',
+ required=True,
+ help='Path to the unused resources detector script.')
+ parser.add_argument(
+ '--dependencies-res-zips',
+ required=True,
+ action='append',
+ help='Resources zip archives to investigate for unused resources.')
+ parser.add_argument('--dexes',
+ action='append',
+ required=True,
+ help='Path to dex file, or zip with dex files.')
+ parser.add_argument(
+ '--proguard-mapping',
+ help='Path to proguard mapping file for the optimized dex.')
+ parser.add_argument('--r-text', required=True, help='Path to R.txt')
+ parser.add_argument('--android-manifests',
+ action='append',
+ required=True,
+ help='Path to AndroidManifest')
+ parser.add_argument('--output-config',
+ required=True,
+ help='Path to output the aapt2 config to.')
+ args = build_utils.ExpandFileArgs(args)
+ options = parser.parse_args(args)
+ options.dependencies_res_zips = (build_utils.ParseGnList(
+ options.dependencies_res_zips))
+
+ # in case of no resources, short circuit early.
+ if not options.dependencies_res_zips:
+ build_utils.Touch(options.output_config)
+ return
+
+ with build_utils.TempDir() as temp_dir:
+ dep_subdirs = []
+ for dependency_res_zip in options.dependencies_res_zips:
+ dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir)
+
+ cmd = [
+ options.script,
+ '--rtxts',
+ options.r_text,
+ '--manifests',
+ ':'.join(options.android_manifests),
+ '--resourceDirs',
+ ':'.join(dep_subdirs),
+ '--dexes',
+ ':'.join(options.dexes),
+ '--outputConfig',
+ options.output_config,
+ ]
+ if options.proguard_mapping:
+ cmd += [
+ '--mapping',
+ options.proguard_mapping,
+ ]
+ build_utils.CheckOutput(cmd)
+
+ if options.depfile:
+ depfile_deps = (options.dependencies_res_zips + options.android_manifests +
+ options.dexes) + [options.r_text]
+ if options.proguard_mapping:
+ depfile_deps.append(options.proguard_mapping)
+ build_utils.WriteDepfile(options.depfile, options.output_config,
+ depfile_deps)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/unused_resources.pydeps b/third_party/libwebrtc/build/android/gyp/unused_resources.pydeps
new file mode 100644
index 0000000000..b821d70614
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/unused_resources.pydeps
@@ -0,0 +1,31 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/unused_resources.pydeps build/android/gyp/unused_resources.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+unused_resources.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
diff --git a/third_party/libwebrtc/build/android/gyp/util/__init__.py b/third_party/libwebrtc/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000000..96196cffb2
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/third_party/libwebrtc/build/android/gyp/util/build_utils.py b/third_party/libwebrtc/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000000..6469f762cc
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/build_utils.py
@@ -0,0 +1,725 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for GN action()s."""
+
+import atexit
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import logging
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import time
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, os.pardir))
+import gn_helpers
+
+# Use relative paths to improved hermetic property of build scripts.
+DIR_SOURCE_ROOT = os.path.relpath(
+ os.environ.get(
+ 'CHECKOUT_SOURCE_ROOT',
+ os.path.join(
+ os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+ os.pardir)))
+JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current')
+JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac')
+JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap')
+RT_JAR_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'extras',
+ 'java_8', 'jre', 'lib', 'rt.jar')
+
+try:
+ string_types = basestring
+except NameError:
+ string_types = (str, bytes)
+
+
+def JavaCmd(verify=True, xmx='1G'):
+ ret = [os.path.join(JAVA_HOME, 'bin', 'java')]
+ # Limit heap to avoid Java not GC'ing when it should, and causing
+ # bots to OOM when many java commands are runnig at the same time
+ # https://crbug.com/1098333
+ ret += ['-Xmx' + xmx]
+
+ # Disable bytecode verification for local builds gives a ~2% speed-up.
+ if not verify:
+ ret += ['-noverify']
+
+ return ret
+
+
+@contextlib.contextmanager
+def TempDir(**kwargs):
+ dirname = tempfile.mkdtemp(**kwargs)
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+ try:
+ os.makedirs(dir_path)
+ except OSError:
+ pass
+
+
+def DeleteDirectory(dir_path):
+ if os.path.exists(dir_path):
+ shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+ if fail_if_missing and not os.path.exists(path):
+ raise Exception(path + ' doesn\'t exist.')
+
+ MakeDirectory(os.path.dirname(path))
+ with open(path, 'a'):
+ os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter='*'):
+ files = []
+ for root, _dirnames, filenames in os.walk(directory):
+ matched_files = fnmatch.filter(filenames, filename_filter)
+ files.extend((os.path.join(root, f) for f in matched_files))
+ return files
+
+
+def ParseGnList(value):
+ """Converts a "GN-list" command-line parameter into a list.
+
+ Conversions handled:
+ * None -> []
+ * '' -> []
+ * 'asdf' -> ['asdf']
+ * '["a", "b"]' -> ['a', 'b']
+ * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list)
+
+ The common use for this behavior is in the Android build where things can
+ take lists of @FileArg references that are expanded via ExpandFileArgs.
+ """
+ # Convert None to [].
+ if not value:
+ return []
+ # Convert a list of GN lists to a flattened list.
+ if isinstance(value, list):
+ ret = []
+ for arg in value:
+ ret.extend(ParseGnList(arg))
+ return ret
+ # Convert normal GN list.
+ if value.startswith('['):
+ return gn_helpers.GNValueParser(value).ParseList()
+ # Convert a single string value to a list.
+ return [value]
+
+
+def CheckOptions(options, parser, required=None):
+ if not required:
+ return
+ for option_name in required:
+ if getattr(options, option_name) is None:
+ parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+ old_dump = None
+ if os.path.exists(path):
+ with open(path, 'r') as oldfile:
+ old_dump = oldfile.read()
+
+ new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+ if not only_if_changed or old_dump != new_dump:
+ with open(path, 'w') as outfile:
+ outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def AtomicOutput(path, only_if_changed=True, mode='w+b'):
+ """Helper to prevent half-written outputs.
+
+ Args:
+ path: Path to the final output file, which will be written atomically.
+ only_if_changed: If True (the default), do not touch the filesystem
+ if the content has not changed.
+ mode: The mode to open the file in (str).
+ Returns:
+ A python context manager that yelds a NamedTemporaryFile instance
+ that must be used by clients to write the data to. On exit, the
+ manager will try to replace the final output file with the
+ temporary one if necessary. The temporary file is always destroyed
+ on exit.
+ Example:
+ with build_utils.AtomicOutput(output_path) as tmp_file:
+ subprocess.check_call(['prog', '--output', tmp_file.name])
+ """
+ # Create in same directory to ensure same filesystem when moving.
+ dirname = os.path.dirname(path)
+ if not os.path.exists(dirname):
+ MakeDirectory(dirname)
+ with tempfile.NamedTemporaryFile(
+ mode, suffix=os.path.basename(path), dir=dirname, delete=False) as f:
+ try:
+ yield f
+
+ # file should be closed before comparison/move.
+ f.close()
+ if not (only_if_changed and os.path.exists(path) and
+ filecmp.cmp(f.name, path)):
+ shutil.move(f.name, path)
+ finally:
+ if os.path.exists(f.name):
+ os.unlink(f.name)
+
+
+class CalledProcessError(Exception):
+ """This exception is raised when the process run by CheckOutput
+ exits with a non-zero exit code."""
+
+ def __init__(self, cwd, args, output):
+ super(CalledProcessError, self).__init__()
+ self.cwd = cwd
+ self.args = args
+ self.output = output
+
+ def __str__(self):
+ # A user should be able to simply copy and paste the command that failed
+ # into their shell.
+ copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+ ' '.join(map(pipes.quote, self.args)))
+ return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def FilterLines(output, filter_string):
+ """Output filter from build_utils.CheckOutput.
+
+ Args:
+ output: Executable output as from build_utils.CheckOutput.
+ filter_string: An RE string that will filter (remove) matching
+ lines from |output|.
+
+ Returns:
+ The filtered output, as a single string.
+ """
+ re_filter = re.compile(filter_string)
+ return '\n'.join(
+ line for line in output.split('\n') if not re_filter.search(line))
+
+
+def FilterReflectiveAccessJavaWarnings(output):
+ """Filters out warnings about illegal reflective access operation.
+
+ These warnings were introduced in Java 9, and generally mean that dependencies
+ need to be updated.
+ """
+ # WARNING: An illegal reflective access operation has occurred
+ # WARNING: Illegal reflective access by ...
+ # WARNING: Please consider reporting this to the maintainers of ...
+ # WARNING: Use --illegal-access=warn to enable warnings of further ...
+ # WARNING: All illegal access operations will be denied in a future release
+ return FilterLines(
+ output, r'WARNING: ('
+ 'An illegal reflective|'
+ 'Illegal reflective access|'
+ 'Please consider reporting this to|'
+ 'Use --illegal-access=warn|'
+ 'All illegal access operations)')
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args,
+ cwd=None,
+ env=None,
+ print_stdout=False,
+ print_stderr=True,
+ stdout_filter=None,
+ stderr_filter=None,
+ fail_on_output=True,
+ fail_func=lambda returncode, stderr: returncode != 0):
+ if not cwd:
+ cwd = os.getcwd()
+
+ logging.info('CheckOutput: %s', ' '.join(args))
+ child = subprocess.Popen(args,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+ stdout, stderr = child.communicate()
+
+ # For Python3 only:
+ if isinstance(stdout, bytes) and sys.version_info >= (3, ):
+ stdout = stdout.decode('utf-8')
+ stderr = stderr.decode('utf-8')
+
+ if stdout_filter is not None:
+ stdout = stdout_filter(stdout)
+
+ if stderr_filter is not None:
+ stderr = stderr_filter(stderr)
+
+ if fail_func and fail_func(child.returncode, stderr):
+ raise CalledProcessError(cwd, args, stdout + stderr)
+
+ if print_stdout:
+ sys.stdout.write(stdout)
+ if print_stderr:
+ sys.stderr.write(stderr)
+
+ has_stdout = print_stdout and stdout
+ has_stderr = print_stderr and stderr
+ if fail_on_output and (has_stdout or has_stderr):
+ MSG = """\
+Command failed because it wrote to {}.
+You can often set treat_warnings_as_errors=false to not treat output as \
+failure (useful when developing locally)."""
+ if has_stdout and has_stderr:
+ stream_string = 'stdout and stderr'
+ elif has_stdout:
+ stream_string = 'stdout'
+ else:
+ stream_string = 'stderr'
+ raise CalledProcessError(cwd, args, MSG.format(stream_string))
+
+ return stdout
+
+
+def GetModifiedTime(path):
+ # For a symlink, the modified time should be the greater of the link's
+ # modified time and the modified time of the target.
+ return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+ if not os.path.exists(output):
+ return True
+
+ output_time = GetModifiedTime(output)
+ for i in inputs:
+ if GetModifiedTime(i) > output_time:
+ return True
+ return False
+
+
+def _CheckZipPath(name):
+ if os.path.normpath(name) != name:
+ raise Exception('Non-canonical zip path: %s' % name)
+ if os.path.isabs(name):
+ raise Exception('Absolute zip path: %s' % name)
+
+
+def _IsSymlink(zip_file, name):
+ zi = zip_file.getinfo(name)
+
+ # The two high-order bytes of ZipInfo.external_attr represent
+ # UNIX permissions and file type bits.
+ return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+ predicate=None):
+ if path is None:
+ path = os.getcwd()
+ elif not os.path.exists(path):
+ MakeDirectory(path)
+
+ if not zipfile.is_zipfile(zip_path):
+ raise Exception('Invalid zip file: %s' % zip_path)
+
+ extracted = []
+ with zipfile.ZipFile(zip_path) as z:
+ for name in z.namelist():
+ if name.endswith('/'):
+ MakeDirectory(os.path.join(path, name))
+ continue
+ if pattern is not None:
+ if not fnmatch.fnmatch(name, pattern):
+ continue
+ if predicate and not predicate(name):
+ continue
+ _CheckZipPath(name)
+ if no_clobber:
+ output_path = os.path.join(path, name)
+ if os.path.exists(output_path):
+ raise Exception(
+ 'Path already exists from zip: %s %s %s'
+ % (zip_path, name, output_path))
+ if _IsSymlink(z, name):
+ dest = os.path.join(path, name)
+ MakeDirectory(os.path.dirname(dest))
+ os.symlink(z.read(name), dest)
+ extracted.append(dest)
+ else:
+ z.extract(name, path)
+ extracted.append(os.path.join(path, name))
+
+ return extracted
+
+
+def HermeticDateTime(timestamp=None):
+ """Returns a constant ZipInfo.date_time tuple.
+
+ Args:
+ timestamp: Unix timestamp to use for files in the archive.
+
+ Returns:
+ A ZipInfo.date_time tuple for Jan 1, 2001, or the given timestamp.
+ """
+ if not timestamp:
+ return (2001, 1, 1, 0, 0, 0)
+ utc_time = time.gmtime(timestamp)
+ return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour,
+ utc_time.tm_min, utc_time.tm_sec)
+
+
+def HermeticZipInfo(*args, **kwargs):
+ """Creates a zipfile.ZipInfo with a constant timestamp and external_attr.
+
+ If a date_time value is not provided in the positional or keyword arguments,
+ the default value from HermeticDateTime is used.
+
+ Args:
+ See zipfile.ZipInfo.
+
+ Returns:
+ A zipfile.ZipInfo.
+ """
+ # The caller may have provided a date_time either as a positional parameter
+ # (args[1]) or as a keyword parameter. Use the default hermetic date_time if
+ # none was provided.
+ date_time = None
+ if len(args) >= 2:
+ date_time = args[1]
+ elif 'date_time' in kwargs:
+ date_time = kwargs['date_time']
+ if not date_time:
+ kwargs['date_time'] = HermeticDateTime()
+ ret = zipfile.ZipInfo(*args, **kwargs)
+ ret.external_attr = (0o644 << 16)
+ return ret
+
+
+def AddToZipHermetic(zip_file,
+ zip_path,
+ src_path=None,
+ data=None,
+ compress=None,
+ date_time=None):
+ """Adds a file to the given ZipFile with a hard-coded modified time.
+
+ Args:
+ zip_file: ZipFile instance to add the file to.
+ zip_path: Destination path within the zip file (or ZipInfo instance).
+ src_path: Path of the source file. Mutually exclusive with |data|.
+ data: File data as a string.
+ compress: Whether to enable compression. Default is taken from ZipFile
+ constructor.
+ date_time: The last modification date and time for the archive member.
+ """
+ assert (src_path is None) != (data is None), (
+ '|src_path| and |data| are mutually exclusive.')
+ if isinstance(zip_path, zipfile.ZipInfo):
+ zipinfo = zip_path
+ zip_path = zipinfo.filename
+ else:
+ zipinfo = HermeticZipInfo(filename=zip_path, date_time=date_time)
+
+ _CheckZipPath(zip_path)
+
+ if src_path and os.path.islink(src_path):
+ zipinfo.filename = zip_path
+ zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
+ zip_file.writestr(zipinfo, os.readlink(src_path))
+ return
+
+ # zipfile.write() does
+ # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16
+ # but we want to use _HERMETIC_FILE_ATTR, so manually set
+ # the few attr bits we care about.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16
+
+ if src_path:
+ with open(src_path, 'rb') as f:
+ data = f.read()
+
+ # zipfile will deflate even when it makes the file bigger. To avoid
+ # growing files, disable compression at an arbitrary cut off point.
+ if len(data) < 16:
+ compress = False
+
+ # None converts to ZIP_STORED, when passed explicitly rather than the
+ # default passed to the ZipFile constructor.
+ compress_type = zip_file.compression
+ if compress is not None:
+ compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs,
+ output,
+ base_dir=None,
+ compress_fn=None,
+ zip_prefix_path=None,
+ timestamp=None):
+ """Creates a zip file from a list of files.
+
+ Args:
+ inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+ output: Path, fileobj, or ZipFile instance to add files to.
+ base_dir: Prefix to strip from inputs.
+ compress_fn: Applied to each input to determine whether or not to compress.
+ By default, items will be |zipfile.ZIP_STORED|.
+ zip_prefix_path: Path prepended to file path in zip file.
+ timestamp: Unix timestamp to use for files in the archive.
+ """
+ if base_dir is None:
+ base_dir = '.'
+ input_tuples = []
+ for tup in inputs:
+ if isinstance(tup, string_types):
+ tup = (os.path.relpath(tup, base_dir), tup)
+ if tup[0].startswith('..'):
+ raise Exception('Invalid zip_path: ' + tup[0])
+ input_tuples.append(tup)
+
+ # Sort by zip path to ensure stable zip ordering.
+ input_tuples.sort(key=lambda tup: tup[0])
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ date_time = HermeticDateTime(timestamp)
+ try:
+ for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = os.path.join(zip_prefix_path, zip_path)
+ compress = compress_fn(zip_path) if compress_fn else None
+ AddToZipHermetic(out_zip,
+ zip_path,
+ src_path=fs_path,
+ compress=compress,
+ date_time=date_time)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+ """Creates a zip file from a directory."""
+ inputs = []
+ for root, _, files in os.walk(base_dir):
+ for f in files:
+ inputs.append(os.path.join(root, f))
+
+ if isinstance(output, zipfile.ZipFile):
+ DoZip(
+ inputs,
+ output,
+ base_dir,
+ compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+ else:
+ with AtomicOutput(output) as f:
+ DoZip(
+ inputs,
+ f,
+ base_dir,
+ compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+
+
+def MatchesGlob(path, filters):
+ """Returns whether the given path matches any of the given glob patterns."""
+ return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, input_zips, path_transform=None, compress=None):
+ """Combines all files from |input_zips| into |output|.
+
+ Args:
+ output: Path, fileobj, or ZipFile instance to add files to.
+ input_zips: Iterable of paths to zip files to merge.
+ path_transform: Called for each entry path. Returns a new path, or None to
+ skip the file.
+ compress: Overrides compression setting from origin zip entries.
+ """
+ path_transform = path_transform or (lambda p: p)
+ added_names = set()
+
+ out_zip = output
+ if not isinstance(output, zipfile.ZipFile):
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for in_file in input_zips:
+ with zipfile.ZipFile(in_file, 'r') as in_zip:
+ for info in in_zip.infolist():
+ # Ignore directories.
+ if info.filename[-1] == '/':
+ continue
+ dst_name = path_transform(info.filename)
+ if not dst_name:
+ continue
+ already_added = dst_name in added_names
+ if not already_added:
+ if compress is not None:
+ compress_entry = compress
+ else:
+ compress_entry = info.compress_type != zipfile.ZIP_STORED
+ AddToZipHermetic(
+ out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=compress_entry)
+ added_names.add(dst_name)
+ finally:
+ if output is not out_zip:
+ out_zip.close()
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+ """Gets the list of all transitive dependencies in sorted order.
+
+ There should be no cycles in the dependency graph (crashes if cycles exist).
+
+ Args:
+ top: A list of the top level nodes
+ deps_func: A function that takes a node and returns a list of its direct
+ dependencies.
+ Returns:
+ A list of all transitive dependencies of nodes in top, in order (a node will
+ appear in the list at a higher index than all of its dependencies).
+ """
+ # Find all deps depth-first, maintaining original order in the case of ties.
+ deps_map = collections.OrderedDict()
+ def discover(nodes):
+ for node in nodes:
+ if node in deps_map:
+ continue
+ deps = deps_func(node)
+ discover(deps)
+ deps_map[node] = deps
+
+ discover(top)
+ return list(deps_map)
+
+
+def InitLogging(enabling_env):
+ logging.basicConfig(
+ level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING,
+ format='%(levelname).1s %(process)d %(relativeCreated)6d %(message)s')
+ script_name = os.path.basename(sys.argv[0])
+ logging.info('Started (%s)', script_name)
+
+ my_pid = os.getpid()
+
+ def log_exit():
+ # Do not log for fork'ed processes.
+ if os.getpid() == my_pid:
+ logging.info("Job's done (%s)", script_name)
+
+ atexit.register(log_exit)
+
+
+def AddDepfileOption(parser):
+ # TODO(agrieve): Get rid of this once we've moved to argparse.
+ if hasattr(parser, 'add_option'):
+ func = parser.add_option
+ else:
+ func = parser.add_argument
+ func('--depfile',
+ help='Path to depfile (refer to `gn help depfile`)')
+
+
+def WriteDepfile(depfile_path, first_gn_output, inputs=None):
+ assert depfile_path != first_gn_output # http://crbug.com/646165
+ assert not isinstance(inputs, string_types) # Easy mistake to make
+ inputs = inputs or []
+ MakeDirectory(os.path.dirname(depfile_path))
+ # Ninja does not support multiple outputs in depfiles.
+ with open(depfile_path, 'w') as depfile:
+ depfile.write(first_gn_output.replace(' ', '\\ '))
+ depfile.write(': \\\n ')
+ depfile.write(' \\\n '.join(i.replace(' ', '\\ ') for i in inputs))
+ depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+ """Replaces file-arg placeholders in args.
+
+ These placeholders have the form:
+ @FileArg(filename:key1:key2:...:keyn)
+
+ The value of such a placeholder is calculated by reading 'filename' as json.
+ And then extracting the value at [key1][key2]...[keyn]. If a key has a '[]'
+ suffix the (intermediate) value will be interpreted as a single item list and
+ the single item will be returned or used for further traversal.
+
+ Note: This intentionally does not return the list of files that appear in such
+ placeholders. An action that uses file-args *must* know the paths of those
+ files prior to the parsing of the arguments (typically by explicitly listing
+ them in the action's inputs in build files).
+ """
+ new_args = list(args)
+ file_jsons = dict()
+ r = re.compile('@FileArg\((.*?)\)')
+ for i, arg in enumerate(args):
+ match = r.search(arg)
+ if not match:
+ continue
+
+ def get_key(key):
+ if key.endswith('[]'):
+ return key[:-2], True
+ return key, False
+
+ lookup_path = match.group(1).split(':')
+ file_path, _ = get_key(lookup_path[0])
+ if not file_path in file_jsons:
+ with open(file_path) as f:
+ file_jsons[file_path] = json.load(f)
+
+ expansion = file_jsons
+ for k in lookup_path:
+ k, flatten = get_key(k)
+ expansion = expansion[k]
+ if flatten:
+ if not isinstance(expansion, list) or not len(expansion) == 1:
+ raise Exception('Expected single item list but got %s' % expansion)
+ expansion = expansion[0]
+
+ # This should match ParseGnList. The output is either a GN-formatted list
+ # or a literal (with no quotes).
+ if isinstance(expansion, list):
+ new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) +
+ arg[match.end():])
+ else:
+ new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():]
+
+ return new_args
+
+
+def ReadSourcesList(sources_list_file_name):
+ """Reads a GN-written file containing list of file names and returns a list.
+
+ Note that this function should not be used to parse response files.
+ """
+ with open(sources_list_file_name) as f:
+ return [file_name.strip() for file_name in f]
diff --git a/third_party/libwebrtc/build/android/gyp/util/build_utils_test.py b/third_party/libwebrtc/build/android/gyp/util/build_utils_test.py
new file mode 100755
index 0000000000..008ea11748
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/build_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+_DEPS = collections.OrderedDict()
+_DEPS['a'] = []
+_DEPS['b'] = []
+_DEPS['c'] = ['a']
+_DEPS['d'] = ['a']
+_DEPS['e'] = ['f']
+_DEPS['f'] = ['a', 'd']
+_DEPS['g'] = []
+_DEPS['h'] = ['d', 'b', 'f']
+_DEPS['i'] = ['f']
+
+
+class BuildUtilsTest(unittest.TestCase):
+ def testGetSortedTransitiveDependencies_all(self):
+ TOP = _DEPS.keys()
+ EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leaves(self):
+ TOP = ['c', 'e', 'g', 'h', 'i']
+ EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+ def testGetSortedTransitiveDependencies_leavesReverse(self):
+ TOP = ['i', 'h', 'g', 'e', 'c']
+ EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c']
+ actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get)
+ self.assertEqual(EXPECTED, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/diff_utils.py b/third_party/libwebrtc/build/android/gyp/util/diff_utils.py
new file mode 100644
index 0000000000..530a688191
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/diff_utils.py
@@ -0,0 +1,127 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import difflib
+from util import build_utils
+
+
+def _SkipOmitted(line):
+ """
+ Skip lines that are to be intentionally omitted from the expectations file.
+
+ This is required when the file to be compared against expectations contains
+ a line that changes from build to build because - for instance - it contains
+ version information.
+ """
+ if line.rstrip().endswith('# OMIT FROM EXPECTATIONS'):
+ return '# THIS LINE WAS OMITTED\n'
+ return line
+
+
+def _GenerateDiffWithOnlyAdditons(expected_path, actual_data):
+ """Generate a diff that only contains additions"""
+ # Ignore blank lines when creating the diff to cut down on whitespace-only
+ # lines in the diff. Also remove trailing whitespaces and add the new lines
+ # manually (ndiff expects new lines but we don't care about trailing
+ # whitespace).
+ with open(expected_path) as expected:
+ expected_lines = [l for l in expected.readlines() if l.strip()]
+ actual_lines = [
+ '{}\n'.format(l.rstrip()) for l in actual_data.splitlines() if l.strip()
+ ]
+
+ diff = difflib.ndiff(expected_lines, actual_lines)
+ filtered_diff = (l for l in diff if l.startswith('+'))
+ return ''.join(filtered_diff)
+
+
+def _DiffFileContents(expected_path, actual_data):
+ """Check file contents for equality and return the diff or None."""
+ # Remove all trailing whitespace and add it explicitly in the end.
+ with open(expected_path) as f_expected:
+ expected_lines = [l.rstrip() for l in f_expected.readlines()]
+ actual_lines = [
+ _SkipOmitted(line).rstrip() for line in actual_data.splitlines()
+ ]
+
+ if expected_lines == actual_lines:
+ return None
+
+ expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT)
+
+ diff = difflib.unified_diff(
+ expected_lines,
+ actual_lines,
+ fromfile=os.path.join('before', expected_path),
+ tofile=os.path.join('after', expected_path),
+ n=0,
+ lineterm='',
+ )
+
+ return '\n'.join(diff)
+
+
+def AddCommandLineFlags(parser):
+ group = parser.add_argument_group('Expectations')
+ group.add_argument(
+ '--expected-file',
+ help='Expected contents for the check. If --expected-file-base is set, '
+ 'this is a diff of --actual-file and --expected-file-base.')
+ group.add_argument(
+ '--expected-file-base',
+ help='File to diff against before comparing to --expected-file.')
+ group.add_argument('--actual-file',
+ help='Path to write actual file (for reference).')
+ group.add_argument('--failure-file',
+ help='Write to this file if expectations fail.')
+ group.add_argument('--fail-on-expectations',
+ action="store_true",
+ help='Fail on expectation mismatches.')
+ group.add_argument('--only-verify-expectations',
+ action='store_true',
+ help='Verify the expectation and exit.')
+
+
+def CheckExpectations(actual_data, options, custom_msg=''):
+ if options.actual_file:
+ with build_utils.AtomicOutput(options.actual_file) as f:
+ f.write(actual_data.encode('utf8'))
+ if options.expected_file_base:
+ actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base,
+ actual_data)
+ diff_text = _DiffFileContents(options.expected_file, actual_data)
+
+ if not diff_text:
+ fail_msg = ''
+ else:
+ fail_msg = """
+Expectations need updating:
+https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/expectations/README.md
+
+LogDog tip: Use "Raw log" or "Switch to lite mode" before copying:
+https://bugs.chromium.org/p/chromium/issues/detail?id=984616
+
+{}
+
+To update expectations, run:
+########### START ###########
+ patch -p1 <<'END_DIFF'
+{}
+END_DIFF
+############ END ############
+""".format(custom_msg, diff_text)
+
+ sys.stderr.write(fail_msg)
+
+ if fail_msg and options.fail_on_expectations:
+ # Don't write failure file when failing on expectations or else the target
+ # will not be re-run on subsequent ninja invocations.
+ sys.exit(1)
+
+ if options.failure_file:
+ with open(options.failure_file, 'w') as f:
+ f.write(fail_msg)
diff --git a/third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py b/third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py
new file mode 100644
index 0000000000..975945510e
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/jar_info_utils.py
@@ -0,0 +1,59 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+# Utilities to read and write .jar.info files.
+#
+# A .jar.info file contains a simple mapping from fully-qualified Java class
+# names to the source file that actually defines it.
+#
+# For APKs, the .jar.info maps the class names to the .jar file that which
+# contains its .class definition instead.
+
+
+def ReadAarSourceInfo(info_path):
+ """Returns the source= path from an .aar's source.info file."""
+ # The .info looks like: "source=path/to/.aar\n".
+ with open(info_path) as f:
+ return f.read().rstrip().split('=', 1)[1]
+
+
+def ParseJarInfoFile(info_path):
+ """Parse a given .jar.info file as a dictionary.
+
+ Args:
+ info_path: input .jar.info file path.
+ Returns:
+ A new dictionary mapping fully-qualified Java class names to file paths.
+ """
+ info_data = dict()
+ if os.path.exists(info_path):
+ with open(info_path, 'r') as info_file:
+ for line in info_file:
+ line = line.strip()
+ if line:
+ fully_qualified_name, path = line.split(',', 1)
+ info_data[fully_qualified_name] = path
+ return info_data
+
+
+def WriteJarInfoFile(output_obj, info_data, source_file_map=None):
+ """Generate a .jar.info file from a given dictionary.
+
+ Args:
+ output_obj: output file object.
+ info_data: a mapping of fully qualified Java class names to filepaths.
+ source_file_map: an optional mapping from java source file paths to the
+ corresponding source .srcjar. This is because info_data may contain the
+ path of Java source files that where extracted from an .srcjar into a
+ temporary location.
+ """
+ for fully_qualified_name, path in sorted(info_data.items()):
+ if source_file_map and path in source_file_map:
+ path = source_file_map[path]
+ assert not path.startswith('/tmp'), (
+ 'Java file path should not be in temp dir: {}'.format(path))
+ output_obj.write(('{},{}\n'.format(fully_qualified_name,
+ path)).encode('utf8'))
diff --git a/third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py b/third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py
new file mode 100644
index 0000000000..5180400d61
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/java_cpp_utils.py
@@ -0,0 +1,194 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import sys
+
+
+def GetScriptName():
+ return os.path.basename(os.path.abspath(sys.argv[0]))
+
+
+def GetJavaFilePath(java_package, class_name):
+ package_path = java_package.replace('.', os.path.sep)
+ file_name = class_name + '.java'
+ return os.path.join(package_path, file_name)
+
+
+def KCamelToShouty(s):
+ """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE.
+
+ kFooBar -> FOO_BAR
+ FooBar -> FOO_BAR
+ FooBAR9 -> FOO_BAR9
+ FooBARBaz -> FOO_BAR_BAZ
+ """
+ if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s):
+ return s
+ # Strip the leading k.
+ s = re.sub(r'^k', '', s)
+ # Treat "WebView" like one word.
+ s = re.sub(r'WebView', r'Webview', s)
+ # Add _ between title words and anything else.
+ s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s)
+ # Add _ between lower -> upper transitions.
+ s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s)
+ return s.upper()
+
+
+class JavaString(object):
+ def __init__(self, name, value, comments):
+ self.name = KCamelToShouty(name)
+ self.value = value
+ self.comments = '\n'.join(' ' + x for x in comments)
+
+ def Format(self):
+ return '%s\n public static final String %s = %s;' % (
+ self.comments, self.name, self.value)
+
+
+def ParseTemplateFile(lines):
+ package_re = re.compile(r'^package (.*);')
+ class_re = re.compile(r'.*class (.*) {')
+ package = ''
+ class_name = ''
+ for line in lines:
+ package_line = package_re.match(line)
+ if package_line:
+ package = package_line.groups()[0]
+ class_line = class_re.match(line)
+ if class_line:
+ class_name = class_line.groups()[0]
+ break
+ return package, class_name
+
+
+# TODO(crbug.com/937282): Work will be needed if we want to annotate specific
+# constants in the file to be parsed.
+class CppConstantParser(object):
+ """Parses C++ constants, retaining their comments.
+
+ The Delegate subclass is responsible for matching and extracting the
+ constant's variable name and value, as well as generating an object to
+ represent the Java representation of this value.
+ """
+ SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)')
+
+ class Delegate(object):
+ def ExtractConstantName(self, line):
+ """Extracts a constant's name from line or None if not a match."""
+ raise NotImplementedError()
+
+ def ExtractValue(self, line):
+ """Extracts a constant's value from line or None if not a match."""
+ raise NotImplementedError()
+
+ def CreateJavaConstant(self, name, value, comments):
+ """Creates an object representing the Java analog of a C++ constant.
+
+ CppConstantParser will not interact with the object created by this
+ method. Instead, it will store this value in a list and return a list of
+ all objects from the Parse() method. In this way, the caller may define
+ whatever class suits their need.
+
+ Args:
+ name: the constant's variable name, as extracted by
+ ExtractConstantName()
+ value: the constant's value, as extracted by ExtractValue()
+ comments: the code comments describing this constant
+ """
+ raise NotImplementedError()
+
+ def __init__(self, delegate, lines):
+ self._delegate = delegate
+ self._lines = lines
+ self._in_variable = False
+ self._in_comment = False
+ self._package = ''
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._constants = []
+
+ def _ExtractVariable(self, line):
+ match = StringFileParser.STRING_RE.match(line)
+ return match.group(1) if match else None
+
+ def _ExtractValue(self, line):
+ match = StringFileParser.VALUE_RE.search(line)
+ return match.group(1) if match else None
+
+ def _Reset(self):
+ self._current_comments = []
+ self._current_name = ''
+ self._current_value = ''
+ self._in_variable = False
+ self._in_comment = False
+
+ def _AppendConstant(self):
+ self._constants.append(
+ self._delegate.CreateJavaConstant(self._current_name,
+ self._current_value,
+ self._current_comments))
+ self._Reset()
+
+ def _ParseValue(self, line):
+ current_value = self._delegate.ExtractValue(line)
+ if current_value is not None:
+ self._current_value = current_value
+ self._AppendConstant()
+ else:
+ self._Reset()
+
+ def _ParseComment(self, line):
+ comment_line = CppConstantParser.SINGLE_LINE_COMMENT_RE.match(line)
+ if comment_line:
+ self._current_comments.append(comment_line.groups()[0])
+ self._in_comment = True
+ self._in_variable = True
+ return True
+ else:
+ self._in_comment = False
+ return False
+
+ def _ParseVariable(self, line):
+ current_name = self._delegate.ExtractConstantName(line)
+ if current_name is not None:
+ self._current_name = current_name
+ current_value = self._delegate.ExtractValue(line)
+ if current_value is not None:
+ self._current_value = current_value
+ self._AppendConstant()
+ else:
+ self._in_variable = True
+ return True
+ else:
+ self._in_variable = False
+ return False
+
+ def _ParseLine(self, line):
+ if not self._in_variable:
+ if not self._ParseVariable(line):
+ self._ParseComment(line)
+ return
+
+ if self._in_comment:
+ if self._ParseComment(line):
+ return
+ if not self._ParseVariable(line):
+ self._Reset()
+ return
+
+ if self._in_variable:
+ self._ParseValue(line)
+
+ def Parse(self):
+ """Returns a list of objects representing C++ constants.
+
+ Each object in the list was created by Delegate.CreateJavaValue().
+ """
+ for line in self._lines:
+ self._ParseLine(line)
+ return self._constants
diff --git a/third_party/libwebrtc/build/android/gyp/util/manifest_utils.py b/third_party/libwebrtc/build/android/gyp/util/manifest_utils.py
new file mode 100644
index 0000000000..a517708b59
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/manifest_utils.py
@@ -0,0 +1,321 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains common helpers for working with Android manifests."""
+
+import hashlib
+import os
+import re
+import shlex
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+from xml.etree import ElementTree
+
+ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android'
+TOOLS_NAMESPACE = 'http://schemas.android.com/tools'
+DIST_NAMESPACE = 'http://schemas.android.com/apk/distribution'
+EMPTY_ANDROID_MANIFEST_PATH = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', 'AndroidManifest.xml'))
+# When normalizing for expectation matching, wrap these tags when they are long
+# or else they become very hard to read.
+_WRAP_CANDIDATES = (
+ '<manifest',
+ '<application',
+ '<activity',
+ '<provider',
+ '<receiver',
+ '<service',
+)
+# Don't wrap lines shorter than this.
+_WRAP_LINE_LENGTH = 100
+
+_xml_namespace_initialized = False
+
+
+def _RegisterElementTreeNamespaces():
+ global _xml_namespace_initialized
+ if _xml_namespace_initialized:
+ return
+ _xml_namespace_initialized = True
+ ElementTree.register_namespace('android', ANDROID_NAMESPACE)
+ ElementTree.register_namespace('tools', TOOLS_NAMESPACE)
+ ElementTree.register_namespace('dist', DIST_NAMESPACE)
+
+
+def ParseManifest(path):
+ """Parses an AndroidManifest.xml using ElementTree.
+
+ Registers required namespaces, creates application node if missing, adds any
+ missing namespaces for 'android', 'tools' and 'dist'.
+
+ Returns tuple of:
+ doc: Root xml document.
+ manifest_node: the <manifest> node.
+ app_node: the <application> node.
+ """
+ _RegisterElementTreeNamespaces()
+ doc = ElementTree.parse(path)
+ # ElementTree.find does not work if the required tag is the root.
+ if doc.getroot().tag == 'manifest':
+ manifest_node = doc.getroot()
+ else:
+ manifest_node = doc.find('manifest')
+
+ app_node = doc.find('application')
+ if app_node is None:
+ app_node = ElementTree.SubElement(manifest_node, 'application')
+
+ return doc, manifest_node, app_node
+
+
+def SaveManifest(doc, path):
+ with build_utils.AtomicOutput(path) as f:
+ f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8'))
+
+
+def GetPackage(manifest_node):
+ return manifest_node.get('package')
+
+
+def AssertUsesSdk(manifest_node,
+ min_sdk_version=None,
+ target_sdk_version=None,
+ max_sdk_version=None,
+ fail_if_not_exist=False):
+ """Asserts values of attributes of <uses-sdk> element.
+
+ Unless |fail_if_not_exist| is true, will only assert if both the passed value
+ is not None and the value of attribute exist. If |fail_if_not_exist| is true
+ will fail if passed value is not None but attribute does not exist.
+ """
+ uses_sdk_node = manifest_node.find('./uses-sdk')
+ if uses_sdk_node is None:
+ return
+ for prefix, sdk_version in (('min', min_sdk_version), ('target',
+ target_sdk_version),
+ ('max', max_sdk_version)):
+ value = uses_sdk_node.get('{%s}%sSdkVersion' % (ANDROID_NAMESPACE, prefix))
+ if fail_if_not_exist and not value and sdk_version:
+ assert False, (
+ '%sSdkVersion in Android manifest does not exist but we expect %s' %
+ (prefix, sdk_version))
+ if not value or not sdk_version:
+ continue
+ assert value == sdk_version, (
+ '%sSdkVersion in Android manifest is %s but we expect %s' %
+ (prefix, value, sdk_version))
+
+
+def AssertPackage(manifest_node, package):
+ """Asserts that manifest package has desired value.
+
+ Will only assert if both |package| is not None and the package is set in the
+ manifest.
+ """
+ package_value = GetPackage(manifest_node)
+ if package_value is None or package is None:
+ return
+ assert package_value == package, (
+ 'Package in Android manifest is %s but we expect %s' % (package_value,
+ package))
+
+
+def _SortAndStripElementTree(root):
+ # Sort alphabetically with two exceptions:
+ # 1) Put <application> node last (since it's giant).
+ # 2) Put android:name before other attributes.
+ def element_sort_key(node):
+ if node.tag == 'application':
+ return 'z'
+ ret = ElementTree.tostring(node)
+ # ElementTree.tostring inserts namespace attributes for any that are needed
+ # for the node or any of its descendants. Remove them so as to prevent a
+ # change to a child that adds/removes a namespace usage from changing sort
+ # order.
+ return re.sub(r' xmlns:.*?".*?"', '', ret.decode('utf8'))
+
+ name_attr = '{%s}name' % ANDROID_NAMESPACE
+
+ def attribute_sort_key(tup):
+ return ('', '') if tup[0] == name_attr else tup
+
+ def helper(node):
+ for child in node:
+ if child.text and child.text.isspace():
+ child.text = None
+ helper(child)
+
+ # Sort attributes (requires Python 3.8+).
+ node.attrib = dict(sorted(node.attrib.items(), key=attribute_sort_key))
+
+ # Sort nodes
+ node[:] = sorted(node, key=element_sort_key)
+
+ helper(root)
+
+
+def _SplitElement(line):
+ """Parses a one-line xml node into ('<tag', ['a="b"', ...]], '/>')."""
+
+ # Shlex splits nicely, but removes quotes. Need to put them back.
+ def restore_quotes(value):
+ return value.replace('=', '="', 1) + '"'
+
+ # Simplify restore_quotes by separating />.
+ assert line.endswith('>'), line
+ end_tag = '>'
+ if line.endswith('/>'):
+ end_tag = '/>'
+ line = line[:-len(end_tag)]
+
+ # Use shlex to avoid having to re-encode &quot;, etc.
+ parts = shlex.split(line)
+ start_tag = parts[0]
+ attrs = parts[1:]
+
+ return start_tag, [restore_quotes(x) for x in attrs], end_tag
+
+
+def _CreateNodeHash(lines):
+ """Computes a hash (md5) for the first XML node found in |lines|.
+
+ Args:
+ lines: List of strings containing pretty-printed XML.
+
+ Returns:
+ Positive 32-bit integer hash of the node (including children).
+ """
+ target_indent = lines[0].find('<')
+ tag_closed = False
+ for i, l in enumerate(lines[1:]):
+ cur_indent = l.find('<')
+ if cur_indent != -1 and cur_indent <= target_indent:
+ tag_lines = lines[:i + 1]
+ break
+ elif not tag_closed and 'android:name="' in l:
+ # To reduce noise of node tags changing, use android:name as the
+ # basis the hash since they usually unique.
+ tag_lines = [l]
+ break
+ tag_closed = tag_closed or '>' in l
+ else:
+ assert False, 'Did not find end of node:\n' + '\n'.join(lines)
+
+ # Insecure and truncated hash as it only needs to be unique vs. its neighbors.
+ return hashlib.md5(('\n'.join(tag_lines)).encode('utf8')).hexdigest()[:8]
+
+
+def _IsSelfClosing(lines):
+ """Given pretty-printed xml, returns whether first node is self-closing."""
+ for l in lines:
+ idx = l.find('>')
+ if idx != -1:
+ return l[idx - 1] == '/'
+ assert False, 'Did not find end of tag:\n' + '\n'.join(lines)
+
+
+def _AddDiffTags(lines):
+ # When multiple identical tags appear sequentially, XML diffs can look like:
+ # + </tag>
+ # + <tag>
+ # rather than:
+ # + <tag>
+ # + </tag>
+ # To reduce confusion, add hashes to tags.
+ # This also ensures changed tags show up with outer <tag> elements rather than
+ # showing only changed attributes.
+ hash_stack = []
+ for i, l in enumerate(lines):
+ stripped = l.lstrip()
+ # Ignore non-indented tags and lines that are not the start/end of a node.
+ if l[0] != ' ' or stripped[0] != '<':
+ continue
+ # Ignore self-closing nodes that fit on one line.
+ if l[-2:] == '/>':
+ continue
+ # Ignore <application> since diff tag changes with basically any change.
+ if stripped.lstrip('</').startswith('application'):
+ continue
+
+ # Check for the closing tag (</foo>).
+ if stripped[1] != '/':
+ cur_hash = _CreateNodeHash(lines[i:])
+ if not _IsSelfClosing(lines[i:]):
+ hash_stack.append(cur_hash)
+ else:
+ cur_hash = hash_stack.pop()
+ lines[i] += ' # DIFF-ANCHOR: {}'.format(cur_hash)
+ assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack)
+
+
+def NormalizeManifest(manifest_contents):
+ _RegisterElementTreeNamespaces()
+ # This also strips comments and sorts node attributes alphabetically.
+ root = ElementTree.fromstring(manifest_contents)
+ package = GetPackage(root)
+
+ app_node = root.find('application')
+ if app_node is not None:
+ # android:debuggable is added when !is_official_build. Strip it out to avoid
+ # expectation diffs caused by not adding is_official_build. Play store
+ # blocks uploading apps with it set, so there's no risk of it slipping in.
+ debuggable_name = '{%s}debuggable' % ANDROID_NAMESPACE
+ if debuggable_name in app_node.attrib:
+ del app_node.attrib[debuggable_name]
+
+ # Trichrome's static library version number is updated daily. To avoid
+ # frequent manifest check failures, we remove the exact version number
+ # during normalization.
+ for node in app_node:
+ if (node.tag in ['uses-static-library', 'static-library']
+ and '{%s}version' % ANDROID_NAMESPACE in node.keys()
+ and '{%s}name' % ANDROID_NAMESPACE in node.keys()):
+ node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER')
+
+ # We also remove the exact package name (except the one at the root level)
+ # to avoid noise during manifest comparison.
+ def blur_package_name(node):
+ for key in node.keys():
+ node.set(key, node.get(key).replace(package, '$PACKAGE'))
+
+ for child in node:
+ blur_package_name(child)
+
+ # We only blur the package names of non-root nodes because they generate a lot
+ # of diffs when doing manifest checks for upstream targets. We still want to
+ # have 1 piece of package name not blurred just in case the package name is
+ # mistakenly changed.
+ for child in root:
+ blur_package_name(child)
+
+ _SortAndStripElementTree(root)
+
+ # Fix up whitespace/indentation.
+ dom = minidom.parseString(ElementTree.tostring(root))
+ out_lines = []
+ for l in dom.toprettyxml(indent=' ').splitlines():
+ if not l or l.isspace():
+ continue
+ if len(l) > _WRAP_LINE_LENGTH and any(x in l for x in _WRAP_CANDIDATES):
+ indent = ' ' * l.find('<')
+ start_tag, attrs, end_tag = _SplitElement(l)
+ out_lines.append('{}{}'.format(indent, start_tag))
+ for attribute in attrs:
+ out_lines.append('{} {}'.format(indent, attribute))
+ out_lines[-1] += '>'
+ # Heuristic: Do not allow multi-line tags to be self-closing since these
+ # can generally be allowed to have nested elements. When diffing, it adds
+ # noise if the base file is self-closing and the non-base file is not
+ # self-closing.
+ if end_tag == '/>':
+ out_lines.append('{}{}>'.format(indent, start_tag.replace('<', '</')))
+ else:
+ out_lines.append(l)
+
+ # Make output more diff-friendly.
+ _AddDiffTags(out_lines)
+
+ return '\n'.join(out_lines) + '\n'
diff --git a/third_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py b/third_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py
new file mode 100755
index 0000000000..52bf458a59
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/manifest_utils_test.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..'))
+from util import manifest_utils
+
+_TEST_MANIFEST = """\
+<?xml version="1.0" ?>
+<manifest package="test.pkg"
+ tools:ignore="MissingVersion"
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools">
+ <!-- Should be one line. -->
+ <uses-sdk android:minSdkVersion="24"
+ android:targetSdkVersion="30"/>
+ <!-- Should have attrs sorted-->
+ <uses-feature android:required="false" android:version="1"
+ android:name="android.hardware.vr.headtracking" />
+ <!-- Should not be wrapped since < 100 chars. -->
+ <application
+ android:name="testname">
+ <activity
+ {extra_activity_attr}
+ android:icon="@drawable/ic_devices_48dp"
+ android:label="label with spaces"
+ android:name="to be hashed"
+ android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+ <intent-filter>
+ {extra_intent_filter_elem}
+ <action android:name="android.intent.action.SEND"/>
+ <category android:name="android.intent.category.DEFAULT"/>
+ <data android:mimeType="text/plain"/>
+ </intent-filter>
+ </activity>
+ <!-- Should be made non-self-closing. -->
+ <receiver android:exported="false" android:name="\
+org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"/>
+ </application>
+</manifest>
+"""
+
+_TEST_MANIFEST_NORMALIZED = """\
+<?xml version="1.0" ?>
+<manifest
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="test.pkg"
+ tools:ignore="MissingVersion">
+ <uses-feature android:name="android.hardware.vr.headtracking" \
+android:required="false" android:version="1"/>
+ <uses-sdk android:minSdkVersion="24" android:targetSdkVersion="30"/>
+ <application android:name="testname">
+ <activity # DIFF-ANCHOR: {activity_diff_anchor}
+ android:name="to be hashed"
+ {extra_activity_attr}android:icon="@drawable/ic_devices_48dp"
+ android:label="label with spaces"
+ android:theme="@style/Theme.Chromium.Activity.TranslucentNoAnimations">
+ <intent-filter> # DIFF-ANCHOR: {intent_filter_diff_anchor}
+ {extra_intent_filter_elem}\
+<action android:name="android.intent.action.SEND"/>
+ <category android:name="android.intent.category.DEFAULT"/>
+ <data android:mimeType="text/plain"/>
+ </intent-filter> # DIFF-ANCHOR: {intent_filter_diff_anchor}
+ </activity> # DIFF-ANCHOR: {activity_diff_anchor}
+ <receiver # DIFF-ANCHOR: ddab3320
+ android:name=\
+"org.chromium.chrome.browser.announcement.AnnouncementNotificationManager$Rcvr"
+ android:exported="false">
+ </receiver> # DIFF-ANCHOR: ddab3320
+ </application>
+</manifest>
+"""
+
+_ACTIVITY_DIFF_ANCHOR = '32b3a641'
+_INTENT_FILTER_DIFF_ANCHOR = '4ee601b7'
+
+
+def _CreateTestData(intent_filter_diff_anchor=_INTENT_FILTER_DIFF_ANCHOR,
+ extra_activity_attr='',
+ extra_intent_filter_elem=''):
+ if extra_activity_attr:
+ extra_activity_attr += '\n '
+ if extra_intent_filter_elem:
+ extra_intent_filter_elem += '\n '
+ test_manifest = _TEST_MANIFEST.format(
+ extra_activity_attr=extra_activity_attr,
+ extra_intent_filter_elem=extra_intent_filter_elem)
+ expected = _TEST_MANIFEST_NORMALIZED.format(
+ activity_diff_anchor=_ACTIVITY_DIFF_ANCHOR,
+ intent_filter_diff_anchor=intent_filter_diff_anchor,
+ extra_activity_attr=extra_activity_attr,
+ extra_intent_filter_elem=extra_intent_filter_elem)
+ return test_manifest, expected
+
+
+class ManifestUtilsTest(unittest.TestCase):
+ # Enable diff output.
+ maxDiff = None
+
+ def testNormalizeManifest_golden(self):
+ test_manifest, expected = _CreateTestData()
+ actual = manifest_utils.NormalizeManifest(test_manifest)
+ self.assertMultiLineEqual(expected, actual)
+
+ def testNormalizeManifest_nameUsedForActivity(self):
+ test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"')
+ actual = manifest_utils.NormalizeManifest(test_manifest)
+ # Checks that the DIFF-ANCHOR does not change with the added attribute.
+ self.assertMultiLineEqual(expected, actual)
+
+ def testNormalizeManifest_nameNotUsedForIntentFilter(self):
+ test_manifest, expected = _CreateTestData(
+ extra_intent_filter_elem='<a/>', intent_filter_diff_anchor='5f5c8a70')
+ actual = manifest_utils.NormalizeManifest(test_manifest)
+ # Checks that the DIFF-ANCHOR does change with the added element despite
+ # having a nested element with an android:name set.
+ self.assertMultiLineEqual(expected, actual)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/md5_check.py b/third_party/libwebrtc/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000000..87ee723c85
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/md5_check.py
@@ -0,0 +1,471 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'build'))
+import print_python_deps
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndWriteDepfileIfStale(on_stale_md5,
+ options,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_allowlist=None,
+ depfile_deps=None):
+ """Wraps CallAndRecordIfStale() and writes a depfile if applicable.
+
+ Depfiles are automatically added to output_paths when present in the |options|
+ argument. They are then created after |on_stale_md5| is called.
+
+ By default, only python dependencies are added to the depfile. If there are
+ other input paths that are not captured by GN deps, then they should be listed
+ in depfile_deps. It's important to write paths to the depfile that are already
+ captured by GN deps since GN args can cause GN deps to change, and such
+ changes are not immediately reflected in depfiles (http://crbug.com/589311).
+ """
+ if not output_paths:
+ raise Exception('At least one output_path must be specified.')
+ input_paths = list(input_paths or [])
+ input_strings = list(input_strings or [])
+ output_paths = list(output_paths or [])
+
+ input_paths += print_python_deps.ComputePythonDependencies()
+
+ CallAndRecordIfStale(
+ on_stale_md5,
+ record_path=record_path,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=pass_changes,
+ track_subpaths_allowlist=track_subpaths_allowlist)
+
+ # Write depfile even when inputs have not changed to ensure build correctness
+ # on bots that build with & without patch, and the patch changes the depfile
+ # location.
+ if hasattr(options, 'depfile') and options.depfile:
+ build_utils.WriteDepfile(options.depfile, output_paths[0], depfile_deps)
+
+
+def CallAndRecordIfStale(function,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ track_subpaths_allowlist=None):
+ """Calls function if outputs are stale.
+
+ Outputs are considered stale if:
+ - any output_paths are missing, or
+ - the contents of any file within input_paths has changed, or
+ - the contents of input_strings has changed.
+
+ To debug which files are out-of-date, set the environment variable:
+ PRINT_MD5_DIFFS=1
+
+ Args:
+ function: The function to call.
+ record_path: Path to record metadata.
+ Defaults to output_paths[0] + '.md5.stamp'
+ input_paths: List of paths to calcualte an md5 sum on.
+ input_strings: List of strings to record verbatim.
+ output_paths: List of output paths.
+ force: Whether to treat outputs as missing regardless of whether they
+ actually are.
+ pass_changes: Whether to pass a Changes instance to |function|.
+ track_subpaths_allowlist: Relevant only when pass_changes=True. List of .zip
+ files from |input_paths| to make subpath information available for.
+ """
+ assert record_path or output_paths
+ input_paths = input_paths or []
+ input_strings = input_strings or []
+ output_paths = output_paths or []
+ record_path = record_path or output_paths[0] + '.md5.stamp'
+
+ assert record_path.endswith('.stamp'), (
+ 'record paths must end in \'.stamp\' so that they are easy to find '
+ 'and delete')
+
+ new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS)
+ new_metadata.AddStrings(input_strings)
+
+ zip_allowlist = set(track_subpaths_allowlist or [])
+ for path in input_paths:
+ # It's faster to md5 an entire zip file than it is to just locate & hash
+ # its central directory (which is what this used to do).
+ if path in zip_allowlist:
+ entries = _ExtractZipEntries(path)
+ new_metadata.AddZipFile(path, entries)
+ else:
+ new_metadata.AddFile(path, _ComputeTagForPath(path))
+
+ old_metadata = None
+ force = force or _FORCE_REBUILD
+ missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+ too_new = []
+ # When outputs are missing, don't bother gathering change information.
+ if not missing_outputs and os.path.exists(record_path):
+ record_mtime = os.path.getmtime(record_path)
+ # Outputs newer than the change information must have been modified outside
+ # of the build, and should be considered stale.
+ too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime]
+ if not too_new:
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.FromFile(jsonfile)
+ except: # pylint: disable=bare-except
+ pass # Not yet using new file format.
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new)
+ if not changes.HasChanges():
+ return
+
+ if PRINT_EXPLANATIONS:
+ print('=' * 80)
+ print('Target is stale: %s' % record_path)
+ print(changes.DescribeDifference())
+ print('=' * 80)
+
+ args = (changes,) if pass_changes else ()
+ function(*args)
+
+ with open(record_path, 'w') as f:
+ new_metadata.ToFile(f)
+
+
+class Changes(object):
+ """Provides and API for querying what changed between runs."""
+
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs,
+ too_new):
+ self.old_metadata = old_metadata
+ self.new_metadata = new_metadata
+ self.force = force
+ self.missing_outputs = missing_outputs
+ self.too_new = too_new
+
+ def _GetOldTag(self, path, subpath=None):
+ return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+ def HasChanges(self):
+ """Returns whether any changes exist."""
+ return (self.HasStringChanges()
+ or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+ def HasStringChanges(self):
+ """Returns whether string metadata changed."""
+ return (self.force or not self.old_metadata
+ or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5())
+
+ def AddedOrModifiedOnly(self):
+ """Returns whether the only changes were from added or modified (sub)files.
+
+ No missing outputs, no removed paths/subpaths.
+ """
+ if self.HasStringChanges():
+ return False
+ if any(self.IterRemovedPaths()):
+ return False
+ for path in self.IterModifiedPaths():
+ if any(self.IterRemovedSubpaths(path)):
+ return False
+ return True
+
+ def IterAllPaths(self):
+ """Generator for paths."""
+ return self.new_metadata.IterPaths();
+
+ def IterAllSubpaths(self, path):
+ """Generator for subpaths."""
+ return self.new_metadata.IterSubpaths(path);
+
+ def IterAddedPaths(self):
+ """Generator for paths that were added."""
+ for path in self.new_metadata.IterPaths():
+ if self._GetOldTag(path) is None:
+ yield path
+
+ def IterAddedSubpaths(self, path):
+ """Generator for paths that were added within the given zip file."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ if self._GetOldTag(path, subpath) is None:
+ yield subpath
+
+ def IterRemovedPaths(self):
+ """Generator for paths that were removed."""
+ if self.old_metadata:
+ for path in self.old_metadata.IterPaths():
+ if self.new_metadata.GetTag(path) is None:
+ yield path
+
+ def IterRemovedSubpaths(self, path):
+ """Generator for paths that were removed within the given zip file."""
+ if self.old_metadata:
+ for subpath in self.old_metadata.IterSubpaths(path):
+ if self.new_metadata.GetTag(path, subpath) is None:
+ yield subpath
+
+ def IterModifiedPaths(self):
+ """Generator for paths whose contents have changed."""
+ for path in self.new_metadata.IterPaths():
+ old_tag = self._GetOldTag(path)
+ new_tag = self.new_metadata.GetTag(path)
+ if old_tag is not None and old_tag != new_tag:
+ yield path
+
+ def IterModifiedSubpaths(self, path):
+ """Generator for paths within a zip file whose contents have changed."""
+ for subpath in self.new_metadata.IterSubpaths(path):
+ old_tag = self._GetOldTag(path, subpath)
+ new_tag = self.new_metadata.GetTag(path, subpath)
+ if old_tag is not None and old_tag != new_tag:
+ yield subpath
+
+ def IterChangedPaths(self):
+ """Generator for all changed paths (added/removed/modified)."""
+ return itertools.chain(self.IterRemovedPaths(),
+ self.IterModifiedPaths(),
+ self.IterAddedPaths())
+
+ def IterChangedSubpaths(self, path):
+ """Generator for paths within a zip that were added/removed/modified."""
+ return itertools.chain(self.IterRemovedSubpaths(path),
+ self.IterModifiedSubpaths(path),
+ self.IterAddedSubpaths(path))
+
+ def DescribeDifference(self):
+ """Returns a human-readable description of what changed."""
+ if self.force:
+ return 'force=True'
+ elif self.missing_outputs:
+ return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs)
+ elif self.too_new:
+ return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new)
+ elif self.old_metadata is None:
+ return 'Previous stamp file not found.'
+
+ if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+ ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+ self.new_metadata.GetStrings())
+ changed = [s for s in ndiff if not s.startswith(' ')]
+ return 'Input strings changed:\n ' + '\n '.join(changed)
+
+ if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+ return "There's no difference."
+
+ lines = []
+ lines.extend('Added: ' + p for p in self.IterAddedPaths())
+ lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+ for path in self.IterModifiedPaths():
+ lines.append('Modified: ' + path)
+ lines.extend(' -> Subpath added: ' + p
+ for p in self.IterAddedSubpaths(path))
+ lines.extend(' -> Subpath removed: ' + p
+ for p in self.IterRemovedSubpaths(path))
+ lines.extend(' -> Subpath modified: ' + p
+ for p in self.IterModifiedSubpaths(path))
+ if lines:
+ return 'Input files changed:\n ' + '\n '.join(lines)
+ return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+ """Data model for tracking change metadata.
+
+ Args:
+ track_entries: Enables per-file change tracking. Slower, but required for
+ Changes functionality.
+ """
+ # Schema:
+ # {
+ # "files-md5": "VALUE",
+ # "strings-md5": "VALUE",
+ # "input-files": [
+ # {
+ # "path": "path.jar",
+ # "tag": "{MD5 of entries}",
+ # "entries": [
+ # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+ # ]
+ # }, {
+ # "path": "path.txt",
+ # "tag": "{MD5}",
+ # }
+ # ],
+ # "input-strings": ["a", "b", ...],
+ # }
+ def __init__(self, track_entries=False):
+ self._track_entries = track_entries
+ self._files_md5 = None
+ self._strings_md5 = None
+ self._files = []
+ self._strings = []
+ # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+ self._file_map = None
+
+ @classmethod
+ def FromFile(cls, fileobj):
+ """Returns a _Metadata initialized from a file object."""
+ ret = cls()
+ obj = json.load(fileobj)
+ ret._files_md5 = obj['files-md5']
+ ret._strings_md5 = obj['strings-md5']
+ ret._files = obj.get('input-files', [])
+ ret._strings = obj.get('input-strings', [])
+ return ret
+
+ def ToFile(self, fileobj):
+ """Serializes metadata to the given file object."""
+ obj = {
+ 'files-md5': self.FilesMd5(),
+ 'strings-md5': self.StringsMd5(),
+ }
+ if self._track_entries:
+ obj['input-files'] = sorted(self._files, key=lambda e: e['path'])
+ obj['input-strings'] = self._strings
+
+ json.dump(obj, fileobj, indent=2)
+
+ def _AssertNotQueried(self):
+ assert self._files_md5 is None
+ assert self._strings_md5 is None
+ assert self._file_map is None
+
+ def AddStrings(self, values):
+ self._AssertNotQueried()
+ self._strings.extend(str(v) for v in values)
+
+ def AddFile(self, path, tag):
+ """Adds metadata for a non-zip file.
+
+ Args:
+ path: Path to the file.
+ tag: A short string representative of the file contents.
+ """
+ self._AssertNotQueried()
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ })
+
+ def AddZipFile(self, path, entries):
+ """Adds metadata for a zip file.
+
+ Args:
+ path: Path to the file.
+ entries: List of (subpath, tag) tuples for entries within the zip.
+ """
+ self._AssertNotQueried()
+ tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+ (e[1] for e in entries)))
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ 'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+ })
+
+ def GetStrings(self):
+ """Returns the list of input strings."""
+ return self._strings
+
+ def FilesMd5(self):
+ """Lazily computes and returns the aggregate md5 of input files."""
+ if self._files_md5 is None:
+ # Omit paths from md5 since temporary files have random names.
+ self._files_md5 = _ComputeInlineMd5(
+ self.GetTag(p) for p in sorted(self.IterPaths()))
+ return self._files_md5
+
+ def StringsMd5(self):
+ """Lazily computes and returns the aggregate md5 of input strings."""
+ if self._strings_md5 is None:
+ self._strings_md5 = _ComputeInlineMd5(self._strings)
+ return self._strings_md5
+
+ def _GetEntry(self, path, subpath=None):
+ """Returns the JSON entry for the given path / subpath."""
+ if self._file_map is None:
+ self._file_map = {}
+ for entry in self._files:
+ self._file_map[(entry['path'], None)] = entry
+ for subentry in entry.get('entries', ()):
+ self._file_map[(entry['path'], subentry['path'])] = subentry
+ return self._file_map.get((path, subpath))
+
+ def GetTag(self, path, subpath=None):
+ """Returns the tag for the given path / subpath."""
+ ret = self._GetEntry(path, subpath)
+ return ret and ret['tag']
+
+ def IterPaths(self):
+ """Returns a generator for all top-level paths."""
+ return (e['path'] for e in self._files)
+
+ def IterSubpaths(self, path):
+ """Returns a generator for all subpaths in the given zip.
+
+ If the given path is not a zip file or doesn't exist, returns an empty
+ iterable.
+ """
+ outer_entry = self._GetEntry(path)
+ if not outer_entry:
+ return ()
+ subentries = outer_entry.get('entries', [])
+ return (entry['path'] for entry in subentries)
+
+
+def _ComputeTagForPath(path):
+ stat = os.stat(path)
+ if stat.st_size > 1 * 1024 * 1024:
+ # Fallback to mtime for large files so that md5_check does not take too long
+ # to run.
+ return stat.st_mtime
+ md5 = hashlib.md5()
+ with open(path, 'rb') as f:
+ md5.update(f.read())
+ return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+ """Computes the md5 of the concatenated parameters."""
+ md5 = hashlib.md5()
+ for item in iterable:
+ md5.update(str(item).encode('ascii'))
+ return md5.hexdigest()
+
+
+def _ExtractZipEntries(path):
+ """Returns a list of (path, CRC32) of all files within |path|."""
+ entries = []
+ with zipfile.ZipFile(path) as zip_file:
+ for zip_info in zip_file.infolist():
+ # Skip directories and empty files.
+ if zip_info.CRC:
+ entries.append(
+ (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+ return entries
diff --git a/third_party/libwebrtc/build/android/gyp/util/md5_check_test.py b/third_party/libwebrtc/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000000..e11bbd50ed
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import os
+import sys
+import tempfile
+import unittest
+import zipfile
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import md5_check
+
+
+def _WriteZipFile(path, entries):
+ with zipfile.ZipFile(path, 'w') as zip_file:
+ for subpath, data in entries:
+ zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+ def setUp(self):
+ self.called = False
+ self.changes = None
+
+ def testCallAndRecordIfStale(self):
+ input_strings = ['string1', 'string2']
+ input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+ input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+ file1_contents = b'input file 1'
+ input_file1.write(file1_contents)
+ input_file1.flush()
+ # Test out empty zip file to start.
+ _WriteZipFile(input_file2.name, [])
+ input_files = [input_file1.name, input_file2.name]
+ zip_paths = [input_file2.name]
+
+ record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+ def CheckCallAndRecord(should_call,
+ message,
+ force=False,
+ outputs_specified=False,
+ outputs_missing=False,
+ expected_changes=None,
+ added_or_modified_only=None,
+ track_subentries=False,
+ output_newer_than_record=False):
+ output_paths = None
+ if outputs_specified:
+ output_file1 = tempfile.NamedTemporaryFile()
+ if outputs_missing:
+ output_file1.close() # Gets deleted on close().
+ output_paths = [output_file1.name]
+ if output_newer_than_record:
+ output_mtime = os.path.getmtime(output_file1.name)
+ os.utime(record_path.name, (output_mtime - 1, output_mtime - 1))
+ else:
+ # touch the record file so it doesn't look like it's older that
+ # the output we've just created
+ os.utime(record_path.name, None)
+
+ self.called = False
+ self.changes = None
+ if expected_changes or added_or_modified_only is not None:
+ def MarkCalled(changes):
+ self.called = True
+ self.changes = changes
+ else:
+ def MarkCalled():
+ self.called = True
+
+ md5_check.CallAndRecordIfStale(
+ MarkCalled,
+ record_path=record_path.name,
+ input_paths=input_files,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=(expected_changes or added_or_modified_only) is not None,
+ track_subpaths_allowlist=zip_paths if track_subentries else None)
+ self.assertEqual(should_call, self.called, message)
+ if expected_changes:
+ description = self.changes.DescribeDifference()
+ self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+ 'Expected %s to match %s' % (
+ repr(description), repr(expected_changes)))
+ if should_call and added_or_modified_only is not None:
+ self.assertEqual(added_or_modified_only,
+ self.changes.AddedOrModifiedOnly())
+
+ CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+ expected_changes='Previous stamp file not found.',
+ added_or_modified_only=False)
+ CheckCallAndRecord(False, 'should not call when nothing changed')
+ input_files = input_files[::-1]
+ CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+ CheckCallAndRecord(False, 'should not call when nothing changed #2',
+ outputs_specified=True, outputs_missing=False)
+ CheckCallAndRecord(True, 'should call when output missing',
+ outputs_specified=True, outputs_missing=True,
+ expected_changes='Outputs do not exist:*',
+ added_or_modified_only=False)
+ CheckCallAndRecord(True,
+ 'should call when output is newer than record',
+ expected_changes='Outputs newer than stamp file:*',
+ outputs_specified=True,
+ outputs_missing=False,
+ added_or_modified_only=False,
+ output_newer_than_record=True)
+ CheckCallAndRecord(True, force=True, message='should call when forced',
+ expected_changes='force=True',
+ added_or_modified_only=False)
+
+ input_file1.write(b'some more input')
+ input_file1.flush()
+ CheckCallAndRecord(True, 'changed input file should trigger call',
+ expected_changes='*Modified: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_files = input_files[:1]
+ CheckCallAndRecord(True, 'removing file should trigger call',
+ expected_changes='*Removed: %s' % input_file1.name,
+ added_or_modified_only=False)
+
+ input_files.append(input_file1.name)
+ CheckCallAndRecord(True, 'added input file should trigger call',
+ expected_changes='*Added: %s' % input_file1.name,
+ added_or_modified_only=True)
+
+ input_strings[0] = input_strings[0] + ' a bit longer'
+ CheckCallAndRecord(True, 'changed input string should trigger call',
+ expected_changes='*Input strings changed*',
+ added_or_modified_only=False)
+
+ input_strings = input_strings[::-1]
+ CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+ expected_changes='*Input strings changed*')
+
+ input_strings = input_strings[:1]
+ CheckCallAndRecord(True, 'removing a string should trigger call')
+
+ input_strings.append('a brand new string')
+ CheckCallAndRecord(
+ True,
+ 'added input string should trigger call',
+ added_or_modified_only=False)
+
+ _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+ CheckCallAndRecord(
+ True,
+ 'added subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name,
+ 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
+ _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+ CheckCallAndRecord(
+ True,
+ 'changed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath modified: %s' %
+ (input_file2.name, 'path/1.txt'),
+ added_or_modified_only=True,
+ track_subentries=True)
+
+ _WriteZipFile(input_file2.name, [])
+ CheckCallAndRecord(True, 'removed subpath should trigger call',
+ expected_changes='*Modified: %s*Subpath removed: %s' % (
+ input_file2.name, 'path/1.txt'),
+ added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/parallel.py b/third_party/libwebrtc/build/android/gyp/util/parallel.py
new file mode 100644
index 0000000000..c26875a71c
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/parallel.py
@@ -0,0 +1,214 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Helpers related to multiprocessing.
+
+Based on: //tools/binary_size/libsupersize/parallel.py
+"""
+
+import atexit
+import logging
+import multiprocessing
+import os
+import sys
+import threading
+import traceback
+
+DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1'
+if DISABLE_ASYNC:
+ logging.warning('Running in synchronous mode.')
+
+_all_pools = None
+_is_child_process = False
+_silence_exceptions = False
+
+# Used to pass parameters to forked processes without pickling.
+_fork_params = None
+_fork_kwargs = None
+
+
+class _ImmediateResult(object):
+ def __init__(self, value):
+ self._value = value
+
+ def get(self):
+ return self._value
+
+ def wait(self):
+ pass
+
+ def ready(self):
+ return True
+
+ def successful(self):
+ return True
+
+
+class _ExceptionWrapper(object):
+ """Used to marshal exception messages back to main process."""
+
+ def __init__(self, msg, exception_type=None):
+ self.msg = msg
+ self.exception_type = exception_type
+
+ def MaybeThrow(self):
+ if self.exception_type:
+ raise getattr(__builtins__,
+ self.exception_type)('Originally caused by: ' + self.msg)
+
+
+class _FuncWrapper(object):
+ """Runs on the fork()'ed side to catch exceptions and spread *args."""
+
+ def __init__(self, func):
+ global _is_child_process
+ _is_child_process = True
+ self._func = func
+
+ def __call__(self, index, _=None):
+ try:
+ return self._func(*_fork_params[index], **_fork_kwargs)
+ except Exception as e:
+ # Only keep the exception type for builtin exception types or else risk
+ # further marshalling exceptions.
+ exception_type = None
+ if hasattr(__builtins__, type(e).__name__):
+ exception_type = type(e).__name__
+ # multiprocessing is supposed to catch and return exceptions automatically
+ # but it doesn't seem to work properly :(.
+ return _ExceptionWrapper(traceback.format_exc(), exception_type)
+ except: # pylint: disable=bare-except
+ return _ExceptionWrapper(traceback.format_exc())
+
+
+class _WrappedResult(object):
+ """Allows for host-side logic to be run after child process has terminated.
+
+ * Unregisters associated pool _all_pools.
+ * Raises exception caught by _FuncWrapper.
+ """
+
+ def __init__(self, result, pool=None):
+ self._result = result
+ self._pool = pool
+
+ def get(self):
+ self.wait()
+ value = self._result.get()
+ _CheckForException(value)
+ return value
+
+ def wait(self):
+ self._result.wait()
+ if self._pool:
+ _all_pools.remove(self._pool)
+ self._pool = None
+
+ def ready(self):
+ return self._result.ready()
+
+ def successful(self):
+ return self._result.successful()
+
+
+def _TerminatePools():
+ """Calls .terminate() on all active process pools.
+
+ Not supposed to be necessary according to the docs, but seems to be required
+ when child process throws an exception or Ctrl-C is hit.
+ """
+ global _silence_exceptions
+ _silence_exceptions = True
+ # Child processes cannot have pools, but atexit runs this function because
+ # it was registered before fork()ing.
+ if _is_child_process:
+ return
+
+ def close_pool(pool):
+ try:
+ pool.terminate()
+ except: # pylint: disable=bare-except
+ pass
+
+ for i, pool in enumerate(_all_pools):
+ # Without calling terminate() on a separate thread, the call can block
+ # forever.
+ thread = threading.Thread(name='Pool-Terminate-{}'.format(i),
+ target=close_pool,
+ args=(pool, ))
+ thread.daemon = True
+ thread.start()
+
+
+def _CheckForException(value):
+ if isinstance(value, _ExceptionWrapper):
+ global _silence_exceptions
+ if not _silence_exceptions:
+ value.MaybeThrow()
+ _silence_exceptions = True
+ logging.error('Subprocess raised an exception:\n%s', value.msg)
+ sys.exit(1)
+
+
+def _MakeProcessPool(job_params, **job_kwargs):
+ global _all_pools
+ global _fork_params
+ global _fork_kwargs
+ assert _fork_params is None
+ assert _fork_kwargs is None
+ pool_size = min(len(job_params), multiprocessing.cpu_count())
+ _fork_params = job_params
+ _fork_kwargs = job_kwargs
+ ret = multiprocessing.Pool(pool_size)
+ _fork_params = None
+ _fork_kwargs = None
+ if _all_pools is None:
+ _all_pools = []
+ atexit.register(_TerminatePools)
+ _all_pools.append(ret)
+ return ret
+
+
+def ForkAndCall(func, args):
+ """Runs |func| in a fork'ed process.
+
+ Returns:
+ A Result object (call .get() to get the return value)
+ """
+ if DISABLE_ASYNC:
+ pool = None
+ result = _ImmediateResult(func(*args))
+ else:
+ pool = _MakeProcessPool([args]) # Omit |kwargs|.
+ result = pool.apply_async(_FuncWrapper(func), (0, ))
+ pool.close()
+ return _WrappedResult(result, pool=pool)
+
+
+def BulkForkAndCall(func, arg_tuples, **kwargs):
+ """Calls |func| in a fork'ed process for each set of args within |arg_tuples|.
+
+ Args:
+ kwargs: Common keyword arguments to be passed to |func|.
+
+ Yields the return values in order.
+ """
+ arg_tuples = list(arg_tuples)
+ if not arg_tuples:
+ return
+
+ if DISABLE_ASYNC:
+ for args in arg_tuples:
+ yield func(*args, **kwargs)
+ return
+
+ pool = _MakeProcessPool(arg_tuples, **kwargs)
+ wrapped_func = _FuncWrapper(func)
+ try:
+ for result in pool.imap(wrapped_func, range(len(arg_tuples))):
+ _CheckForException(result)
+ yield result
+ finally:
+ pool.close()
+ pool.join()
+ _all_pools.remove(pool)
diff --git a/third_party/libwebrtc/build/android/gyp/util/protoresources.py b/third_party/libwebrtc/build/android/gyp/util/protoresources.py
new file mode 100644
index 0000000000..272574f117
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/protoresources.py
@@ -0,0 +1,308 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Functions that modify resources in protobuf format.
+
+Format reference:
+https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/Resources.proto
+"""
+
+import logging
+import os
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+from util import resource_utils
+
+sys.path[1:1] = [
+ # `Resources_pb2` module imports `descriptor`, which imports `six`.
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'six', 'src'),
+ # Make sure the pb2 files are able to import google.protobuf
+ os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf',
+ 'python'),
+]
+
+from proto import Resources_pb2
+
+# First bytes in an .flat.arsc file.
+# uint32: Magic ("ARSC"), version (1), num_entries (1), type (0)
+_FLAT_ARSC_HEADER = b'AAPT\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
+
+# The package ID hardcoded for shared libraries. See
+# _HardcodeSharedLibraryDynamicAttributes() for more details. If this value
+# changes make sure to change REQUIRED_PACKAGE_IDENTIFIER in WebLayerImpl.java.
+SHARED_LIBRARY_HARDCODED_ID = 36
+
+
+def _ProcessZip(zip_path, process_func):
+ """Filters a .zip file via: new_bytes = process_func(filename, data)."""
+ has_changes = False
+ zip_entries = []
+ with zipfile.ZipFile(zip_path) as src_zip:
+ for info in src_zip.infolist():
+ data = src_zip.read(info)
+ new_data = process_func(info.filename, data)
+ if new_data is not data:
+ has_changes = True
+ data = new_data
+ zip_entries.append((info, data))
+
+ # Overwrite the original zip file.
+ if has_changes:
+ with zipfile.ZipFile(zip_path, 'w') as f:
+ for info, data in zip_entries:
+ f.writestr(info, data)
+
+
+def _ProcessProtoItem(item):
+ if not item.HasField('ref'):
+ return
+
+ # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode
+ # the package to SHARED_LIBRARY_HARDCODED_ID.
+ if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (item.ref.id
+ & 0xff000000):
+ item.ref.id |= (0x01000000 * SHARED_LIBRARY_HARDCODED_ID)
+ item.ref.ClearField('is_dynamic')
+
+
+def _ProcessProtoValue(value):
+ if value.HasField('item'):
+ _ProcessProtoItem(value.item)
+ return
+
+ compound_value = value.compound_value
+ if compound_value.HasField('style'):
+ for entry in compound_value.style.entry:
+ _ProcessProtoItem(entry.item)
+ elif compound_value.HasField('array'):
+ for element in compound_value.array.element:
+ _ProcessProtoItem(element.item)
+ elif compound_value.HasField('plural'):
+ for entry in compound_value.plural.entry:
+ _ProcessProtoItem(entry.item)
+
+
+def _ProcessProtoXmlNode(xml_node):
+ if not xml_node.HasField('element'):
+ return
+
+ for attribute in xml_node.element.attribute:
+ _ProcessProtoItem(attribute.compiled_item)
+
+ for child in xml_node.element.child:
+ _ProcessProtoXmlNode(child)
+
+
+def _SplitLocaleResourceType(_type, allowed_resource_names):
+ """Splits locale specific resources out of |_type| and returns them.
+
+ Any locale specific resources will be removed from |_type|, and a new
+ Resources_pb2.Type value will be returned which contains those resources.
+
+ Args:
+ _type: A Resources_pb2.Type value
+ allowed_resource_names: Names of locale resources that should be kept in the
+ main type.
+ """
+ locale_entries = []
+ for entry in _type.entry:
+ if entry.name in allowed_resource_names:
+ continue
+
+ # First collect all resources values with a locale set.
+ config_values_with_locale = []
+ for config_value in entry.config_value:
+ if config_value.config.locale:
+ config_values_with_locale.append(config_value)
+
+ if config_values_with_locale:
+ # Remove the locale resources from the original entry
+ for value in config_values_with_locale:
+ entry.config_value.remove(value)
+
+ # Add locale resources to a new Entry, and save for later.
+ locale_entry = Resources_pb2.Entry()
+ locale_entry.CopyFrom(entry)
+ del locale_entry.config_value[:]
+ locale_entry.config_value.extend(config_values_with_locale)
+ locale_entries.append(locale_entry)
+
+ if not locale_entries:
+ return None
+
+ # Copy the original type and replace the entries with |locale_entries|.
+ locale_type = Resources_pb2.Type()
+ locale_type.CopyFrom(_type)
+ del locale_type.entry[:]
+ locale_type.entry.extend(locale_entries)
+ return locale_type
+
+
+def _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist):
+ translations_package = None
+ if is_bundle_module:
+ # A separate top level package will be added to the resources, which
+ # contains only locale specific resources. The package ID of the locale
+ # resources is hardcoded to SHARED_LIBRARY_HARDCODED_ID. This causes
+ # resources in locale splits to all get assigned
+ # SHARED_LIBRARY_HARDCODED_ID as their package ID, which prevents a bug
+ # in shared library bundles where each split APK gets a separate dynamic
+ # ID, and cannot be accessed by the main APK.
+ translations_package = Resources_pb2.Package()
+ translations_package.package_id.id = SHARED_LIBRARY_HARDCODED_ID
+ translations_package.package_name = (table.package[0].package_name +
+ '_translations')
+
+ # These resources are allowed in the base resources, since they are needed
+ # by WebView.
+ allowed_resource_names = set()
+ if shared_resources_allowlist:
+ allowed_resource_names = set(
+ resource_utils.GetRTxtStringResourceNames(shared_resources_allowlist))
+
+ for package in table.package:
+ for _type in package.type:
+ for entry in _type.entry:
+ for config_value in entry.config_value:
+ _ProcessProtoValue(config_value.value)
+
+ if translations_package is not None:
+ locale_type = _SplitLocaleResourceType(_type, allowed_resource_names)
+ if locale_type:
+ translations_package.type.add().CopyFrom(locale_type)
+
+ if translations_package is not None:
+ table.package.add().CopyFrom(translations_package)
+
+
+def HardcodeSharedLibraryDynamicAttributes(zip_path,
+ is_bundle_module,
+ shared_resources_allowlist=None):
+ """Hardcodes the package IDs of dynamic attributes and locale resources.
+
+ Hardcoding dynamic attribute package IDs is a workaround for b/147674078,
+ which affects Android versions pre-N. Hardcoding locale resource package IDs
+ is a workaround for b/155437035, which affects resources built with
+ --shared-lib on all Android versions
+
+ Args:
+ zip_path: Path to proto APK file.
+ is_bundle_module: True for bundle modules.
+ shared_resources_allowlist: Set of resource names to not extract out of the
+ main package.
+ """
+
+ def process_func(filename, data):
+ if filename == 'resources.pb':
+ table = Resources_pb2.ResourceTable()
+ table.ParseFromString(data)
+ _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist)
+ data = table.SerializeToString()
+ elif filename.endswith('.xml') and not filename.startswith('res/raw'):
+ xml_node = Resources_pb2.XmlNode()
+ xml_node.ParseFromString(data)
+ _ProcessProtoXmlNode(xml_node)
+ data = xml_node.SerializeToString()
+ return data
+
+ _ProcessZip(zip_path, process_func)
+
+
+class _ResourceStripper(object):
+ def __init__(self, partial_path, keep_predicate):
+ self.partial_path = partial_path
+ self.keep_predicate = keep_predicate
+ self._has_changes = False
+
+ @staticmethod
+ def _IterStyles(entry):
+ for config_value in entry.config_value:
+ value = config_value.value
+ if value.HasField('compound_value'):
+ compound_value = value.compound_value
+ if compound_value.HasField('style'):
+ yield compound_value.style
+
+ def _StripStyles(self, entry, type_and_name):
+ # Strip style entries that refer to attributes that have been stripped.
+ for style in self._IterStyles(entry):
+ entries = style.entry
+ new_entries = []
+ for entry in entries:
+ full_name = '{}/{}'.format(type_and_name, entry.key.name)
+ if not self.keep_predicate(full_name):
+ logging.debug('Stripped %s/%s', self.partial_path, full_name)
+ else:
+ new_entries.append(entry)
+
+ if len(new_entries) != len(entries):
+ self._has_changes = True
+ del entries[:]
+ entries.extend(new_entries)
+
+ def _StripEntries(self, entries, type_name):
+ new_entries = []
+ for entry in entries:
+ type_and_name = '{}/{}'.format(type_name, entry.name)
+ if not self.keep_predicate(type_and_name):
+ logging.debug('Stripped %s/%s', self.partial_path, type_and_name)
+ else:
+ new_entries.append(entry)
+ self._StripStyles(entry, type_and_name)
+
+ if len(new_entries) != len(entries):
+ self._has_changes = True
+ del entries[:]
+ entries.extend(new_entries)
+
+ def StripTable(self, table):
+ self._has_changes = False
+ for package in table.package:
+ for _type in package.type:
+ self._StripEntries(_type.entry, _type.name)
+ return self._has_changes
+
+
+def _TableFromFlatBytes(data):
+ # https://cs.android.com/android/platform/superproject/+/master:frameworks/base/tools/aapt2/format/Container.cpp
+ size_idx = len(_FLAT_ARSC_HEADER)
+ proto_idx = size_idx + 8
+ if data[:size_idx] != _FLAT_ARSC_HEADER:
+ raise Exception('Error parsing {} in {}'.format(info.filename, zip_path))
+ # Size is stored as uint64.
+ size = struct.unpack('<Q', data[size_idx:proto_idx])[0]
+ table = Resources_pb2.ResourceTable()
+ proto_bytes = data[proto_idx:proto_idx + size]
+ table.ParseFromString(proto_bytes)
+ return table
+
+
+def _FlatBytesFromTable(table):
+ proto_bytes = table.SerializeToString()
+ size = struct.pack('<Q', len(proto_bytes))
+ overage = len(proto_bytes) % 4
+ padding = b'\0' * (4 - overage) if overage else b''
+ return b''.join((_FLAT_ARSC_HEADER, size, proto_bytes, padding))
+
+
+def StripUnwantedResources(partial_path, keep_predicate):
+ """Removes resources from .arsc.flat files inside of a .zip.
+
+ Args:
+ partial_path: Path to a .zip containing .arsc.flat entries
+ keep_predicate: Given "$partial_path/$res_type/$res_name", returns
+ whether to keep the resource.
+ """
+ stripper = _ResourceStripper(partial_path, keep_predicate)
+
+ def process_file(filename, data):
+ if filename.endswith('.arsc.flat'):
+ table = _TableFromFlatBytes(data)
+ if stripper.StripTable(table):
+ data = _FlatBytesFromTable(table)
+ return data
+
+ _ProcessZip(partial_path, process_file)
diff --git a/third_party/libwebrtc/build/android/gyp/util/resource_utils.py b/third_party/libwebrtc/build/android/gyp/util/resource_utils.py
new file mode 100644
index 0000000000..4f64174193
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/resource_utils.py
@@ -0,0 +1,1078 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import contextlib
+import itertools
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+from xml.etree import ElementTree
+
+import util.build_utils as build_utils
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+# A variation of these maps also exists in:
+# //base/android/java/src/org/chromium/base/LocaleUtils.java
+# //ui/android/java/src/org/chromium/base/LocalizationUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+ 'es-419': 'es-rUS',
+ 'sr-Latn': 'b+sr+Latn',
+ 'fil': 'tl',
+ 'he': 'iw',
+ 'id': 'in',
+ 'yi': 'ji',
+}
+_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = {
+ 'tl': 'fil',
+ 'iw': 'he',
+ 'in': 'id',
+ 'ji': 'yi',
+ 'no': 'nb', # 'no' is not a real language. http://crbug.com/920960
+}
+
+_ALL_RESOURCE_TYPES = {
+ 'anim', 'animator', 'array', 'attr', 'bool', 'color', 'dimen', 'drawable',
+ 'font', 'fraction', 'id', 'integer', 'interpolator', 'layout', 'macro',
+ 'menu', 'mipmap', 'plurals', 'raw', 'string', 'style', 'styleable',
+ 'transition', 'xml'
+}
+
+AAPT_IGNORE_PATTERN = ':'.join([
+ '*OWNERS', # Allow OWNERS files within res/
+ 'DIR_METADATA', # Allow DIR_METADATA files within res/
+ '*.py', # PRESUBMIT.py sometimes exist.
+ '*.pyc',
+ '*~', # Some editors create these as temp files.
+ '.*', # Never makes sense to include dot(files/dirs).
+ '*.d.stamp', # Ignore stamp files
+ '*.backup', # Some tools create temporary backup files.
+])
+
+MULTIPLE_RES_MAGIC_STRING = b'magic'
+
+
+def ToAndroidLocaleName(chromium_locale):
+ """Convert a Chromium locale name into a corresponding Android one."""
+ # Should be in sync with build/config/locales.gni.
+ # First handle the special cases, these are needed to deal with Android
+ # releases *before* 5.0/Lollipop.
+ android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale)
+ if android_locale:
+ return android_locale
+
+ # Format of Chromium locale name is '<lang>' or '<lang>-<region>'
+ # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2)
+ # and region is a capitalized locale region name.
+ lang, _, region = chromium_locale.partition('-')
+ if not region:
+ return lang
+
+ # Translate newer language tags into obsolete ones. Only necessary if
+ # region is not None (e.g. 'he-IL' -> 'iw-rIL')
+ lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang)
+
+ # Using '<lang>-r<region>' is now acceptable as a locale name for all
+ # versions of Android.
+ return '%s-r%s' % (lang, region)
+
+
+# ISO 639 language code + optional ("-r" + capitalized region code).
+# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes
+# are supported.
+_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$')
+
+# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must
+# be prefixed with 'b+', and may include optional tags.
+# e.g. 'b+en+US', 'b+ja+Latn', 'b+ja+Latn+JP'
+_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$')
+
+
+def ToChromiumLocaleName(android_locale):
+ """Convert an Android locale name into a Chromium one."""
+ lang = None
+ region = None
+ script = None
+ m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale)
+ if m:
+ lang = m.group(1)
+ if m.group(2):
+ region = m.group(3)
+ elif _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale):
+ # Split an Android BCP-47 locale (e.g. b+sr+Latn+RS)
+ tags = android_locale.split('+')
+
+ # The Lang tag is always the first tag.
+ lang = tags[1]
+
+ # The optional region tag is 2ALPHA or 3DIGIT tag in pos 1 or 2.
+ # The optional script tag is 4ALPHA and always in pos 1.
+ optional_tags = iter(tags[2:])
+
+ next_tag = next(optional_tags, None)
+ if next_tag and len(next_tag) == 4:
+ script = next_tag
+ next_tag = next(optional_tags, None)
+ if next_tag and len(next_tag) < 4:
+ region = next_tag
+
+ if not lang:
+ return None
+
+ # Special case for es-rUS -> es-419
+ if lang == 'es' and region == 'US':
+ return 'es-419'
+
+ lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang)
+
+ if script:
+ lang = '%s-%s' % (lang, script)
+
+ if not region:
+ return lang
+
+ return '%s-%s' % (lang, region)
+
+
+def IsAndroidLocaleQualifier(string):
+ """Returns true if |string| is a valid Android resource locale qualifier."""
+ return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string)
+ or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string))
+
+
+def FindLocaleInStringResourceFilePath(file_path):
+ """Return Android locale name of a string resource file path.
+
+ Args:
+ file_path: A file path.
+ Returns:
+ If |file_path| is of the format '.../values-<locale>/<name>.xml', return
+ the value of <locale> (and Android locale qualifier). Otherwise return None.
+ """
+ if not file_path.endswith('.xml'):
+ return None
+ prefix = 'values-'
+ dir_name = os.path.basename(os.path.dirname(file_path))
+ if not dir_name.startswith(prefix):
+ return None
+ qualifier = dir_name[len(prefix):]
+ return qualifier if IsAndroidLocaleQualifier(qualifier) else None
+
+
+def ToAndroidLocaleList(locale_list):
+ """Convert a list of Chromium locales into the corresponding Android list."""
+ return sorted(ToAndroidLocaleName(locale) for locale in locale_list)
+
+# Represents a line from a R.txt file.
+_TextSymbolEntry = collections.namedtuple('RTextEntry',
+ ('java_type', 'resource_type', 'name', 'value'))
+
+
+def _GenerateGlobs(pattern):
+ # This function processes the aapt ignore assets pattern into a list of globs
+ # to be used to exclude files using build_utils.MatchesGlob. It removes the
+ # '!', which is used by aapt to mean 'not chatty' so it does not output if the
+ # file is ignored (we dont output anyways, so it is not required). This
+ # function does not handle the <dir> and <file> prefixes used by aapt and are
+ # assumed not to be included in the pattern string.
+ return pattern.replace('!', '').split(':')
+
+
+def DeduceResourceDirsFromFileList(resource_files):
+ """Return a list of resource directories from a list of resource files."""
+ # Directory list order is important, cannot use set or other data structures
+ # that change order. This is because resource files of the same name in
+ # multiple res/ directories ellide one another (the last one passed is used).
+ # Thus the order must be maintained to prevent non-deterministic and possibly
+ # flakey builds.
+ resource_dirs = []
+ for resource_path in resource_files:
+ # Resources are always 1 directory deep under res/.
+ res_dir = os.path.dirname(os.path.dirname(resource_path))
+ if res_dir not in resource_dirs:
+ resource_dirs.append(res_dir)
+
+ # Check if any resource_dirs are children of other ones. This indicates that a
+ # file was listed that is not exactly 1 directory deep under res/.
+ # E.g.:
+ # sources = ["java/res/values/foo.xml", "java/res/README.md"]
+ # ^^ This will cause "java" to be detected as resource directory.
+ for a, b in itertools.permutations(resource_dirs, 2):
+ if not os.path.relpath(a, b).startswith('..'):
+ bad_sources = (s for s in resource_files
+ if os.path.dirname(os.path.dirname(s)) == b)
+ msg = """\
+Resource(s) found that are not in a proper directory structure:
+ {}
+All resource files must follow a structure of "$ROOT/$SUBDIR/$FILE"."""
+ raise Exception(msg.format('\n '.join(bad_sources)))
+
+ return resource_dirs
+
+
+def IterResourceFilesInDirectories(directories,
+ ignore_pattern=AAPT_IGNORE_PATTERN):
+ globs = _GenerateGlobs(ignore_pattern)
+ for d in directories:
+ for root, _, files in os.walk(d):
+ for f in files:
+ archive_path = f
+ parent_dir = os.path.relpath(root, d)
+ if parent_dir != '.':
+ archive_path = os.path.join(parent_dir, f)
+ path = os.path.join(root, f)
+ if build_utils.MatchesGlob(archive_path, globs):
+ continue
+ yield path, archive_path
+
+
+class ResourceInfoFile(object):
+ """Helper for building up .res.info files."""
+
+ def __init__(self):
+ # Dict of archive_path -> source_path for the current target.
+ self._entries = {}
+ # List of (old_archive_path, new_archive_path) tuples.
+ self._renames = []
+ # We don't currently support using both AddMapping and MergeInfoFile.
+ self._add_mapping_was_called = False
+
+ def AddMapping(self, archive_path, source_path):
+ """Adds a single |archive_path| -> |source_path| entry."""
+ self._add_mapping_was_called = True
+ # "values/" files do not end up in the apk except through resources.arsc.
+ if archive_path.startswith('values'):
+ return
+ source_path = os.path.normpath(source_path)
+ new_value = self._entries.setdefault(archive_path, source_path)
+ if new_value != source_path:
+ raise Exception('Duplicate AddMapping for "{}". old={} new={}'.format(
+ archive_path, new_value, source_path))
+
+ def RegisterRename(self, old_archive_path, new_archive_path):
+ """Records an archive_path rename.
+
+ |old_archive_path| does not need to currently exist in the mappings. Renames
+ are buffered and replayed only when Write() is called.
+ """
+ if not old_archive_path.startswith('values'):
+ self._renames.append((old_archive_path, new_archive_path))
+
+ def MergeInfoFile(self, info_file_path):
+ """Merges the mappings from |info_file_path| into this object.
+
+ Any existing entries are overridden.
+ """
+ assert not self._add_mapping_was_called
+ # Allows clobbering, which is used when overriding resources.
+ with open(info_file_path) as f:
+ self._entries.update(l.rstrip().split('\t') for l in f)
+
+ def _ApplyRenames(self):
+ applied_renames = set()
+ ret = self._entries
+ for rename_tup in self._renames:
+ # Duplicate entries happen for resource overrides.
+ # Use a "seen" set to ensure we still error out if multiple renames
+ # happen for the same old_archive_path with different new_archive_paths.
+ if rename_tup in applied_renames:
+ continue
+ applied_renames.add(rename_tup)
+ old_archive_path, new_archive_path = rename_tup
+ ret[new_archive_path] = ret[old_archive_path]
+ del ret[old_archive_path]
+
+ self._entries = None
+ self._renames = None
+ return ret
+
+ def Write(self, info_file_path):
+ """Applies renames and writes out the file.
+
+ No other methods may be called after this.
+ """
+ entries = self._ApplyRenames()
+ lines = []
+ for archive_path, source_path in entries.items():
+ lines.append('{}\t{}\n'.format(archive_path, source_path))
+ with open(info_file_path, 'w') as info_file:
+ info_file.writelines(sorted(lines))
+
+
+def _ParseTextSymbolsFile(path, fix_package_ids=False):
+ """Given an R.txt file, returns a list of _TextSymbolEntry.
+
+ Args:
+ path: Input file path.
+ fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file
+ will be fixed to 0x7f.
+ Returns:
+ A list of _TextSymbolEntry instances.
+ Raises:
+ Exception: An unexpected line was detected in the input.
+ """
+ ret = []
+ with open(path) as f:
+ for line in f:
+ m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+ if not m:
+ raise Exception('Unexpected line in R.txt: %s' % line)
+ java_type, resource_type, name, value = m.groups()
+ if fix_package_ids:
+ value = _FixPackageIds(value)
+ ret.append(_TextSymbolEntry(java_type, resource_type, name, value))
+ return ret
+
+
+def _FixPackageIds(resource_value):
+ # Resource IDs for resources belonging to regular APKs have their first byte
+ # as 0x7f (package id). However with webview, since it is not a regular apk
+ # but used as a shared library, aapt is passed the --shared-resources flag
+ # which changes some of the package ids to 0x00. This function normalises
+ # these (0x00) package ids to 0x7f, which the generated code in R.java changes
+ # to the correct package id at runtime. resource_value is a string with
+ # either, a single value '0x12345678', or an array of values like '{
+ # 0xfedcba98, 0x01234567, 0x56789abc }'
+ return resource_value.replace('0x00', '0x7f')
+
+
+def _GetRTxtResourceNames(r_txt_path):
+ """Parse an R.txt file and extract the set of resource names from it."""
+ return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)}
+
+
+def GetRTxtStringResourceNames(r_txt_path):
+ """Parse an R.txt file and the list of its string resource names."""
+ return sorted({
+ entry.name
+ for entry in _ParseTextSymbolsFile(r_txt_path)
+ if entry.resource_type == 'string'
+ })
+
+
+def GenerateStringResourcesAllowList(module_r_txt_path, allowlist_r_txt_path):
+ """Generate a allowlist of string resource IDs.
+
+ Args:
+ module_r_txt_path: Input base module R.txt path.
+ allowlist_r_txt_path: Input allowlist R.txt path.
+ Returns:
+ A dictionary mapping numerical resource IDs to the corresponding
+ string resource names. The ID values are taken from string resources in
+ |module_r_txt_path| that are also listed by name in |allowlist_r_txt_path|.
+ """
+ allowlisted_names = {
+ entry.name
+ for entry in _ParseTextSymbolsFile(allowlist_r_txt_path)
+ if entry.resource_type == 'string'
+ }
+ return {
+ int(entry.value, 0): entry.name
+ for entry in _ParseTextSymbolsFile(module_r_txt_path)
+ if entry.resource_type == 'string' and entry.name in allowlisted_names
+ }
+
+
+class RJavaBuildOptions:
+ """A class used to model the various ways to build an R.java file.
+
+ This is used to control which resource ID variables will be final or
+ non-final, and whether an onResourcesLoaded() method will be generated
+ to adjust the non-final ones, when the corresponding library is loaded
+ at runtime.
+
+ Note that by default, all resources are final, and there is no
+ method generated, which corresponds to calling ExportNoResources().
+ """
+ def __init__(self):
+ self.has_constant_ids = True
+ self.resources_allowlist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+ self.final_package_id = None
+ self.fake_on_resources_loaded = False
+
+ def ExportNoResources(self):
+ """Make all resource IDs final, and don't generate a method."""
+ self.has_constant_ids = True
+ self.resources_allowlist = None
+ self.has_on_resources_loaded = False
+ self.export_const_styleable = False
+
+ def ExportAllResources(self):
+ """Make all resource IDs non-final in the R.java file."""
+ self.has_constant_ids = False
+ self.resources_allowlist = None
+
+ def ExportSomeResources(self, r_txt_file_path):
+ """Only select specific resource IDs to be non-final.
+
+ Args:
+ r_txt_file_path: The path to an R.txt file. All resources named
+ int it will be non-final in the generated R.java file, all others
+ will be final.
+ """
+ self.has_constant_ids = True
+ self.resources_allowlist = _GetRTxtResourceNames(r_txt_file_path)
+
+ def ExportAllStyleables(self):
+ """Make all styleable constants non-final, even non-resources ones.
+
+ Resources that are styleable but not of int[] type are not actually
+ resource IDs but constants. By default they are always final. Call this
+ method to make them non-final anyway in the final R.java file.
+ """
+ self.export_const_styleable = True
+
+ def GenerateOnResourcesLoaded(self, fake=False):
+ """Generate an onResourcesLoaded() method.
+
+ This Java method will be called at runtime by the framework when
+ the corresponding library (which includes the R.java source file)
+ will be loaded at runtime. This corresponds to the --shared-resources
+ or --app-as-shared-lib flags of 'aapt package'.
+
+ if |fake|, then the method will be empty bodied to compile faster. This
+ useful for dummy R.java files that will eventually be replaced by real
+ ones.
+ """
+ self.has_on_resources_loaded = True
+ self.fake_on_resources_loaded = fake
+
+ def SetFinalPackageId(self, package_id):
+ """Sets a package ID to be used for resources marked final."""
+ self.final_package_id = package_id
+
+ def _MaybeRewriteRTxtPackageIds(self, r_txt_path):
+ """Rewrites package IDs in the R.txt file if necessary.
+
+ If SetFinalPackageId() was called, some of the resource IDs may have had
+ their package ID changed. This function rewrites the R.txt file to match
+ those changes.
+ """
+ if self.final_package_id is None:
+ return
+
+ entries = _ParseTextSymbolsFile(r_txt_path)
+ with open(r_txt_path, 'w') as f:
+ for entry in entries:
+ value = entry.value
+ if self._IsResourceFinal(entry):
+ value = re.sub(r'0x(?:00|7f)',
+ '0x{:02x}'.format(self.final_package_id), value)
+ f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type,
+ entry.name, value))
+
+ def _IsResourceFinal(self, entry):
+ """Determines whether a resource should be final or not.
+
+ Args:
+ entry: A _TextSymbolEntry instance.
+ Returns:
+ True iff the corresponding entry should be final.
+ """
+ if entry.resource_type == 'styleable' and entry.java_type != 'int[]':
+ # A styleable constant may be exported as non-final after all.
+ return not self.export_const_styleable
+ elif not self.has_constant_ids:
+ # Every resource is non-final
+ return False
+ elif not self.resources_allowlist:
+ # No allowlist means all IDs are non-final.
+ return True
+ else:
+ # Otherwise, only those in the
+ return entry.name not in self.resources_allowlist
+
+
+def CreateRJavaFiles(srcjar_dir,
+ package,
+ main_r_txt_file,
+ extra_res_packages,
+ rjava_build_options,
+ srcjar_out,
+ custom_root_package_name=None,
+ grandparent_custom_package_name=None,
+ extra_main_r_text_files=None,
+ ignore_mismatched_values=False):
+ """Create all R.java files for a set of packages and R.txt files.
+
+ Args:
+ srcjar_dir: The top-level output directory for the generated files.
+ package: Package name for R java source files which will inherit
+ from the root R java file.
+ main_r_txt_file: The main R.txt file containing the valid values
+ of _all_ resource IDs.
+ extra_res_packages: A list of extra package names.
+ rjava_build_options: An RJavaBuildOptions instance that controls how
+ exactly the R.java file is generated.
+ srcjar_out: Path of desired output srcjar.
+ custom_root_package_name: Custom package name for module root R.java file,
+ (eg. vr for gen.vr package).
+ grandparent_custom_package_name: Custom root package name for the root
+ R.java file to inherit from. DFM root R.java files will have "base"
+ as the grandparent_custom_package_name. The format of this package name
+ is identical to custom_root_package_name.
+ (eg. for vr grandparent_custom_package_name would be "base")
+ extra_main_r_text_files: R.txt files to be added to the root R.java file.
+ ignore_mismatched_values: If True, ignores if a resource appears multiple
+ times with different entry values (useful when all the values are
+ dummy anyways).
+ Raises:
+ Exception if a package name appears several times in |extra_res_packages|
+ """
+ rjava_build_options._MaybeRewriteRTxtPackageIds(main_r_txt_file)
+
+ packages = list(extra_res_packages)
+
+ if package and package not in packages:
+ # Sometimes, an apk target and a resources target share the same
+ # AndroidManifest.xml and thus |package| will already be in |packages|.
+ packages.append(package)
+
+ # Map of (resource_type, name) -> Entry.
+ # Contains the correct values for resources.
+ all_resources = {}
+ all_resources_by_type = collections.defaultdict(list)
+
+ main_r_text_files = [main_r_txt_file]
+ if extra_main_r_text_files:
+ main_r_text_files.extend(extra_main_r_text_files)
+ for r_txt_file in main_r_text_files:
+ for entry in _ParseTextSymbolsFile(r_txt_file, fix_package_ids=True):
+ entry_key = (entry.resource_type, entry.name)
+ if entry_key in all_resources:
+ if not ignore_mismatched_values:
+ assert entry == all_resources[entry_key], (
+ 'Input R.txt %s provided a duplicate resource with a different '
+ 'entry value. Got %s, expected %s.' %
+ (r_txt_file, entry, all_resources[entry_key]))
+ else:
+ all_resources[entry_key] = entry
+ all_resources_by_type[entry.resource_type].append(entry)
+ assert entry.resource_type in _ALL_RESOURCE_TYPES, (
+ 'Unknown resource type: %s, add to _ALL_RESOURCE_TYPES!' %
+ entry.resource_type)
+
+ if custom_root_package_name:
+ # Custom package name is available, thus use it for root_r_java_package.
+ root_r_java_package = GetCustomPackagePath(custom_root_package_name)
+ else:
+ # Create a unique name using srcjar_out. Underscores are added to ensure
+ # no reserved keywords are used for directory names.
+ root_r_java_package = re.sub('[^\w\.]', '', srcjar_out.replace('/', '._'))
+
+ root_r_java_dir = os.path.join(srcjar_dir, *root_r_java_package.split('.'))
+ build_utils.MakeDirectory(root_r_java_dir)
+ root_r_java_path = os.path.join(root_r_java_dir, 'R.java')
+ root_java_file_contents = _RenderRootRJavaSource(
+ root_r_java_package, all_resources_by_type, rjava_build_options,
+ grandparent_custom_package_name)
+ with open(root_r_java_path, 'w') as f:
+ f.write(root_java_file_contents)
+
+ for package in packages:
+ _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+ rjava_build_options)
+
+
+def _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package,
+ rjava_build_options):
+ """Generates an R.java source file."""
+ package_r_java_dir = os.path.join(srcjar_dir, *package.split('.'))
+ build_utils.MakeDirectory(package_r_java_dir)
+ package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+ java_file_contents = _RenderRJavaSource(package, root_r_java_package,
+ rjava_build_options)
+ with open(package_r_java_path, 'w') as f:
+ f.write(java_file_contents)
+
+
+# Resource IDs inside resource arrays are sorted. Application resource IDs start
+# with 0x7f but system resource IDs start with 0x01 thus system resource ids are
+# always at the start of the array. This function finds the index of the first
+# non system resource id to be used for package ID rewriting (we should not
+# rewrite system resource ids).
+def _GetNonSystemIndex(entry):
+ """Get the index of the first application resource ID within a resource
+ array."""
+ res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value)
+ for i, res_id in enumerate(res_ids):
+ if res_id.startswith('0x7f'):
+ return i
+ return len(res_ids)
+
+
+def _RenderRJavaSource(package, root_r_java_package, rjava_build_options):
+ """Generates the contents of a R.java file."""
+ template = Template(
+ """/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ {% for resource_type in resource_types %}
+ public static final class {{ resource_type }} extends
+ {{ root_package }}.R.{{ resource_type }} {}
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ {{ root_package }}.R.onResourcesLoaded(packageId);
+ }
+ {% endif %}
+}
+""",
+ trim_blocks=True,
+ lstrip_blocks=True)
+
+ return template.render(
+ package=package,
+ resource_types=sorted(_ALL_RESOURCE_TYPES),
+ root_package=root_r_java_package,
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded)
+
+
+def GetCustomPackagePath(package_name):
+ return 'gen.' + package_name + '_module'
+
+
+def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options,
+ grandparent_custom_package_name):
+ """Render an R.java source file. See _CreateRJaveSourceFile for args info."""
+ final_resources_by_type = collections.defaultdict(list)
+ non_final_resources_by_type = collections.defaultdict(list)
+ for res_type, resources in all_resources_by_type.items():
+ for entry in resources:
+ # Entries in stylable that are not int[] are not actually resource ids
+ # but constants.
+ if rjava_build_options._IsResourceFinal(entry):
+ final_resources_by_type[res_type].append(entry)
+ else:
+ non_final_resources_by_type[res_type].append(entry)
+
+ # Here we diverge from what aapt does. Because we have so many
+ # resources, the onResourcesLoaded method was exceeding the 64KB limit that
+ # Java imposes. For this reason we split onResourcesLoaded into different
+ # methods for each resource type.
+ extends_string = ''
+ dep_path = ''
+ if grandparent_custom_package_name:
+ extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} '
+ dep_path = GetCustomPackagePath(grandparent_custom_package_name)
+
+ template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+ {% for resource_type in resource_types %}
+ public static class {{ resource_type }} """ + extends_string + """ {
+ {% for e in final_resources[resource_type] %}
+ public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% endfor %}
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.value != '0' %}
+ public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+ {% else %}
+ public static {{ e.java_type }} {{ e.name }};
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% if has_on_resources_loaded %}
+ {% if fake_on_resources_loaded %}
+ public static void onResourcesLoaded(int packageId) {
+ }
+ {% else %}
+ private static boolean sResourcesDidLoad;
+
+ private static void patchArray(
+ int[] arr, int startIndex, int packageIdTransform) {
+ for (int i = startIndex; i < arr.length; ++i) {
+ arr[i] ^= packageIdTransform;
+ }
+ }
+
+ public static void onResourcesLoaded(int packageId) {
+ if (sResourcesDidLoad) {
+ return;
+ }
+ sResourcesDidLoad = true;
+ int packageIdTransform = (packageId ^ 0x7f) << 24;
+ {# aapt2 makes int[] resources refer to other resources by reference
+ rather than by value. Thus, need to transform the int[] resources
+ first, before the referenced resources are transformed in order to
+ ensure the transform applies exactly once.
+ See https://crbug.com/1237059 for context.
+ #}
+ {% for resource_type in resource_types %}
+ {% for e in non_final_resources[resource_type] %}
+ {% if e.java_type == 'int[]' %}
+ patchArray({{ e.resource_type }}.{{ e.name }}, {{ startIndex(e) }}, \
+packageIdTransform);
+ {% endif %}
+ {% endfor %}
+ {% endfor %}
+ {% for resource_type in resource_types %}
+ onResourcesLoaded{{ resource_type|title }}(packageIdTransform);
+ {% endfor %}
+ }
+ {% for res_type in resource_types %}
+ private static void onResourcesLoaded{{ res_type|title }} (
+ int packageIdTransform) {
+ {% for e in non_final_resources[res_type] %}
+ {% if res_type != 'styleable' and e.java_type != 'int[]' %}
+ {{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;
+ {% endif %}
+ {% endfor %}
+ }
+ {% endfor %}
+ {% endif %}
+ {% endif %}
+}
+""",
+ trim_blocks=True,
+ lstrip_blocks=True)
+ return template.render(
+ package=package,
+ resource_types=sorted(_ALL_RESOURCE_TYPES),
+ has_on_resources_loaded=rjava_build_options.has_on_resources_loaded,
+ fake_on_resources_loaded=rjava_build_options.fake_on_resources_loaded,
+ final_resources=final_resources_by_type,
+ non_final_resources=non_final_resources_by_type,
+ startIndex=_GetNonSystemIndex,
+ parent_path=dep_path)
+
+
+def ExtractBinaryManifestValues(aapt2_path, apk_path):
+ """Returns (version_code, version_name, package_name) for the given apk."""
+ output = subprocess.check_output([
+ aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml'
+ ]).decode('utf-8')
+ version_code = re.search(r'versionCode.*?=(\d*)', output).group(1)
+ version_name = re.search(r'versionName.*?="(.*?)"', output).group(1)
+ package_name = re.search(r'package.*?="(.*?)"', output).group(1)
+ return version_code, version_name, package_name
+
+
+def ExtractArscPackage(aapt2_path, apk_path):
+ """Returns (package_name, package_id) of resources.arsc from apk_path.
+
+ When the apk does not have any entries in its resources file, in recent aapt2
+ versions it will not contain a "Package" line. The package is not even in the
+ actual resources.arsc/resources.pb file (which itself is mostly empty). Thus
+ return (None, None) when dump succeeds and there are no errors to indicate
+ that the package name does not exist in the resources file.
+ """
+ proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ for line in proc.stdout:
+ line = line.decode('utf-8')
+ # Package name=org.chromium.webview_shell id=7f
+ if line.startswith('Package'):
+ proc.kill()
+ parts = line.split()
+ package_name = parts[1].split('=')[1]
+ package_id = parts[2][3:]
+ return package_name, int(package_id, 16)
+
+ # aapt2 currently crashes when dumping webview resources, but not until after
+ # it prints the "Package" line (b/130553900).
+ stderr_output = proc.stderr.read().decode('utf-8')
+ if stderr_output:
+ sys.stderr.write(stderr_output)
+ raise Exception('Failed to find arsc package name')
+ return None, None
+
+
+def _RenameSubdirsWithPrefix(dir_path, prefix):
+ subdirs = [
+ d for d in os.listdir(dir_path)
+ if os.path.isdir(os.path.join(dir_path, d))
+ ]
+ renamed_subdirs = []
+ for d in subdirs:
+ old_path = os.path.join(dir_path, d)
+ new_path = os.path.join(dir_path, '{}_{}'.format(prefix, d))
+ renamed_subdirs.append(new_path)
+ os.rename(old_path, new_path)
+ return renamed_subdirs
+
+
+def _HasMultipleResDirs(zip_path):
+ """Checks for magic comment set by prepare_resources.py
+
+ Returns: True iff the zipfile has the magic comment that means it contains
+ multiple res/ dirs inside instead of just contents of a single res/ dir
+ (without a wrapping res/).
+ """
+ with zipfile.ZipFile(zip_path) as z:
+ return z.comment == MULTIPLE_RES_MAGIC_STRING
+
+
+def ExtractDeps(dep_zips, deps_dir):
+ """Extract a list of resource dependency zip files.
+
+ Args:
+ dep_zips: A list of zip file paths, each one will be extracted to
+ a subdirectory of |deps_dir|, named after the zip file's path (e.g.
+ '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/').
+ deps_dir: Top-level extraction directory.
+ Returns:
+ The list of all sub-directory paths, relative to |deps_dir|.
+ Raises:
+ Exception: If a sub-directory already exists with the same name before
+ extraction.
+ """
+ dep_subdirs = []
+ for z in dep_zips:
+ subdirname = z.replace(os.path.sep, '_')
+ subdir = os.path.join(deps_dir, subdirname)
+ if os.path.exists(subdir):
+ raise Exception('Resource zip name conflict: ' + subdirname)
+ build_utils.ExtractAll(z, path=subdir)
+ if _HasMultipleResDirs(z):
+ # basename of the directory is used to create a zip during resource
+ # compilation, include the path in the basename to help blame errors on
+ # the correct target. For example directory 0_res may be renamed
+ # chrome_android_chrome_app_java_resources_0_res pointing to the name and
+ # path of the android_resources target from whence it came.
+ subdir_subdirs = _RenameSubdirsWithPrefix(subdir, subdirname)
+ dep_subdirs.extend(subdir_subdirs)
+ else:
+ dep_subdirs.append(subdir)
+ return dep_subdirs
+
+
+class _ResourceBuildContext(object):
+ """A temporary directory for packaging and compiling Android resources.
+
+ Args:
+ temp_dir: Optional root build directory path. If None, a temporary
+ directory will be created, and removed in Close().
+ """
+
+ def __init__(self, temp_dir=None, keep_files=False):
+ """Initialized the context."""
+ # The top-level temporary directory.
+ if temp_dir:
+ self.temp_dir = temp_dir
+ os.makedirs(temp_dir)
+ else:
+ self.temp_dir = tempfile.mkdtemp()
+ self.remove_on_exit = not keep_files
+
+ # A location to store resources extracted form dependency zip files.
+ self.deps_dir = os.path.join(self.temp_dir, 'deps')
+ os.mkdir(self.deps_dir)
+ # A location to place aapt-generated files.
+ self.gen_dir = os.path.join(self.temp_dir, 'gen')
+ os.mkdir(self.gen_dir)
+ # A location to place generated R.java files.
+ self.srcjar_dir = os.path.join(self.temp_dir, 'java')
+ os.mkdir(self.srcjar_dir)
+ # Temporary file locacations.
+ self.r_txt_path = os.path.join(self.gen_dir, 'R.txt')
+ self.srcjar_path = os.path.join(self.temp_dir, 'R.srcjar')
+ self.info_path = os.path.join(self.temp_dir, 'size.info')
+ self.stable_ids_path = os.path.join(self.temp_dir, 'in_ids.txt')
+ self.emit_ids_path = os.path.join(self.temp_dir, 'out_ids.txt')
+ self.proguard_path = os.path.join(self.temp_dir, 'keeps.flags')
+ self.proguard_main_dex_path = os.path.join(self.temp_dir, 'maindex.flags')
+ self.arsc_path = os.path.join(self.temp_dir, 'out.ap_')
+ self.proto_path = os.path.join(self.temp_dir, 'out.proto.ap_')
+ self.optimized_arsc_path = os.path.join(self.temp_dir, 'out.opt.ap_')
+ self.optimized_proto_path = os.path.join(self.temp_dir, 'out.opt.proto.ap_')
+
+ def Close(self):
+ """Close the context and destroy all temporary files."""
+ if self.remove_on_exit:
+ shutil.rmtree(self.temp_dir)
+
+
+@contextlib.contextmanager
+def BuildContext(temp_dir=None, keep_files=False):
+ """Generator for a _ResourceBuildContext instance."""
+ context = None
+ try:
+ context = _ResourceBuildContext(temp_dir, keep_files)
+ yield context
+ finally:
+ if context:
+ context.Close()
+
+
+def ResourceArgsParser():
+ """Create an argparse.ArgumentParser instance with common argument groups.
+
+ Returns:
+ A tuple of (parser, in_group, out_group) corresponding to the parser
+ instance, and the input and output argument groups for it, respectively.
+ """
+ parser = argparse.ArgumentParser(description=__doc__)
+
+ input_opts = parser.add_argument_group('Input options')
+ output_opts = parser.add_argument_group('Output options')
+
+ build_utils.AddDepfileOption(output_opts)
+
+ input_opts.add_argument('--include-resources', required=True, action="append",
+ help='Paths to arsc resource files used to link '
+ 'against. Can be specified multiple times.')
+
+ input_opts.add_argument('--dependencies-res-zips', required=True,
+ help='Resources zip archives from dependents. Required to '
+ 'resolve @type/foo references into dependent '
+ 'libraries.')
+
+ input_opts.add_argument(
+ '--extra-res-packages',
+ help='Additional package names to generate R.java files for.')
+
+ return (parser, input_opts, output_opts)
+
+
+def HandleCommonOptions(options):
+ """Handle common command-line options after parsing.
+
+ Args:
+ options: the result of parse_args() on the parser returned by
+ ResourceArgsParser(). This function updates a few common fields.
+ """
+ options.include_resources = [build_utils.ParseGnList(r) for r in
+ options.include_resources]
+ # Flatten list of include resources list to make it easier to use.
+ options.include_resources = [r for resources in options.include_resources
+ for r in resources]
+
+ options.dependencies_res_zips = (
+ build_utils.ParseGnList(options.dependencies_res_zips))
+
+ # Don't use [] as default value since some script explicitly pass "".
+ if options.extra_res_packages:
+ options.extra_res_packages = (
+ build_utils.ParseGnList(options.extra_res_packages))
+ else:
+ options.extra_res_packages = []
+
+
+def ParseAndroidResourceStringsFromXml(xml_data):
+ """Parse and Android xml resource file and extract strings from it.
+
+ Args:
+ xml_data: XML file data.
+ Returns:
+ A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8
+ encoded value, and |namespaces| is a dictionary mapping prefixes to URLs
+ corresponding to namespaces declared in the <resources> element.
+ """
+ # NOTE: This uses regular expression matching because parsing with something
+ # like ElementTree makes it tedious to properly parse some of the structured
+ # text found in string resources, e.g.:
+ # <string msgid="3300176832234831527" \
+ # name="abc_shareactionprovider_share_with_application">\
+ # "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\
+ # </string>
+ result = {}
+
+ # Find <resources> start tag and extract namespaces from it.
+ m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE)
+ if not m:
+ raise Exception('<resources> start tag expected: ' + xml_data)
+ input_data = xml_data[m.end():]
+ resource_attrs = m.group(1)
+ re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")')
+ namespaces = {}
+ while resource_attrs:
+ m = re_namespace.match(resource_attrs)
+ if not m:
+ break
+ namespaces[m.group(2)] = m.group(3)
+ resource_attrs = resource_attrs[m.end(1):]
+
+ # Find each string element now.
+ re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>')
+ re_string_element_end = re.compile('</string>')
+ while input_data:
+ m = re_string_element_start.search(input_data)
+ if not m:
+ break
+ name = m.group(2)
+ input_data = input_data[m.end():]
+ m2 = re_string_element_end.search(input_data)
+ if not m2:
+ raise Exception('Expected closing string tag: ' + input_data)
+ text = input_data[:m2.start()]
+ input_data = input_data[m2.end():]
+ if len(text) and text[0] == '"' and text[-1] == '"':
+ text = text[1:-1]
+ result[name] = text
+
+ return result, namespaces
+
+
+def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None):
+ """Generate an XML text corresponding to an Android resource strings map.
+
+ Args:
+ names_to_text: A dictionary mapping resource names to localized
+ text (encoded as UTF-8).
+ namespaces: A map of namespace prefix to URL.
+ Returns:
+ New non-Unicode string containing an XML data structure describing the
+ input as an Android resource .xml file.
+ """
+ result = '<?xml version="1.0" encoding="utf-8"?>\n'
+ result += '<resources'
+ if namespaces:
+ for prefix, url in sorted(namespaces.items()):
+ result += ' xmlns:%s="%s"' % (prefix, url)
+ result += '>\n'
+ if not names_to_utf8_text:
+ result += '<!-- this file intentionally empty -->\n'
+ else:
+ for name, utf8_text in sorted(names_to_utf8_text.items()):
+ result += '<string name="%s">"%s"</string>\n' % (name, utf8_text)
+ result += '</resources>\n'
+ return result.encode('utf8')
+
+
+def FilterAndroidResourceStringsXml(xml_file_path, string_predicate):
+ """Remove unwanted localized strings from an Android resource .xml file.
+
+ This function takes a |string_predicate| callable object that will
+ receive a resource string name, and should return True iff the
+ corresponding <string> element should be kept in the file.
+
+ Args:
+ xml_file_path: Android resource strings xml file path.
+ string_predicate: A predicate function which will receive the string name
+ and shal
+ """
+ with open(xml_file_path) as f:
+ xml_data = f.read()
+ strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data)
+
+ string_deletion = False
+ for name in list(strings_map.keys()):
+ if not string_predicate(name):
+ del strings_map[name]
+ string_deletion = True
+
+ if string_deletion:
+ new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces)
+ with open(xml_file_path, 'wb') as f:
+ f.write(new_xml_data)
diff --git a/third_party/libwebrtc/build/android/gyp/util/resource_utils_test.py b/third_party/libwebrtc/build/android/gyp/util/resource_utils_test.py
new file mode 100755
index 0000000000..62d5b431e9
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/resource_utils_test.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+# coding: utf-8
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import sys
+import unittest
+
+sys.path.insert(
+ 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)))
+from util import build_utils
+
+# Required because the following import needs build/android/gyp in the
+# Python path to import util.build_utils.
+_BUILD_ANDROID_GYP_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT)
+
+import resource_utils # pylint: disable=relative-import
+
+# pylint: disable=line-too-long
+
+_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string>
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?>
+<resources xmlns:android="http://schemas.android.com/apk/res/android">
+<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string>
+<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string>
+</resources>
+'''
+
+# pylint: enable=line-too-long
+
+_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+<!-- this file intentionally empty -->
+</resources>
+'''
+
+_TEST_RESOURCES_MAP_1 = {
+ 'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada',
+ 'opening_file_error': 'Valit. faili avamine ebaõnnestus',
+ 'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus',
+ 'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>',
+}
+
+_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'}
+
+_TEST_RESOURCES_ALLOWLIST_1 = ['low_memory_error', 'structured_text']
+
+# Extracted from one generated Chromium R.txt file, with string resource
+# names shuffled randomly.
+_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000
+int anim abc_fade_out 0x7f050001
+int anim abc_grow_fade_in_from_bottom 0x7f050002
+int array DefaultCookiesSettingEntries 0x7f120002
+int array DefaultCookiesSettingValues 0x7f120003
+int array DefaultGeolocationSettingEntries 0x7f120004
+int attr actionBarDivider 0x7f0100e7
+int attr actionBarStyle 0x7f0100e2
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109
+int string AllowedDomainsForAppsTitle 0x7f0c0104
+int string AlternateErrorPagesEnabledTitle 0x7f0c0106
+int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba }
+int styleable SnackbarLayout_android_maxWidth 0
+int styleable SnackbarLayout_elevation 2
+'''
+
+# Test allowlist R.txt file. Note that AlternateErrorPagesEnabledTitle is
+# listed as an 'anim' and should thus be skipped. Similarly the string
+# 'ThisStringDoesNotAppear' should not be in the final result.
+_TEST_ALLOWLIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee
+int string AllowedDomainsForAppsDesc 0x7f0c0105
+int string AlternateErrorPagesEnabledDesc 0x7f0c0107
+int string ThisStringDoesNotAppear 0x7f0fffff
+'''
+
+_TEST_R_TEXT_RESOURCES_IDS = {
+ 0x7f0c0105: 'AllowedDomainsForAppsDesc',
+ 0x7f0c0107: 'AlternateErrorPagesEnabledDesc',
+}
+
+# Names of string resources in _TEST_R_TXT, should be sorted!
+_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([
+ 'AllowedDomainsForAppsDesc',
+ 'AllowedDomainsForAppsTitle',
+ 'AlternateErrorPagesEnabledDesc',
+ 'AlternateErrorPagesEnabledTitle',
+ 'AuthAndroidNegotiateAccountTypeDesc',
+])
+
+
+def _CreateTestFile(tmp_dir, file_name, file_data):
+ file_path = os.path.join(tmp_dir, file_name)
+ with open(file_path, 'wt') as f:
+ f.write(file_data)
+ return file_path
+
+
+
+class ResourceUtilsTest(unittest.TestCase):
+
+ def test_GetRTxtStringResourceNames(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ self.assertListEqual(
+ resource_utils.GetRTxtStringResourceNames(tmp_file),
+ _TEST_R_TXT_STRING_RESOURCE_NAMES)
+
+ def test_GenerateStringResourcesAllowList(self):
+ with build_utils.TempDir() as tmp_dir:
+ tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT)
+ tmp_allowlist_rtxt_file = _CreateTestFile(tmp_dir, "test_allowlist_R.txt",
+ _TEST_ALLOWLIST_R_TXT)
+ self.assertDictEqual(
+ resource_utils.GenerateStringResourcesAllowList(
+ tmp_module_rtxt_file, tmp_allowlist_rtxt_file),
+ _TEST_R_TEXT_RESOURCES_IDS)
+
+ def test_IsAndroidLocaleQualifier(self):
+ good_locales = [
+ 'en',
+ 'en-rUS',
+ 'fil',
+ 'fil-rPH',
+ 'iw',
+ 'iw-rIL',
+ 'b+en',
+ 'b+en+US',
+ 'b+ja+Latn',
+ 'b+ja+JP+Latn',
+ 'b+cmn+Hant-TW',
+ ]
+ bad_locales = [
+ 'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+'
+ ]
+ for locale in good_locales:
+ self.assertTrue(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a good locale!" % locale)
+
+ for locale in bad_locales:
+ self.assertFalse(
+ resource_utils.IsAndroidLocaleQualifier(locale),
+ msg="'%s' should be a bad locale!" % locale)
+
+ def test_ToAndroidLocaleName(self):
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = {
+ 'en': 'en',
+ 'en-US': 'en-rUS',
+ 'en-FOO': 'en-rFOO',
+ 'fil': 'tl',
+ 'tl': 'tl',
+ 'he': 'iw',
+ 'he-IL': 'iw-rIL',
+ 'id': 'in',
+ 'id-BAR': 'in-rBAR',
+ 'nb': 'nb',
+ 'yi': 'ji'
+ }
+ for chromium_locale, android_locale in \
+ _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.items():
+ result = resource_utils.ToAndroidLocaleName(chromium_locale)
+ self.assertEqual(result, android_locale)
+
+ def test_ToChromiumLocaleName(self):
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = {
+ 'foo': 'foo',
+ 'foo-rBAR': 'foo-BAR',
+ 'b+lll': 'lll',
+ 'b+ll+Extra': 'll',
+ 'b+ll+RR': 'll-RR',
+ 'b+lll+RR+Extra': 'lll-RR',
+ 'b+ll+RRR+Extra': 'll-RRR',
+ 'b+ll+Ssss': 'll-Ssss',
+ 'b+ll+Ssss+Extra': 'll-Ssss',
+ 'b+ll+Ssss+RR': 'll-Ssss-RR',
+ 'b+ll+Ssss+RRR': 'll-Ssss-RRR',
+ 'b+ll+Ssss+RRR+Extra': 'll-Ssss-RRR',
+ 'b+ll+Whatever': 'll',
+ 'en': 'en',
+ 'en-rUS': 'en-US',
+ 'en-US': None,
+ 'en-FOO': None,
+ 'en-rFOO': 'en-FOO',
+ 'es-rES': 'es-ES',
+ 'es-rUS': 'es-419',
+ 'tl': 'fil',
+ 'fil': 'fil',
+ 'iw': 'he',
+ 'iw-rIL': 'he-IL',
+ 'b+iw+IL': 'he-IL',
+ 'in': 'id',
+ 'in-rBAR': 'id-BAR',
+ 'id-rBAR': 'id-BAR',
+ 'nb': 'nb',
+ 'no': 'nb', # http://crbug.com/920960
+ }
+ for android_locale, chromium_locale in \
+ _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.items():
+ result = resource_utils.ToChromiumLocaleName(android_locale)
+ self.assertEqual(result, chromium_locale)
+
+ def test_FindLocaleInStringResourceFilePath(self):
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values/whatever.xml'))
+ self.assertEqual(
+ 'foo',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/whatever.xml'))
+ self.assertEqual(
+ 'foo-rBAR',
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo-rBAR/whatever.xml'))
+ self.assertEqual(
+ None,
+ resource_utils.FindLocaleInStringResourceFilePath(
+ 'res/values-foo/ignore-subdirs/whatever.xml'))
+
+ def test_ParseAndroidResourceStringsFromXml(self):
+ ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml(
+ _TEST_XML_INPUT_1)
+ self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1)
+ self.assertDictEqual(namespaces, _TEST_NAMESPACES_1)
+
+ def test_GenerateAndroidResourceStringsXml(self):
+ # Fist, an empty strings map, with no namespaces
+ result = resource_utils.GenerateAndroidResourceStringsXml({})
+ self.assertEqual(result.decode('utf8'), _TEST_XML_OUTPUT_EMPTY)
+
+ result = resource_utils.GenerateAndroidResourceStringsXml(
+ _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ self.assertEqual(result.decode('utf8'), _TEST_XML_INPUT_1)
+
+ @staticmethod
+ def _CreateTestResourceFile(output_dir, locale, string_map, namespaces):
+ values_dir = os.path.join(output_dir, 'values-' + locale)
+ build_utils.MakeDirectory(values_dir)
+ file_path = os.path.join(values_dir, 'strings.xml')
+ with open(file_path, 'wb') as f:
+ file_data = resource_utils.GenerateAndroidResourceStringsXml(
+ string_map, namespaces)
+ f.write(file_data)
+ return file_path
+
+ def _CheckTestResourceFile(self, file_path, expected_data):
+ with open(file_path) as f:
+ file_data = f.read()
+ self.assertEqual(file_data, expected_data)
+
+ def test_FilterAndroidResourceStringsXml(self):
+ with build_utils.TempDir() as tmp_path:
+ test_file = self._CreateTestResourceFile(
+ tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1)
+ resource_utils.FilterAndroidResourceStringsXml(
+ test_file, lambda x: x in _TEST_RESOURCES_ALLOWLIST_1)
+ self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/third_party/libwebrtc/build/android/gyp/util/resources_parser.py b/third_party/libwebrtc/build/android/gyp/util/resources_parser.py
new file mode 100644
index 0000000000..8d8d69cce8
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/resources_parser.py
@@ -0,0 +1,142 @@
+# Copyright 2020 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import re
+from xml.etree import ElementTree
+
+from util import build_utils
+from util import resource_utils
+
+_TextSymbolEntry = collections.namedtuple(
+ 'RTextEntry', ('java_type', 'resource_type', 'name', 'value'))
+
+_DUMMY_RTXT_ID = '0x7f010001'
+_DUMMY_RTXT_INDEX = '1'
+
+
+def _ResourceNameToJavaSymbol(resource_name):
+ return re.sub('[\.:]', '_', resource_name)
+
+
+class RTxtGenerator(object):
+ def __init__(self,
+ res_dirs,
+ ignore_pattern=resource_utils.AAPT_IGNORE_PATTERN):
+ self.res_dirs = res_dirs
+ self.ignore_pattern = ignore_pattern
+
+ def _ParseDeclareStyleable(self, node):
+ ret = set()
+ stylable_name = _ResourceNameToJavaSymbol(node.attrib['name'])
+ ret.add(
+ _TextSymbolEntry('int[]', 'styleable', stylable_name,
+ '{{{}}}'.format(_DUMMY_RTXT_ID)))
+ for child in node:
+ if child.tag == 'eat-comment':
+ continue
+ if child.tag != 'attr':
+ # This parser expects everything inside <declare-stylable/> to be either
+ # an attr or an eat-comment. If new resource xml files are added that do
+ # not conform to this, this parser needs updating.
+ raise Exception('Unexpected tag {} inside <delcare-stylable/>'.format(
+ child.tag))
+ entry_name = '{}_{}'.format(
+ stylable_name, _ResourceNameToJavaSymbol(child.attrib['name']))
+ ret.add(
+ _TextSymbolEntry('int', 'styleable', entry_name, _DUMMY_RTXT_INDEX))
+ if not child.attrib['name'].startswith('android:'):
+ resource_name = _ResourceNameToJavaSymbol(child.attrib['name'])
+ ret.add(_TextSymbolEntry('int', 'attr', resource_name, _DUMMY_RTXT_ID))
+ for entry in child:
+ if entry.tag not in ('enum', 'flag'):
+ # This parser expects everything inside <attr/> to be either an
+ # <enum/> or an <flag/>. If new resource xml files are added that do
+ # not conform to this, this parser needs updating.
+ raise Exception('Unexpected tag {} inside <attr/>'.format(entry.tag))
+ resource_name = _ResourceNameToJavaSymbol(entry.attrib['name'])
+ ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+ return ret
+
+ def _ExtractNewIdsFromNode(self, node):
+ ret = set()
+ # Sometimes there are @+id/ in random attributes (not just in android:id)
+ # and apparently that is valid. See:
+ # https://developer.android.com/reference/android/widget/RelativeLayout.LayoutParams.html
+ for value in node.attrib.values():
+ if value.startswith('@+id/'):
+ resource_name = value[5:]
+ ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID))
+ for child in node:
+ ret.update(self._ExtractNewIdsFromNode(child))
+ return ret
+
+ def _ExtractNewIdsFromXml(self, xml_path):
+ root = ElementTree.parse(xml_path).getroot()
+ return self._ExtractNewIdsFromNode(root)
+
+ def _ParseValuesXml(self, xml_path):
+ ret = set()
+ root = ElementTree.parse(xml_path).getroot()
+ assert root.tag == 'resources'
+ for child in root:
+ if child.tag == 'eat-comment':
+ # eat-comment is just a dummy documentation element.
+ continue
+ if child.tag == 'skip':
+ # skip is just a dummy element.
+ continue
+ if child.tag == 'declare-styleable':
+ ret.update(self._ParseDeclareStyleable(child))
+ else:
+ if child.tag == 'item':
+ resource_type = child.attrib['type']
+ elif child.tag in ('array', 'integer-array', 'string-array'):
+ resource_type = 'array'
+ else:
+ resource_type = child.tag
+ name = _ResourceNameToJavaSymbol(child.attrib['name'])
+ ret.add(_TextSymbolEntry('int', resource_type, name, _DUMMY_RTXT_ID))
+ return ret
+
+ def _CollectResourcesListFromDirectory(self, res_dir):
+ ret = set()
+ globs = resource_utils._GenerateGlobs(self.ignore_pattern)
+ for root, _, files in os.walk(res_dir):
+ resource_type = os.path.basename(root)
+ if '-' in resource_type:
+ resource_type = resource_type[:resource_type.index('-')]
+ for f in files:
+ if build_utils.MatchesGlob(f, globs):
+ continue
+ if resource_type == 'values':
+ ret.update(self._ParseValuesXml(os.path.join(root, f)))
+ else:
+ if '.' in f:
+ resource_name = f[:f.index('.')]
+ else:
+ resource_name = f
+ ret.add(
+ _TextSymbolEntry('int', resource_type, resource_name,
+ _DUMMY_RTXT_ID))
+ # Other types not just layouts can contain new ids (eg: Menus and
+ # Drawables). Just in case, look for new ids in all files.
+ if f.endswith('.xml'):
+ ret.update(self._ExtractNewIdsFromXml(os.path.join(root, f)))
+ return ret
+
+ def _CollectResourcesListFromDirectories(self):
+ ret = set()
+ for res_dir in self.res_dirs:
+ ret.update(self._CollectResourcesListFromDirectory(res_dir))
+ return ret
+
+ def WriteRTxtFile(self, rtxt_path):
+ resources = self._CollectResourcesListFromDirectories()
+ with build_utils.AtomicOutput(rtxt_path, mode='w') as f:
+ for resource in resources:
+ line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format(
+ resource)
+ f.write(line)
diff --git a/third_party/libwebrtc/build/android/gyp/util/server_utils.py b/third_party/libwebrtc/build/android/gyp/util/server_utils.py
new file mode 100644
index 0000000000..e050ef6552
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/server_utils.py
@@ -0,0 +1,41 @@
+# Copyright 2021 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import json
+import os
+import socket
+
+# Use a unix abstract domain socket:
+# https://man7.org/linux/man-pages/man7/unix.7.html#:~:text=abstract:
+SOCKET_ADDRESS = '\0chromium_build_server_socket'
+BUILD_SERVER_ENV_VARIABLE = 'INVOKED_BY_BUILD_SERVER'
+
+
+def MaybeRunCommand(name, argv, stamp_file):
+ """Returns True if the command was successfully sent to the build server."""
+
+ # When the build server runs a command, it sets this environment variable.
+ # This prevents infinite recursion where the script sends a request to the
+ # build server, then the build server runs the script, and then the script
+ # sends another request to the build server.
+ if BUILD_SERVER_ENV_VARIABLE in os.environ:
+ return False
+ with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock:
+ try:
+ sock.connect(SOCKET_ADDRESS)
+ sock.sendall(
+ json.dumps({
+ 'name': name,
+ 'cmd': argv,
+ 'cwd': os.getcwd(),
+ 'stamp_file': stamp_file,
+ }).encode('utf8'))
+ except socket.error as e:
+ # [Errno 111] Connection refused. Either the server has not been started
+ # or the server is not currently accepting new connections.
+ if e.errno == 111:
+ return False
+ raise e
+ return True
diff --git a/third_party/libwebrtc/build/android/gyp/util/zipalign.py b/third_party/libwebrtc/build/android/gyp/util/zipalign.py
new file mode 100644
index 0000000000..c5c4ea88c6
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/util/zipalign.py
@@ -0,0 +1,97 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import struct
+import sys
+import zipfile
+
+from util import build_utils
+
+_FIXED_ZIP_HEADER_LEN = 30
+
+
+def _PatchedDecodeExtra(self):
+ # Try to decode the extra field.
+ extra = self.extra
+ unpack = struct.unpack
+ while len(extra) >= 4:
+ tp, ln = unpack('<HH', extra[:4])
+ if tp == 1:
+ if ln >= 24:
+ counts = unpack('<QQQ', extra[4:28])
+ elif ln == 16:
+ counts = unpack('<QQ', extra[4:20])
+ elif ln == 8:
+ counts = unpack('<Q', extra[4:12])
+ elif ln == 0:
+ counts = ()
+ else:
+ raise RuntimeError("Corrupt extra field %s" % (ln, ))
+
+ idx = 0
+
+ # ZIP64 extension (large files and/or large archives)
+ if self.file_size in (0xffffffffffffffff, 0xffffffff):
+ self.file_size = counts[idx]
+ idx += 1
+
+ if self.compress_size == 0xffffffff:
+ self.compress_size = counts[idx]
+ idx += 1
+
+ if self.header_offset == 0xffffffff:
+ self.header_offset = counts[idx]
+ idx += 1
+
+ extra = extra[ln + 4:]
+
+
+def ApplyZipFileZipAlignFix():
+ """Fix zipfile.ZipFile() to be able to open zipaligned .zip files.
+
+ Android's zip alignment uses not-quite-valid zip headers to perform alignment.
+ Python < 3.4 crashes when trying to load them.
+ https://bugs.python.org/issue14315
+ """
+ if sys.version_info < (3, 4):
+ zipfile.ZipInfo._decodeExtra = ( # pylint: disable=protected-access
+ _PatchedDecodeExtra)
+
+
+def _SetAlignment(zip_obj, zip_info, alignment):
+ """Sets a ZipInfo's extra field such that the file will be aligned.
+
+ Args:
+ zip_obj: The ZipFile object that is being written.
+ zip_info: The ZipInfo object about to be written.
+ alignment: The amount of alignment (e.g. 4, or 4*1024).
+ """
+ cur_offset = zip_obj.fp.tell()
+ header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename)
+ padding_needed = (alignment - (
+ (cur_offset + header_size) % alignment)) % alignment
+
+
+ # Python writes |extra| to both the local file header and the central
+ # directory's file header. Android's zipalign tool writes only to the
+ # local file header, so there is more overhead in using python to align.
+ zip_info.extra = b'\0' * padding_needed
+
+
+def AddToZipHermetic(zip_file,
+ zip_path,
+ src_path=None,
+ data=None,
+ compress=None,
+ alignment=None):
+ """Same as build_utils.AddToZipHermetic(), but with alignment.
+
+ Args:
+ alignment: If set, align the data of the entry to this many bytes.
+ """
+ zipinfo = build_utils.HermeticZipInfo(filename=zip_path)
+ if alignment:
+ _SetAlignment(zip_file, zipinfo, alignment)
+ build_utils.AddToZipHermetic(
+ zip_file, zipinfo, src_path=src_path, data=data, compress=compress)
diff --git a/third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.py b/third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.py
new file mode 100755
index 0000000000..b14ca3c314
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.dex import dex_parser
+from util import build_utils
+
+_FLAGS_PATH = (
+ '//chrome/android/java/static_library_dex_reference_workarounds.flags')
+
+
+def _FindIllegalStaticLibraryReferences(static_lib_dex_files,
+ main_apk_dex_files):
+ main_apk_defined_types = set()
+ for dex_file in main_apk_dex_files:
+ for class_def_item in dex_file.class_def_item_list:
+ main_apk_defined_types.add(
+ dex_file.GetTypeString(class_def_item.class_idx))
+
+ static_lib_referenced_types = set()
+ for dex_file in static_lib_dex_files:
+ for type_item in dex_file.type_item_list:
+ static_lib_referenced_types.add(
+ dex_file.GetString(type_item.descriptor_idx))
+
+ return main_apk_defined_types.intersection(static_lib_referenced_types)
+
+
+def _DexFilesFromPath(path):
+ if zipfile.is_zipfile(path):
+ with zipfile.ZipFile(path) as z:
+ return [
+ dex_parser.DexFile(bytearray(z.read(name))) for name in z.namelist()
+ if re.match(r'.*classes[0-9]*\.dex$', name)
+ ]
+ else:
+ with open(path) as f:
+ return dex_parser.DexFile(bytearray(f.read()))
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--depfile', required=True, help='Path to output depfile.')
+ parser.add_argument(
+ '--stamp', required=True, help='Path to file to touch upon success.')
+ parser.add_argument(
+ '--static-library-dex',
+ required=True,
+ help='classes.dex or classes.zip for the static library APK that was '
+ 'proguarded with other dependent APKs')
+ parser.add_argument(
+ '--static-library-dependent-dex',
+ required=True,
+ action='append',
+ dest='static_library_dependent_dexes',
+ help='classes.dex or classes.zip for the APKs that use the static '
+ 'library APK')
+ args = parser.parse_args(args)
+
+ static_library_dexfiles = _DexFilesFromPath(args.static_library_dex)
+ for path in args.static_library_dependent_dexes:
+ dependent_dexfiles = _DexFilesFromPath(path)
+ illegal_references = _FindIllegalStaticLibraryReferences(
+ static_library_dexfiles, dependent_dexfiles)
+
+ if illegal_references:
+ msg = 'Found illegal references from {} to {}\n'.format(
+ args.static_library_dex, path)
+ msg += 'Add a -keep rule to avoid this. '
+ msg += 'See {} for an example and why this is necessary.\n'.format(
+ _FLAGS_PATH)
+ msg += 'The illegal references are:\n'
+ msg += '\n'.join(illegal_references)
+ sys.stderr.write(msg)
+ sys.exit(1)
+
+ input_paths = [args.static_library_dex] + args.static_library_dependent_dexes
+ build_utils.Touch(args.stamp)
+ build_utils.WriteDepfile(args.depfile, args.stamp, inputs=input_paths)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.pydeps b/third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.pydeps
new file mode 100644
index 0000000000..e57172dbd6
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/validate_static_library_dex_references.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/validate_static_library_dex_references.pydeps build/android/gyp/validate_static_library_dex_references.py
+../../gn_helpers.py
+../pylib/__init__.py
+../pylib/dex/__init__.py
+../pylib/dex/dex_parser.py
+util/__init__.py
+util/build_utils.py
+validate_static_library_dex_references.py
diff --git a/third_party/libwebrtc/build/android/gyp/write_build_config.py b/third_party/libwebrtc/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000000..4756d8ac47
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/write_build_config.py
@@ -0,0 +1,2091 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+ --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+ 1. inputs/deps of the action ensure that the files are available the first
+ time the action runs.
+ 2. Either (a) or (b)
+ a. inputs/deps ensure that the action runs whenever one of the files changes
+ b. the files are added to the action's depfile
+
+NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR.
+
+This is a technical note describing the format of .build_config files.
+Please keep it updated when changing this script. For extraction and
+visualization instructions, see build/android/docs/build_config.md
+
+------------- BEGIN_MARKDOWN ---------------------------------------------------
+The .build_config file format
+===
+
+# Introduction
+
+This document tries to explain the format of `.build_config` generated during
+the Android build of Chromium. For a higher-level explanation of these files,
+please read
+[build/android/docs/build_config.md](build/android/docs/build_config.md).
+
+# The `deps_info` top-level dictionary:
+
+All `.build_config` files have a required `'deps_info'` key, whose value is a
+dictionary describing the target and its dependencies. The latter has the
+following required keys:
+
+## Required keys in `deps_info`:
+
+* `deps_info['type']`: The target type as a string.
+
+ The following types are known by the internal GN build rules and the
+ build scripts altogether:
+
+ * [java_binary](#target_java_binary)
+ * [java_annotation_processor](#target_java_annotation_processor)
+ * [junit_binary](#target_junit_binary)
+ * [java_library](#target_java_library)
+ * [android_assets](#target_android_assets)
+ * [android_resources](#target_android_resources)
+ * [android_apk](#target_android_apk)
+ * [android_app_bundle_module](#target_android_app_bundle_module)
+ * [android_app_bundle](#target_android_app_bundle)
+ * [dist_jar](#target_dist_jar)
+ * [dist_aar](#target_dist_aar)
+ * [group](#target_group)
+
+ See later sections for more details of some of these.
+
+* `deps_info['path']`: Path to the target's `.build_config` file.
+
+* `deps_info['name']`: Nothing more than the basename of `deps_info['path']`
+at the moment.
+
+* `deps_info['deps_configs']`: List of paths to the `.build_config` files of
+all *direct* dependencies of the current target.
+
+ NOTE: Because the `.build_config` of a given target is always generated
+ after the `.build_config` of its dependencies, the `write_build_config.py`
+ script can use chains of `deps_configs` to compute transitive dependencies
+ for each target when needed.
+
+## Optional keys in `deps_info`:
+
+The following keys will only appear in the `.build_config` files of certain
+target types:
+
+* `deps_info['requires_android']`: True to indicate that the corresponding
+code uses Android-specific APIs, and thus cannot run on the host within a
+regular JVM. May only appear in Java-related targets.
+
+* `deps_info['supports_android']`:
+May appear in Java-related targets, and indicates that
+the corresponding code doesn't use Java APIs that are not available on
+Android. As such it may run either on the host or on an Android device.
+
+* `deps_info['assets']`:
+Only seen for the [`android_assets`](#target_android_assets) type. See below.
+
+* `deps_info['package_name']`: Java package name associated with this target.
+
+ NOTE: For `android_resources` targets,
+ this is the package name for the corresponding R class. For `android_apk`
+ targets, this is the corresponding package name. This does *not* appear for
+ other target types.
+
+* `deps_info['android_manifest']`:
+Path to an AndroidManifest.xml file related to the current target.
+
+* `deps_info['base_module_config']`:
+Only seen for the [`android_app_bundle`](#target_android_app_bundle) type.
+Path to the base module for the bundle.
+
+* `deps_info['is_base_module']`:
+Only seen for the
+[`android_app_bundle_module`](#target_android_app_bundle_module)
+type. Whether or not this module is the base module for some bundle.
+
+* `deps_info['dependency_zips']`:
+List of `deps_info['resources_zip']` entries for all `android_resources`
+dependencies for the current target.
+
+* `deps_info['extra_package_names']`:
+Always empty for `android_resources` types. Otherwise,
+the list of `deps_info['package_name']` entries for all `android_resources`
+dependencies for the current target. Computed automatically by
+`write_build_config.py`.
+
+* `deps_info['dependency_r_txt_files']`:
+Exists only on dist_aar. It is the list of deps_info['r_text_path'] from
+transitive dependencies. Computed automatically.
+
+
+# `.build_config` target types description:
+
+## <a name="target_group">Target type `group`</a>:
+
+This type corresponds to a simple target that is only used to group
+dependencies. It matches the `java_group()` GN template. Its only top-level
+`deps_info` keys are `supports_android` (always True), and `deps_configs`.
+
+
+## <a name="target_android_resources">Target type `android_resources`</a>:
+
+This type corresponds to targets that are used to group Android resource files.
+For example, all `android_resources` dependencies of an `android_apk` will
+end up packaged into the final APK by the build system.
+
+It uses the following keys:
+
+
+* `deps_info['res_sources_path']`:
+Path to file containing a list of resource source files used by the
+android_resources target.
+
+* `deps_info['resources_zip']`:
+*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled
+resource files for this target (and also no `R.txt`, `R.java` or `R.class`).
+
+ If `deps_info['res_sources_path']` is missing, this must point to a prebuilt
+ `.aar` archive containing resources. Otherwise, this will point to a zip
+ archive generated at build time, wrapping the sources listed in
+ `deps_info['res_sources_path']` into a single zip file.
+
+* `deps_info['package_name']`:
+Java package name that the R class for this target belongs to.
+
+* `deps_info['android_manifest']`:
+Optional. Path to the top-level Android manifest file associated with these
+resources (if not provided, an empty manifest will be used to generate R.txt).
+
+* `deps_info['resource_overlay']`:
+Optional. Whether the resources in resources_zip should override resources with
+the same name. Does not affect the behaviour of any android_resources()
+dependencies of this target. If a target with resource_overlay=true depends
+on another target with resource_overlay=true the target with the dependency
+overrides the other.
+
+* `deps_info['r_text_path']`:
+Provide the path to the `R.txt` file that describes the resources wrapped by
+this target. Normally this file is generated from the content of the resource
+directories or zip file, but some targets can provide their own `R.txt` file
+if they want.
+
+* `deps_info['srcjar_path']`:
+Path to the `.srcjar` file that contains the auto-generated `R.java` source
+file corresponding to the content of `deps_info['r_text_path']`. This is
+*always* generated from the content of `deps_info['r_text_path']` by the
+`build/android/gyp/process_resources.py` script.
+
+* `deps_info['static_library_dependent_classpath_configs']`:
+Sub dictionary mapping .build_config paths to lists of jar files. For static
+library APKs, this defines which input jars belong to each
+static_library_dependent_target.
+
+* `deps_info['static_library_proguard_mapping_output_paths']`:
+Additional paths to copy the ProGuard mapping file to for static library
+APKs.
+
+## <a name="target_android_assets">Target type `android_assets`</a>:
+
+This type corresponds to targets used to group Android assets, i.e. liberal
+files that will be placed under `//assets/` within the final APK.
+
+These use an `deps_info['assets']` key to hold a dictionary of values related
+to assets covered by this target.
+
+* `assets['sources']`:
+The list of all asset source paths for this target. Each source path can
+use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location
+of the assets (relative to `//assets/`) within the APK.
+
+* `assets['outputs']`:
+Optional. Some of the sources might be renamed before being stored in the
+final //assets/ sub-directory. When this happens, this contains a list of
+all renamed output file paths
+
+ NOTE: When not empty, the first items of `assets['sources']` must match
+ every item in this list. Extra sources correspond to non-renamed sources.
+
+ NOTE: This comes from the `asset_renaming_destinations` parameter for the
+ `android_assets()` GN template.
+
+* `assets['disable_compression']`:
+Optional. Will be True to indicate that these assets should be stored
+uncompressed in the final APK. For example, this is necessary for locale
+.pak files used by the System WebView feature.
+
+* `assets['treat_as_locale_paks']`:
+Optional. Will be True to indicate that these assets are locale `.pak` files
+(containing localized strings for C++). These are later processed to generate
+a special ``.build_config`.java` source file, listing all supported Locales in
+the current build.
+
+
+## <a name="target_java_library">Target type `java_library`</a>:
+
+This type is used to describe target that wrap Java bytecode, either created
+by compiling sources, or providing them with a prebuilt jar.
+
+* `deps_info['public_deps_configs']`: List of paths to the `.build_config` files
+of *direct* dependencies of the current target which are exposed as part of the
+current target's public API. This should be a subset of
+deps_info['deps_configs'].
+
+* `deps_info['ignore_dependency_public_deps']`: If true, 'public_deps' will not
+be collected from the current target's direct deps.
+
+* `deps_info['unprocessed_jar_path']`:
+Path to the original .jar file for this target, before any kind of processing
+through Proguard or other tools. For most targets this is generated
+from sources, with a name like `$target_name.javac.jar`. However, when using
+a prebuilt jar, this will point to the source archive directly.
+
+* `deps_info['device_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed).
+
+* `deps_info['host_jar_path']`:
+Path to a file that is the result of processing
+`deps_info['unprocessed_jar_path']` with various tools (use by java_binary).
+
+* `deps_info['interface_jar_path']:
+Path to the interface jar generated for this library. This corresponds to
+a jar file that only contains declarations. Generated by running the `ijar` on
+`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files.
+
+* `deps_info['dex_path']`:
+Path to the `.dex` file generated for this target, from
+`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive.
+
+* `deps_info['is_prebuilt']`:
+True to indicate that this target corresponds to a prebuilt `.jar` file.
+In this case, `deps_info['unprocessed_jar_path']` will point to the source
+`.jar` file. Otherwise, it will be point to a build-generated file.
+
+* `deps_info['java_sources_file']`:
+Path to a single `.sources` file listing all the Java sources that were used
+to generate the library (simple text format, one `.jar` path per line).
+
+* `deps_info['lint_android_manifest']`:
+Path to an AndroidManifest.xml file to use for this lint target.
+
+* `deps_info['lint_java_sources']`:
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies that are chromium code. Note: this is a list of files, where each
+file contains a list of Java source files. This is used for lint.
+
+* `deps_info['lint_aars']`:
+List of all aars from transitive java dependencies. This allows lint to collect
+their custom annotations.zip and run checks like @IntDef on their annotations.
+
+* `deps_info['lint_srcjars']`:
+List of all bundled srcjars of all transitive java library targets. Excludes
+non-chromium java libraries.
+
+* `deps_info['lint_resource_sources']`:
+List of all resource sources files belonging to all transitive resource
+dependencies of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['lint_resource_zips']`:
+List of all resource zip files belonging to all transitive resource dependencies
+of this target. Excludes resources owned by non-chromium code.
+
+* `deps_info['javac']`:
+A dictionary containing information about the way the sources in this library
+are compiled. Appears also on other Java-related targets. See the [dedicated
+section about this](#dict_javac) below for details.
+
+* `deps_info['javac_full_classpath']`:
+The classpath used when performing bytecode processing. Essentially the
+collection of all `deps_info['unprocessed_jar_path']` entries for the target
+and all its dependencies.
+
+* `deps_info['javac_full_interface_classpath']`:
+The classpath used when using the errorprone compiler.
+
+* `deps_info['proguard_enabled"]`:
+True to indicate that ProGuard processing is enabled for this target.
+
+* `deps_info['proguard_configs"]`:
+A list of paths to ProGuard configuration files related to this library.
+
+* `deps_info['extra_classpath_jars']:
+For some Java related types, a list of extra `.jar` files to use at build time
+but not at runtime.
+
+## <a name="target_java_binary">Target type `java_binary`</a>:
+
+This type corresponds to a Java binary, which is nothing more than a
+`java_library` target that also provides a main class name. It thus inherits
+all entries from the `java_library` type, and adds:
+
+* `deps_info['main_class']`:
+Name of the main Java class that serves as an entry point for the binary.
+
+* `deps_info['device_classpath']`:
+The classpath used when running a Java or Android binary. Essentially the
+collection of all `deps_info['device_jar_path']` entries for the target and all
+its dependencies.
+
+
+## <a name="target_junit_binary">Target type `junit_binary`</a>:
+
+A target type for JUnit-specific binaries. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name.
+
+
+## <a name="target_java_annotation_processor">Target type \
+`java_annotation_processor`</a>:
+
+A target type for Java annotation processors. Identical to
+[`java_binary`](#target_java_binary) in the context of `.build_config` files,
+except the name, except that it requires a `deps_info['main_class']` entry.
+
+
+## <a name="target_android_apk">Target type `android_apk`</a>:
+
+Corresponds to an Android APK. Inherits from the
+[`java_binary`](#target_java_binary) type and adds:
+
+* `deps_info['apk_path']`:
+Path to the raw, unsigned, APK generated by this target.
+
+* `deps_info['incremental_apk_path']`:
+Path to the raw, unsigned, incremental APK generated by this target.
+
+* `deps_info['incremental_install_json_path']`:
+Path to the JSON file with per-apk details for incremental install.
+See `build/android/gyp/incremental/write_installer_json.py` for more
+details about its content.
+
+* `deps_info['dist_jar']['all_interface_jars']`:
+For `android_apk` and `dist_jar` targets, a list of all interface jar files
+that will be merged into the final `.jar` file for distribution.
+
+* `deps_info['final_dex']['path']`:
+Path to the final classes.dex file (or classes.zip in case of multi-dex)
+for this APK.
+
+* `deps_info['final_dex']['all_dex_files']`:
+The list of paths to all `deps_info['dex_path']` entries for all libraries
+that comprise this APK. Valid only for debug builds.
+
+* `native['libraries']`
+List of native libraries for the primary ABI to be embedded in this APK.
+E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory
+prefix).
+
+* `native['java_libraries_list']`
+The same list as `native['libraries']` as a string holding a Java source
+fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so`
+suffix (as expected by `System.loadLibrary()`).
+
+* `native['second_abi_libraries']`
+List of native libraries for the secondary ABI to be embedded in this APK.
+Empty if only a single ABI is supported.
+
+* `native['uncompress_shared_libraries']`
+A boolean indicating whether native libraries are stored uncompressed in the
+APK.
+
+* `native['loadable_modules']`
+A list of native libraries to store within the APK, in addition to those from
+`native['libraries']`. These correspond to things like the Chromium linker
+or instrumentation libraries.
+
+* `native['secondary_abi_loadable_modules']`
+Secondary ABI version of loadable_modules
+
+* `native['library_always_compress']`
+A list of library files that we always compress.
+
+* `native['library_renames']`
+A list of library files that we prepend "crazy." to their file names.
+
+* `assets`
+A list of assets stored compressed in the APK. Each entry has the format
+`<source-path>:<destination-path>`, where `<source-path>` is relative to
+`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/`
+within the APK.
+
+NOTE: Not to be confused with the `deps_info['assets']` dictionary that
+belongs to `android_assets` targets only.
+
+* `uncompressed_assets`
+A list of uncompressed assets stored in the APK. Each entry has the format
+`<source-path>:<destination-path>` too.
+
+* `locales_java_list`
+A string holding a Java source fragment that gives the list of locales stored
+uncompressed as android assets.
+
+* `extra_android_manifests`
+A list of `deps_configs['android_manifest]` entries, for all resource
+dependencies for this target. I.e. a list of paths to manifest files for
+all the resources in this APK. These will be merged with the root manifest
+file to generate the final one used to build the APK.
+
+* `java_resources_jars`
+This is a list of `.jar` files whose *Java* resources should be included in
+the final APK. For example, this is used to copy the `.res` files from the
+EMMA Coverage tool. The copy will omit any `.class` file and the top-level
+`//meta-inf/` directory from the input jars. Everything else will be copied
+into the final APK as-is.
+
+NOTE: This has nothing to do with *Android* resources.
+
+* `jni['all_source']`
+The list of all `deps_info['java_sources_file']` entries for all library
+dependencies for this APK. Note: this is a list of files, where each file
+contains a list of Java source files. This is used for JNI registration.
+
+* `deps_info['proguard_all_configs']`:
+The collection of all 'deps_info['proguard_configs']` values from this target
+and all its dependencies.
+
+* `deps_info['proguard_classpath_jars']`:
+The collection of all 'deps_info['extra_classpath_jars']` values from all
+dependencies.
+
+* `deps_info['proguard_under_test_mapping']`:
+Applicable to apks with proguard enabled that have an apk_under_test. This is
+the path to the apk_under_test's output proguard .mapping file.
+
+## <a name="target_android_app_bundle_module">Target type \
+`android_app_bundle_module`</a>:
+
+Corresponds to an Android app bundle module. Very similar to an APK and
+inherits the same fields, except that this does not generate an installable
+file (see `android_app_bundle`), and for the following omitted fields:
+
+* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and
+ `deps_info['incremental_install_json_path']` are omitted.
+
+* top-level `dist_jar` is omitted as well.
+
+In addition to `android_apk` targets though come these new fields:
+
+* `deps_info['proto_resources_path']`:
+The path of an zip archive containing the APK's resources compiled to the
+protocol buffer format (instead of regular binary xml + resources.arsc).
+
+* `deps_info['module_rtxt_path']`:
+The path of the R.txt file generated when compiling the resources for the bundle
+module.
+
+* `deps_info['module_pathmap_path']`:
+The path of the pathmap file generated when compiling the resources for the
+bundle module, if resource path shortening is enabled.
+
+* `deps_info['base_allowlist_rtxt_path']`:
+Optional path to an R.txt file used as a allowlist for base string resources.
+This means that any string resource listed in this file *and* in
+`deps_info['module_rtxt_path']` will end up in the base split APK of any
+`android_app_bundle` target that uses this target as its base module.
+
+This ensures that such localized strings are available to all bundle installs,
+even when language based splits are enabled (e.g. required for WebView strings
+inside the Monochrome bundle).
+
+
+## <a name="target_android_app_bundle">Target type `android_app_bundle`</a>
+
+This target type corresponds to an Android app bundle, and is built from one
+or more `android_app_bundle_module` targets listed as dependencies.
+
+
+## <a name="target_dist_aar">Target type `dist_aar`</a>:
+
+This type corresponds to a target used to generate an `.aar` archive for
+distribution. The archive's content is determined by the target's dependencies.
+
+This always has the following entries:
+
+ * `deps_info['supports_android']` (always True).
+ * `deps_info['requires_android']` (always True).
+ * `deps_info['proguard_configs']` (optional).
+
+
+## <a name="target_dist_jar">Target type `dist_jar`</a>:
+
+This type is similar to [`dist_aar`](#target_dist_aar) but is not
+Android-specific, and used to create a `.jar` file that can be later
+redistributed.
+
+This always has the following entries:
+
+ * `deps_info['proguard_enabled']` (False by default).
+ * `deps_info['proguard_configs']` (optional).
+ * `deps_info['supports_android']` (True by default).
+ * `deps_info['requires_android']` (False by default).
+
+
+
+## <a name="dict_javac">The `deps_info['javac']` dictionary</a>:
+
+This dictionary appears in Java-related targets (e.g. `java_library`,
+`android_apk` and others), and contains information related to the compilation
+of Java sources, class files, and jars.
+
+* `javac['classpath']`
+The classpath used to compile this target when annotation processors are
+present.
+
+* `javac['interface_classpath']`
+The classpath used to compile this target when annotation processors are
+not present. These are also always used to known when a target needs to be
+rebuilt.
+
+* `javac['processor_classpath']`
+The classpath listing the jars used for annotation processors. I.e. sent as
+`-processorpath` when invoking `javac`.
+
+* `javac['processor_classes']`
+The list of annotation processor main classes. I.e. sent as `-processor' when
+invoking `javac`.
+
+## <a name="android_app_bundle">Target type `android_app_bundle`</a>:
+
+This type corresponds to an Android app bundle (`.aab` file).
+
+--------------- END_MARKDOWN ---------------------------------------------------
+"""
+
+from __future__ import print_function
+
+import collections
+import itertools
+import json
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+from util import resource_utils
+
+# TODO(crbug.com/1174969): Remove this once Python2 is obsoleted.
+if sys.version_info.major == 2:
+ zip_longest = itertools.izip_longest
+else:
+ zip_longest = itertools.zip_longest
+
+
+# Types that should never be used as a dependency of another build config.
+_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor',
+ 'junit_binary', 'android_app_bundle')
+# Types that should not allow code deps to pass through.
+_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library')
+
+
+class OrderedSet(collections.OrderedDict):
+ # Value |parameter| is present to avoid presubmit warning due to different
+ # number of parameters from overridden method.
+ @staticmethod
+ def fromkeys(iterable, value=None):
+ out = OrderedSet()
+ out.update(iterable)
+ return out
+
+ def add(self, key):
+ self[key] = True
+
+ def update(self, iterable):
+ for v in iterable:
+ self.add(v)
+
+
+def _ExtractMarkdownDocumentation(input_text):
+ """Extract Markdown documentation from a list of input strings lines.
+
+ This generates a list of strings extracted from |input_text|, by looking
+ for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers."""
+ in_markdown = False
+ result = []
+ for line in input_text.splitlines():
+ if in_markdown:
+ if '-- END_MARKDOWN --' in line:
+ in_markdown = False
+ else:
+ result.append(line)
+ else:
+ if '-- BEGIN_MARKDOWN --' in line:
+ in_markdown = True
+
+ return result
+
+class AndroidManifest(object):
+ def __init__(self, path):
+ self.path = path
+ dom = xml.dom.minidom.parse(path)
+ manifests = dom.getElementsByTagName('manifest')
+ assert len(manifests) == 1
+ self.manifest = manifests[0]
+
+ def GetInstrumentationElements(self):
+ instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+ if len(instrumentation_els) == 0:
+ return None
+ return instrumentation_els
+
+ def CheckInstrumentationElements(self, expected_package):
+ instrs = self.GetInstrumentationElements()
+ if not instrs:
+ raise Exception('No <instrumentation> elements found in %s' % self.path)
+ for instr in instrs:
+ instrumented_package = instr.getAttributeNS(
+ 'http://schemas.android.com/apk/res/android', 'targetPackage')
+ if instrumented_package != expected_package:
+ raise Exception(
+ 'Wrong instrumented package. Expected %s, got %s'
+ % (expected_package, instrumented_package))
+
+ def GetPackageName(self):
+ return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+ if not path in dep_config_cache:
+ with open(path) as jsonfile:
+ dep_config_cache[path] = json.load(jsonfile)['deps_info']
+ return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+ return [c for c in configs if c['type'] == wanted_type]
+
+
+def DepPathsOfType(wanted_type, config_paths):
+ return [p for p in config_paths if GetDepConfig(p)['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths, filter_func=None):
+ def GetDeps(path):
+ config = GetDepConfig(path)
+ if filter_func and not filter_func(config):
+ return []
+ return config['deps_configs']
+
+ return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+def GetObjectByPath(obj, key_path):
+ """Given an object, return its nth child based on a key path.
+ """
+ return GetObjectByPath(obj[key_path[0]], key_path[1:]) if key_path else obj
+
+
+def RemoveObjDups(obj, base, *key_path):
+ """Remove array items from an object[*kep_path] that are also
+ contained in the base[*kep_path] (duplicates).
+ """
+ base_target = set(GetObjectByPath(base, key_path))
+ target = GetObjectByPath(obj, key_path)
+ target[:] = [x for x in target if x not in base_target]
+
+
+class Deps(object):
+ def __init__(self, direct_deps_config_paths):
+ self._all_deps_config_paths = GetAllDepsConfigsInOrder(
+ direct_deps_config_paths)
+ self._direct_deps_configs = [
+ GetDepConfig(p) for p in direct_deps_config_paths
+ ]
+ self._all_deps_configs = [
+ GetDepConfig(p) for p in self._all_deps_config_paths
+ ]
+ self._direct_deps_config_paths = direct_deps_config_paths
+
+ def All(self, wanted_type=None):
+ if wanted_type is None:
+ return self._all_deps_configs
+ return DepsOfType(wanted_type, self._all_deps_configs)
+
+ def Direct(self, wanted_type=None):
+ if wanted_type is None:
+ return self._direct_deps_configs
+ return DepsOfType(wanted_type, self._direct_deps_configs)
+
+ def DirectAndChildPublicDeps(self, wanted_type=None):
+ """Returns direct dependencies and dependencies exported via public_deps of
+ direct dependencies.
+ """
+ dep_paths = set(self._direct_deps_config_paths)
+ for direct_dep in self._direct_deps_configs:
+ dep_paths.update(direct_dep.get('public_deps_configs', []))
+ deps_list = [GetDepConfig(p) for p in dep_paths]
+ if wanted_type is None:
+ return deps_list
+ return DepsOfType(wanted_type, deps_list)
+
+ def AllConfigPaths(self):
+ return self._all_deps_config_paths
+
+ def GradlePrebuiltJarPaths(self):
+ ret = []
+
+ def helper(cur):
+ for config in cur.Direct('java_library'):
+ if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']:
+ if config['unprocessed_jar_path'] not in ret:
+ ret.append(config['unprocessed_jar_path'])
+
+ helper(self)
+ return ret
+
+ def GradleLibraryProjectDeps(self):
+ ret = []
+
+ def helper(cur):
+ for config in cur.Direct('java_library'):
+ if config['is_prebuilt']:
+ pass
+ elif config['gradle_treat_as_prebuilt']:
+ helper(Deps(config['deps_configs']))
+ elif config not in ret:
+ ret.append(config)
+
+ helper(self)
+ return ret
+
+
+def _MergeAssets(all_assets):
+ """Merges all assets from the given deps.
+
+ Returns:
+ A tuple of: (compressed, uncompressed, locale_paks)
+ |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is
+ the path of the asset to add, and zipPath is the location within the zip
+ (excluding assets/ prefix).
+ |locale_paks| is a set of all zipPaths that have been marked as
+ treat_as_locale_paks=true.
+ """
+ compressed = {}
+ uncompressed = {}
+ locale_paks = set()
+ for asset_dep in all_assets:
+ entry = asset_dep['assets']
+ disable_compression = entry.get('disable_compression')
+ treat_as_locale_paks = entry.get('treat_as_locale_paks')
+ dest_map = uncompressed if disable_compression else compressed
+ other_map = compressed if disable_compression else uncompressed
+ outputs = entry.get('outputs', [])
+ for src, dest in zip_longest(entry['sources'], outputs):
+ if not dest:
+ dest = os.path.basename(src)
+ # Merge so that each path shows up in only one of the lists, and that
+ # deps of the same target override previous ones.
+ other_map.pop(dest, 0)
+ dest_map[dest] = src
+ if treat_as_locale_paks:
+ locale_paks.add(dest)
+
+ def create_list(asset_map):
+ ret = ['%s:%s' % (src, dest) for dest, src in asset_map.items()]
+ # Sort to ensure deterministic ordering.
+ ret.sort()
+ return ret
+
+ return create_list(compressed), create_list(uncompressed), locale_paks
+
+
+def _ResolveGroups(config_paths):
+ """Returns a list of configs with all groups inlined."""
+ ret = list(config_paths)
+ ret_set = set(config_paths)
+ while True:
+ group_paths = DepPathsOfType('group', ret)
+ if not group_paths:
+ return ret
+ for group_path in group_paths:
+ index = ret.index(group_path)
+ expanded_config_paths = []
+ for deps_config_path in GetDepConfig(group_path)['deps_configs']:
+ if not deps_config_path in ret_set:
+ expanded_config_paths.append(deps_config_path)
+ ret[index:index + 1] = expanded_config_paths
+ ret_set.update(expanded_config_paths)
+
+
+def _DepsFromPaths(dep_paths,
+ target_type,
+ filter_root_targets=True,
+ recursive_resource_deps=False):
+ """Resolves all groups and trims dependency branches that we never want.
+
+ E.g. When a resource or asset depends on an apk target, the intent is to
+ include the .apk as a resource/asset, not to have the apk's classpath added.
+
+ This method is meant to be called to get the top nodes (i.e. closest to
+ current target) that we could then use to get a full transitive dependants
+ list (eg using Deps#all). So filtering single elements out of this list,
+ filters whole branches of dependencies. By resolving groups (i.e. expanding
+ them to their constituents), depending on a group is equivalent to directly
+ depending on each element of that group.
+ """
+ blocklist = []
+ allowlist = []
+
+ # Don't allow root targets to be considered as a dep.
+ if filter_root_targets:
+ blocklist.extend(_ROOT_TYPES)
+
+ # Don't allow java libraries to cross through assets/resources.
+ if target_type in _RESOURCE_TYPES:
+ allowlist.extend(_RESOURCE_TYPES)
+ # Pretend that this target directly depends on all of its transitive
+ # dependencies.
+ if recursive_resource_deps:
+ dep_paths = GetAllDepsConfigsInOrder(dep_paths)
+ # Exclude assets if recursive_resource_deps is set. The
+ # recursive_resource_deps arg is used to pull resources into the base
+ # module to workaround bugs accessing resources in isolated DFMs, but
+ # assets should be kept in the DFMs.
+ blocklist.append('android_assets')
+
+ return _DepsFromPathsWithFilters(dep_paths, blocklist, allowlist)
+
+
+def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None):
+ """Resolves all groups and trims dependency branches that we never want.
+
+ See _DepsFromPaths.
+
+ |blocklist| if passed, are the types of direct dependencies we do not care
+ about (i.e. tips of branches that we wish to prune).
+
+ |allowlist| if passed, are the only types of direct dependencies we care
+ about (i.e. we wish to prune all other branches that do not start from one of
+ these).
+ """
+ group_paths = DepPathsOfType('group', dep_paths)
+ config_paths = dep_paths
+ if group_paths:
+ config_paths = _ResolveGroups(dep_paths) + group_paths
+ configs = [GetDepConfig(p) for p in config_paths]
+ if blocklist:
+ configs = [c for c in configs if c['type'] not in blocklist]
+ if allowlist:
+ configs = [c for c in configs if c['type'] in allowlist]
+
+ return Deps([c['path'] for c in configs])
+
+
+def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file):
+ ret = []
+ with open(runtime_deps_file) as f:
+ for line in f:
+ line = line.rstrip()
+ if not line.endswith('.so'):
+ continue
+ # Only unstripped .so files are listed in runtime deps.
+ # Convert to the stripped .so by going up one directory.
+ ret.append(os.path.normpath(line.replace('lib.unstripped/', '')))
+ ret.reverse()
+ return ret
+
+
+def _CreateJavaLibrariesList(library_paths):
+ """Returns a java literal array with the "base" library names:
+ e.g. libfoo.so -> foo
+ """
+ names = ['"%s"' % os.path.basename(s)[3:-3] for s in library_paths]
+ return ('{%s}' % ','.join(sorted(set(names))))
+
+
+def _CreateJavaLocaleListFromAssets(assets, locale_paks):
+ """Returns a java literal array from a list of locale assets.
+
+ Args:
+ assets: A list of all APK asset paths in the form 'src:dst'
+ locale_paks: A list of asset paths that correponds to the locale pak
+ files of interest. Each |assets| entry will have its 'dst' part matched
+ against it to determine if they are part of the result.
+ Returns:
+ A string that is a Java source literal array listing the locale names
+ of the corresponding asset files, without directory or .pak suffix.
+ E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }'
+ """
+ assets_paths = [a.split(':')[1] for a in assets]
+ locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks]
+ return '{%s}' % ','.join('"%s"' % l for l in sorted(locales))
+
+
+def _AddJarMapping(jar_to_target, configs):
+ for config in configs:
+ jar = config.get('unprocessed_jar_path')
+ if jar:
+ jar_to_target[jar] = config['gn_target']
+ for jar in config.get('extra_classpath_jars', []):
+ jar_to_target[jar] = config['gn_target']
+
+
+def _CompareClasspathPriority(dep):
+ return 1 if dep.get('low_classpath_priority') else 0
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+ parser.add_option('--build-config', help='Path to build_config output.')
+ parser.add_option(
+ '--type',
+ help='Type of this target (e.g. android_library).')
+ parser.add_option('--gn-target', help='GN label for this target')
+ parser.add_option(
+ '--deps-configs',
+ help='GN-list of dependent build_config files.')
+ parser.add_option(
+ '--annotation-processor-configs',
+ help='GN-list of build_config files for annotation processors.')
+
+ # android_resources options
+ parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+ parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+ parser.add_option('--package-name',
+ help='Java package name for these resources.')
+ parser.add_option('--android-manifest',
+ help='Path to the root android manifest.')
+ parser.add_option('--merged-android-manifest',
+ help='Path to the merged android manifest.')
+ parser.add_option('--resource-dirs', action='append', default=[],
+ help='GYP-list of resource dirs')
+ parser.add_option(
+ '--res-sources-path',
+ help='Path to file containing a list of paths to resources.')
+ parser.add_option(
+ '--resource-overlay',
+ action='store_true',
+ help='Whether resources passed in via --resources-zip should override '
+ 'resources with the same name')
+ parser.add_option(
+ '--recursive-resource-deps',
+ action='store_true',
+ help='Whether deps should be walked recursively to find resource deps.')
+
+ # android_assets options
+ parser.add_option('--asset-sources', help='List of asset sources.')
+ parser.add_option('--asset-renaming-sources',
+ help='List of asset sources with custom destinations.')
+ parser.add_option('--asset-renaming-destinations',
+ help='List of asset custom destinations.')
+ parser.add_option('--disable-asset-compression', action='store_true',
+ help='Whether to disable asset compression.')
+ parser.add_option('--treat-as-locale-paks', action='store_true',
+ help='Consider the assets as locale paks in BuildConfig.java')
+
+ # java library options
+
+ parser.add_option('--public-deps-configs',
+ help='GN list of config files of deps which are exposed as '
+ 'part of the target\'s public API.')
+ parser.add_option(
+ '--ignore-dependency-public-deps',
+ action='store_true',
+ help='If true, \'public_deps\' will not be collected from the current '
+ 'target\'s direct deps.')
+ parser.add_option('--aar-path', help='Path to containing .aar file.')
+ parser.add_option('--device-jar-path', help='Path to .jar for dexing.')
+ parser.add_option('--host-jar-path', help='Path to .jar for java_binary.')
+ parser.add_option('--unprocessed-jar-path',
+ help='Path to the .jar to use for javac classpath purposes.')
+ parser.add_option(
+ '--interface-jar-path',
+ help='Path to the interface .jar to use for javac classpath purposes.')
+ parser.add_option('--is-prebuilt', action='store_true',
+ help='Whether the jar was compiled or pre-compiled.')
+ parser.add_option('--java-sources-file', help='Path to .sources file')
+ parser.add_option('--bundled-srcjars',
+ help='GYP-list of .srcjars that have been included in this java_library.')
+ parser.add_option('--supports-android', action='store_true',
+ help='Whether this library supports running on the Android platform.')
+ parser.add_option('--requires-android', action='store_true',
+ help='Whether this library requires running on the Android platform.')
+ parser.add_option('--bypass-platform-checks', action='store_true',
+ help='Bypass checks for support/require Android platform.')
+ parser.add_option('--extra-classpath-jars',
+ help='GYP-list of .jar files to include on the classpath when compiling, '
+ 'but not to include in the final binary.')
+ parser.add_option(
+ '--low-classpath-priority',
+ action='store_true',
+ help='Indicates that the library should be placed at the end of the '
+ 'classpath.')
+ parser.add_option(
+ '--mergeable-android-manifests',
+ help='GN-list of AndroidManifest.xml to include in manifest merging.')
+ parser.add_option('--gradle-treat-as-prebuilt', action='store_true',
+ help='Whether this library should be treated as a prebuilt library by '
+ 'generate_gradle.py.')
+ parser.add_option('--main-class',
+ help='Main class for java_binary or java_annotation_processor targets.')
+ parser.add_option('--java-resources-jar-path',
+ help='Path to JAR that contains java resources. Everything '
+ 'from this JAR except meta-inf/ content and .class files '
+ 'will be added to the final APK.')
+ parser.add_option(
+ '--non-chromium-code',
+ action='store_true',
+ help='True if a java library is not chromium code, used for lint.')
+
+ # android library options
+ parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+ # native library options
+ parser.add_option('--shared-libraries-runtime-deps',
+ help='Path to file containing runtime deps for shared '
+ 'libraries.')
+ parser.add_option(
+ '--loadable-modules',
+ action='append',
+ help='GN-list of native libraries for primary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_option('--secondary-abi-shared-libraries-runtime-deps',
+ help='Path to file containing runtime deps for secondary '
+ 'abi shared libraries.')
+ parser.add_option(
+ '--secondary-abi-loadable-modules',
+ action='append',
+ help='GN-list of native libraries for secondary '
+ 'android-abi. Can be specified multiple times.',
+ default=[])
+ parser.add_option(
+ '--native-lib-placeholders',
+ action='append',
+ help='GN-list of native library placeholders to add.',
+ default=[])
+ parser.add_option(
+ '--secondary-native-lib-placeholders',
+ action='append',
+ help='GN-list of native library placeholders to add '
+ 'for the secondary android-abi.',
+ default=[])
+ parser.add_option('--uncompress-shared-libraries', default=False,
+ action='store_true',
+ help='Whether to store native libraries uncompressed')
+ parser.add_option(
+ '--library-always-compress',
+ help='The list of library files that we always compress.')
+ parser.add_option(
+ '--library-renames',
+ default=[],
+ help='The list of library files that we prepend crazy. to their names.')
+
+ # apk options
+ parser.add_option('--apk-path', help='Path to the target\'s apk output.')
+ parser.add_option('--incremental-apk-path',
+ help="Path to the target's incremental apk output.")
+ parser.add_option('--incremental-install-json-path',
+ help="Path to the target's generated incremental install "
+ "json.")
+ parser.add_option(
+ '--tested-apk-config',
+ help='Path to the build config of the tested apk (for an instrumentation '
+ 'test apk).')
+ parser.add_option(
+ '--proguard-enabled',
+ action='store_true',
+ help='Whether proguard is enabled for this apk or bundle module.')
+ parser.add_option(
+ '--proguard-configs',
+ help='GN-list of proguard flag files to use in final apk.')
+ parser.add_option(
+ '--proguard-mapping-path', help='Path to jar created by ProGuard step')
+
+ # apk options that are static library specific
+ parser.add_option(
+ '--static-library-dependent-configs',
+ help='GN list of .build_configs of targets that use this target as a '
+ 'static library.')
+
+ # options shared between android_resources and apk targets
+ parser.add_option('--r-text-path', help='Path to target\'s R.txt file.')
+
+ parser.add_option('--fail',
+ help='GN-list of error message lines to fail with.')
+
+ parser.add_option('--final-dex-path',
+ help='Path to final input classes.dex (or classes.zip) to '
+ 'use in final apk.')
+ parser.add_option('--res-size-info', help='Path to .ap_.info')
+ parser.add_option('--apk-proto-resources',
+ help='Path to resources compiled in protocol buffer format '
+ ' for this apk.')
+ parser.add_option(
+ '--module-pathmap-path',
+ help='Path to pathmap file for resource paths in a bundle module.')
+ parser.add_option(
+ '--base-allowlist-rtxt-path',
+ help='Path to R.txt file for the base resources allowlist.')
+ parser.add_option(
+ '--is-base-module',
+ action='store_true',
+ help='Specifies that this module is a base module for some app bundle.')
+
+ parser.add_option('--generate-markdown-format-doc', action='store_true',
+ help='Dump the Markdown .build_config format documentation '
+ 'then exit immediately.')
+
+ parser.add_option(
+ '--base-module-build-config',
+ help='Path to the base module\'s build config '
+ 'if this is a feature module.')
+
+ parser.add_option(
+ '--module-build-configs',
+ help='For bundles, the paths of all non-async module .build_configs '
+ 'for modules that are part of the bundle.')
+
+ parser.add_option('--version-name', help='Version name for this APK.')
+ parser.add_option('--version-code', help='Version code for this APK.')
+
+ options, args = parser.parse_args(argv)
+
+ if args:
+ parser.error('No positional arguments should be given.')
+
+ if options.generate_markdown_format_doc:
+ doc_lines = _ExtractMarkdownDocumentation(__doc__)
+ for line in doc_lines:
+ print(line)
+ return 0
+
+ if options.fail:
+ parser.error('\n'.join(build_utils.ParseGnList(options.fail)))
+
+ lib_options = ['unprocessed_jar_path', 'interface_jar_path']
+ device_lib_options = ['device_jar_path', 'dex_path']
+ required_options_map = {
+ 'android_apk': ['build_config'] + lib_options + device_lib_options,
+ 'android_app_bundle_module':
+ ['build_config', 'final_dex_path', 'res_size_info'] + lib_options +
+ device_lib_options,
+ 'android_assets': ['build_config'],
+ 'android_resources': ['build_config', 'resources_zip'],
+ 'dist_aar': ['build_config'],
+ 'dist_jar': ['build_config'],
+ 'group': ['build_config'],
+ 'java_annotation_processor': ['build_config', 'main_class'],
+ 'java_binary': ['build_config'],
+ 'java_library': ['build_config', 'host_jar_path'] + lib_options,
+ 'junit_binary': ['build_config'],
+ 'system_java_library': ['build_config', 'unprocessed_jar_path'],
+ 'android_app_bundle': ['build_config', 'module_build_configs'],
+ }
+ required_options = required_options_map.get(options.type)
+ if not required_options:
+ raise Exception('Unknown type: <%s>' % options.type)
+
+ build_utils.CheckOptions(options, parser, required_options)
+
+ if options.type != 'android_app_bundle_module':
+ if options.apk_proto_resources:
+ raise Exception('--apk-proto-resources can only be used with '
+ '--type=android_app_bundle_module')
+ if options.module_pathmap_path:
+ raise Exception('--module-pathmap-path can only be used with '
+ '--type=android_app_bundle_module')
+ if options.base_allowlist_rtxt_path:
+ raise Exception('--base-allowlist-rtxt-path can only be used with '
+ '--type=android_app_bundle_module')
+ if options.is_base_module:
+ raise Exception('--is-base-module can only be used with '
+ '--type=android_app_bundle_module')
+
+ is_apk_or_module_target = options.type in ('android_apk',
+ 'android_app_bundle_module')
+
+ if not is_apk_or_module_target:
+ if options.uncompress_shared_libraries:
+ raise Exception('--uncompressed-shared-libraries can only be used '
+ 'with --type=android_apk or '
+ '--type=android_app_bundle_module')
+ if options.library_always_compress:
+ raise Exception(
+ '--library-always-compress can only be used with --type=android_apk '
+ 'or --type=android_app_bundle_module')
+ if options.library_renames:
+ raise Exception(
+ '--library-renames can only be used with --type=android_apk or '
+ '--type=android_app_bundle_module')
+
+ if options.device_jar_path and not options.dex_path:
+ raise Exception('java_library that supports Android requires a dex path.')
+ if any(getattr(options, x) for x in lib_options):
+ for attr in lib_options:
+ if not getattr(options, attr):
+ raise('Expected %s to be set.' % attr)
+
+ if options.requires_android and not options.supports_android:
+ raise Exception(
+ '--supports-android is required when using --requires-android')
+
+ is_java_target = options.type in (
+ 'java_binary', 'junit_binary', 'java_annotation_processor',
+ 'java_library', 'android_apk', 'dist_aar', 'dist_jar',
+ 'system_java_library', 'android_app_bundle_module')
+
+ is_static_library_dex_provider_target = (
+ options.static_library_dependent_configs and options.proguard_enabled)
+ if is_static_library_dex_provider_target:
+ if options.type != 'android_apk':
+ raise Exception(
+ '--static-library-dependent-configs only supports --type=android_apk')
+ options.static_library_dependent_configs = build_utils.ParseGnList(
+ options.static_library_dependent_configs)
+ static_library_dependent_configs_by_path = {
+ p: GetDepConfig(p)
+ for p in options.static_library_dependent_configs
+ }
+
+ deps_configs_paths = build_utils.ParseGnList(options.deps_configs)
+ deps = _DepsFromPaths(deps_configs_paths,
+ options.type,
+ recursive_resource_deps=options.recursive_resource_deps)
+ processor_deps = _DepsFromPaths(
+ build_utils.ParseGnList(options.annotation_processor_configs or ''),
+ options.type, filter_root_targets=False)
+
+ all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths() +
+ list(static_library_dependent_configs_by_path))
+
+ if options.recursive_resource_deps:
+ # Include java_library targets since changes to these targets can remove
+ # resource deps from the build, which would require rebuilding this target's
+ # build config file: crbug.com/1168655.
+ recursive_java_deps = _DepsFromPathsWithFilters(
+ GetAllDepsConfigsInOrder(deps_configs_paths),
+ allowlist=['java_library'])
+ all_inputs.extend(recursive_java_deps.AllConfigPaths())
+
+ direct_deps = deps.Direct()
+ system_library_deps = deps.Direct('system_java_library')
+ all_deps = deps.All()
+ all_library_deps = deps.All('java_library')
+ all_resources_deps = deps.All('android_resources')
+
+ if options.type == 'java_library':
+ java_library_deps = _DepsFromPathsWithFilters(
+ deps_configs_paths, allowlist=['android_resources'])
+ # for java libraries, we only care about resources that are directly
+ # reachable without going through another java_library.
+ all_resources_deps = java_library_deps.All('android_resources')
+ if options.type == 'android_resources' and options.recursive_resource_deps:
+ # android_resources targets that want recursive resource deps also need to
+ # collect package_names from all library deps. This ensures the R.java files
+ # for these libraries will get pulled in along with the resources.
+ android_resources_library_deps = _DepsFromPathsWithFilters(
+ deps_configs_paths, allowlist=['java_library']).All('java_library')
+ if is_apk_or_module_target:
+ # android_resources deps which had recursive_resource_deps set should not
+ # have the manifests from the recursively collected deps added to this
+ # module. This keeps the manifest declarations in the child DFMs, since they
+ # will have the Java implementations.
+ def ExcludeRecursiveResourcesDeps(config):
+ return not config.get('includes_recursive_resources', False)
+
+ extra_manifest_deps = [
+ GetDepConfig(p) for p in GetAllDepsConfigsInOrder(
+ deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps)
+ ]
+
+ base_module_build_config = None
+ if options.base_module_build_config:
+ with open(options.base_module_build_config, 'r') as f:
+ base_module_build_config = json.load(f)
+
+ # Initialize some common config.
+ # Any value that needs to be queryable by dependents must go within deps_info.
+ config = {
+ 'deps_info': {
+ 'name': os.path.basename(options.build_config),
+ 'path': options.build_config,
+ 'type': options.type,
+ 'gn_target': options.gn_target,
+ 'deps_configs': [d['path'] for d in direct_deps],
+ 'chromium_code': not options.non_chromium_code,
+ },
+ # Info needed only by generate_gradle.py.
+ 'gradle': {}
+ }
+ deps_info = config['deps_info']
+ gradle = config['gradle']
+
+ if options.type == 'android_apk' and options.tested_apk_config:
+ tested_apk_deps = Deps([options.tested_apk_config])
+ tested_apk_config = tested_apk_deps.Direct()[0]
+ gradle['apk_under_test'] = tested_apk_config['name']
+
+ if options.type == 'android_app_bundle_module':
+ deps_info['is_base_module'] = bool(options.is_base_module)
+
+ # Required for generating gradle files.
+ if options.type == 'java_library':
+ deps_info['is_prebuilt'] = bool(options.is_prebuilt)
+ deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt
+
+ if options.android_manifest:
+ deps_info['android_manifest'] = options.android_manifest
+
+ if options.merged_android_manifest:
+ deps_info['merged_android_manifest'] = options.merged_android_manifest
+
+ if options.bundled_srcjars:
+ deps_info['bundled_srcjars'] = build_utils.ParseGnList(
+ options.bundled_srcjars)
+
+ if options.java_sources_file:
+ deps_info['java_sources_file'] = options.java_sources_file
+
+ if is_java_target:
+ if options.bundled_srcjars:
+ gradle['bundled_srcjars'] = deps_info['bundled_srcjars']
+
+ gradle['dependent_android_projects'] = []
+ gradle['dependent_java_projects'] = []
+ gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths()
+
+ if options.main_class:
+ deps_info['main_class'] = options.main_class
+
+ for c in deps.GradleLibraryProjectDeps():
+ if c['requires_android']:
+ gradle['dependent_android_projects'].append(c['path'])
+ else:
+ gradle['dependent_java_projects'].append(c['path'])
+
+ if options.r_text_path:
+ deps_info['r_text_path'] = options.r_text_path
+
+ # TODO(tiborg): Remove creation of JNI info for type group and java_library
+ # once we can generate the JNI registration based on APK / module targets as
+ # opposed to groups and libraries.
+ if is_apk_or_module_target or options.type in (
+ 'group', 'java_library', 'junit_binary'):
+ deps_info['jni'] = {}
+ all_java_sources = [c['java_sources_file'] for c in all_library_deps
+ if 'java_sources_file' in c]
+ if options.java_sources_file:
+ all_java_sources.append(options.java_sources_file)
+
+ if options.apk_proto_resources:
+ deps_info['proto_resources_path'] = options.apk_proto_resources
+
+ deps_info['version_name'] = options.version_name
+ deps_info['version_code'] = options.version_code
+ if options.module_pathmap_path:
+ deps_info['module_pathmap_path'] = options.module_pathmap_path
+ else:
+ # Ensure there is an entry, even if it is empty, for modules
+ # that have not enabled resource path shortening. Otherwise
+ # build_utils.ExpandFileArgs fails.
+ deps_info['module_pathmap_path'] = ''
+
+ if options.base_allowlist_rtxt_path:
+ deps_info['base_allowlist_rtxt_path'] = options.base_allowlist_rtxt_path
+ else:
+ # Ensure there is an entry, even if it is empty, for modules
+ # that don't need such a allowlist.
+ deps_info['base_allowlist_rtxt_path'] = ''
+
+ if is_java_target:
+ deps_info['requires_android'] = bool(options.requires_android)
+ deps_info['supports_android'] = bool(options.supports_android)
+
+ if not options.bypass_platform_checks:
+ deps_require_android = (all_resources_deps +
+ [d['name'] for d in all_library_deps if d['requires_android']])
+ deps_not_support_android = (
+ [d['name'] for d in all_library_deps if not d['supports_android']])
+
+ if deps_require_android and not options.requires_android:
+ raise Exception('Some deps require building for the Android platform: '
+ + str(deps_require_android))
+
+ if deps_not_support_android and options.supports_android:
+ raise Exception('Not all deps support the Android platform: '
+ + str(deps_not_support_android))
+
+ if is_apk_or_module_target or options.type == 'dist_jar':
+ all_dex_files = [c['dex_path'] for c in all_library_deps]
+
+ if is_java_target:
+ # Classpath values filled in below (after applying tested_apk_config).
+ config['javac'] = {}
+ if options.aar_path:
+ deps_info['aar_path'] = options.aar_path
+ if options.unprocessed_jar_path:
+ deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path
+ deps_info['interface_jar_path'] = options.interface_jar_path
+ if options.public_deps_configs:
+ deps_info['public_deps_configs'] = build_utils.ParseGnList(
+ options.public_deps_configs)
+ if options.device_jar_path:
+ deps_info['device_jar_path'] = options.device_jar_path
+ if options.host_jar_path:
+ deps_info['host_jar_path'] = options.host_jar_path
+ if options.dex_path:
+ deps_info['dex_path'] = options.dex_path
+ if is_apk_or_module_target:
+ all_dex_files.append(options.dex_path)
+ if options.low_classpath_priority:
+ deps_info['low_classpath_priority'] = True
+ if options.type == 'android_apk':
+ deps_info['apk_path'] = options.apk_path
+ deps_info['incremental_apk_path'] = options.incremental_apk_path
+ deps_info['incremental_install_json_path'] = (
+ options.incremental_install_json_path)
+
+ if options.type == 'android_assets':
+ all_asset_sources = []
+ if options.asset_renaming_sources:
+ all_asset_sources.extend(
+ build_utils.ParseGnList(options.asset_renaming_sources))
+ if options.asset_sources:
+ all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources))
+
+ deps_info['assets'] = {
+ 'sources': all_asset_sources
+ }
+ if options.asset_renaming_destinations:
+ deps_info['assets']['outputs'] = (
+ build_utils.ParseGnList(options.asset_renaming_destinations))
+ if options.disable_asset_compression:
+ deps_info['assets']['disable_compression'] = True
+ if options.treat_as_locale_paks:
+ deps_info['assets']['treat_as_locale_paks'] = True
+
+ if options.type == 'android_resources':
+ deps_info['resources_zip'] = options.resources_zip
+ if options.resource_overlay:
+ deps_info['resource_overlay'] = True
+ if options.srcjar:
+ deps_info['srcjar'] = options.srcjar
+ if options.android_manifest:
+ manifest = AndroidManifest(options.android_manifest)
+ deps_info['package_name'] = manifest.GetPackageName()
+ if options.package_name:
+ deps_info['package_name'] = options.package_name
+ deps_info['res_sources_path'] = ''
+ if options.res_sources_path:
+ deps_info['res_sources_path'] = options.res_sources_path
+
+ if options.requires_android and options.type == 'java_library':
+ if options.package_name:
+ deps_info['package_name'] = options.package_name
+
+ if options.type in ('android_resources', 'android_apk', 'junit_binary',
+ 'dist_aar', 'android_app_bundle_module', 'java_library'):
+ dependency_zips = []
+ dependency_zip_overlays = []
+ for c in all_resources_deps:
+ if not c['resources_zip']:
+ continue
+
+ dependency_zips.append(c['resources_zip'])
+ if c.get('resource_overlay'):
+ dependency_zip_overlays.append(c['resources_zip'])
+
+ extra_package_names = []
+
+ if options.type != 'android_resources':
+ extra_package_names = [
+ c['package_name'] for c in all_resources_deps if 'package_name' in c
+ ]
+
+ # android_resources targets which specified recursive_resource_deps may
+ # have extra_package_names.
+ for resources_dep in all_resources_deps:
+ extra_package_names.extend(resources_dep['extra_package_names'])
+
+ # In final types (i.e. apks and modules) that create real R.java files,
+ # they must collect package names from java_libraries as well.
+ # https://crbug.com/1073476
+ if options.type != 'java_library':
+ extra_package_names.extend([
+ c['package_name'] for c in all_library_deps if 'package_name' in c
+ ])
+ elif options.recursive_resource_deps:
+ # Pull extra_package_names from library deps if recursive resource deps
+ # are required.
+ extra_package_names = [
+ c['package_name'] for c in android_resources_library_deps
+ if 'package_name' in c
+ ]
+ config['deps_info']['includes_recursive_resources'] = True
+
+ if options.type in ('dist_aar', 'java_library'):
+ r_text_files = [
+ c['r_text_path'] for c in all_resources_deps if 'r_text_path' in c
+ ]
+ deps_info['dependency_r_txt_files'] = r_text_files
+
+ # For feature modules, remove any resources that already exist in the base
+ # module.
+ if base_module_build_config:
+ dependency_zips = [
+ c for c in dependency_zips
+ if c not in base_module_build_config['deps_info']['dependency_zips']
+ ]
+ dependency_zip_overlays = [
+ c for c in dependency_zip_overlays if c not in
+ base_module_build_config['deps_info']['dependency_zip_overlays']
+ ]
+ extra_package_names = [
+ c for c in extra_package_names if c not in
+ base_module_build_config['deps_info']['extra_package_names']
+ ]
+
+ if options.type == 'android_apk' and options.tested_apk_config:
+ config['deps_info']['arsc_package_name'] = (
+ tested_apk_config['package_name'])
+ # We should not shadow the actual R.java files of the apk_under_test by
+ # creating new R.java files with the same package names in the tested apk.
+ extra_package_names = [
+ package for package in extra_package_names
+ if package not in tested_apk_config['extra_package_names']
+ ]
+ if options.res_size_info:
+ config['deps_info']['res_size_info'] = options.res_size_info
+
+ config['deps_info']['dependency_zips'] = dependency_zips
+ config['deps_info']['dependency_zip_overlays'] = dependency_zip_overlays
+ config['deps_info']['extra_package_names'] = extra_package_names
+
+ # These are .jars to add to javac classpath but not to runtime classpath.
+ extra_classpath_jars = build_utils.ParseGnList(options.extra_classpath_jars)
+ if extra_classpath_jars:
+ deps_info['extra_classpath_jars'] = extra_classpath_jars
+
+ mergeable_android_manifests = build_utils.ParseGnList(
+ options.mergeable_android_manifests)
+ if mergeable_android_manifests:
+ deps_info['mergeable_android_manifests'] = mergeable_android_manifests
+
+ extra_proguard_classpath_jars = []
+ proguard_configs = build_utils.ParseGnList(options.proguard_configs)
+ if proguard_configs:
+ # Make a copy of |proguard_configs| since it's mutated below.
+ deps_info['proguard_configs'] = list(proguard_configs)
+
+
+ if is_java_target:
+ if options.ignore_dependency_public_deps:
+ classpath_direct_deps = deps.Direct()
+ classpath_direct_library_deps = deps.Direct('java_library')
+ else:
+ classpath_direct_deps = deps.DirectAndChildPublicDeps()
+ classpath_direct_library_deps = deps.DirectAndChildPublicDeps(
+ 'java_library')
+
+ # The classpath used to compile this target when annotation processors are
+ # present.
+ javac_classpath = set(c['unprocessed_jar_path']
+ for c in classpath_direct_library_deps)
+ # The classpath used to compile this target when annotation processors are
+ # not present. These are also always used to know when a target needs to be
+ # rebuilt.
+ javac_interface_classpath = set(c['interface_jar_path']
+ for c in classpath_direct_library_deps)
+
+ # Preserve order of |all_library_deps|. Move low priority libraries to the
+ # end of the classpath.
+ all_library_deps_sorted_for_classpath = sorted(
+ all_library_deps[::-1], key=_CompareClasspathPriority)
+
+ # The classpath used for bytecode-rewritting.
+ javac_full_classpath = OrderedSet.fromkeys(
+ c['unprocessed_jar_path']
+ for c in all_library_deps_sorted_for_classpath)
+ # The classpath used for error prone.
+ javac_full_interface_classpath = OrderedSet.fromkeys(
+ c['interface_jar_path'] for c in all_library_deps_sorted_for_classpath)
+
+ # Adding base module to classpath to compile against its R.java file
+ if base_module_build_config:
+ javac_full_classpath.add(
+ base_module_build_config['deps_info']['unprocessed_jar_path'])
+ javac_full_interface_classpath.add(
+ base_module_build_config['deps_info']['interface_jar_path'])
+ # Turbine now compiles headers against only the direct classpath, so the
+ # base module's interface jar must be on the direct interface classpath.
+ javac_interface_classpath.add(
+ base_module_build_config['deps_info']['interface_jar_path'])
+
+ for dep in classpath_direct_deps:
+ if 'extra_classpath_jars' in dep:
+ javac_classpath.update(dep['extra_classpath_jars'])
+ javac_interface_classpath.update(dep['extra_classpath_jars'])
+ for dep in all_deps:
+ if 'extra_classpath_jars' in dep:
+ javac_full_classpath.update(dep['extra_classpath_jars'])
+ javac_full_interface_classpath.update(dep['extra_classpath_jars'])
+
+ # TODO(agrieve): Might be less confusing to fold these into bootclasspath.
+ # Deps to add to the compile-time classpath (but not the runtime classpath).
+ # These are jars specified by input_jars_paths that almost never change.
+ # Just add them directly to all the classpaths.
+ if options.extra_classpath_jars:
+ javac_classpath.update(extra_classpath_jars)
+ javac_interface_classpath.update(extra_classpath_jars)
+ javac_full_classpath.update(extra_classpath_jars)
+ javac_full_interface_classpath.update(extra_classpath_jars)
+
+ if is_java_target or options.type == 'android_app_bundle':
+ # The classpath to use to run this target (or as an input to ProGuard).
+ device_classpath = []
+ if is_java_target and options.device_jar_path:
+ device_classpath.append(options.device_jar_path)
+ device_classpath.extend(
+ c.get('device_jar_path') for c in all_library_deps
+ if c.get('device_jar_path'))
+ if options.type == 'android_app_bundle':
+ for d in deps.Direct('android_app_bundle_module'):
+ device_classpath.extend(c for c in d.get('device_classpath', [])
+ if c not in device_classpath)
+
+ if options.type in ('dist_jar', 'java_binary', 'junit_binary'):
+ # The classpath to use to run this target.
+ host_classpath = []
+ if options.host_jar_path:
+ host_classpath.append(options.host_jar_path)
+ host_classpath.extend(c['host_jar_path'] for c in all_library_deps)
+ deps_info['host_classpath'] = host_classpath
+
+ # We allow lint to be run on android_apk targets, so we collect lint
+ # artifacts for them.
+ # We allow lint to be run on android_app_bundle targets, so we need to
+ # collect lint artifacts for the android_app_bundle_module targets that the
+ # bundle includes. Different android_app_bundle targets may include different
+ # android_app_bundle_module targets, so the bundle needs to be able to
+ # de-duplicate these lint artifacts.
+ if options.type in ('android_app_bundle_module', 'android_apk'):
+ # Collect all sources and resources at the apk/bundle_module level.
+ lint_aars = set()
+ lint_srcjars = set()
+ lint_java_sources = set()
+ lint_resource_sources = set()
+ lint_resource_zips = set()
+
+ if options.java_sources_file:
+ lint_java_sources.add(options.java_sources_file)
+ if options.bundled_srcjars:
+ lint_srcjars.update(deps_info['bundled_srcjars'])
+ for c in all_library_deps:
+ if c['chromium_code'] and c['requires_android']:
+ if 'java_sources_file' in c:
+ lint_java_sources.add(c['java_sources_file'])
+ lint_srcjars.update(c['bundled_srcjars'])
+ if 'aar_path' in c:
+ lint_aars.add(c['aar_path'])
+
+ if options.res_sources_path:
+ lint_resource_sources.add(options.res_sources_path)
+ if options.resources_zip:
+ lint_resource_zips.add(options.resources_zip)
+ for c in all_resources_deps:
+ if c['chromium_code']:
+ # Prefer res_sources_path to resources_zips so that lint errors have
+ # real paths and to avoid needing to extract during lint.
+ if c['res_sources_path']:
+ lint_resource_sources.add(c['res_sources_path'])
+ else:
+ lint_resource_zips.add(c['resources_zip'])
+
+ deps_info['lint_aars'] = sorted(lint_aars)
+ deps_info['lint_srcjars'] = sorted(lint_srcjars)
+ deps_info['lint_java_sources'] = sorted(lint_java_sources)
+ deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+ deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+ deps_info['lint_extra_android_manifests'] = []
+
+ if options.type == 'android_apk':
+ assert options.android_manifest, 'Android APKs must define a manifest'
+ deps_info['lint_android_manifest'] = options.android_manifest
+
+ if options.type == 'android_app_bundle':
+ module_configs = [
+ GetDepConfig(c)
+ for c in build_utils.ParseGnList(options.module_build_configs)
+ ]
+ jni_all_source = set()
+ lint_aars = set()
+ lint_srcjars = set()
+ lint_java_sources = set()
+ lint_resource_sources = set()
+ lint_resource_zips = set()
+ lint_extra_android_manifests = set()
+ for c in module_configs:
+ if c['is_base_module']:
+ assert 'base_module_config' not in deps_info, (
+ 'Must have exactly 1 base module!')
+ deps_info['base_module_config'] = c['path']
+ # Use the base module's android manifest for linting.
+ deps_info['lint_android_manifest'] = c['android_manifest']
+ else:
+ lint_extra_android_manifests.add(c['android_manifest'])
+ jni_all_source.update(c['jni']['all_source'])
+ lint_aars.update(c['lint_aars'])
+ lint_srcjars.update(c['lint_srcjars'])
+ lint_java_sources.update(c['lint_java_sources'])
+ lint_resource_sources.update(c['lint_resource_sources'])
+ lint_resource_zips.update(c['lint_resource_zips'])
+ deps_info['jni'] = {'all_source': sorted(jni_all_source)}
+ deps_info['lint_aars'] = sorted(lint_aars)
+ deps_info['lint_srcjars'] = sorted(lint_srcjars)
+ deps_info['lint_java_sources'] = sorted(lint_java_sources)
+ deps_info['lint_resource_sources'] = sorted(lint_resource_sources)
+ deps_info['lint_resource_zips'] = sorted(lint_resource_zips)
+ deps_info['lint_extra_android_manifests'] = sorted(
+ lint_extra_android_manifests)
+
+ # Map configs to classpath entries that should be included in their final dex.
+ classpath_entries_by_owning_config = collections.defaultdict(list)
+ extra_main_r_text_files = []
+ if is_static_library_dex_provider_target:
+ # Map classpath entries to configs that include them in their classpath.
+ configs_by_classpath_entry = collections.defaultdict(list)
+ for config_path, dep_config in (sorted(
+ static_library_dependent_configs_by_path.items())):
+ # For bundles, only the jar path and jni sources of the base module
+ # are relevant for proguard. Should be updated when bundle feature
+ # modules support JNI.
+ base_config = dep_config
+ if dep_config['type'] == 'android_app_bundle':
+ base_config = GetDepConfig(dep_config['base_module_config'])
+ extra_main_r_text_files.append(base_config['r_text_path'])
+ proguard_configs.extend(dep_config['proguard_all_configs'])
+ extra_proguard_classpath_jars.extend(
+ dep_config['proguard_classpath_jars'])
+ all_java_sources.extend(base_config['jni']['all_source'])
+
+ # The srcjars containing the generated R.java files are excluded for APK
+ # targets the use static libraries, so we add them here to ensure the
+ # union of resource IDs are available in the static library APK.
+ for package in base_config['extra_package_names']:
+ if package not in extra_package_names:
+ extra_package_names.append(package)
+ for cp_entry in dep_config['device_classpath']:
+ configs_by_classpath_entry[cp_entry].append(config_path)
+
+ for cp_entry in device_classpath:
+ configs_by_classpath_entry[cp_entry].append(options.build_config)
+
+ for cp_entry, candidate_configs in configs_by_classpath_entry.items():
+ config_path = (candidate_configs[0]
+ if len(candidate_configs) == 1 else options.build_config)
+ classpath_entries_by_owning_config[config_path].append(cp_entry)
+ device_classpath.append(cp_entry)
+
+ device_classpath = sorted(set(device_classpath))
+
+ deps_info['static_library_proguard_mapping_output_paths'] = sorted([
+ d['proguard_mapping_path']
+ for d in static_library_dependent_configs_by_path.values()
+ ])
+ deps_info['static_library_dependent_classpath_configs'] = {
+ path: sorted(set(classpath))
+ for path, classpath in classpath_entries_by_owning_config.items()
+ }
+ deps_info['extra_main_r_text_files'] = sorted(extra_main_r_text_files)
+
+ if is_apk_or_module_target or options.type in ('group', 'java_library',
+ 'junit_binary'):
+ deps_info['jni']['all_source'] = sorted(set(all_java_sources))
+
+ system_jars = [c['unprocessed_jar_path'] for c in system_library_deps]
+ system_interface_jars = [c['interface_jar_path'] for c in system_library_deps]
+ if system_library_deps:
+ config['android'] = {}
+ config['android']['sdk_interface_jars'] = system_interface_jars
+ config['android']['sdk_jars'] = system_jars
+
+ if options.type in ('android_apk', 'dist_aar',
+ 'dist_jar', 'android_app_bundle_module', 'android_app_bundle'):
+ for c in all_deps:
+ proguard_configs.extend(c.get('proguard_configs', []))
+ extra_proguard_classpath_jars.extend(c.get('extra_classpath_jars', []))
+ if options.type == 'android_app_bundle':
+ for c in deps.Direct('android_app_bundle_module'):
+ proguard_configs.extend(p for p in c.get('proguard_configs', []))
+ if options.type == 'android_app_bundle':
+ for d in deps.Direct('android_app_bundle_module'):
+ extra_proguard_classpath_jars.extend(
+ c for c in d.get('proguard_classpath_jars', [])
+ if c not in extra_proguard_classpath_jars)
+
+ if options.type == 'android_app_bundle':
+ deps_proguard_enabled = []
+ deps_proguard_disabled = []
+ for d in deps.Direct('android_app_bundle_module'):
+ if not d['device_classpath']:
+ # We don't care about modules that have no Java code for proguarding.
+ continue
+ if d['proguard_enabled']:
+ deps_proguard_enabled.append(d['name'])
+ else:
+ deps_proguard_disabled.append(d['name'])
+ if deps_proguard_enabled and deps_proguard_disabled:
+ raise Exception('Deps %s have proguard enabled while deps %s have '
+ 'proguard disabled' % (deps_proguard_enabled,
+ deps_proguard_disabled))
+ deps_info['proguard_enabled'] = bool(options.proguard_enabled)
+
+ if options.proguard_mapping_path:
+ deps_info['proguard_mapping_path'] = options.proguard_mapping_path
+
+ # The java code for an instrumentation test apk is assembled differently for
+ # ProGuard vs. non-ProGuard.
+ #
+ # Without ProGuard: Each library's jar is dexed separately and then combined
+ # into a single classes.dex. A test apk will include all dex files not already
+ # present in the apk-under-test. At runtime all test code lives in the test
+ # apk, and the program code lives in the apk-under-test.
+ #
+ # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs
+ # a single .jar, which is then dexed into a classes.dex. A test apk includes
+ # all jar files from the program and the tests because having them separate
+ # doesn't work with ProGuard's whole-program optimizations. Although the
+ # apk-under-test still has all of its code in its classes.dex, none of it is
+ # used at runtime because the copy of it within the test apk takes precidence.
+
+ if options.type == 'android_apk' and options.tested_apk_config:
+ if tested_apk_config['proguard_enabled']:
+ assert options.proguard_enabled, ('proguard must be enabled for '
+ 'instrumentation apks if it\'s enabled for the tested apk.')
+ # Mutating lists, so no need to explicitly re-assign to dict.
+ proguard_configs.extend(
+ p for p in tested_apk_config['proguard_all_configs'])
+ extra_proguard_classpath_jars.extend(
+ p for p in tested_apk_config['proguard_classpath_jars'])
+ tested_apk_config = GetDepConfig(options.tested_apk_config)
+ deps_info['proguard_under_test_mapping'] = (
+ tested_apk_config['proguard_mapping_path'])
+ elif options.proguard_enabled:
+ # Not sure why you'd want to proguard the test apk when the under-test apk
+ # is not proguarded, but it's easy enough to support.
+ deps_info['proguard_under_test_mapping'] = ''
+
+ # Add all tested classes to the test's classpath to ensure that the test's
+ # java code is a superset of the tested apk's java code
+ device_classpath_extended = list(device_classpath)
+ device_classpath_extended.extend(
+ p for p in tested_apk_config['device_classpath']
+ if p not in device_classpath)
+ # Include in the classpath classes that are added directly to the apk under
+ # test (those that are not a part of a java_library).
+ javac_classpath.add(tested_apk_config['unprocessed_jar_path'])
+ javac_interface_classpath.add(tested_apk_config['interface_jar_path'])
+ javac_full_classpath.add(tested_apk_config['unprocessed_jar_path'])
+ javac_full_interface_classpath.add(tested_apk_config['interface_jar_path'])
+ javac_full_classpath.update(tested_apk_config['javac_full_classpath'])
+ javac_full_interface_classpath.update(
+ tested_apk_config['javac_full_interface_classpath'])
+
+ # Exclude .jar files from the test apk that exist within the apk under test.
+ tested_apk_library_deps = tested_apk_deps.All('java_library')
+ tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps}
+ all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files]
+ tested_apk_jar_files = set(tested_apk_config['device_classpath'])
+ device_classpath = [
+ p for p in device_classpath if p not in tested_apk_jar_files
+ ]
+
+ if options.type in ('android_apk', 'dist_aar', 'dist_jar',
+ 'android_app_bundle_module', 'android_app_bundle'):
+ deps_info['proguard_all_configs'] = sorted(set(proguard_configs))
+ deps_info['proguard_classpath_jars'] = sorted(
+ set(extra_proguard_classpath_jars))
+
+ # Dependencies for the final dex file of an apk.
+ if (is_apk_or_module_target or options.final_dex_path
+ or options.type == 'dist_jar'):
+ config['final_dex'] = {}
+ dex_config = config['final_dex']
+ dex_config['path'] = options.final_dex_path
+ if is_apk_or_module_target or options.type == 'dist_jar':
+ dex_config['all_dex_files'] = all_dex_files
+
+ if is_java_target:
+ config['javac']['classpath'] = sorted(javac_classpath)
+ config['javac']['interface_classpath'] = sorted(javac_interface_classpath)
+ # Direct() will be of type 'java_annotation_processor', and so not included
+ # in All('java_library').
+ # Annotation processors run as part of the build, so need host_jar_path.
+ config['javac']['processor_classpath'] = [
+ c['host_jar_path'] for c in processor_deps.Direct()
+ if c.get('host_jar_path')
+ ]
+ config['javac']['processor_classpath'] += [
+ c['host_jar_path'] for c in processor_deps.All('java_library')
+ ]
+ config['javac']['processor_classes'] = [
+ c['main_class'] for c in processor_deps.Direct()]
+ deps_info['javac_full_classpath'] = list(javac_full_classpath)
+ deps_info['javac_full_interface_classpath'] = list(
+ javac_full_interface_classpath)
+ elif options.type == 'android_app_bundle':
+ # bundles require javac_full_classpath to create .aab.jar.info and require
+ # javac_full_interface_classpath for lint.
+ javac_full_classpath = OrderedSet()
+ javac_full_interface_classpath = OrderedSet()
+ for d in deps.Direct('android_app_bundle_module'):
+ javac_full_classpath.update(d['javac_full_classpath'])
+ javac_full_interface_classpath.update(d['javac_full_interface_classpath'])
+ javac_full_classpath.add(d['unprocessed_jar_path'])
+ javac_full_interface_classpath.add(d['interface_jar_path'])
+ deps_info['javac_full_classpath'] = list(javac_full_classpath)
+ deps_info['javac_full_interface_classpath'] = list(
+ javac_full_interface_classpath)
+
+ if options.type in ('android_apk', 'dist_jar', 'android_app_bundle_module',
+ 'android_app_bundle'):
+ deps_info['device_classpath'] = device_classpath
+ if options.tested_apk_config:
+ deps_info['device_classpath_extended'] = device_classpath_extended
+
+ if options.type in ('android_apk', 'dist_jar'):
+ all_interface_jars = []
+ if options.interface_jar_path:
+ all_interface_jars.append(options.interface_jar_path)
+ all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps)
+
+ config['dist_jar'] = {
+ 'all_interface_jars': all_interface_jars,
+ }
+
+ if is_apk_or_module_target:
+ manifest = AndroidManifest(options.android_manifest)
+ deps_info['package_name'] = manifest.GetPackageName()
+ if not options.tested_apk_config and manifest.GetInstrumentationElements():
+ # This must then have instrumentation only for itself.
+ manifest.CheckInstrumentationElements(manifest.GetPackageName())
+
+ library_paths = []
+ java_libraries_list = None
+ if options.shared_libraries_runtime_deps:
+ library_paths = _ExtractSharedLibsFromRuntimeDeps(
+ options.shared_libraries_runtime_deps)
+ java_libraries_list = _CreateJavaLibrariesList(library_paths)
+ all_inputs.append(options.shared_libraries_runtime_deps)
+
+ secondary_abi_library_paths = []
+ if options.secondary_abi_shared_libraries_runtime_deps:
+ secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps(
+ options.secondary_abi_shared_libraries_runtime_deps)
+ all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps)
+
+ native_library_placeholder_paths = build_utils.ParseGnList(
+ options.native_lib_placeholders)
+
+ secondary_native_library_placeholder_paths = build_utils.ParseGnList(
+ options.secondary_native_lib_placeholders)
+
+ loadable_modules = build_utils.ParseGnList(options.loadable_modules)
+ secondary_abi_loadable_modules = build_utils.ParseGnList(
+ options.secondary_abi_loadable_modules)
+
+ config['native'] = {
+ 'libraries':
+ library_paths,
+ 'native_library_placeholders':
+ native_library_placeholder_paths,
+ 'secondary_abi_libraries':
+ secondary_abi_library_paths,
+ 'secondary_native_library_placeholders':
+ secondary_native_library_placeholder_paths,
+ 'java_libraries_list':
+ java_libraries_list,
+ 'uncompress_shared_libraries':
+ options.uncompress_shared_libraries,
+ 'library_always_compress':
+ options.library_always_compress,
+ 'library_renames':
+ options.library_renames,
+ 'loadable_modules':
+ loadable_modules,
+ 'secondary_abi_loadable_modules':
+ secondary_abi_loadable_modules,
+ }
+ config['assets'], config['uncompressed_assets'], locale_paks = (
+ _MergeAssets(deps.All('android_assets')))
+
+ deps_info['locales_java_list'] = _CreateJavaLocaleListFromAssets(
+ config['uncompressed_assets'], locale_paks)
+
+ config['extra_android_manifests'] = []
+ for c in extra_manifest_deps:
+ config['extra_android_manifests'].extend(
+ c.get('mergeable_android_manifests', []))
+
+ # Collect java resources
+ java_resources_jars = [d['java_resources_jar'] for d in all_library_deps
+ if 'java_resources_jar' in d]
+ if options.tested_apk_config:
+ tested_apk_resource_jars = [d['java_resources_jar']
+ for d in tested_apk_library_deps
+ if 'java_resources_jar' in d]
+ java_resources_jars = [jar for jar in java_resources_jars
+ if jar not in tested_apk_resource_jars]
+ config['java_resources_jars'] = java_resources_jars
+
+ if options.java_resources_jar_path:
+ deps_info['java_resources_jar'] = options.java_resources_jar_path
+
+ # DYNAMIC FEATURE MODULES:
+ # Make sure that dependencies that exist on the base module
+ # are not duplicated on the feature module.
+ if base_module_build_config:
+ base = base_module_build_config
+ RemoveObjDups(config, base, 'deps_info', 'device_classpath')
+ RemoveObjDups(config, base, 'deps_info', 'javac_full_classpath')
+ RemoveObjDups(config, base, 'deps_info', 'javac_full_interface_classpath')
+ RemoveObjDups(config, base, 'deps_info', 'jni', 'all_source')
+ RemoveObjDups(config, base, 'final_dex', 'all_dex_files')
+ RemoveObjDups(config, base, 'extra_android_manifests')
+
+ if is_java_target:
+ jar_to_target = {}
+ _AddJarMapping(jar_to_target, [deps_info])
+ _AddJarMapping(jar_to_target, all_deps)
+ if base_module_build_config:
+ _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']])
+ if options.tested_apk_config:
+ _AddJarMapping(jar_to_target, [tested_apk_config])
+ for jar, target in zip(tested_apk_config['javac_full_classpath'],
+ tested_apk_config['javac_full_classpath_targets']):
+ jar_to_target[jar] = target
+
+ # Used by bytecode_processor to give better error message when missing
+ # deps are found.
+ config['deps_info']['javac_full_classpath_targets'] = [
+ jar_to_target[x] for x in deps_info['javac_full_classpath']
+ ]
+
+ build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile, options.build_config,
+ sorted(set(all_inputs)))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/android/gyp/write_build_config.pydeps b/third_party/libwebrtc/build/android/gyp/write_build_config.pydeps
new file mode 100644
index 0000000000..e9c7d9fcaa
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/write_build_config.pydeps
@@ -0,0 +1,31 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py
+../../../third_party/jinja2/__init__.py
+../../../third_party/jinja2/_compat.py
+../../../third_party/jinja2/_identifier.py
+../../../third_party/jinja2/asyncfilters.py
+../../../third_party/jinja2/asyncsupport.py
+../../../third_party/jinja2/bccache.py
+../../../third_party/jinja2/compiler.py
+../../../third_party/jinja2/defaults.py
+../../../third_party/jinja2/environment.py
+../../../third_party/jinja2/exceptions.py
+../../../third_party/jinja2/filters.py
+../../../third_party/jinja2/idtracking.py
+../../../third_party/jinja2/lexer.py
+../../../third_party/jinja2/loaders.py
+../../../third_party/jinja2/nodes.py
+../../../third_party/jinja2/optimizer.py
+../../../third_party/jinja2/parser.py
+../../../third_party/jinja2/runtime.py
+../../../third_party/jinja2/tests.py
+../../../third_party/jinja2/utils.py
+../../../third_party/jinja2/visitor.py
+../../../third_party/markupsafe/__init__.py
+../../../third_party/markupsafe/_compat.py
+../../../third_party/markupsafe/_native.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+util/resource_utils.py
+write_build_config.py
diff --git a/third_party/libwebrtc/build/android/gyp/write_native_libraries_java.py b/third_party/libwebrtc/build/android/gyp/write_native_libraries_java.py
new file mode 100755
index 0000000000..322b8b2c82
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/write_native_libraries_java.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+#
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes list of native libraries to srcjar file."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+_NATIVE_LIBRARIES_TEMPLATE = """\
+// This file is autogenerated by
+// build/android/gyp/write_native_libraries_java.py
+// Please do not change its content.
+
+package org.chromium.build;
+
+public class NativeLibraries {{
+ public static final int CPU_FAMILY_UNKNOWN = 0;
+ public static final int CPU_FAMILY_ARM = 1;
+ public static final int CPU_FAMILY_MIPS = 2;
+ public static final int CPU_FAMILY_X86 = 3;
+
+ // Set to true to enable the use of the Chromium Linker.
+ public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
+ public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
+ public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
+
+ // This is the list of native libraries to be loaded (in the correct order)
+ // by LibraryLoader.java.
+ public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
+
+ public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
+}}
+"""
+
+
+def _FormatLibraryName(library_name):
+ filename = os.path.split(library_name)[1]
+ assert filename.startswith('lib')
+ assert filename.endswith('.so')
+ # Remove lib prefix and .so suffix.
+ return '"%s"' % filename[3:-3]
+
+
+def main():
+ parser = argparse.ArgumentParser()
+
+ build_utils.AddDepfileOption(parser)
+ parser.add_argument('--final', action='store_true', help='Use final fields.')
+ parser.add_argument(
+ '--enable-chromium-linker',
+ action='store_true',
+ help='Enable Chromium linker.')
+ parser.add_argument(
+ '--load-library-from-apk',
+ action='store_true',
+ help='Load libaries from APK without uncompressing.')
+ parser.add_argument(
+ '--use-modern-linker', action='store_true', help='To use ModernLinker.')
+ parser.add_argument(
+ '--native-libraries-list', help='File with list of native libraries.')
+ parser.add_argument(
+ '--cpu-family',
+ choices={
+ 'CPU_FAMILY_ARM', 'CPU_FAMILY_X86', 'CPU_FAMILY_MIPS',
+ 'CPU_FAMILY_UNKNOWN'
+ },
+ required=True,
+ default='CPU_FAMILY_UNKNOWN',
+ help='CPU family.')
+ parser.add_argument(
+ '--main-component-library',
+ help='If used, the list of native libraries will only contain this '
+ 'library. Dependencies are found in the library\'s "NEEDED" section.')
+
+ parser.add_argument(
+ '--output', required=True, help='Path to the generated srcjar file.')
+
+ options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+ assert (options.enable_chromium_linker or not options.load_library_from_apk)
+
+ native_libraries_list = []
+ if options.main_component_library:
+ native_libraries_list.append(
+ _FormatLibraryName(options.main_component_library))
+ elif options.native_libraries_list:
+ with open(options.native_libraries_list) as f:
+ for path in f:
+ path = path.strip()
+ native_libraries_list.append(_FormatLibraryName(path))
+
+ def bool_str(value):
+ if value:
+ return ' = true'
+ elif options.final:
+ return ' = false'
+ return ''
+
+ format_dict = {
+ 'MAYBE_FINAL': 'final ' if options.final else '',
+ 'USE_LINKER': bool_str(options.enable_chromium_linker),
+ 'USE_LIBRARY_IN_ZIP_FILE': bool_str(options.load_library_from_apk),
+ 'USE_MODERN_LINKER': bool_str(options.use_modern_linker),
+ 'LIBRARIES': ','.join(native_libraries_list),
+ 'CPU_FAMILY': options.cpu_family,
+ }
+ with build_utils.AtomicOutput(options.output) as f:
+ with zipfile.ZipFile(f.name, 'w') as srcjar_file:
+ build_utils.AddToZipHermetic(
+ zip_file=srcjar_file,
+ zip_path='org/chromium/build/NativeLibraries.java',
+ data=_NATIVE_LIBRARIES_TEMPLATE.format(**format_dict))
+
+ if options.depfile:
+ assert options.native_libraries_list
+ build_utils.WriteDepfile(options.depfile,
+ options.output,
+ inputs=[options.native_libraries_list])
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/android/gyp/write_native_libraries_java.pydeps b/third_party/libwebrtc/build/android/gyp/write_native_libraries_java.pydeps
new file mode 100644
index 0000000000..f5176ef78e
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/write_native_libraries_java.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_native_libraries_java.pydeps build/android/gyp/write_native_libraries_java.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+write_native_libraries_java.py
diff --git a/third_party/libwebrtc/build/android/gyp/zip.py b/third_party/libwebrtc/build/android/gyp/zip.py
new file mode 100755
index 0000000000..6b405400eb
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/zip.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Archives a set of files."""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+ parser = argparse.ArgumentParser(args)
+ parser.add_argument('--input-files', help='GN-list of files to zip.')
+ parser.add_argument(
+ '--input-files-base-dir',
+ help='Paths in the archive will be relative to this directory')
+ parser.add_argument('--input-zips', help='GN-list of zips to merge.')
+ parser.add_argument(
+ '--input-zips-excluded-globs',
+ help='GN-list of globs for paths to exclude.')
+ parser.add_argument('--output', required=True, help='Path to output archive.')
+ compress_group = parser.add_mutually_exclusive_group()
+ compress_group.add_argument(
+ '--compress', action='store_true', help='Compress entries')
+ compress_group.add_argument(
+ '--no-compress',
+ action='store_false',
+ dest='compress',
+ help='Do not compress entries')
+ build_utils.AddDepfileOption(parser)
+ options = parser.parse_args(args)
+
+ with build_utils.AtomicOutput(options.output) as f:
+ with zipfile.ZipFile(f.name, 'w') as out_zip:
+ depfile_deps = None
+ if options.input_files:
+ files = build_utils.ParseGnList(options.input_files)
+ build_utils.DoZip(
+ files,
+ out_zip,
+ base_dir=options.input_files_base_dir,
+ compress_fn=lambda _: options.compress)
+
+ if options.input_zips:
+ files = build_utils.ParseGnList(options.input_zips)
+ depfile_deps = files
+ path_transform = None
+ if options.input_zips_excluded_globs:
+ globs = build_utils.ParseGnList(options.input_zips_excluded_globs)
+ path_transform = (
+ lambda p: None if build_utils.MatchesGlob(p, globs) else p)
+ build_utils.MergeZips(
+ out_zip,
+ files,
+ path_transform=path_transform,
+ compress=options.compress)
+
+ # Depfile used only by dist_jar().
+ if options.depfile:
+ build_utils.WriteDepfile(options.depfile,
+ options.output,
+ inputs=depfile_deps)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/third_party/libwebrtc/build/android/gyp/zip.pydeps b/third_party/libwebrtc/build/android/gyp/zip.pydeps
new file mode 100644
index 0000000000..36affd1707
--- /dev/null
+++ b/third_party/libwebrtc/build/android/gyp/zip.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py
+../../gn_helpers.py
+util/__init__.py
+util/build_utils.py
+zip.py