summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/build/toolchain/win
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
commit6bf0a5cb5034a7e684dcc3500e841785237ce2dd (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /third_party/libwebrtc/build/toolchain/win
parentInitial commit. (diff)
downloadthunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.tar.xz
thunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/build/toolchain/win')
-rw-r--r--third_party/libwebrtc/build/toolchain/win/BUILD.gn587
-rw-r--r--third_party/libwebrtc/build/toolchain/win/midl.gni199
-rw-r--r--third_party/libwebrtc/build/toolchain/win/midl.py487
-rwxr-xr-xthird_party/libwebrtc/build/toolchain/win/ml.py290
-rw-r--r--third_party/libwebrtc/build/toolchain/win/rc/.gitignore3
-rw-r--r--third_party/libwebrtc/build/toolchain/win/rc/README.md30
-rw-r--r--third_party/libwebrtc/build/toolchain/win/rc/linux64/rc.sha11
-rw-r--r--third_party/libwebrtc/build/toolchain/win/rc/mac/rc.sha11
-rwxr-xr-xthird_party/libwebrtc/build/toolchain/win/rc/rc.py276
-rwxr-xr-xthird_party/libwebrtc/build/toolchain/win/rc/upload_rc_binaries.sh46
-rw-r--r--third_party/libwebrtc/build/toolchain/win/rc/win/rc.exe.sha11
-rw-r--r--third_party/libwebrtc/build/toolchain/win/setup_toolchain.py314
-rw-r--r--third_party/libwebrtc/build/toolchain/win/tool_wrapper.py190
13 files changed, 2425 insertions, 0 deletions
diff --git a/third_party/libwebrtc/build/toolchain/win/BUILD.gn b/third_party/libwebrtc/build/toolchain/win/BUILD.gn
new file mode 100644
index 0000000000..571235286e
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/BUILD.gn
@@ -0,0 +1,587 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/rust.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/rbe.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+
+# This tool will is used as a wrapper for various commands below.
+tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir)
+
+if (use_rbe) {
+ goma_prefix = ""
+ rbe_prefix = "${rbe_bin_dir}/rewrapper -cfg=${rbe_cc_cfg_file} -exec_root=${rbe_exec_root} "
+ clang_prefix = rbe_prefix
+} else if (use_goma) {
+ if (host_os == "win") {
+ goma_prefix = "$goma_dir/gomacc.exe "
+ } else {
+ goma_prefix = "$goma_dir/gomacc "
+ }
+ clang_prefix = goma_prefix
+} else {
+ goma_prefix = ""
+ if (cc_wrapper != "") {
+ clang_prefix = cc_wrapper + " "
+ } else {
+ clang_prefix = ""
+ }
+}
+
+# Copy the VS runtime DLL for the default toolchain to the root build directory
+# so things will run.
+if (current_toolchain == default_toolchain) {
+ if (is_debug) {
+ configuration_name = "Debug"
+ } else {
+ configuration_name = "Release"
+ }
+ exec_script("../../vs_toolchain.py",
+ [
+ "copy_dlls",
+ rebase_path(root_build_dir),
+ configuration_name,
+ target_cpu,
+ ])
+}
+
+if (host_os == "win") {
+ clang_cl = "clang-cl.exe"
+} else {
+ clang_cl = "clang-cl"
+}
+
+# Parameters:
+# environment: File name of environment file.
+#
+# You would also define a toolchain_args variable with at least these set:
+# target_cpu: target_cpu to pass as a build arg
+# current_os: current_os to pass as a build arg
+template("msvc_toolchain") {
+ toolchain(target_name) {
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ assert(defined(invoker.toolchain_args))
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+
+ # This value needs to be passed through unchanged.
+ host_toolchain = host_toolchain
+ }
+
+ # Make these apply to all tools below.
+ lib_switch = ""
+ lib_dir_switch = "/LIBPATH:"
+
+ # Object files go in this directory.
+ object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+ env = invoker.environment
+
+ cl = invoker.cl
+
+ if (use_lld) {
+ if (host_os == "win") {
+ lld_link = "lld-link.exe"
+ } else {
+ lld_link = "lld-link"
+ }
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+
+ # lld-link includes a replacement for lib.exe that can produce thin
+ # archives and understands bitcode (for lto builds).
+ link = "$prefix/$lld_link"
+ if (host_os == "win") {
+ # Flip the slashes so that copy/paste of the commands works.
+ link = string_replace(link, "/", "\\")
+ }
+ lib = "$link /lib"
+ if (host_os != "win") {
+ # See comment adding --rsp-quoting to $cl above for more information.
+ link = "$link --rsp-quoting=posix"
+ }
+ } else {
+ lib = "lib.exe"
+ link = "link.exe"
+ }
+
+ # If possible, pass system includes as flags to the compiler. When that's
+ # not possible, load a full environment file (containing %INCLUDE% and
+ # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just
+ # passing in a list of include directories isn't enough.
+ if (defined(invoker.sys_include_flags)) {
+ env_wrapper = ""
+ sys_include_flags =
+ "${invoker.sys_include_flags} " # Note trailing space.
+ } else {
+ # clang-cl doesn't need this env hoop, so omit it there.
+ assert((defined(toolchain_args.is_clang) && !toolchain_args.is_clang) ||
+ !is_clang)
+ env_wrapper = "ninja -t msvc -e $env -- " # Note trailing space.
+ sys_include_flags = ""
+ }
+
+ # ninja does not have -t msvc other than windows, and lld doesn't depend on
+ # mt.exe in PATH on non-Windows, so it's not needed there anyways.
+ if (host_os != "win") {
+ linker_wrapper = ""
+ sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space.
+ } else if (defined(invoker.sys_lib_flags)) {
+ # Invoke ninja as wrapper instead of tool wrapper, because python
+ # invocation requires higher cpu usage compared to ninja invocation, and
+ # the python wrapper is only needed to work around link.exe problems.
+ # TODO(thakis): Remove wrapper once lld-link can merge manifests without
+ # relying on mt.exe being in %PATH% on Windows, https://crbug.com/872740
+ linker_wrapper = "ninja -t msvc -e $env -- " # Note trailing space.
+ sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space.
+ } else {
+ # Note trailing space:
+ linker_wrapper =
+ "$python_path $tool_wrapper_path link-wrapper $env False "
+ sys_lib_flags = ""
+ }
+
+ if (defined(toolchain_args.use_clang_coverage)) {
+ toolchain_use_clang_coverage = toolchain_args.use_clang_coverage
+ } else {
+ toolchain_use_clang_coverage = use_clang_coverage
+ }
+
+ if (toolchain_use_clang_coverage) {
+ assert(toolchain_args.is_clang,
+ "use_clang_coverage should only be used with Clang")
+ if (defined(toolchain_args.coverage_instrumentation_input_file)) {
+ toolchain_coverage_instrumentation_input_file =
+ toolchain_args.coverage_instrumentation_input_file
+ } else {
+ toolchain_coverage_instrumentation_input_file =
+ coverage_instrumentation_input_file
+ }
+
+ coverage_wrapper =
+ rebase_path("//build/toolchain/clang_code_coverage_wrapper.py",
+ root_build_dir)
+ coverage_wrapper = coverage_wrapper + " --target-os=" + target_os
+ if (toolchain_coverage_instrumentation_input_file != "") {
+ coverage_wrapper =
+ coverage_wrapper + " --files-to-instrument=" +
+ rebase_path(toolchain_coverage_instrumentation_input_file,
+ root_build_dir)
+ }
+ coverage_wrapper = "$python_path " + coverage_wrapper + " "
+ } else {
+ coverage_wrapper = ""
+ }
+
+ # Disabled with cc_wrapper because of https://github.com/mozilla/sccache/issues/1013
+ if (toolchain_args.is_clang && cc_wrapper == "") {
+ # This flag omits system includes from /showIncludes output, to reduce the
+ # amount of data to parse and store in .ninja_deps. We do this on non-Windows too,
+ # and already make sure rebuilds after win sdk / libc++ / clang header updates happen via
+ # changing commandline flags.
+ show_includes = "/showIncludes:user"
+ } else {
+ show_includes = "/showIncludes"
+ }
+
+ tool("cc") {
+ precompiled_header_type = "msvc"
+ pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb"
+
+ # Label names may have spaces in them so the pdbname must be quoted. The
+ # source and output don't need to be quoted because GN knows they're a
+ # full file name and will quote automatically when necessary.
+ depsformat = "msvc"
+ description = "CC {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
+
+ # Note that the code coverage wrapper scripts assumes that {{source}}
+ # comes immediately after /c.
+ command = "$coverage_wrapper$env_wrapper$cl /c {{source}} /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /Fo{{output}} /Fd\"$pdbname\""
+ }
+
+ tool("cxx") {
+ precompiled_header_type = "msvc"
+
+ # The PDB name needs to be different between C and C++ compiled files.
+ pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb"
+
+ # See comment in CC tool about quoting.
+ depsformat = "msvc"
+ description = "CXX {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
+
+ # Note that the code coverage wrapper scripts assumes that {{source}}
+ # comes immediately after /c.
+ command = "$coverage_wrapper$env_wrapper$cl /c {{source}} /Fo{{output}} /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /Fd\"$pdbname\""
+ }
+
+ tool("rc") {
+ command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+ depsformat = "msvc"
+ outputs = [ "$object_subdir/{{source_name_part}}.res" ]
+ description = "RC {{output}}"
+ }
+
+ tool("asm") {
+ is_msvc_assembler = true
+
+ if (toolchain_args.target_cpu == "arm64") {
+ if (is_clang) {
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ ml = "${clang_prefix}${prefix}/${clang_cl} --target=arm64-windows"
+ if (host_os == "win") {
+ # Flip the slashes so that copy/paste of the command works.
+ ml = string_replace(ml, "/", "\\")
+ }
+ ml += " -c -o{{output}}"
+ is_msvc_assembler = false
+ } else {
+ # Only affects Arm builds with is_clang = false, implemented for building
+ # V8 for Windows on Arm systems with the MSVC toolchain.
+ ml = "armasm64.exe"
+ }
+ } else {
+ # x86/x64 builds always use the MSVC assembler.
+ if (toolchain_args.target_cpu == "x64") {
+ ml = "ml64.exe"
+ } else {
+ ml = "ml.exe"
+ }
+ }
+
+ if (is_msvc_assembler) {
+ ml += " /nologo /Fo{{output}}"
+
+ # Suppress final-stage linking on x64/x86 builds. (Armasm64 does not
+ # require /c because it doesn't support linking.)
+ if (toolchain_args.target_cpu != "arm64") {
+ ml += " /c"
+ }
+ if (use_lld) {
+ # Wrap ml(64).exe with a script that makes its output deterministic.
+ # It's lld only because the script zaps obj Timestamp which
+ # link.exe /incremental looks at.
+ # TODO(https://crbug.com/762167): If we end up writing an llvm-ml64,
+ # make sure it has deterministic output (maybe with /Brepro or
+ # something) and remove this wrapper.
+ ml_py = rebase_path("ml.py", root_build_dir)
+ ml = "$python_path $ml_py $ml"
+ }
+ }
+ if (toolchain_args.target_cpu != "arm64" || is_clang) {
+ command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}"
+ } else {
+ # armasm64.exe does not support definitions passed via the command line.
+ # (Fortunately, they're not needed for compiling the V8 snapshot, which
+ # is the only time this assembler is required.)
+ command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}"
+ }
+
+ description = "ASM {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.obj" ]
+ }
+
+ if (toolchain_has_rust) {
+ tool("rust_staticlib") {
+ rust_outfile = "{{target_out_dir}}/{{crate_name}}.lib"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_rlib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.rlib"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ if (rustc_can_link) {
+ tool("rust_bin") {
+ rust_outfile = "{{root_out_dir}}/{{crate_name}}.exe"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_cdylib") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.dll"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+
+ tool("rust_macro") {
+ rust_outfile = "{{target_out_dir}}/lib{{crate_name}}.dll"
+ depfile = "{{crate_name}}.d"
+ command = "${rust_prefix}/rustc $rustc_common_args --emit=dep-info={{target_out_dir}}/$depfile,link -o $rust_outfile"
+ description = "RUST $rust_outfile"
+ outputs = [ rust_outfile ]
+ }
+ }
+ }
+
+ tool("alink") {
+ rspfile = "{{output}}.rsp"
+ command = "$linker_wrapper$lib /OUT:{{output}} /nologo ${sys_lib_flags}{{arflags}} @$rspfile"
+ description = "LIB {{output}}"
+ outputs = [
+ # Ignore {{output_extension}} and always use .lib, there's no reason to
+ # allow targets to override this extension on Windows.
+ "{{output_dir}}/{{target_output_name}}.lib",
+ ]
+ default_output_extension = ".lib"
+ default_output_dir = "{{target_out_dir}}"
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{inputs_newline}}"
+ }
+
+ tool("solink") {
+ # E.g. "foo.dll":
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ libname = "${dllname}.lib" # e.g. foo.dll.lib
+ pdbname = "${dllname}.pdb"
+ rspfile = "${dllname}.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/IMPLIB:$libname /DLL /PDB:$pdbname @$rspfile"
+
+ default_output_extension = ".dll"
+ default_output_dir = "{{root_out_dir}}"
+ description = "LINK(DLL) {{output}}"
+ outputs = [
+ dllname,
+ libname,
+ pdbname,
+ ]
+ link_output = libname
+ depend_output = libname
+ runtime_outputs = [
+ dllname,
+ pdbname,
+ ]
+
+ # Since the above commands only updates the .lib file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content =
+ "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}"
+ }
+
+ tool("solink_module") {
+ # E.g. "foo.dll":
+ dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ pdbname = "${dllname}.pdb"
+ rspfile = "${dllname}.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/DLL /PDB:$pdbname @$rspfile"
+
+ default_output_extension = ".dll"
+ default_output_dir = "{{root_out_dir}}"
+ description = "LINK_MODULE(DLL) {{output}}"
+ outputs = [
+ dllname,
+ pdbname,
+ ]
+ runtime_outputs = outputs
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content =
+ "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}"
+ }
+
+ tool("link") {
+ exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ pdbname = "$exename.pdb"
+ rspfile = "$exename.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ command = "$linker_wrapper$link /OUT:$exename /nologo ${sys_lib_flags} /PDB:$pdbname @$rspfile"
+
+ default_output_extension = ".exe"
+ default_output_dir = "{{root_out_dir}}"
+ description = "LINK {{output}}"
+ outputs = [
+ exename,
+ pdbname,
+ ]
+ runtime_outputs = outputs
+
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content =
+ "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}} {{rlibs}}"
+ }
+
+ # These two are really entirely generic, but have to be repeated in
+ # each toolchain because GN doesn't allow a template to be used here.
+ # See //build/toolchain/toolchain.gni for details.
+ tool("stamp") {
+ command = stamp_command
+ description = stamp_description
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+ tool("copy") {
+ command = copy_command
+ description = copy_description
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+
+ tool("action") {
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+ }
+}
+
+template("win_toolchains") {
+ assert(defined(invoker.toolchain_arch))
+ toolchain_arch = invoker.toolchain_arch
+
+ win_toolchain_data = exec_script("setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ "win",
+ toolchain_arch,
+ "environment." + toolchain_arch,
+ ],
+ "scope")
+
+ # The toolchain using MSVC only makes sense when not doing cross builds.
+ # Chromium exclusively uses the win_clang_ toolchain below, but V8 and
+ # WebRTC still use this MSVC toolchain in some cases.
+ if (host_os == "win") {
+ msvc_toolchain(target_name) {
+ environment = "environment." + toolchain_arch
+ cl = "${goma_prefix}\"${win_toolchain_data.vc_bin_dir}/cl.exe\""
+
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+ is_clang = false
+ use_clang_coverage = false
+ current_os = "win"
+ target_cpu = "arm64"
+ }
+ }
+ }
+
+ msvc_toolchain("win_clang_" + target_name) {
+ environment = "environment." + toolchain_arch
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ cl = "${clang_prefix}$prefix/${clang_cl}"
+ _clang_lib_dir =
+ rebase_path("$clang_base_path/lib/clang/$clang_version/lib/windows",
+ root_build_dir)
+ if (host_os == "win") {
+ # Flip the slashes so that copy/paste of the command works.
+ cl = string_replace(cl, "/", "\\")
+
+ # And to match the other -libpath flags.
+ _clang_lib_dir = string_replace(_clang_lib_dir, "/", "\\")
+ }
+
+ sys_include_flags = "${win_toolchain_data.include_flags_imsvc}"
+ sys_lib_flags =
+ "-libpath:$_clang_lib_dir ${win_toolchain_data.libpath_flags}"
+
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+ is_clang = true
+ current_os = "win"
+ target_cpu = "arm64"
+ }
+ }
+}
+
+if (target_cpu == "x86" || target_cpu == "x64") {
+ win_toolchains("x86") {
+ toolchain_arch = "x86"
+ }
+ win_toolchains("x64") {
+ toolchain_arch = "x64"
+ }
+}
+
+if (target_cpu == "arm64") {
+ win_toolchains("arm64") {
+ toolchain_arch = "arm64"
+ }
+ win_toolchains(host_cpu) {
+ toolchain_arch = host_cpu
+ }
+}
+
+# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain.
+# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64).
+# The only reason it's a separate toolchain is so that it can force
+# is_component_build to false in the toolchain_args() block, because
+# building nacl64.exe in component style does not work.
+win_toolchains("nacl_win64") {
+ toolchain_arch = "x64"
+ toolchain_args = {
+ is_component_build = false
+ }
+}
+
+# WinUWP toolchains. Only define these when targeting them.
+
+if (target_os == "winuwp") {
+ assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" ||
+ target_cpu == "arm64")
+ store_cpu_toolchain_data = exec_script("setup_toolchain.py",
+ [
+ visual_studio_path,
+ windows_sdk_path,
+ visual_studio_runtime_dirs,
+ target_os,
+ target_cpu,
+ "environment.store_" + target_cpu,
+ ],
+ "scope")
+
+ msvc_toolchain("uwp_" + target_cpu) {
+ environment = "environment.store_" + target_cpu
+ cl = "${goma_prefix}\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\""
+ toolchain_args = {
+ current_os = "winuwp"
+ target_cpu = target_cpu
+ is_clang = false
+ }
+ }
+}
diff --git a/third_party/libwebrtc/build/toolchain/win/midl.gni b/third_party/libwebrtc/build/toolchain/win/midl.gni
new file mode 100644
index 0000000000..ecf02acd3c
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/midl.gni
@@ -0,0 +1,199 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler. The generated
+# source code will be compiled and linked into targets that depend on this.
+#
+# Parameters
+#
+# sources
+# List of .idl file to process.
+#
+# header_file (optional)
+# File name of generated header file. Defaults to the basename of the
+# source idl file with a .h extension.
+#
+# out_dir (optional)
+# Directory to write the generated files to. Defaults to target_gen_dir.
+#
+# generated_dir (optional)
+# Directory where generated files were previously persisted.
+# Defaults to third_party\win_build_output\midl\|out_dir|.
+#
+# dynamic_guids (optional)
+# If the GUIDs are not constant across builds, the current GUID
+# substitutions.
+# |dynamic_guids| is of the form:
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791="
+# "3d852661-c795-4d20-9b95-5561e9a1d2d9,"
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B="
+# "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83".
+# See midl.py for more details.
+#
+# writes_tlb (optional)
+# Whether a .tlb file should be added to outputs. Defaults to false.
+#
+# writes_proxy(optional)
+# Whether a _p.c file should be added to outputs. Defaults to true.
+#
+# writes_dlldata(optional)
+# Whether a .dlldata.c file should be added to outputs. Defaults to true.
+#
+# deps (optional)
+#
+# defines (optional)
+# Build time defines to be passed to midl.exe as /D parameter.
+#
+# visibility (optional)
+
+template("midl") {
+ action_name = "${target_name}_idl_action"
+ source_set_name = target_name
+
+ assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+ if (defined(invoker.out_dir)) {
+ out_dir = invoker.out_dir
+ } else {
+ out_dir = target_gen_dir
+ }
+
+ if (defined(invoker.generated_dir)) {
+ generated_dir = rebase_path(invoker.generated_dir)
+ } else {
+ # midl.py expects 'gen' to be replaced with 'midl'.
+ generated_dir = rebase_path("//third_party/win_build_output") + "/midl/" +
+ rebase_path(out_dir, root_gen_dir)
+ }
+
+ if (defined(invoker.dynamic_guids)) {
+ dynamic_guids = invoker.dynamic_guids
+ } else {
+ dynamic_guids = "none"
+ }
+
+ if (defined(invoker.header_file)) {
+ header_file = invoker.header_file
+ } else {
+ header_file = "{{source_name_part}}.h"
+ }
+
+ if (defined(invoker.writes_tlb)) {
+ writes_tlb = invoker.writes_tlb
+ } else {
+ writes_tlb = false
+ }
+
+ if (defined(invoker.writes_proxy)) {
+ writes_proxy = invoker.writes_proxy
+ } else {
+ writes_proxy = true
+ }
+
+ if (defined(invoker.writes_dlldata)) {
+ writes_dlldata = invoker.writes_dlldata
+ } else {
+ writes_dlldata = true
+ }
+
+ if (writes_tlb) {
+ type_library_file = "{{source_name_part}}.tlb"
+ } else {
+ type_library_file = "none"
+ }
+
+ if (writes_dlldata) {
+ dlldata_file = "{{source_name_part}}.dlldata.c"
+ } else {
+ dlldata_file = "none"
+ }
+
+ if (writes_proxy) {
+ proxy_file = "{{source_name_part}}_p.c"
+ } else {
+ proxy_file = "none"
+ }
+
+ interface_identifier_file = "{{source_name_part}}_i.c"
+
+ action_foreach(action_name) {
+ visibility = [ ":$source_set_name" ]
+ script = "//build/toolchain/win/midl.py"
+
+ sources = invoker.sources
+
+ outputs = [
+ "$out_dir/$header_file",
+ "$out_dir/$interface_identifier_file",
+ ]
+
+ # These files are only added to outputs if the invoker so desires, as it
+ # they are not always generated depending on the content of the input idl
+ # file.
+ if (writes_tlb) {
+ outputs += [ "$out_dir/$type_library_file" ]
+ }
+ if (writes_dlldata) {
+ outputs += [ "$out_dir/$dlldata_file" ]
+ }
+ if (writes_proxy) {
+ outputs += [ "$out_dir/$proxy_file" ]
+ }
+
+ if (target_cpu == "x86") {
+ win_tool_arch = "environment.x86"
+ idl_target_platform = "win32"
+ } else if (target_cpu == "x64") {
+ win_tool_arch = "environment.x64"
+ idl_target_platform = "x64"
+ } else if (target_cpu == "arm64") {
+ win_tool_arch = "environment.arm64"
+ idl_target_platform = "arm64"
+ } else {
+ assert(false, "Need environment for this arch")
+ }
+
+ args = [
+ win_tool_arch,
+ generated_dir,
+ rebase_path(out_dir, root_build_dir),
+ dynamic_guids,
+ type_library_file,
+ header_file,
+ dlldata_file,
+ interface_identifier_file,
+ proxy_file,
+ rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang-cl.exe",
+ root_build_dir),
+ "{{source}}",
+ "/char",
+ "signed",
+ "/env",
+ idl_target_platform,
+ "/Oicf",
+ ]
+
+ if (defined(invoker.defines)) {
+ foreach(define, invoker.defines) {
+ args += [ "/D" + define ]
+ }
+ }
+
+ forward_variables_from(invoker, [ "deps" ])
+ }
+
+ source_set(target_name) {
+ forward_variables_from(invoker, [ "visibility" ])
+
+ # We only compile the IID files from the IDL tool rather than all outputs.
+ sources = process_file_template(invoker.sources,
+ [ "$out_dir/$interface_identifier_file" ])
+
+ public_deps = [ ":$action_name" ]
+ }
+}
diff --git a/third_party/libwebrtc/build/toolchain/win/midl.py b/third_party/libwebrtc/build/toolchain/win/midl.py
new file mode 100644
index 0000000000..cfb4220133
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/midl.py
@@ -0,0 +1,487 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import division
+from __future__ import print_function
+
+import array
+import difflib
+import distutils.dir_util
+import filecmp
+import io
+import operator
+import os
+import posixpath
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+import uuid
+
+from functools import reduce
+
+
+def ZapTimestamp(filename):
+ contents = open(filename, 'rb').read()
+ # midl.exe writes timestamp 2147483647 (2^31 - 1) as creation date into its
+ # outputs, but using the local timezone. To make the output timezone-
+ # independent, replace that date with a fixed string of the same length.
+ # Also blank out the minor version number.
+ if filename.endswith('.tlb'):
+ # See https://chromium-review.googlesource.com/c/chromium/src/+/693223 for
+ # a fairly complete description of the .tlb binary format.
+ # TLB files start with a 54 byte header. Offset 0x20 stores how many types
+ # are defined in the file, and the header is followed by that many uint32s.
+ # After that, 15 section headers appear. Each section header is 16 bytes,
+ # starting with offset and length uint32s.
+ # Section 12 in the file contains custom() data. custom() data has a type
+ # (int, string, etc). Each custom data chunk starts with a uint16_t
+ # describing its type. Type 8 is string data, consisting of a uint32_t
+ # len, followed by that many data bytes, followed by 'W' bytes to pad to a
+ # 4 byte boundary. Type 0x13 is uint32 data, followed by 4 data bytes,
+ # followed by two 'W' to pad to a 4 byte boundary.
+ # The custom block always starts with one string containing "Created by
+ # MIDL version 8...", followed by one uint32 containing 0x7fffffff,
+ # followed by another uint32 containing the MIDL compiler version (e.g.
+ # 0x0801026e for v8.1.622 -- 0x26e == 622). These 3 fields take 0x54 bytes.
+ # There might be more custom data after that, but these 3 blocks are always
+ # there for file-level metadata.
+ # All data is little-endian in the file.
+ assert contents[0:8] == b'MSFT\x02\x00\x01\x00'
+ ntypes, = struct.unpack_from('<I', contents, 0x20)
+ custom_off, custom_len = struct.unpack_from(
+ '<II', contents, 0x54 + 4*ntypes + 11*16)
+ assert custom_len >= 0x54
+ # First: Type string (0x8), followed by 0x3e characters.
+ assert contents[custom_off:custom_off + 6] == b'\x08\x00\x3e\x00\x00\x00'
+ assert re.match(
+ br'Created by MIDL version 8\.\d\d\.\d{4} '
+ br'at ... Jan 1. ..:..:.. 2038\n',
+ contents[custom_off + 6:custom_off + 6 + 0x3e])
+ # Second: Type uint32 (0x13) storing 0x7fffffff (followed by WW / 0x57 pad)
+ assert contents[custom_off+6+0x3e:custom_off+6+0x3e+8] == \
+ b'\x13\x00\xff\xff\xff\x7f\x57\x57'
+ # Third: Type uint32 (0x13) storing MIDL compiler version.
+ assert contents[custom_off + 6 + 0x3e + 8:custom_off + 6 + 0x3e + 8 +
+ 2] == b'\x13\x00'
+ # Replace "Created by" string with fixed string, and fixed MIDL version with
+ # 8.1.622 always.
+ contents = (
+ contents[0:custom_off + 6] +
+ b'Created by MIDL version 8.xx.xxxx at a redacted point in time\n' +
+ # uint32 (0x13) val 0x7fffffff, WW, uint32 (0x13), val 0x0801026e, WW
+ b'\x13\x00\xff\xff\xff\x7f\x57\x57\x13\x00\x6e\x02\x01\x08\x57\x57' +
+ contents[custom_off + 0x54:])
+ else:
+ contents = re.sub(
+ br'File created by MIDL compiler version 8\.\d\d\.\d{4} \*/\r\n'
+ br'/\* at ... Jan 1. ..:..:.. 2038',
+ br'File created by MIDL compiler version 8.xx.xxxx */\r\n'
+ br'/* at a redacted point in time', contents)
+ contents = re.sub(
+ br' Oicf, W1, Zp8, env=(.....) \(32b run\), '
+ br'target_arch=(AMD64|X86) 8\.\d\d\.\d{4}',
+ br' Oicf, W1, Zp8, env=\1 (32b run), target_arch=\2 8.xx.xxxx',
+ contents)
+ # TODO(thakis): If we need more hacks than these, try to verify checked-in
+ # outputs when we're using the hermetic toolchain.
+ # midl.exe older than 8.1.622 omit '//' after #endif, fix that:
+ contents = contents.replace(b'#endif !_MIDL_USE_GUIDDEF_',
+ b'#endif // !_MIDL_USE_GUIDDEF_')
+ # midl.exe puts the midl version into code in one place. To have
+ # predictable output, lie about the midl version if it's not 8.1.622.
+ # This is unfortunate, but remember that there's beauty too in imperfection.
+ contents = contents.replace(b'0x801026c, /* MIDL Version 8.1.620 */',
+ b'0x801026e, /* MIDL Version 8.1.622 */')
+ open(filename, 'wb').write(contents)
+
+
+def get_tlb_contents(tlb_file):
+ # See ZapTimestamp() for a short overview of the .tlb format.
+ contents = open(tlb_file, 'rb').read()
+ assert contents[0:8] == b'MSFT\x02\x00\x01\x00'
+ ntypes, = struct.unpack_from('<I', contents, 0x20)
+ type_off, type_len = struct.unpack_from('<II', contents, 0x54 + 4*ntypes)
+
+ guid_off, guid_len = struct.unpack_from(
+ '<II', contents, 0x54 + 4*ntypes + 5*16)
+ assert guid_len % 24 == 0
+
+ contents = array.array('B', contents)
+
+ return contents, ntypes, type_off, guid_off, guid_len
+
+
+def recreate_guid_hashtable(contents, ntypes, guid_off, guid_len):
+ # This function is called after changing guids in section 6 (the "guid"
+ # section). This function recreates the GUID hashtable in section 5. Since the
+ # hash table uses chaining, it's easiest to recompute it from scratch rather
+ # than trying to patch it up.
+ hashtab = [0xffffffff] * (0x80 // 4)
+ for guidind in range(guid_off, guid_off + guid_len, 24):
+ guidbytes, typeoff, nextguid = struct.unpack_from(
+ '<16sII', contents, guidind)
+ words = struct.unpack('<8H', guidbytes)
+ # midl seems to use the following simple hash function for GUIDs:
+ guidhash = reduce(operator.xor, [w for w in words]) % (0x80 // 4)
+ nextguid = hashtab[guidhash]
+ struct.pack_into('<I', contents, guidind + 0x14, nextguid)
+ hashtab[guidhash] = guidind - guid_off
+ hash_off, hash_len = struct.unpack_from(
+ '<II', contents, 0x54 + 4*ntypes + 4*16)
+ for i, hashval in enumerate(hashtab):
+ struct.pack_into('<I', contents, hash_off + 4*i, hashval)
+
+
+def overwrite_guids_h(h_file, dynamic_guids):
+ contents = open(h_file, 'rb').read()
+ for key in dynamic_guids:
+ contents = re.sub(key, dynamic_guids[key], contents, flags=re.I)
+ open(h_file, 'wb').write(contents)
+
+
+def get_uuid_format(guid, prefix):
+ formatted_uuid = b'0x%s,0x%s,0x%s,' % (guid[0:8], guid[9:13], guid[14:18])
+ formatted_uuid += b'%s0x%s,0x%s' % (prefix, guid[19:21], guid[21:23])
+ for i in range(24, len(guid), 2):
+ formatted_uuid += b',0x' + guid[i:i + 2]
+ return formatted_uuid
+
+
+def get_uuid_format_iid_file(guid):
+ # Convert from "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83" to
+ # 0xD0E1CACC,0xC63C,0x4192,0x94,0xAB,0xBF,0x8E,0xAD,0x0E,0x3B,0x83.
+ return get_uuid_format(guid, b'')
+
+
+def overwrite_guids_iid(iid_file, dynamic_guids):
+ contents = open(iid_file, 'rb').read()
+ for key in dynamic_guids:
+ contents = re.sub(get_uuid_format_iid_file(key),
+ get_uuid_format_iid_file(dynamic_guids[key]),
+ contents,
+ flags=re.I)
+ open(iid_file, 'wb').write(contents)
+
+
+def get_uuid_format_proxy_file(guid):
+ # Convert from "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83" to
+ # {0xD0E1CACC,0xC63C,0x4192,{0x94,0xAB,0xBF,0x8E,0xAD,0x0E,0x3B,0x83}}.
+ return get_uuid_format(guid, b'{')
+
+
+def overwrite_guids_proxy(proxy_file, dynamic_guids):
+ contents = open(proxy_file, 'rb').read()
+ for key in dynamic_guids:
+ contents = re.sub(get_uuid_format_proxy_file(key),
+ get_uuid_format_proxy_file(dynamic_guids[key]),
+ contents,
+ flags=re.I)
+ open(proxy_file, 'wb').write(contents)
+
+
+def getguid(contents, offset):
+ # Returns a guid string of the form "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83".
+ g0, g1, g2, g3 = struct.unpack_from('<IHH8s', contents, offset)
+ g3 = b''.join([b'%02X' % g for g in bytearray(g3)])
+ return b'%08X-%04X-%04X-%s-%s' % (g0, g1, g2, g3[0:4], g3[4:])
+
+
+def setguid(contents, offset, guid):
+ guid = uuid.UUID(guid.decode('utf-8'))
+ struct.pack_into('<IHH8s', contents, offset,
+ *(guid.fields[0:3] + (guid.bytes[8:], )))
+
+
+def overwrite_guids_tlb(tlb_file, dynamic_guids):
+ contents, ntypes, type_off, guid_off, guid_len = get_tlb_contents(tlb_file)
+
+ for i in range(0, guid_len, 24):
+ current_guid = getguid(contents, guid_off + i)
+ for key in dynamic_guids:
+ if key.lower() == current_guid.lower():
+ setguid(contents, guid_off + i, dynamic_guids[key])
+
+ recreate_guid_hashtable(contents, ntypes, guid_off, guid_len)
+ open(tlb_file, 'wb').write(contents)
+
+
+# Handle multiple guid substitutions, where |dynamic_guids| is of the form
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791="
+# "3d852661-c795-4d20-9b95-5561e9a1d2d9,"
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B="
+# "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83".
+#
+# Before specifying |dynamic_guids| in the build, the IDL file is first compiled
+# with "158428a4-6014-4978-83ba-9fad0dabe791" and
+# "63B8FFB1-5314-48C9-9C57-93EC8BC6184B". These are the "replaceable" guids,
+# i.e., guids that can be replaced in future builds. The resulting MIDL outputs
+# are copied over to src\third_party\win_build_output\.
+#
+# Then, in the future, any changes to these guids can be accomplished by
+# providing |dynamic_guids| of the format above in the build file. These
+# "dynamic" guid changes by themselves will not require the MIDL compiler and
+# therefore will not require copying output over to
+# src\third_party\win_build_output\.
+#
+# The pre-generated src\third_party\win_build_output\ files are used for
+# cross-compiling on other platforms, since the MIDL compiler is Windows-only.
+def overwrite_guids(h_file, iid_file, proxy_file, tlb_file, dynamic_guids):
+ # Fix up GUIDs in .h, _i.c, _p.c, and .tlb.
+ overwrite_guids_h(h_file, dynamic_guids)
+ overwrite_guids_iid(iid_file, dynamic_guids)
+ overwrite_guids_proxy(proxy_file, dynamic_guids)
+ if tlb_file:
+ overwrite_guids_tlb(tlb_file, dynamic_guids)
+
+
+# This function removes all occurrences of 'PLACEHOLDER-GUID-' from the
+# template, and if |dynamic_guids| is specified, also replaces the guids within
+# the file. Finally, it writes the resultant output to the |idl| file.
+def generate_idl_from_template(idl_template, dynamic_guids, idl):
+ contents = open(idl_template, 'rb').read()
+ contents = re.sub(b'PLACEHOLDER-GUID-', b'', contents, flags=re.I)
+ if dynamic_guids:
+ for key in dynamic_guids:
+ contents = re.sub(key, dynamic_guids[key], contents, flags=re.I)
+ open(idl, 'wb').write(contents)
+
+
+# This function runs the MIDL compiler with the provided arguments. It creates
+# and returns a tuple of |0,midl_output_dir| on success.
+def run_midl(args, env_dict):
+ midl_output_dir = tempfile.mkdtemp()
+ delete_midl_output_dir = True
+
+ try:
+ popen = subprocess.Popen(args + ['/out', midl_output_dir],
+ shell=True,
+ universal_newlines=True,
+ env=env_dict,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ if popen.returncode != 0:
+ return popen.returncode, midl_output_dir
+
+ # Filter junk out of stdout, and write filtered versions. Output we want
+ # to filter is pairs of lines that look like this:
+ # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
+ # objidl.idl
+ lines = out.splitlines()
+ prefixes = ('Processing ', '64 bit Processing ')
+ processing = set(
+ os.path.basename(x) for x in lines if x.startswith(prefixes))
+ for line in lines:
+ if not line.startswith(prefixes) and line not in processing:
+ print(line)
+
+ for f in os.listdir(midl_output_dir):
+ ZapTimestamp(os.path.join(midl_output_dir, f))
+
+ delete_midl_output_dir = False
+ finally:
+ if os.path.exists(midl_output_dir) and delete_midl_output_dir:
+ shutil.rmtree(midl_output_dir)
+
+ return 0, midl_output_dir
+
+
+# This function adds support for dynamic generation of guids: when values are
+# specified as 'uuid5:name', this function will substitute the values with
+# generated dynamic guids using the uuid5 function. The uuid5 function generates
+# a guid based on the SHA-1 hash of a namespace identifier (which is the guid
+# that comes after 'PLACEHOLDER-GUID-') and a name (which is a string, such as a
+# version string "87.1.2.3").
+#
+# For instance, when |dynamic_guid| is of the form:
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791=uuid5:88.0.4307.0
+# ,"
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B=uuid5:88.0.4307.0
+# "
+#
+# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791" would be substituted
+# with uuid5("158428a4-6014-4978-83ba-9fad0dabe791", "88.0.4307.0"), which is
+# "64700170-AD80-5DE3-924E-2F39D862CFD5". And
+# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B" would be
+# substituted with uuid5("63B8FFB1-5314-48C9-9C57-93EC8BC6184B", "88.0.4307.0"),
+# which is "7B6E7538-3C38-5565-BC92-42BCEE268D76".
+def uuid5_substitutions(dynamic_guids):
+ for key, value in dynamic_guids.items():
+ if value.startswith('uuid5:'):
+ name = value.split('uuid5:', 1)[1]
+ assert name
+ dynamic_guids[key] = str(uuid.uuid5(uuid.UUID(key), name)).upper()
+
+
+def main(arch, gendir, outdir, dynamic_guids, tlb, h, dlldata, iid, proxy,
+ clang, idl, *flags):
+ # Copy checked-in outputs to final location.
+ source = gendir
+ if os.path.isdir(os.path.join(source, os.path.basename(idl))):
+ source = os.path.join(source, os.path.basename(idl))
+ source = os.path.join(source, arch.split('.')[1]) # Append 'x86' or 'x64'.
+ source = os.path.normpath(source)
+
+ source_exists = True
+ if not os.path.isdir(source):
+ source_exists = False
+ if sys.platform != 'win32':
+ print('Directory %s needs to be populated from Windows first' % source)
+ return 1
+
+ # This is a brand new IDL file that does not have outputs under
+ # third_party\win_build_output\midl. We create an empty directory for now.
+ os.makedirs(source)
+
+ common_files = [h, iid]
+ if tlb != 'none':
+ # Not all projects use tlb files.
+ common_files += [tlb]
+ else:
+ tlb = None
+
+ if dlldata != 'none':
+ # Not all projects use dlldta files.
+ common_files += [dlldata]
+ else:
+ dlldata = None
+
+ # Not all projects use proxy files
+ if proxy != 'none':
+ # Not all projects use proxy files.
+ common_files += [proxy]
+ else:
+ proxy = None
+
+ for source_file in common_files:
+ file_path = os.path.join(source, source_file)
+ if not os.path.isfile(file_path):
+ source_exists = False
+ if sys.platform != 'win32':
+ print('File %s needs to be generated from Windows first' % file_path)
+ return 1
+
+ # Either this is a brand new IDL file that does not have outputs under
+ # third_party\win_build_output\midl or the file is (unexpectedly) missing.
+ # We create an empty file for now. The rest of the machinery below will
+ # then generate the correctly populated file using the MIDL compiler and
+ # instruct the developer to copy that file under
+ # third_party\win_build_output\midl.
+ open(file_path, 'wb').close()
+ shutil.copy(file_path, outdir)
+
+ if dynamic_guids != 'none':
+ assert '=' in dynamic_guids
+ if dynamic_guids.startswith("ignore_proxy_stub,"):
+ # TODO(ganesh): The custom proxy/stub file ("_p.c") is not generated
+ # correctly for dynamic IIDs (but correctly if there are only dynamic
+ # CLSIDs). The proxy/stub lookup functions generated by MIDL.exe within
+ # "_p.c" rely on a sorted set of vtable lists, which we are not currently
+ # regenerating. At the moment, no project in Chromium that uses dynamic
+ # IIDs is relying on the custom proxy/stub file. So for now, if
+ # |dynamic_guids| is prefixed with "ignore_proxy_stub,", we exclude the
+ # custom proxy/stub file from the directory comparisons.
+ common_files.remove(proxy)
+ dynamic_guids = dynamic_guids.split("ignore_proxy_stub,", 1)[1]
+ dynamic_guids = re.sub('PLACEHOLDER-GUID-', '', dynamic_guids, flags=re.I)
+ dynamic_guids = dynamic_guids.split(',')
+ dynamic_guids = dict(s.split('=') for s in dynamic_guids)
+ uuid5_substitutions(dynamic_guids)
+ dynamic_guids_bytes = {
+ k.encode('utf-8'): v.encode('utf-8')
+ for k, v in dynamic_guids.items()
+ }
+ if source_exists:
+ overwrite_guids(*(os.path.join(outdir, file) if file else None
+ for file in [h, iid, proxy, tlb]),
+ dynamic_guids=dynamic_guids_bytes)
+ else:
+ dynamic_guids = None
+
+ # On non-Windows, that's all we can do.
+ if sys.platform != 'win32':
+ return 0
+
+ idl_template = None
+ if dynamic_guids:
+ idl_template = idl
+
+ # posixpath is used here to keep the MIDL-generated files with a uniform
+ # separator of '/' instead of mixed '/' and '\\'.
+ idl = posixpath.join(
+ outdir,
+ os.path.splitext(os.path.basename(idl_template))[0] + '.idl')
+
+ # |idl_template| can contain one or more occurrences of guids that are
+ # substituted with |dynamic_guids|, and then MIDL is run on the substituted
+ # IDL file.
+ generate_idl_from_template(idl_template, dynamic_guids_bytes, idl)
+
+ # On Windows, run midl.exe on the input and check that its outputs are
+ # identical to the checked-in outputs (after replacing guids if
+ # |dynamic_guids| is specified).
+
+ # Read the environment block from the file. This is stored in the format used
+ # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
+ # trailing vs. separator.
+ env_pairs = open(arch).read()[:-2].split('\0')
+ env_dict = dict([item.split('=', 1) for item in env_pairs])
+
+ # Extract the /D options and send them to the preprocessor.
+ preprocessor_options = '-E -nologo -Wno-nonportable-include-path'
+ preprocessor_options += ''.join(
+ [' ' + flag for flag in flags if flag.startswith('/D')])
+ args = ['midl', '/nologo'] + list(flags) + (['/tlb', tlb] if tlb else []) + [
+ '/h', h
+ ] + (['/dlldata', dlldata] if dlldata else []) + ['/iid', iid] + (
+ ['/proxy', proxy] if proxy else
+ []) + ['/cpp_cmd', clang, '/cpp_opt', preprocessor_options, idl]
+
+ returncode, midl_output_dir = run_midl(args, env_dict)
+ if returncode != 0:
+ return returncode
+
+ # Now compare the output in midl_output_dir to the copied-over outputs.
+ _, mismatch, errors = filecmp.cmpfiles(midl_output_dir, outdir, common_files)
+ assert not errors
+
+ if mismatch:
+ print('midl.exe output different from files in %s, see %s' %
+ (outdir, midl_output_dir))
+ for f in mismatch:
+ if f.endswith('.tlb'): continue
+ fromfile = os.path.join(outdir, f)
+ tofile = os.path.join(midl_output_dir, f)
+ print(''.join(
+ difflib.unified_diff(
+ io.open(fromfile).readlines(),
+ io.open(tofile).readlines(), fromfile, tofile)))
+
+ if dynamic_guids:
+ # |idl_template| can contain one or more occurrences of guids prefixed
+ # with 'PLACEHOLDER-GUID-'. We first remove the extraneous
+ # 'PLACEHOLDER-GUID-' prefix and then run MIDL on the substituted IDL
+ # file.
+ # No guid substitutions are done at this point, because we want to compile
+ # with the placeholder guids and then instruct the user to copy the output
+ # over to |source| which is typically src\third_party\win_build_output\.
+ # In future runs, the placeholder guids in |source| are replaced with the
+ # guids specified in |dynamic_guids|.
+ generate_idl_from_template(idl_template, None, idl)
+ returncode, midl_output_dir = run_midl(args, env_dict)
+ if returncode != 0:
+ return returncode
+
+ print('To rebaseline:')
+ print(r' copy /y %s\* %s' % (midl_output_dir, source))
+ return 1
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(*sys.argv[1:]))
diff --git a/third_party/libwebrtc/build/toolchain/win/ml.py b/third_party/libwebrtc/build/toolchain/win/ml.py
new file mode 100755
index 0000000000..6a1b6e577e
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/ml.py
@@ -0,0 +1,290 @@
+#!/usr/bin/env python
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wraps ml.exe or ml64.exe and postprocesses the output to be deterministic.
+Sets timestamp in .obj file to 0, hence incompatible with link.exe /incremental.
+
+Use by prefixing the ml(64).exe invocation with this script:
+ python ml.py ml.exe [args...]"""
+
+import array
+import collections
+import struct
+import subprocess
+import sys
+
+
+class Struct(object):
+ """A thin wrapper around the struct module that returns a namedtuple"""
+ def __init__(self, name, *args):
+ """Pass the name of the return type, and then an interleaved list of
+ format strings as used by the struct module and of field names."""
+ self.fmt = '<' + ''.join(args[0::2])
+ self.type = collections.namedtuple(name, args[1::2])
+
+ def pack_into(self, buffer, offset, data):
+ return struct.pack_into(self.fmt, buffer, offset, *data)
+
+ def unpack_from(self, buffer, offset=0):
+ return self.type(*struct.unpack_from(self.fmt, buffer, offset))
+
+ def size(self):
+ return struct.calcsize(self.fmt)
+
+
+def Subtract(nt, **kwargs):
+ """Subtract(nt, f=2) returns a new namedtuple with 2 subtracted from nt.f"""
+ return nt._replace(**{k: getattr(nt, k) - v for k, v in kwargs.items()})
+
+
+def MakeDeterministic(objdata):
+ # Takes data produced by ml(64).exe (without any special flags) and
+ # 1. Sets the timestamp to 0
+ # 2. Strips the .debug$S section (which contains an unwanted absolute path)
+
+ # This makes several assumptions about ml's output:
+ # - Section data is in the same order as the corresponding section headers:
+ # section headers preceding the .debug$S section header have their data
+ # preceding the .debug$S section data; likewise for section headers
+ # following the .debug$S section.
+ # - The .debug$S section contains only the absolute path to the obj file and
+ # nothing else, in particular there's only a single entry in the symbol
+ # table referring to the .debug$S section.
+ # - There are no COFF line number entries.
+ # - There's no IMAGE_SYM_CLASS_CLR_TOKEN symbol.
+ # These seem to hold in practice; if they stop holding this script needs to
+ # become smarter.
+
+ objdata = array.array('b', objdata) # Writable, e.g. via struct.pack_into.
+
+ # Read coff header.
+ COFFHEADER = Struct('COFFHEADER',
+ 'H', 'Machine',
+ 'H', 'NumberOfSections',
+ 'I', 'TimeDateStamp',
+ 'I', 'PointerToSymbolTable',
+ 'I', 'NumberOfSymbols',
+
+ 'H', 'SizeOfOptionalHeader',
+ 'H', 'Characteristics')
+ coff_header = COFFHEADER.unpack_from(objdata)
+ assert coff_header.SizeOfOptionalHeader == 0 # Only set for binaries.
+
+ # Read section headers following coff header.
+ SECTIONHEADER = Struct('SECTIONHEADER',
+ '8s', 'Name',
+ 'I', 'VirtualSize',
+ 'I', 'VirtualAddress',
+
+ 'I', 'SizeOfRawData',
+ 'I', 'PointerToRawData',
+ 'I', 'PointerToRelocations',
+ 'I', 'PointerToLineNumbers',
+
+ 'H', 'NumberOfRelocations',
+ 'H', 'NumberOfLineNumbers',
+ 'I', 'Characteristics')
+ section_headers = []
+ debug_section_index = -1
+ for i in range(0, coff_header.NumberOfSections):
+ section_header = SECTIONHEADER.unpack_from(
+ objdata, offset=COFFHEADER.size() + i * SECTIONHEADER.size())
+ assert not section_header[0].startswith(b'/') # Support short names only.
+ section_headers.append(section_header)
+
+ if section_header.Name == b'.debug$S':
+ assert debug_section_index == -1
+ debug_section_index = i
+ assert debug_section_index != -1
+
+ data_start = COFFHEADER.size() + len(section_headers) * SECTIONHEADER.size()
+
+ # Verify the .debug$S section looks like we expect.
+ assert section_headers[debug_section_index].Name == b'.debug$S'
+ assert section_headers[debug_section_index].VirtualSize == 0
+ assert section_headers[debug_section_index].VirtualAddress == 0
+ debug_size = section_headers[debug_section_index].SizeOfRawData
+ debug_offset = section_headers[debug_section_index].PointerToRawData
+ assert section_headers[debug_section_index].PointerToRelocations == 0
+ assert section_headers[debug_section_index].PointerToLineNumbers == 0
+ assert section_headers[debug_section_index].NumberOfRelocations == 0
+ assert section_headers[debug_section_index].NumberOfLineNumbers == 0
+
+ # Make sure sections in front of .debug$S have their data preceding it.
+ for header in section_headers[:debug_section_index]:
+ assert header.PointerToRawData < debug_offset
+ assert header.PointerToRelocations < debug_offset
+ assert header.PointerToLineNumbers < debug_offset
+
+ # Make sure sections after of .debug$S have their data following it.
+ for header in section_headers[debug_section_index + 1:]:
+ # Make sure the .debug$S data is at the very end of section data:
+ assert header.PointerToRawData > debug_offset
+ assert header.PointerToRelocations == 0
+ assert header.PointerToLineNumbers == 0
+
+ # Make sure the first non-empty section's data starts right after the section
+ # headers.
+ for section_header in section_headers:
+ if section_header.PointerToRawData == 0:
+ assert section_header.PointerToRelocations == 0
+ assert section_header.PointerToLineNumbers == 0
+ continue
+ assert section_header.PointerToRawData == data_start
+ break
+
+ # Make sure the symbol table (and hence, string table) appear after the last
+ # section:
+ assert (coff_header.PointerToSymbolTable >=
+ section_headers[-1].PointerToRawData + section_headers[-1].SizeOfRawData)
+
+ # The symbol table contains a symbol for the no-longer-present .debug$S
+ # section. If we leave it there, lld-link will complain:
+ #
+ # lld-link: error: .debug$S should not refer to non-existent section 5
+ #
+ # so we need to remove that symbol table entry as well. This shifts symbol
+ # entries around and we need to update symbol table indices in:
+ # - relocations
+ # - line number records (never present)
+ # - one aux symbol entry (IMAGE_SYM_CLASS_CLR_TOKEN; not present in ml output)
+ SYM = Struct('SYM',
+ '8s', 'Name',
+ 'I', 'Value',
+ 'h', 'SectionNumber', # Note: Signed!
+ 'H', 'Type',
+
+ 'B', 'StorageClass',
+ 'B', 'NumberOfAuxSymbols')
+ i = 0
+ debug_sym = -1
+ while i < coff_header.NumberOfSymbols:
+ sym_offset = coff_header.PointerToSymbolTable + i * SYM.size()
+ sym = SYM.unpack_from(objdata, sym_offset)
+
+ # 107 is IMAGE_SYM_CLASS_CLR_TOKEN, which has aux entry "CLR Token
+ # Definition", which contains a symbol index. Check it's never present.
+ assert sym.StorageClass != 107
+
+ # Note: sym.SectionNumber is 1-based, debug_section_index is 0-based.
+ if sym.SectionNumber - 1 == debug_section_index:
+ assert debug_sym == -1, 'more than one .debug$S symbol found'
+ debug_sym = i
+ # Make sure the .debug$S symbol looks like we expect.
+ # In particular, it should have exactly one aux symbol.
+ assert sym.Name == b'.debug$S'
+ assert sym.Value == 0
+ assert sym.Type == 0
+ assert sym.StorageClass == 3
+ assert sym.NumberOfAuxSymbols == 1
+ elif sym.SectionNumber > debug_section_index:
+ sym = Subtract(sym, SectionNumber=1)
+ SYM.pack_into(objdata, sym_offset, sym)
+ i += 1 + sym.NumberOfAuxSymbols
+ assert debug_sym != -1, '.debug$S symbol not found'
+
+ # Note: Usually the .debug$S section is the last, but for files saying
+ # `includelib foo.lib`, like safe_terminate_process.asm in 32-bit builds,
+ # this isn't true: .drectve is after .debug$S.
+
+ # Update symbol table indices in relocations.
+ # There are a few processor types that have one or two relocation types
+ # where SymbolTableIndex has a different meaning, but not for x86.
+ REL = Struct('REL',
+ 'I', 'VirtualAddress',
+ 'I', 'SymbolTableIndex',
+ 'H', 'Type')
+ for header in section_headers[0:debug_section_index]:
+ for j in range(0, header.NumberOfRelocations):
+ rel_offset = header.PointerToRelocations + j * REL.size()
+ rel = REL.unpack_from(objdata, rel_offset)
+ assert rel.SymbolTableIndex != debug_sym
+ if rel.SymbolTableIndex > debug_sym:
+ rel = Subtract(rel, SymbolTableIndex=2)
+ REL.pack_into(objdata, rel_offset, rel)
+
+ # Update symbol table indices in line numbers -- just check they don't exist.
+ for header in section_headers:
+ assert header.NumberOfLineNumbers == 0
+
+ # Now that all indices are updated, remove the symbol table entry referring to
+ # .debug$S and its aux entry.
+ del objdata[coff_header.PointerToSymbolTable + debug_sym * SYM.size():
+ coff_header.PointerToSymbolTable + (debug_sym + 2) * SYM.size()]
+
+ # Now we know that it's safe to write out the input data, with just the
+ # timestamp overwritten to 0, the last section header cut out (and the
+ # offsets of all other section headers decremented by the size of that
+ # one section header), and the last section's data cut out. The symbol
+ # table offset needs to be reduced by one section header and the size of
+ # the missing section.
+ # (The COFF spec only requires on-disk sections to be aligned in image files,
+ # for obj files it's not required. If that wasn't the case, deleting slices
+ # if data would not generally be safe.)
+
+ # Update section offsets and remove .debug$S section data.
+ for i in range(0, debug_section_index):
+ header = section_headers[i]
+ if header.SizeOfRawData:
+ header = Subtract(header, PointerToRawData=SECTIONHEADER.size())
+ if header.NumberOfRelocations:
+ header = Subtract(header, PointerToRelocations=SECTIONHEADER.size())
+ if header.NumberOfLineNumbers:
+ header = Subtract(header, PointerToLineNumbers=SECTIONHEADER.size())
+ SECTIONHEADER.pack_into(
+ objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+ for i in range(debug_section_index + 1, len(section_headers)):
+ header = section_headers[i]
+ shift = SECTIONHEADER.size() + debug_size
+ if header.SizeOfRawData:
+ header = Subtract(header, PointerToRawData=shift)
+ if header.NumberOfRelocations:
+ header = Subtract(header, PointerToRelocations=shift)
+ if header.NumberOfLineNumbers:
+ header = Subtract(header, PointerToLineNumbers=shift)
+ SECTIONHEADER.pack_into(
+ objdata, COFFHEADER.size() + i * SECTIONHEADER.size(), header)
+
+ del objdata[debug_offset:debug_offset + debug_size]
+
+ # Finally, remove .debug$S section header and update coff header.
+ coff_header = coff_header._replace(TimeDateStamp=0)
+ coff_header = Subtract(coff_header,
+ NumberOfSections=1,
+ PointerToSymbolTable=SECTIONHEADER.size() + debug_size,
+ NumberOfSymbols=2)
+ COFFHEADER.pack_into(objdata, 0, coff_header)
+
+ del objdata[
+ COFFHEADER.size() + debug_section_index * SECTIONHEADER.size():
+ COFFHEADER.size() + (debug_section_index + 1) * SECTIONHEADER.size()]
+
+ # All done!
+ if sys.version_info.major == 2:
+ return objdata.tostring()
+ else:
+ return objdata.tobytes()
+
+
+def main():
+ ml_result = subprocess.call(sys.argv[1:])
+ if ml_result != 0:
+ return ml_result
+
+ objfile = None
+ for i in range(1, len(sys.argv)):
+ if sys.argv[i].startswith('/Fo'):
+ objfile = sys.argv[i][len('/Fo'):]
+ assert objfile, 'failed to find ml output'
+
+ with open(objfile, 'rb') as f:
+ objdata = f.read()
+ objdata = MakeDeterministic(objdata)
+ with open(objfile, 'wb') as f:
+ f.write(objdata)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/.gitignore b/third_party/libwebrtc/build/toolchain/win/rc/.gitignore
new file mode 100644
index 0000000000..e8fc4d3e1f
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/.gitignore
@@ -0,0 +1,3 @@
+linux64/rc
+mac/rc
+win/rc.exe
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/README.md b/third_party/libwebrtc/build/toolchain/win/rc/README.md
new file mode 100644
index 0000000000..e6d38f9709
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/README.md
@@ -0,0 +1,30 @@
+# rc
+
+This contains a cross-platform reimplementation of rc.exe.
+
+This exists mainly to compile .rc files on non-Windows hosts for cross builds.
+However, it also runs on Windows for two reasons:
+
+1. To compare the output of Microsoft's rc.exe and the reimplementation and to
+ check that they produce bitwise identical output.
+2. The reimplementation supports printing resource files in /showIncludes
+ output, which helps getting build dependencies right.
+
+The resource compiler consists of two parts:
+
+1. A python script rc.py that serves as the driver. It does unicode
+ conversions, runs the input through the preprocessor, and then calls the
+ actual resource compiler.
+2. The resource compiler, a C++ binary obtained via sha1 files from Google
+ Storage. The binary's code currenty lives at
+ https://github.com/nico/hack/tree/master/res, even though work is (slowly)
+ underway to upstream it into LLVM.
+
+To update the rc binary, run `upload_rc_binaries.sh` in this directory, on a
+Mac.
+
+rc isn't built from source as part of the regular chrome build because
+it's needed in a gn toolchain tool, and these currently cannot have deps.
+Alternatively, gn could be taught about deps on tools, or rc invocations could
+be not a tool but a template like e.g. yasm invocations (which can have deps),
+then the prebuilt binaries wouldn't be needed.
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/linux64/rc.sha1 b/third_party/libwebrtc/build/toolchain/win/rc/linux64/rc.sha1
new file mode 100644
index 0000000000..ad14ca46a9
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/linux64/rc.sha1
@@ -0,0 +1 @@
+2d0c766039264dc2514d005a42f074af4838a446 \ No newline at end of file
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/mac/rc.sha1 b/third_party/libwebrtc/build/toolchain/win/rc/mac/rc.sha1
new file mode 100644
index 0000000000..dbd6302a35
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/mac/rc.sha1
@@ -0,0 +1 @@
+4c25c3bcb6608109bb52028d008835895cf72629 \ No newline at end of file
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/rc.py b/third_party/libwebrtc/build/toolchain/win/rc/rc.py
new file mode 100755
index 0000000000..2ab41225fb
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/rc.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""usage: rc.py [options] input.res
+A resource compiler for .rc files.
+
+options:
+-h, --help Print this message.
+-I<dir> Add include path, used for both headers and resources.
+-imsvc<dir> Add system include path, used for preprocessing only.
+/winsysroot<d> Set winsysroot, used for preprocessing only.
+-D<sym> Define a macro for the preprocessor.
+/fo<out> Set path of output .res file.
+/nologo Ignored (rc.py doesn't print a logo by default).
+/showIncludes Print referenced header and resource files."""
+
+from __future__ import print_function
+from collections import namedtuple
+import codecs
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = \
+ os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(THIS_DIR))))
+
+
+def ParseFlags():
+ """Parses flags off sys.argv and returns the parsed flags."""
+ # Can't use optparse / argparse because of /fo flag :-/
+ includes = []
+ imsvcs = []
+ winsysroot = []
+ defines = []
+ output = None
+ input = None
+ show_includes = False
+ # Parse.
+ for flag in sys.argv[1:]:
+ if flag == '-h' or flag == '--help':
+ print(__doc__)
+ sys.exit(0)
+ if flag.startswith('-I'):
+ includes.append(flag)
+ elif flag.startswith('-imsvc'):
+ imsvcs.append(flag)
+ elif flag.startswith('/winsysroot'):
+ winsysroot = [flag]
+ elif flag.startswith('-D'):
+ defines.append(flag)
+ elif flag.startswith('/fo'):
+ if output:
+ print('rc.py: error: multiple /fo flags', '/fo' + output, flag,
+ file=sys.stderr)
+ sys.exit(1)
+ output = flag[3:]
+ elif flag == '/nologo':
+ pass
+ elif flag == '/showIncludes':
+ show_includes = True
+ elif (flag.startswith('-') or
+ (flag.startswith('/') and not os.path.exists(flag))):
+ print('rc.py: error: unknown flag', flag, file=sys.stderr)
+ print(__doc__, file=sys.stderr)
+ sys.exit(1)
+ else:
+ if input:
+ print('rc.py: error: multiple inputs:', input, flag, file=sys.stderr)
+ sys.exit(1)
+ input = flag
+ # Validate and set default values.
+ if not input:
+ print('rc.py: error: no input file', file=sys.stderr)
+ sys.exit(1)
+ if not output:
+ output = os.path.splitext(input)[0] + '.res'
+ Flags = namedtuple('Flags', [
+ 'includes', 'defines', 'output', 'imsvcs', 'winsysroot', 'input',
+ 'show_includes'
+ ])
+ return Flags(includes=includes,
+ defines=defines,
+ output=output,
+ imsvcs=imsvcs,
+ winsysroot=winsysroot,
+ input=input,
+ show_includes=show_includes)
+
+
+def ReadInput(input):
+ """"Reads input and returns it. For UTF-16LEBOM input, converts to UTF-8."""
+ # Microsoft's rc.exe only supports unicode in the form of UTF-16LE with a BOM.
+ # Our rc binary sniffs for UTF-16LE. If that's not found, if /utf-8 is
+ # passed, the input is treated as UTF-8. If /utf-8 is not passed and the
+ # input is not UTF-16LE, then our rc errors out on characters outside of
+ # 7-bit ASCII. Since the driver always converts UTF-16LE to UTF-8 here (for
+ # the preprocessor, which doesn't support UTF-16LE), our rc will either see
+ # UTF-8 with the /utf-8 flag (for UTF-16LE input), or ASCII input.
+ # This is compatible with Microsoft rc.exe. If we wanted, we could expose
+ # a /utf-8 flag for the driver for UTF-8 .rc inputs too.
+ # TODO(thakis): Microsoft's rc.exe supports BOM-less UTF-16LE. We currently
+ # don't, but for chrome it currently doesn't matter.
+ is_utf8 = False
+ try:
+ with open(input, 'rb') as rc_file:
+ rc_file_data = rc_file.read()
+ if rc_file_data.startswith(codecs.BOM_UTF16_LE):
+ rc_file_data = rc_file_data[2:].decode('utf-16le').encode('utf-8')
+ is_utf8 = True
+ except IOError:
+ print('rc.py: failed to open', input, file=sys.stderr)
+ sys.exit(1)
+ except UnicodeDecodeError:
+ print('rc.py: failed to decode UTF-16 despite BOM', input, file=sys.stderr)
+ sys.exit(1)
+ return rc_file_data, is_utf8
+
+
+def Preprocess(rc_file_data, flags):
+ """Runs the input file through the preprocessor."""
+ clang = os.path.join(SRC_DIR, 'third_party', 'llvm-build',
+ 'Release+Asserts', 'bin', 'clang-cl')
+ # Let preprocessor write to a temp file so that it doesn't interfere
+ # with /showIncludes output on stdout.
+ if sys.platform == 'win32':
+ clang += '.exe'
+ temp_handle, temp_file = tempfile.mkstemp(suffix='.i')
+ # Closing temp_handle immediately defeats the purpose of mkstemp(), but I
+ # can't figure out how to let write to the temp file on Windows otherwise.
+ os.close(temp_handle)
+ clang_cmd = [clang, '/P', '/DRC_INVOKED', '/TC', '-', '/Fi' + temp_file]
+ if flags.imsvcs:
+ clang_cmd += ['/X']
+ if os.path.dirname(flags.input):
+ # This must precede flags.includes.
+ clang_cmd.append('-I' + os.path.dirname(flags.input))
+ if flags.show_includes:
+ clang_cmd.append('/showIncludes')
+ clang_cmd += flags.imsvcs + flags.winsysroot + flags.includes + flags.defines
+ p = subprocess.Popen(clang_cmd, stdin=subprocess.PIPE)
+ p.communicate(input=rc_file_data)
+ if p.returncode != 0:
+ sys.exit(p.returncode)
+ preprocessed_output = open(temp_file, 'rb').read()
+ os.remove(temp_file)
+
+ # rc.exe has a wacko preprocessor:
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381033(v=vs.85).aspx
+ # """RC treats files with the .c and .h extensions in a special manner. It
+ # assumes that a file with one of these extensions does not contain
+ # resources. If a file has the .c or .h file name extension, RC ignores all
+ # lines in the file except the preprocessor directives."""
+ # Thankfully, the Microsoft headers are mostly good about putting everything
+ # in the system headers behind `if !defined(RC_INVOKED)`, so regular
+ # preprocessing with RC_INVOKED defined works.
+ return preprocessed_output
+
+
+def RunRc(preprocessed_output, is_utf8, flags):
+ if sys.platform.startswith('linux'):
+ rc = os.path.join(THIS_DIR, 'linux64', 'rc')
+ elif sys.platform == 'darwin':
+ rc = os.path.join(THIS_DIR, 'mac', 'rc')
+ elif sys.platform == 'win32':
+ rc = os.path.join(THIS_DIR, 'win', 'rc.exe')
+ else:
+ print('rc.py: error: unsupported platform', sys.platform, file=sys.stderr)
+ sys.exit(1)
+ rc_cmd = [rc]
+ # Make sure rc-relative resources can be found:
+ if os.path.dirname(flags.input):
+ rc_cmd.append('/cd' + os.path.dirname(flags.input))
+ rc_cmd.append('/fo' + flags.output)
+ if is_utf8:
+ rc_cmd.append('/utf-8')
+ # TODO(thakis): cl currently always prints full paths for /showIncludes,
+ # but clang-cl /P doesn't. Which one is right?
+ if flags.show_includes:
+ rc_cmd.append('/showIncludes')
+ # Microsoft rc.exe searches for referenced files relative to -I flags in
+ # addition to the pwd, so -I flags need to be passed both to both
+ # the preprocessor and rc.
+ rc_cmd += flags.includes
+ p = subprocess.Popen(rc_cmd, stdin=subprocess.PIPE)
+ p.communicate(input=preprocessed_output)
+
+ if flags.show_includes and p.returncode == 0:
+ TOOL_DIR = os.path.dirname(os.path.relpath(THIS_DIR)).replace("\\", "/")
+ # Since tool("rc") can't have deps, add deps on this script and on rc.py
+ # and its deps here, so that rc edges become dirty if rc.py changes.
+ print('Note: including file: {}/tool_wrapper.py'.format(TOOL_DIR))
+ print('Note: including file: {}/rc/rc.py'.format(TOOL_DIR))
+ print(
+ 'Note: including file: {}/rc/linux64/rc.sha1'.format(TOOL_DIR))
+ print('Note: including file: {}/rc/mac/rc.sha1'.format(TOOL_DIR))
+ print(
+ 'Note: including file: {}/rc/win/rc.exe.sha1'.format(TOOL_DIR))
+
+ return p.returncode
+
+
+def CompareToMsRcOutput(preprocessed_output, is_utf8, flags):
+ msrc_in = flags.output + '.preprocessed.rc'
+
+ # Strip preprocessor line markers.
+ preprocessed_output = re.sub(br'^#.*$', b'', preprocessed_output, flags=re.M)
+ if is_utf8:
+ preprocessed_output = preprocessed_output.decode('utf-8').encode('utf-16le')
+ with open(msrc_in, 'wb') as f:
+ f.write(preprocessed_output)
+
+ msrc_out = flags.output + '_ms_rc'
+ msrc_cmd = ['rc', '/nologo', '/x', '/fo' + msrc_out]
+
+ # Make sure rc-relative resources can be found. rc.exe looks for external
+ # resource files next to the file, but the preprocessed file isn't where the
+ # input was.
+ # Note that rc searches external resource files in the order of
+ # 1. next to the input file
+ # 2. relative to cwd
+ # 3. next to -I directories
+ # Changing the cwd means we'd have to rewrite all -I flags, so just add
+ # the input file dir as -I flag. That technically gets the order of 1 and 2
+ # wrong, but in Chromium's build the cwd is the gn out dir, and generated
+ # files there are in obj/ and gen/, so this difference doesn't matter in
+ # practice.
+ if os.path.dirname(flags.input):
+ msrc_cmd += [ '-I' + os.path.dirname(flags.input) ]
+
+ # Microsoft rc.exe searches for referenced files relative to -I flags in
+ # addition to the pwd, so -I flags need to be passed both to both
+ # the preprocessor and rc.
+ msrc_cmd += flags.includes
+
+ # Input must come last.
+ msrc_cmd += [ msrc_in ]
+
+ rc_exe_exit_code = subprocess.call(msrc_cmd)
+ # Assert Microsoft rc.exe and rc.py produced identical .res files.
+ if rc_exe_exit_code == 0:
+ import filecmp
+ assert filecmp.cmp(msrc_out, flags.output)
+ return rc_exe_exit_code
+
+
+def main():
+ # This driver has to do these things:
+ # 1. Parse flags.
+ # 2. Convert the input from UTF-16LE to UTF-8 if needed.
+ # 3. Pass the input through a preprocessor (and clean up the preprocessor's
+ # output in minor ways).
+ # 4. Call rc for the heavy lifting.
+ flags = ParseFlags()
+ rc_file_data, is_utf8 = ReadInput(flags.input)
+ preprocessed_output = Preprocess(rc_file_data, flags)
+ rc_exe_exit_code = RunRc(preprocessed_output, is_utf8, flags)
+
+ # 5. On Windows, we also call Microsoft's rc.exe and check that we produced
+ # the same output.
+ # Since Microsoft's rc has a preprocessor that only accepts 32 characters
+ # for macro names, feed the clang-preprocessed source into it instead
+ # of using ms rc's preprocessor.
+ if sys.platform == 'win32' and rc_exe_exit_code == 0:
+ rc_exe_exit_code = CompareToMsRcOutput(preprocessed_output, is_utf8, flags)
+
+ return rc_exe_exit_code
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/upload_rc_binaries.sh b/third_party/libwebrtc/build/toolchain/win/rc/upload_rc_binaries.sh
new file mode 100755
index 0000000000..ec4df4cbce
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/upload_rc_binaries.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+set -eu
+
+# Builds new rc binaries at head and uploads them to google storage.
+# The new .sha1 files will be in the tree after this has run.
+
+if [[ "$OSTYPE" != "darwin"* ]]; then
+ echo "this script must run on a mac"
+ exit 1
+fi
+
+DIR="$(cd "$(dirname "${0}" )" && pwd)"
+SRC_DIR="$DIR/../../../.."
+
+# Make sure Linux and Windows sysroots are installed, for distrib.py.
+$SRC_DIR/build/linux/sysroot_scripts/install-sysroot.py --arch amd64
+$SRC_DIR/build/vs_toolchain.py update --force
+
+# Make a temporary directory.
+WORK_DIR=$(mktemp -d)
+if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
+ echo "could not create temp dir"
+ exit 1
+fi
+function cleanup {
+ rm -rf "$WORK_DIR"
+}
+trap cleanup EXIT
+
+# Check out rc and build it in the temporary directory. Copy binaries over.
+pushd "$WORK_DIR" > /dev/null
+git clone -q https://github.com/nico/hack
+cd hack/res
+./distrib.py "$SRC_DIR"
+popd > /dev/null
+cp "$WORK_DIR/hack/res/rc-linux64" "$DIR/linux64/rc"
+cp "$WORK_DIR/hack/res/rc-mac" "$DIR/mac/rc"
+cp "$WORK_DIR/hack/res/rc-win.exe" "$DIR/win/rc.exe"
+
+# Upload binaries to cloud storage.
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/linux64/rc"
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/mac/rc"
+upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/win/rc.exe"
diff --git a/third_party/libwebrtc/build/toolchain/win/rc/win/rc.exe.sha1 b/third_party/libwebrtc/build/toolchain/win/rc/win/rc.exe.sha1
new file mode 100644
index 0000000000..3fdbfc0c20
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/rc/win/rc.exe.sha1
@@ -0,0 +1 @@
+ba51d69039ffb88310b72b6568efa9f0de148f8f \ No newline at end of file
diff --git a/third_party/libwebrtc/build/toolchain/win/setup_toolchain.py b/third_party/libwebrtc/build/toolchain/win/setup_toolchain.py
new file mode 100644
index 0000000000..1ff3608f0c
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,314 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
+
+from __future__ import print_function
+
+import errno
+import json
+import os
+import re
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+import gn_helpers
+
+SCRIPT_DIR = os.path.dirname(__file__)
+
+def _ExtractImportantEnvironment(output_of_set):
+ """Extracts environment variables required for the toolchain to run from
+ a textual dump output by the cmd.exe 'set' command."""
+ envvars_to_save = (
+ 'cipd_cache_dir', # needed by vpython
+ 'homedrive', # needed by vpython
+ 'homepath', # needed by vpython
+ 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+ 'include',
+ 'lib',
+ 'libpath',
+ 'luci_context', # needed by vpython
+ 'path',
+ 'pathext',
+ 'systemroot',
+ 'temp',
+ 'tmp',
+ 'userprofile', # needed by vpython
+ 'vpython_virtualenv_root' # needed by vpython
+ )
+ env = {}
+ # This occasionally happens and leads to misleading SYSTEMROOT error messages
+ # if not caught here.
+ if output_of_set.count('=') == 0:
+ raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
+ for line in output_of_set.splitlines():
+ for envvar in envvars_to_save:
+ if re.match(envvar + '=', line.lower()):
+ var, setting = line.split('=', 1)
+ if envvar == 'path':
+ # Our own rules and actions in Chromium rely on python being in the
+ # path. Add the path to this python here so that if it's not in the
+ # path when ninja is run later, python will still be found.
+ setting = os.path.dirname(sys.executable) + os.pathsep + setting
+ env[var.upper()] = setting
+ break
+ if sys.platform in ('win32', 'cygwin'):
+ for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+ if required not in env:
+ raise Exception('Environment variable "%s" '
+ 'required to be set to valid path' % required)
+ return env
+
+
+def _DetectVisualStudioPath():
+ """Return path to the installed Visual Studio.
+ """
+
+ # Use the code in build/vs_toolchain.py to avoid duplicating code.
+ chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..'))
+ sys.path.append(os.path.join(chromium_dir, 'build'))
+ import vs_toolchain
+ return vs_toolchain.DetectVisualStudioPath()
+
+
+def _LoadEnvFromBat(args):
+ """Given a bat command, runs it and returns env vars set by it."""
+ args = args[:]
+ args.extend(('&&', 'set'))
+ popen = subprocess.Popen(
+ args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ variables, _ = popen.communicate()
+ if popen.returncode != 0:
+ raise Exception('"%s" failed with error %d' % (args, popen.returncode))
+ return variables.decode(errors='ignore')
+
+
+def _LoadToolchainEnv(cpu, toolchain_root, sdk_dir, target_store):
+ """Returns a dictionary with environment variables that must be set while
+ running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe)."""
+ # Check if we are running in the SDK command line environment and use
+ # the setup script from the SDK if so. |cpu| should be either
+ # 'x86' or 'x64' or 'arm' or 'arm64'.
+ assert cpu in ('x86', 'x64', 'arm', 'arm64')
+ if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
+ # Load environment from json file.
+ env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu))
+ env = json.load(open(env))['env']
+ if env['VSINSTALLDIR'] == [["..", "..\\"]]:
+ # Old-style paths were relative to the win_sdk\bin directory.
+ json_relative_dir = os.path.join(sdk_dir, 'bin')
+ else:
+ # New-style paths are relative to the toolchain directory.
+ json_relative_dir = toolchain_root
+ for k in env:
+ entries = [os.path.join(*([json_relative_dir] + e)) for e in env[k]]
+ # clang-cl wants INCLUDE to be ;-separated even on non-Windows,
+ # lld-link wants LIB to be ;-separated even on non-Windows. Path gets :.
+ # The separator for INCLUDE here must match the one used in main() below.
+ sep = os.pathsep if k == 'PATH' else ';'
+ env[k] = sep.join(entries)
+ # PATH is a bit of a special case, it's in addition to the current PATH.
+ env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH']
+ # Augment with the current env to pick up TEMP and friends.
+ for k in os.environ:
+ if k not in env:
+ env[k] = os.environ[k]
+
+ varlines = []
+ for k in sorted(env.keys()):
+ varlines.append('%s=%s' % (str(k), str(env[k])))
+ variables = '\n'.join(varlines)
+
+ # Check that the json file contained the same environment as the .cmd file.
+ if sys.platform in ('win32', 'cygwin'):
+ script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd'))
+ arg = '/' + cpu
+ json_env = _ExtractImportantEnvironment(variables)
+ cmd_env = _ExtractImportantEnvironment(_LoadEnvFromBat([script, arg]))
+ assert _LowercaseDict(json_env) == _LowercaseDict(cmd_env)
+ else:
+ if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ:
+ os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath()
+ # We only support x64-hosted tools.
+ script_path = os.path.normpath(os.path.join(
+ os.environ['GYP_MSVS_OVERRIDE_PATH'],
+ 'VC/vcvarsall.bat'))
+ if not os.path.exists(script_path):
+ # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from
+ # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment
+ # variable. Since vcvarsall.bat appends to the INCLUDE, LIB, and LIBPATH
+ # environment variables we need to clear those to avoid getting double
+ # entries when vcvarsall.bat has been run before gn gen. vcvarsall.bat
+ # also adds to PATH, but there is no clean way of clearing that and it
+ # doesn't seem to cause problems.
+ if 'VSINSTALLDIR' in os.environ:
+ del os.environ['VSINSTALLDIR']
+ del os.environ['INCLUDE']
+ del os.environ['LIB']
+ del os.environ['LIBPATH']
+ other_path = os.path.normpath(os.path.join(
+ os.environ['GYP_MSVS_OVERRIDE_PATH'],
+ 'VC/Auxiliary/Build/vcvarsall.bat'))
+ if not os.path.exists(other_path):
+ raise Exception('%s is missing - make sure VC++ tools are installed.' %
+ script_path)
+ script_path = other_path
+ cpu_arg = "amd64"
+ if (cpu != 'x64'):
+ # x64 is default target CPU thus any other CPU requires a target set
+ cpu_arg += '_' + cpu
+ args = [script_path, cpu_arg, ]
+ # Store target must come before any SDK version declaration
+ if (target_store):
+ args.append('store')
+ # Explicitly specifying the SDK version to build with to avoid accidentally
+ # building with a new and untested SDK. This should stay in sync with the
+ # packaged toolchain in build/vs_toolchain.py.
+ args.append('10.0.19041.0')
+ variables = _LoadEnvFromBat(args)
+ return _ExtractImportantEnvironment(variables)
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+ """Format as an 'environment block' directly suitable for CreateProcess.
+ Briefly this is a list of key=value\0, terminated by an additional \0. See
+ CreateProcess documentation for more details."""
+ block = ''
+ nul = '\0'
+ for key, value in envvar_dict.items():
+ block += key + '=' + value + nul
+ block += nul
+ return block
+
+
+def _LowercaseDict(d):
+ """Returns a copy of `d` with both key and values lowercased.
+
+ Args:
+ d: dict to lowercase (e.g. {'A': 'BcD'}).
+
+ Returns:
+ A dict with both keys and values lowercased (e.g.: {'a': 'bcd'}).
+ """
+ return {k.lower(): d[k].lower() for k in d}
+
+
+def FindFileInEnvList(env, env_name, separator, file_name, optional=False):
+ parts = env[env_name].split(separator)
+ for path in parts:
+ if os.path.exists(os.path.join(path, file_name)):
+ return os.path.realpath(path)
+ assert optional, "%s is not found in %s:\n%s\nCheck if it is installed." % (
+ file_name, env_name, '\n'.join(parts))
+ return ''
+
+
+def main():
+ if len(sys.argv) != 7:
+ print('Usage setup_toolchain.py '
+ '<visual studio path> <win sdk path> '
+ '<runtime dirs> <target_os> <target_cpu> '
+ '<environment block name|none>')
+ sys.exit(2)
+ # toolchain_root and win_sdk_path are only read if the hermetic Windows
+ # toolchain is set, that is if DEPOT_TOOLS_WIN_TOOLCHAIN is not set to 0.
+ # With the hermetic Windows toolchain, the visual studio path in argv[1]
+ # is the root of the Windows toolchain directory.
+ toolchain_root = sys.argv[1]
+ win_sdk_path = sys.argv[2]
+
+ runtime_dirs = sys.argv[3]
+ target_os = sys.argv[4]
+ target_cpu = sys.argv[5]
+ environment_block_name = sys.argv[6]
+ if (environment_block_name == 'none'):
+ environment_block_name = ''
+
+ if (target_os == 'winuwp'):
+ target_store = True
+ else:
+ target_store = False
+
+ cpus = ('x86', 'x64', 'arm', 'arm64')
+ assert target_cpu in cpus
+ vc_bin_dir = 'fake_path/cl.exe'
+ vc_lib_path = 'fake_path/lib'
+ vc_lib_atlmfc_path = 'fake_path/atlmfc'
+ vc_lib_um_path = 'fake_path/lib_um'
+ include = ''
+ lib = ''
+
+ # TODO(scottmg|goma): Do we need an equivalent of
+ # ninja_use_custom_environment_files?
+
+ def relflag(s): # Make s relative to builddir when cwd and sdk on same drive.
+ try:
+ return os.path.relpath(s).replace('\\', '/')
+ except ValueError:
+ return s
+
+ def q(s): # Quote s if it contains spaces or other weird characters.
+ return s if re.match(r'^[a-zA-Z0-9._/\\:-]*$', s) else '"' + s + '"'
+
+# for cpu in cpus:
+# if cpu == target_cpu:
+# # Extract environment variables for subprocesses.
+# env = _LoadToolchainEnv(cpu, toolchain_root, win_sdk_path, target_store)
+# env['PATH'] = runtime_dirs + os.pathsep + env['PATH']
+#
+# vc_bin_dir = FindFileInEnvList(env, 'PATH', os.pathsep, 'cl.exe')
+# vc_lib_path = FindFileInEnvList(env, 'LIB', ';', 'msvcrt.lib')
+# vc_lib_atlmfc_path = FindFileInEnvList(
+# env, 'LIB', ';', 'atls.lib', optional=True)
+# vc_lib_um_path = FindFileInEnvList(env, 'LIB', ';', 'user32.lib')
+#
+# # The separator for INCLUDE here must match the one used in
+# # _LoadToolchainEnv() above.
+# include = [p.replace('"', r'\"') for p in env['INCLUDE'].split(';') if p]
+# include = list(map(relflag, include))
+#
+# lib = [p.replace('"', r'\"') for p in env['LIB'].split(';') if p]
+# lib = list(map(relflag, lib))
+#
+# include_I = ' '.join([q('/I' + i) for i in include])
+# include_imsvc = ' '.join([q('-imsvc' + i) for i in include])
+# libpath_flags = ' '.join([q('-libpath:' + i) for i in lib])
+#
+# if (environment_block_name != ''):
+# env_block = _FormatAsEnvironmentBlock(env)
+# with open(environment_block_name, 'w') as f:
+# f.write(env_block)
+
+# We don't really use any of this information so it can be skipped altogether
+ env = {}
+ env['PATH'] = ''
+ include_I = include
+ include_imsvc = include
+ libpath_flags = ''
+ print('vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir))
+ print('include_flags_I = ' + gn_helpers.ToGNString(include_I))
+ if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and win_sdk_path:
+ print('include_flags_imsvc = ' +
+ gn_helpers.ToGNString(q('/winsysroot' + relflag(toolchain_root))))
+ else:
+ print('include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc))
+ print('vc_lib_path = ' + gn_helpers.ToGNString(vc_lib_path))
+ # Possible atlmfc library path gets introduced in the future for store thus
+ # output result if a result exists.
+ if (vc_lib_atlmfc_path != ''):
+ print('vc_lib_atlmfc_path = ' + gn_helpers.ToGNString(vc_lib_atlmfc_path))
+ print('vc_lib_um_path = ' + gn_helpers.ToGNString(vc_lib_um_path))
+ print('paths = ' + gn_helpers.ToGNString(env['PATH']))
+ print('libpath_flags = ' + gn_helpers.ToGNString(libpath_flags))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/third_party/libwebrtc/build/toolchain/win/tool_wrapper.py b/third_party/libwebrtc/build/toolchain/win/tool_wrapper.py
new file mode 100644
index 0000000000..9327369181
--- /dev/null
+++ b/third_party/libwebrtc/build/toolchain/win/tool_wrapper.py
@@ -0,0 +1,190 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for Windows builds.
+
+This file is copied to the build directory as part of toolchain setup and
+is used to set up calls to tools used by the build that need wrappers.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import shutil
+import subprocess
+import stat
+import sys
+
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
+
+def main(args):
+ exit_code = WinTool().Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class WinTool(object):
+ """This class performs all the Windows tooling steps. The methods can either
+ be executed directly, or dispatched from an argument list."""
+
+ def _UseSeparateMspdbsrv(self, env, args):
+ """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+ shared one."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ if args[0] != 'link.exe':
+ return
+
+ # Use the output filename passed to the linker to generate an endpoint name
+ # for mspdbsrv.exe.
+ endpoint_name = None
+ for arg in args:
+ m = _LINK_EXE_OUT_ARG.match(arg)
+ if m:
+ endpoint_name = re.sub(r'\W+', '',
+ '%s_%d' % (m.group('out'), os.getpid()))
+ break
+
+ if endpoint_name is None:
+ return
+
+ # Adds the appropriate environment variable. This will be read by link.exe
+ # to know which instance of mspdbsrv.exe it should connect to (if it's
+ # not set then the default endpoint is used).
+ env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like recursive-mirror to RecursiveMirror."""
+ return name_string.title().replace('-', '')
+
+ def _GetEnv(self, arch):
+ """Gets the saved environment from a file for a given architecture."""
+ # The environment is saved as an "environment block" (see CreateProcess
+ # and msvs_emulation for details). We convert to a dict here.
+ # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
+ pairs = open(arch).read()[:-2].split('\0')
+ kvs = [item.split('=', 1) for item in pairs]
+ return dict(kvs)
+
+ def ExecDeleteFile(self, path):
+ """Simple file delete command."""
+ if os.path.exists(path):
+ os.unlink(path)
+
+ def ExecRecursiveMirror(self, source, dest):
+ """Emulation of rm -rf out && cp -af in out."""
+ if os.path.exists(dest):
+ if os.path.isdir(dest):
+ def _on_error(fn, path, dummy_excinfo):
+ # The operation failed, possibly because the file is set to
+ # read-only. If that's why, make it writable and try the op again.
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWRITE)
+ fn(path)
+ shutil.rmtree(dest, onerror=_on_error)
+ else:
+ if not os.access(dest, os.W_OK):
+ # Attempt to make the file writable before deleting it.
+ os.chmod(dest, stat.S_IWRITE)
+ os.unlink(dest)
+
+ if os.path.isdir(source):
+ shutil.copytree(source, dest)
+ else:
+ shutil.copy2(source, dest)
+ # Try to diagnose crbug.com/741603
+ if not os.path.exists(dest):
+ raise Exception("Copying of %s to %s failed" % (source, dest))
+
+ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
+ """Filter diagnostic output from link that looks like:
+ ' Creating library ui.dll.lib and object ui.dll.exp'
+ This happens when there are exports from the dll or exe.
+ """
+ env = self._GetEnv(arch)
+ if use_separate_mspdbsrv == 'True':
+ self._UseSeparateMspdbsrv(env, args)
+ if sys.platform == 'win32':
+ args = list(args) # *args is a tuple by default, which is read-only.
+ args[0] = args[0].replace('/', '\\')
+ # https://docs.python.org/2/library/subprocess.html:
+ # "On Unix with shell=True [...] if args is a sequence, the first item
+ # specifies the command string, and any additional items will be treated as
+ # additional arguments to the shell itself. That is to say, Popen does the
+ # equivalent of:
+ # Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+ # For that reason, since going through the shell doesn't seem necessary on
+ # non-Windows don't do that there.
+ pe_name = None
+ for arg in args:
+ m = _LINK_EXE_OUT_ARG.match(arg)
+ if m:
+ pe_name = m.group('out')
+ link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ # Read output one line at a time as it shows up to avoid OOM failures when
+ # GBs of output is produced.
+ for line in link.stdout:
+ if (not line.startswith(b' Creating library ')
+ and not line.startswith(b'Generating code')
+ and not line.startswith(b'Finished generating code')):
+ print(line)
+ return link.wait()
+
+ def ExecAsmWrapper(self, arch, *args):
+ """Filter logo banner from invocations of asm.exe."""
+ env = self._GetEnv(arch)
+ if sys.platform == 'win32':
+ # Windows ARM64 uses clang-cl as assembler which has '/' as path
+ # separator, convert it to '\\' when running on Windows.
+ args = list(args) # *args is a tuple by default, which is read-only
+ args[0] = args[0].replace('/', '\\')
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ for line in out.decode('utf8').splitlines():
+ if not line.startswith(' Assembling: '):
+ print(line)
+ return popen.returncode
+
+ def ExecRcWrapper(self, arch, *args):
+ """Converts .rc files to .res files."""
+ env = self._GetEnv(arch)
+ args = list(args)
+ rcpy_args = args[:]
+ rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')]
+ rcpy_args.append('/showIncludes')
+ return subprocess.call(rcpy_args, env=env)
+
+ def ExecActionWrapper(self, arch, rspfile, *dirname):
+ """Runs an action command line from a response file using the environment
+ for |arch|. If |dirname| is supplied, use that as the working directory."""
+ env = self._GetEnv(arch)
+ # TODO(scottmg): This is a temporary hack to get some specific variables
+ # through to actions that are set after GN-time. http://crbug.com/333738.
+ for k, v in os.environ.items():
+ if k not in env:
+ env[k] = v
+ args = open(rspfile).read()
+ dirname = dirname[0] if dirname else None
+ return subprocess.call(args, shell=True, env=env, cwd=dirname)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))