summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozbuild/backend
diff options
context:
space:
mode:
Diffstat (limited to 'python/mozbuild/mozbuild/backend')
-rw-r--r--python/mozbuild/mozbuild/backend/__init__.py27
-rw-r--r--python/mozbuild/mozbuild/backend/base.py389
-rw-r--r--python/mozbuild/mozbuild/backend/cargo_build_defs.py87
-rw-r--r--python/mozbuild/mozbuild/backend/clangd.py126
-rw-r--r--python/mozbuild/mozbuild/backend/common.py603
-rw-r--r--python/mozbuild/mozbuild/backend/configenvironment.py357
-rw-r--r--python/mozbuild/mozbuild/backend/cpp_eclipse.py876
-rw-r--r--python/mozbuild/mozbuild/backend/fastermake.py300
-rw-r--r--python/mozbuild/mozbuild/backend/mach_commands.py420
-rw-r--r--python/mozbuild/mozbuild/backend/make.py139
-rw-r--r--python/mozbuild/mozbuild/backend/recursivemake.py1904
-rw-r--r--python/mozbuild/mozbuild/backend/static_analysis.py52
-rw-r--r--python/mozbuild/mozbuild/backend/test_manifest.py110
-rw-r--r--python/mozbuild/mozbuild/backend/visualstudio.py712
14 files changed, 6102 insertions, 0 deletions
diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py
new file mode 100644
index 0000000000..e7097eb614
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/__init__.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+backends = {
+ "Clangd": "mozbuild.backend.clangd",
+ "ChromeMap": "mozbuild.codecoverage.chrome_map",
+ "CompileDB": "mozbuild.compilation.database",
+ "CppEclipse": "mozbuild.backend.cpp_eclipse",
+ "FasterMake": "mozbuild.backend.fastermake",
+ "FasterMake+RecursiveMake": None,
+ "RecursiveMake": "mozbuild.backend.recursivemake",
+ "StaticAnalysis": "mozbuild.backend.static_analysis",
+ "TestManifest": "mozbuild.backend.test_manifest",
+ "VisualStudio": "mozbuild.backend.visualstudio",
+}
+
+
+def get_backend_class(name):
+ if "+" in name:
+ from mozbuild.backend.base import HybridBackend
+
+ return HybridBackend(*(get_backend_class(name) for name in name.split("+")))
+
+ class_name = "%sBackend" % name
+ module = __import__(backends[name], globals(), locals(), [class_name])
+ return getattr(module, class_name)
diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py
new file mode 100644
index 0000000000..0f95942f51
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -0,0 +1,389 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import io
+import itertools
+import os
+import time
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+import mozpack.path as mozpath
+import six
+from mach.mixin.logging import LoggingMixin
+
+from mozbuild.base import ExecutionSummary
+
+from ..frontend.data import ContextDerived
+from ..frontend.reader import EmptyConfig
+from ..preprocessor import Preprocessor
+from ..pythonutil import iter_modules_in_path
+from ..util import FileAvoidWrite, simple_diff
+from .configenvironment import ConfigEnvironment
+
+
+class BuildBackend(LoggingMixin):
+ """Abstract base class for build backends.
+
+ A build backend is merely a consumer of the build configuration (the output
+ of the frontend processing). It does something with said data. What exactly
+ is the discretion of the specific implementation.
+ """
+
+ __metaclass__ = ABCMeta
+
+ def __init__(self, environment):
+ assert isinstance(environment, (ConfigEnvironment, EmptyConfig))
+ self.populate_logger()
+
+ self.environment = environment
+
+ # Files whose modification should cause a new read and backend
+ # generation.
+ self.backend_input_files = set()
+
+ # Files generated by the backend.
+ self._backend_output_files = set()
+
+ self._environments = {}
+ self._environments[environment.topobjdir] = environment
+
+ # The number of backend files created.
+ self._created_count = 0
+
+ # The number of backend files updated.
+ self._updated_count = 0
+
+ # The number of unchanged backend files.
+ self._unchanged_count = 0
+
+ # The number of deleted backend files.
+ self._deleted_count = 0
+
+ # The total wall time spent in the backend. This counts the time the
+ # backend writes out files, etc.
+ self._execution_time = 0.0
+
+ # Mapping of changed file paths to diffs of the changes.
+ self.file_diffs = {}
+
+ self.dry_run = False
+
+ self._init()
+
+ def summary(self):
+ return ExecutionSummary(
+ self.__class__.__name__.replace("Backend", "")
+ + " backend executed in {execution_time:.2f}s\n "
+ "{total:d} total backend files; "
+ "{created:d} created; "
+ "{updated:d} updated; "
+ "{unchanged:d} unchanged; "
+ "{deleted:d} deleted",
+ execution_time=self._execution_time,
+ total=self._created_count + self._updated_count + self._unchanged_count,
+ created=self._created_count,
+ updated=self._updated_count,
+ unchanged=self._unchanged_count,
+ deleted=self._deleted_count,
+ )
+
+ def _init(self):
+ """Hook point for child classes to perform actions during __init__.
+
+ This exists so child classes don't need to implement __init__.
+ """
+
+ def consume(self, objs):
+ """Consume a stream of TreeMetadata instances.
+
+ This is the main method of the interface. This is what takes the
+ frontend output and does something with it.
+
+ Child classes are not expected to implement this method. Instead, the
+ base class consumes objects and calls methods (possibly) implemented by
+ child classes.
+ """
+
+ # Previously generated files.
+ list_file = mozpath.join(
+ self.environment.topobjdir, "backend.%s" % self.__class__.__name__
+ )
+ backend_output_list = set()
+ if os.path.exists(list_file):
+ with open(list_file) as fh:
+ backend_output_list.update(
+ mozpath.normsep(p) for p in fh.read().splitlines()
+ )
+
+ for obj in objs:
+ obj_start = time.monotonic()
+ if not self.consume_object(obj) and not isinstance(self, PartialBackend):
+ raise Exception("Unhandled object of type %s" % type(obj))
+ self._execution_time += time.monotonic() - obj_start
+
+ if isinstance(obj, ContextDerived) and not isinstance(self, PartialBackend):
+ self.backend_input_files |= obj.context_all_paths
+
+ # Pull in all loaded Python as dependencies so any Python changes that
+ # could influence our output result in a rescan.
+ self.backend_input_files |= set(
+ iter_modules_in_path(self.environment.topsrcdir, self.environment.topobjdir)
+ )
+
+ finished_start = time.monotonic()
+ self.consume_finished()
+ self._execution_time += time.monotonic() - finished_start
+
+ # Purge backend files created in previous run, but not created anymore
+ delete_files = backend_output_list - self._backend_output_files
+ for path in delete_files:
+ full_path = mozpath.join(self.environment.topobjdir, path)
+ try:
+ with io.open(full_path, mode="r", encoding="utf-8") as existing:
+ old_content = existing.read()
+ if old_content:
+ self.file_diffs[full_path] = simple_diff(
+ full_path, old_content.splitlines(), None
+ )
+ except IOError:
+ pass
+ try:
+ if not self.dry_run:
+ os.unlink(full_path)
+ self._deleted_count += 1
+ except OSError:
+ pass
+ # Remove now empty directories
+ for dir in set(mozpath.dirname(d) for d in delete_files):
+ try:
+ os.removedirs(dir)
+ except OSError:
+ pass
+
+ # Write out the list of backend files generated, if it changed.
+ if backend_output_list != self._backend_output_files:
+ with self._write_file(list_file) as fh:
+ fh.write("\n".join(sorted(self._backend_output_files)))
+ else:
+ # Always update its mtime if we're not in dry-run mode.
+ if not self.dry_run:
+ with open(list_file, "a"):
+ os.utime(list_file, None)
+
+ # Write out the list of input files for the backend
+ with self._write_file("%s.in" % list_file) as fh:
+ fh.write(
+ "\n".join(sorted(mozpath.normsep(f) for f in self.backend_input_files))
+ )
+
+ @abstractmethod
+ def consume_object(self, obj):
+ """Consumes an individual TreeMetadata instance.
+
+ This is the main method used by child classes to react to build
+ metadata.
+ """
+
+ def consume_finished(self):
+ """Called when consume() has completed handling all objects."""
+
+ def build(self, config, output, jobs, verbose, what=None):
+ """Called when 'mach build' is executed.
+
+ This should return the status value of a subprocess, where 0 denotes
+ success and any other value is an error code. A return value of None
+ indicates that the default 'make -f client.mk' should run.
+ """
+ return None
+
+ def _write_purgecaches(self, config):
+ """Write .purgecaches sentinels.
+
+ The purgecaches mechanism exists to allow the platform to
+ invalidate the XUL cache (which includes some JS) at application
+ startup-time. The application checks for .purgecaches in the
+ application directory, which varies according to
+ --enable-application/--enable-project. There's a further wrinkle on
+ macOS, where the real application directory is part of a Cocoa bundle
+ produced from the regular application directory by the build
+ system. In this case, we write to both locations, since the
+ build system recreates the Cocoa bundle from the contents of the
+ regular application directory and might remove a sentinel
+ created here.
+ """
+
+ app = config.substs["MOZ_BUILD_APP"]
+ if app == "mobile/android":
+ # In order to take effect, .purgecaches sentinels would need to be
+ # written to the Android device file system.
+ return
+
+ root = mozpath.join(config.topobjdir, "dist", "bin")
+
+ if app == "browser":
+ root = mozpath.join(config.topobjdir, "dist", "bin", "browser")
+
+ purgecaches_dirs = [root]
+ if app == "browser" and "cocoa" == config.substs["MOZ_WIDGET_TOOLKIT"]:
+ bundledir = mozpath.join(
+ config.topobjdir,
+ "dist",
+ config.substs["MOZ_MACBUNDLE_NAME"],
+ "Contents",
+ "Resources",
+ "browser",
+ )
+ purgecaches_dirs.append(bundledir)
+
+ for dir in purgecaches_dirs:
+ with open(mozpath.join(dir, ".purgecaches"), "wt") as f:
+ f.write("\n")
+
+ def post_build(self, config, output, jobs, verbose, status):
+ """Called late during 'mach build' execution, after `build(...)` has finished.
+
+ `status` is the status value returned from `build(...)`.
+
+ In the case where `build` returns `None`, this is called after
+ the default `make` command has completed, with the status of
+ that command.
+
+ This should return the status value from `build(...)`, or the
+ status value of a subprocess, where 0 denotes success and any
+ other value is an error code.
+
+ If an exception is raised, ``mach build`` will fail with a
+ non-zero exit code.
+ """
+ self._write_purgecaches(config)
+
+ return status
+
+ @contextmanager
+ def _write_file(self, path=None, fh=None, readmode="r"):
+ """Context manager to write a file.
+
+ This is a glorified wrapper around FileAvoidWrite with integration to
+ update the summary data on this instance.
+
+ Example usage:
+
+ with self._write_file('foo.txt') as fh:
+ fh.write('hello world')
+ """
+
+ if path is not None:
+ assert fh is None
+ fh = FileAvoidWrite(
+ path, capture_diff=True, dry_run=self.dry_run, readmode=readmode
+ )
+ else:
+ assert fh is not None
+
+ dirname = mozpath.dirname(fh.name)
+ try:
+ os.makedirs(dirname)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ yield fh
+
+ self._backend_output_files.add(
+ mozpath.relpath(fh.name, self.environment.topobjdir)
+ )
+ existed, updated = fh.close()
+ if fh.diff:
+ self.file_diffs[fh.name] = fh.diff
+ if not existed:
+ self._created_count += 1
+ elif updated:
+ self._updated_count += 1
+ else:
+ self._unchanged_count += 1
+
+ @contextmanager
+ def _get_preprocessor(self, obj):
+ """Returns a preprocessor with a few predefined values depending on
+ the given BaseConfigSubstitution(-like) object, and all the substs
+ in the current environment."""
+ pp = Preprocessor()
+ srcdir = mozpath.dirname(obj.input_path)
+ pp.context.update(
+ {
+ k: " ".join(v) if isinstance(v, list) else v
+ for k, v in six.iteritems(obj.config.substs)
+ }
+ )
+ pp.context.update(
+ top_srcdir=obj.topsrcdir,
+ topobjdir=obj.topobjdir,
+ srcdir=srcdir,
+ srcdir_rel=mozpath.relpath(srcdir, mozpath.dirname(obj.output_path)),
+ relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or ".",
+ DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path))
+ or ".",
+ )
+ pp.do_filter("attemptSubstitution")
+ pp.setMarker(None)
+ with self._write_file(obj.output_path) as fh:
+ pp.out = fh
+ yield pp
+
+
+class PartialBackend(BuildBackend):
+ """A PartialBackend is a BuildBackend declaring that its consume_object
+ method may not handle all build configuration objects it's passed, and
+ that it's fine."""
+
+
+def HybridBackend(*backends):
+ """A HybridBackend is the combination of one or more PartialBackends
+ with a non-partial BuildBackend.
+
+ Build configuration objects are passed to each backend, stopping at the
+ first of them that declares having handled them.
+ """
+ assert len(backends) >= 2
+ assert all(issubclass(b, PartialBackend) for b in backends[:-1])
+ assert not (issubclass(backends[-1], PartialBackend))
+ assert all(issubclass(b, BuildBackend) for b in backends)
+
+ class TheHybridBackend(BuildBackend):
+ def __init__(self, environment):
+ self._backends = [b(environment) for b in backends]
+ super(TheHybridBackend, self).__init__(environment)
+
+ def consume_object(self, obj):
+ return any(b.consume_object(obj) for b in self._backends)
+
+ def consume_finished(self):
+ for backend in self._backends:
+ backend.consume_finished()
+
+ for attr in (
+ "_execution_time",
+ "_created_count",
+ "_updated_count",
+ "_unchanged_count",
+ "_deleted_count",
+ ):
+ setattr(self, attr, sum(getattr(b, attr) for b in self._backends))
+
+ for b in self._backends:
+ self.file_diffs.update(b.file_diffs)
+ for attr in ("backend_input_files", "_backend_output_files"):
+ files = getattr(self, attr)
+ files |= getattr(b, attr)
+
+ name = "+".join(
+ itertools.chain(
+ (b.__name__.replace("Backend", "") for b in backends[:-1]),
+ (b.__name__ for b in backends[-1:]),
+ )
+ )
+
+ return type(str(name), (TheHybridBackend,), {})
diff --git a/python/mozbuild/mozbuild/backend/cargo_build_defs.py b/python/mozbuild/mozbuild/backend/cargo_build_defs.py
new file mode 100644
index 0000000000..c60fd2abf6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/cargo_build_defs.py
@@ -0,0 +1,87 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+cargo_extra_outputs = {
+ "bindgen": ["tests.rs", "host-target.txt"],
+ "cssparser": ["tokenizer.rs"],
+ "gleam": ["gl_and_gles_bindings.rs", "gl_bindings.rs", "gles_bindings.rs"],
+ "khronos_api": ["webgl_exts.rs"],
+ "libloading": ["libglobal_static.a", "src/os/unix/global_static.o"],
+ "lmdb-sys": ["liblmdb.a", "midl.o", "mdb.o"],
+ "num-integer": ["rust_out.o"],
+ "num-traits": ["rust_out.o"],
+ "selectors": ["ascii_case_insensitive_html_attributes.rs"],
+ "style": [
+ "gecko/atom_macro.rs",
+ "gecko/bindings.rs",
+ "gecko/pseudo_element_definition.rs",
+ "gecko/structs.rs",
+ "gecko_properties.rs",
+ "longhands/background.rs",
+ "longhands/border.rs",
+ "longhands/box.rs",
+ "longhands/color.rs",
+ "longhands/column.rs",
+ "longhands/counters.rs",
+ "longhands/effects.rs",
+ "longhands/font.rs",
+ "longhands/inherited_box.rs",
+ "longhands/inherited_svg.rs",
+ "longhands/inherited_table.rs",
+ "longhands/inherited_text.rs",
+ "longhands/inherited_ui.rs",
+ "longhands/list.rs",
+ "longhands/margin.rs",
+ "longhands/outline.rs",
+ "longhands/padding.rs",
+ "longhands/position.rs",
+ "longhands/svg.rs",
+ "longhands/table.rs",
+ "longhands/text.rs",
+ "longhands/ui.rs",
+ "longhands/xul.rs",
+ "properties.rs",
+ "shorthands/background.rs",
+ "shorthands/border.rs",
+ "shorthands/box.rs",
+ "shorthands/color.rs",
+ "shorthands/column.rs",
+ "shorthands/counters.rs",
+ "shorthands/effects.rs",
+ "shorthands/font.rs",
+ "shorthands/inherited_box.rs",
+ "shorthands/inherited_svg.rs",
+ "shorthands/inherited_table.rs",
+ "shorthands/inherited_text.rs",
+ "shorthands/inherited_ui.rs",
+ "shorthands/list.rs",
+ "shorthands/margin.rs",
+ "shorthands/outline.rs",
+ "shorthands/padding.rs",
+ "shorthands/position.rs",
+ "shorthands/svg.rs",
+ "shorthands/table.rs",
+ "shorthands/text.rs",
+ "shorthands/ui.rs",
+ "shorthands/xul.rs",
+ ],
+ "webrender": ["shaders.rs"],
+ "geckodriver": ["build-info.rs"],
+ "gecko-profiler": ["gecko/bindings.rs"],
+ "crc": ["crc64_constants.rs", "crc32_constants.rs"],
+ "bzip2-sys": [
+ "bzip2-1.0.6/blocksort.o",
+ "bzip2-1.0.6/bzlib.o",
+ "bzip2-1.0.6/compress.o",
+ "bzip2-1.0.6/crctable.o",
+ "bzip2-1.0.6/decompress.o",
+ "bzip2-1.0.6/huffman.o",
+ "bzip2-1.0.6/randtable.o",
+ "libbz2.a",
+ ],
+ "clang-sys": ["common.rs", "dynamic.rs"],
+ "target-lexicon": ["host.rs"],
+ "baldrdash": ["bindings.rs"],
+ "typenum": ["op.rs", "consts.rs"],
+}
diff --git a/python/mozbuild/mozbuild/backend/clangd.py b/python/mozbuild/mozbuild/backend/clangd.py
new file mode 100644
index 0000000000..5db5610ae6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/clangd.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides a backend for `clangd` in order to have support for
+# code completion, compile errors, go-to-definition and more.
+# It is based on `database.py` with the difference that we don't generate
+# an unified `compile_commands.json` but we generate a per file basis `command` in
+# `objdir/clangd/compile_commands.json`
+
+import os
+
+import mozpack.path as mozpath
+
+from mozbuild.compilation.database import CompileDBBackend
+
+
+def find_vscode_cmd():
+ import shutil
+ import sys
+
+ # Try to look up the `code` binary on $PATH, and use it if present. This
+ # should catch cases like being run from within a vscode-remote shell,
+ # even if vscode itself is also installed on the remote host.
+ path = shutil.which("code")
+ if path is not None:
+ return [path]
+
+ cmd_and_path = []
+
+ # If the binary wasn't on $PATH, try to find it in a variety of other
+ # well-known install locations based on the current platform.
+ if sys.platform.startswith("darwin"):
+ cmd_and_path = [
+ {"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]},
+ {
+ "path": "/Applications/Visual Studio Code.app",
+ "cmd": ["open", "/Applications/Visual Studio Code.app", "--args"],
+ },
+ {
+ "path": "/Applications/Visual Studio Code - Insiders.app",
+ "cmd": [
+ "open",
+ "/Applications/Visual Studio Code - Insiders.app",
+ "--args",
+ ],
+ },
+ ]
+ elif sys.platform.startswith("win"):
+ from pathlib import Path
+
+ vscode_path = mozpath.join(
+ str(Path.home()),
+ "AppData",
+ "Local",
+ "Programs",
+ "Microsoft VS Code",
+ "Code.exe",
+ )
+ vscode_insiders_path = mozpath.join(
+ str(Path.home()),
+ "AppData",
+ "Local",
+ "Programs",
+ "Microsoft VS Code Insiders",
+ "Code - Insiders.exe",
+ )
+ cmd_and_path = [
+ {"path": vscode_path, "cmd": [vscode_path]},
+ {"path": vscode_insiders_path, "cmd": [vscode_insiders_path]},
+ ]
+ elif sys.platform.startswith("linux"):
+ cmd_and_path = [
+ {"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]},
+ {"path": "/snap/bin/code", "cmd": ["/snap/bin/code"]},
+ {"path": "/usr/bin/code", "cmd": ["/usr/bin/code"]},
+ {"path": "/usr/bin/code-insiders", "cmd": ["/usr/bin/code-insiders"]},
+ ]
+
+ # Did we guess the path?
+ for element in cmd_and_path:
+ if os.path.exists(element["path"]):
+ return element["cmd"]
+
+ # Path cannot be found
+ return None
+
+
+class ClangdBackend(CompileDBBackend):
+ """
+ Configuration that generates the backend for clangd, it is used with `clangd`
+ extension for vscode
+ """
+
+ def _init(self):
+ CompileDBBackend._init(self)
+
+ def _get_compiler_args(self, cenv, canonical_suffix):
+ compiler_args = super(ClangdBackend, self)._get_compiler_args(
+ cenv, canonical_suffix
+ )
+ if compiler_args is None:
+ return None
+
+ if len(compiler_args) and compiler_args[0].endswith("ccache"):
+ compiler_args.pop(0)
+ return compiler_args
+
+ def _build_cmd(self, cmd, filename, unified):
+ cmd = list(cmd)
+
+ cmd.append(filename)
+
+ return cmd
+
+ def _outputfile_path(self):
+ clangd_cc_path = os.path.join(self.environment.topobjdir, "clangd")
+
+ if not os.path.exists(clangd_cc_path):
+ os.mkdir(clangd_cc_path)
+
+ # Output the database (a JSON file) to objdir/clangd/compile_commands.json
+ return mozpath.join(clangd_cc_path, "compile_commands.json")
+
+ def _process_unified_sources(self, obj):
+ self._process_unified_sources_without_mapping(obj)
diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
new file mode 100644
index 0000000000..f0dc7d4e46
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -0,0 +1,603 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import itertools
+import json
+import os
+from collections import defaultdict
+from operator import itemgetter
+
+import mozpack.path as mozpath
+import six
+from mozpack.chrome.manifest import parse_manifest_line
+
+from mozbuild.backend.base import BuildBackend
+from mozbuild.frontend.context import (
+ VARIABLES,
+ Context,
+ ObjDirPath,
+ Path,
+ RenamedSourcePath,
+)
+from mozbuild.frontend.data import (
+ BaseProgram,
+ ChromeManifestEntry,
+ ConfigFileSubstitution,
+ Exports,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostLibrary,
+ HostSources,
+ IPDLCollection,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ SandboxedWasmLibrary,
+ SharedLibrary,
+ Sources,
+ StaticLibrary,
+ UnifiedSources,
+ WebIDLCollection,
+ XPCOMComponentManifests,
+ XPIDLModule,
+)
+from mozbuild.jar import DeprecatedJarManifest, JarManifestParser
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import mkdir
+
+
+class XPIDLManager(object):
+ """Helps manage XPCOM IDLs in the context of the build system."""
+
+ class Module(object):
+ def __init__(self):
+ self.idl_files = set()
+ self.directories = set()
+ self._stems = set()
+
+ def add_idls(self, idls):
+ self.idl_files.update(idl.full_path for idl in idls)
+ self.directories.update(mozpath.dirname(idl.full_path) for idl in idls)
+ self._stems.update(
+ mozpath.splitext(mozpath.basename(idl))[0] for idl in idls
+ )
+
+ def stems(self):
+ return iter(self._stems)
+
+ def __init__(self, config):
+ self.config = config
+ self.topsrcdir = config.topsrcdir
+ self.topobjdir = config.topobjdir
+
+ self._idls = set()
+ self.modules = defaultdict(self.Module)
+
+ def link_module(self, module):
+ """Links an XPIDL module with with this instance."""
+ for idl in module.idl_files:
+ basename = mozpath.basename(idl.full_path)
+
+ if basename in self._idls:
+ raise Exception("IDL already registered: %s" % basename)
+ self._idls.add(basename)
+
+ self.modules[module.name].add_idls(module.idl_files)
+
+ def idl_stems(self):
+ """Return an iterator of stems of the managed IDL files.
+
+ The stem of an IDL file is the basename of the file with no .idl extension.
+ """
+ return itertools.chain(*[m.stems() for m in six.itervalues(self.modules)])
+
+
+class BinariesCollection(object):
+ """Tracks state of binaries produced by the build."""
+
+ def __init__(self):
+ self.shared_libraries = []
+ self.programs = []
+
+
+class CommonBackend(BuildBackend):
+ """Holds logic common to all build backends."""
+
+ def _init(self):
+ self._idl_manager = XPIDLManager(self.environment)
+ self._binaries = BinariesCollection()
+ self._configs = set()
+ self._generated_sources = set()
+
+ def consume_object(self, obj):
+ self._configs.add(obj.config)
+
+ if isinstance(obj, XPIDLModule):
+ # TODO bug 1240134 tracks not processing XPIDL files during
+ # artifact builds.
+ self._idl_manager.link_module(obj)
+
+ elif isinstance(obj, ConfigFileSubstitution):
+ # Do not handle ConfigFileSubstitution for Makefiles. Leave that
+ # to other
+ if mozpath.basename(obj.output_path) == "Makefile":
+ return False
+ with self._get_preprocessor(obj) as pp:
+ pp.do_include(obj.input_path)
+ self.backend_input_files.add(obj.input_path)
+
+ elif isinstance(obj, WebIDLCollection):
+ self._handle_webidl_collection(obj)
+
+ elif isinstance(obj, IPDLCollection):
+ self._handle_ipdl_sources(
+ obj.objdir,
+ list(sorted(obj.all_sources())),
+ list(sorted(obj.all_preprocessed_sources())),
+ list(sorted(obj.all_regular_sources())),
+ )
+
+ elif isinstance(obj, XPCOMComponentManifests):
+ self._handle_xpcom_collection(obj)
+
+ elif isinstance(obj, UnifiedSources):
+ if obj.generated_files:
+ self._handle_generated_sources(obj.generated_files)
+
+ # Unified sources aren't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ if obj.have_unified_mapping:
+ self._write_unified_files(obj.unified_source_mapping, obj.objdir)
+ if hasattr(self, "_process_unified_sources"):
+ self._process_unified_sources(obj)
+
+ elif isinstance(obj, BaseProgram):
+ self._binaries.programs.append(obj)
+ return False
+
+ elif isinstance(obj, SharedLibrary):
+ self._binaries.shared_libraries.append(obj)
+ return False
+
+ elif isinstance(obj, SandboxedWasmLibrary):
+ self._handle_generated_sources(
+ [mozpath.join(obj.relobjdir, f"{obj.basename}.h")]
+ )
+ return False
+
+ elif isinstance(obj, (Sources, HostSources)):
+ if obj.generated_files:
+ self._handle_generated_sources(obj.generated_files)
+ return False
+
+ elif isinstance(obj, GeneratedFile):
+ if obj.required_during_compile or obj.required_before_compile:
+ for f in itertools.chain(
+ obj.required_before_compile, obj.required_during_compile
+ ):
+ fullpath = ObjDirPath(obj._context, "!" + f).full_path
+ self._handle_generated_sources([fullpath])
+ return False
+
+ elif isinstance(obj, Exports):
+ objdir_files = [
+ f.full_path
+ for path, files in obj.files.walk()
+ for f in files
+ if isinstance(f, ObjDirPath)
+ ]
+ if objdir_files:
+ self._handle_generated_sources(objdir_files)
+ return False
+
+ else:
+ return False
+
+ return True
+
+ def consume_finished(self):
+ if len(self._idl_manager.modules):
+ self._write_rust_xpidl_summary(self._idl_manager)
+ self._handle_idl_manager(self._idl_manager)
+ self._handle_xpidl_sources()
+
+ for config in self._configs:
+ self.backend_input_files.add(config.source)
+
+ # Write out a machine-readable file describing binaries.
+ topobjdir = self.environment.topobjdir
+ with self._write_file(mozpath.join(topobjdir, "binaries.json")) as fh:
+ d = {
+ "shared_libraries": sorted(
+ (s.to_dict() for s in self._binaries.shared_libraries),
+ key=itemgetter("basename"),
+ ),
+ "programs": sorted(
+ (p.to_dict() for p in self._binaries.programs),
+ key=itemgetter("program"),
+ ),
+ }
+ json.dump(d, fh, sort_keys=True, indent=4)
+
+ # Write out a file listing generated sources.
+ with self._write_file(mozpath.join(topobjdir, "generated-sources.json")) as fh:
+ d = {"sources": sorted(self._generated_sources)}
+ json.dump(d, fh, sort_keys=True, indent=4)
+
+ def _expand_libs(self, input_bin):
+ os_libs = []
+ shared_libs = []
+ static_libs = []
+ objs = []
+
+ seen_objs = set()
+ seen_libs = set()
+
+ def add_objs(lib):
+ for o in lib.objs:
+ if o in seen_objs:
+ continue
+
+ seen_objs.add(o)
+ objs.append(o)
+
+ def expand(lib, recurse_objs, system_libs):
+ if isinstance(lib, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ if lib.no_expand_lib:
+ static_libs.append(lib)
+ recurse_objs = False
+ elif recurse_objs:
+ add_objs(lib)
+
+ for l in lib.linked_libraries:
+ expand(l, recurse_objs, system_libs)
+
+ if system_libs:
+ for l in lib.linked_system_libs:
+ if l not in seen_libs:
+ seen_libs.add(l)
+ os_libs.append(l)
+
+ elif isinstance(lib, SharedLibrary):
+ if lib not in seen_libs:
+ seen_libs.add(lib)
+ shared_libs.append(lib)
+
+ add_objs(input_bin)
+
+ system_libs = not isinstance(
+ input_bin, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)
+ )
+ for lib in input_bin.linked_libraries:
+ if isinstance(lib, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ expand(lib, True, system_libs)
+ elif isinstance(lib, SharedLibrary):
+ if lib not in seen_libs:
+ seen_libs.add(lib)
+ shared_libs.append(lib)
+
+ for lib in input_bin.linked_system_libs:
+ if lib not in seen_libs:
+ seen_libs.add(lib)
+ os_libs.append(lib)
+
+ return (objs, shared_libs, os_libs, static_libs)
+
+ def _make_list_file(self, kind, objdir, objs, name):
+ if not objs:
+ return None
+ if kind == "target":
+ list_style = self.environment.substs.get("EXPAND_LIBS_LIST_STYLE")
+ else:
+ # The host compiler is not necessarily the same kind as the target
+ # compiler, so we can't be sure EXPAND_LIBS_LIST_STYLE is the right
+ # style to use ; however, all compilers support the `list` type, so
+ # use that. That doesn't cause any practical problem because where
+ # it really matters to use something else than `list` is when
+ # linking tons of objects (because of command line argument limits),
+ # which only really happens for libxul.
+ list_style = "list"
+ list_file_path = mozpath.join(objdir, name)
+ objs = [os.path.relpath(o, objdir) for o in objs]
+ if list_style == "linkerscript":
+ ref = list_file_path
+ content = "\n".join('INPUT("%s")' % o for o in objs)
+ elif list_style == "filelist":
+ ref = "-Wl,-filelist," + list_file_path
+ content = "\n".join(objs)
+ elif list_style == "list":
+ ref = "@" + list_file_path
+ content = "\n".join(objs)
+ else:
+ return None
+
+ mkdir(objdir)
+ with self._write_file(list_file_path) as fh:
+ fh.write(content)
+
+ return ref
+
+ def _handle_generated_sources(self, files):
+ self._generated_sources.update(
+ mozpath.relpath(f, self.environment.topobjdir) for f in files
+ )
+
+ def _handle_xpidl_sources(self):
+ bindings_rt_dir = mozpath.join(
+ self.environment.topobjdir, "dist", "xpcrs", "rt"
+ )
+ bindings_bt_dir = mozpath.join(
+ self.environment.topobjdir, "dist", "xpcrs", "bt"
+ )
+ include_dir = mozpath.join(self.environment.topobjdir, "dist", "include")
+
+ self._handle_generated_sources(
+ itertools.chain.from_iterable(
+ (
+ mozpath.join(include_dir, "%s.h" % stem),
+ mozpath.join(bindings_rt_dir, "%s.rs" % stem),
+ mozpath.join(bindings_bt_dir, "%s.rs" % stem),
+ )
+ for stem in self._idl_manager.idl_stems()
+ )
+ )
+
+ def _handle_webidl_collection(self, webidls):
+
+ bindings_dir = mozpath.join(self.environment.topobjdir, "dom", "bindings")
+
+ all_inputs = set(webidls.all_static_sources())
+ for s in webidls.all_non_static_basenames():
+ all_inputs.add(mozpath.join(bindings_dir, s))
+
+ generated_events_stems = webidls.generated_events_stems()
+ exported_stems = webidls.all_regular_stems()
+
+ # The WebIDL manager reads configuration from a JSON file. So, we
+ # need to write this file early.
+ o = dict(
+ webidls=sorted(all_inputs),
+ generated_events_stems=sorted(generated_events_stems),
+ exported_stems=sorted(exported_stems),
+ example_interfaces=sorted(webidls.example_interfaces),
+ )
+
+ file_lists = mozpath.join(bindings_dir, "file-lists.json")
+ with self._write_file(file_lists) as fh:
+ json.dump(o, fh, sort_keys=True, indent=2)
+
+ import mozwebidlcodegen
+
+ manager = mozwebidlcodegen.create_build_system_manager(
+ self.environment.topsrcdir,
+ self.environment.topobjdir,
+ mozpath.join(self.environment.topobjdir, "dist"),
+ )
+ self._handle_generated_sources(manager.expected_build_output_files())
+ self._write_unified_files(
+ webidls.unified_source_mapping, bindings_dir, poison_windows_h=True
+ )
+ self._handle_webidl_build(
+ bindings_dir,
+ webidls.unified_source_mapping,
+ webidls,
+ manager.expected_build_output_files(),
+ manager.GLOBAL_DEFINE_FILES,
+ )
+
+ def _handle_xpcom_collection(self, manifests):
+ components_dir = mozpath.join(manifests.topobjdir, "xpcom", "components")
+
+ # The code generators read their configuration from this file, so it
+ # needs to be written early.
+ o = dict(manifests=sorted(manifests.all_sources()))
+
+ conf_file = mozpath.join(components_dir, "manifest-lists.json")
+ with self._write_file(conf_file) as fh:
+ json.dump(o, fh, sort_keys=True, indent=2)
+
+ def _write_unified_file(
+ self, unified_file, source_filenames, output_directory, poison_windows_h=False
+ ):
+ with self._write_file(mozpath.join(output_directory, unified_file)) as f:
+ f.write("#define MOZ_UNIFIED_BUILD\n")
+ includeTemplate = '#include "%(cppfile)s"'
+ if poison_windows_h:
+ includeTemplate += (
+ "\n"
+ "#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n"
+ '#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n' # noqa
+ '#error "%(cppfile)s included unwrapped windows.h"\n'
+ "#endif"
+ )
+ includeTemplate += (
+ "\n"
+ "#ifdef PL_ARENA_CONST_ALIGN_MASK\n"
+ '#error "%(cppfile)s uses PL_ARENA_CONST_ALIGN_MASK, '
+ 'so it cannot be built in unified mode."\n'
+ "#undef PL_ARENA_CONST_ALIGN_MASK\n"
+ "#endif\n"
+ "#ifdef INITGUID\n"
+ '#error "%(cppfile)s defines INITGUID, '
+ 'so it cannot be built in unified mode."\n'
+ "#undef INITGUID\n"
+ "#endif"
+ )
+ f.write(
+ "\n".join(includeTemplate % {"cppfile": s} for s in source_filenames)
+ )
+
+ def _write_unified_files(
+ self, unified_source_mapping, output_directory, poison_windows_h=False
+ ):
+ for unified_file, source_filenames in unified_source_mapping:
+ self._write_unified_file(
+ unified_file, source_filenames, output_directory, poison_windows_h
+ )
+
+ def localized_path(self, relativesrcdir, filename):
+ """Return the localized path for a file.
+
+ Given ``relativesrcdir``, a path relative to the topsrcdir, return a path to ``filename``
+ from the current locale as specified by ``MOZ_UI_LOCALE``, using ``L10NBASEDIR`` as the
+ parent directory for non-en-US locales.
+ """
+ ab_cd = self.environment.substs["MOZ_UI_LOCALE"][0]
+ l10nbase = mozpath.join(self.environment.substs["L10NBASEDIR"], ab_cd)
+ # Filenames from LOCALIZED_FILES will start with en-US/.
+ if filename.startswith("en-US/"):
+ e, filename = filename.split("en-US/")
+ assert not e
+ if ab_cd == "en-US":
+ return mozpath.join(
+ self.environment.topsrcdir, relativesrcdir, "en-US", filename
+ )
+ if mozpath.basename(relativesrcdir) == "locales":
+ l10nrelsrcdir = mozpath.dirname(relativesrcdir)
+ else:
+ l10nrelsrcdir = relativesrcdir
+ return mozpath.join(l10nbase, l10nrelsrcdir, filename)
+
+ def _consume_jar_manifest(self, obj):
+ # Ideally, this would all be handled somehow in the emitter, but
+ # this would require all the magic surrounding l10n and addons in
+ # the recursive make backend to die, which is not going to happen
+ # any time soon enough.
+ # Notably missing:
+ # - DEFINES from config/config.mk
+ # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
+ # moz.build, but it doesn't matter in dist/bin.
+ pp = Preprocessor()
+ if obj.defines:
+ pp.context.update(obj.defines.defines)
+ pp.context.update(self.environment.defines)
+ ab_cd = obj.config.substs["MOZ_UI_LOCALE"][0]
+ pp.context.update(AB_CD=ab_cd)
+ pp.out = JarManifestParser()
+ try:
+ pp.do_include(obj.path.full_path)
+ except DeprecatedJarManifest as e:
+ raise DeprecatedJarManifest(
+ "Parsing error while processing %s: %s" % (obj.path.full_path, e)
+ )
+ self.backend_input_files |= pp.includes
+
+ for jarinfo in pp.out:
+ jar_context = Context(
+ allowed_variables=VARIABLES, config=obj._context.config
+ )
+ jar_context.push_source(obj._context.main_path)
+ jar_context.push_source(obj.path.full_path)
+
+ install_target = obj.install_target
+ if jarinfo.base:
+ install_target = mozpath.normpath(
+ mozpath.join(install_target, jarinfo.base)
+ )
+ jar_context["FINAL_TARGET"] = install_target
+ if obj.defines:
+ jar_context["DEFINES"] = obj.defines.defines
+ files = jar_context["FINAL_TARGET_FILES"]
+ files_pp = jar_context["FINAL_TARGET_PP_FILES"]
+ localized_files = jar_context["LOCALIZED_FILES"]
+ localized_files_pp = jar_context["LOCALIZED_PP_FILES"]
+
+ for e in jarinfo.entries:
+ if e.is_locale:
+ if jarinfo.relativesrcdir:
+ src = "/%s" % jarinfo.relativesrcdir
+ else:
+ src = ""
+ src = mozpath.join(src, "en-US", e.source)
+ else:
+ src = e.source
+
+ src = Path(jar_context, src)
+
+ if "*" not in e.source and not os.path.exists(src.full_path):
+ if e.is_locale:
+ raise Exception(
+ "%s: Cannot find %s (tried %s)"
+ % (obj.path, e.source, src.full_path)
+ )
+ if e.source.startswith("/"):
+ src = Path(jar_context, "!" + e.source)
+ else:
+ # This actually gets awkward if the jar.mn is not
+ # in the same directory as the moz.build declaring
+ # it, but it's how it works in the recursive make,
+ # not that anything relies on that, but it's simpler.
+ src = Path(obj._context, "!" + e.source)
+
+ output_basename = mozpath.basename(e.output)
+ if output_basename != src.target_basename:
+ src = RenamedSourcePath(jar_context, (src, output_basename))
+ path = mozpath.dirname(mozpath.join(jarinfo.name, e.output))
+
+ if e.preprocess:
+ if "*" in e.source:
+ raise Exception(
+ "%s: Wildcards are not supported with "
+ "preprocessing" % obj.path
+ )
+ if e.is_locale:
+ localized_files_pp[path] += [src]
+ else:
+ files_pp[path] += [src]
+ else:
+ if e.is_locale:
+ localized_files[path] += [src]
+ else:
+ files[path] += [src]
+
+ if files:
+ self.consume_object(FinalTargetFiles(jar_context, files))
+ if files_pp:
+ self.consume_object(FinalTargetPreprocessedFiles(jar_context, files_pp))
+ if localized_files:
+ self.consume_object(LocalizedFiles(jar_context, localized_files))
+ if localized_files_pp:
+ self.consume_object(
+ LocalizedPreprocessedFiles(jar_context, localized_files_pp)
+ )
+
+ for m in jarinfo.chrome_manifests:
+ entry = parse_manifest_line(
+ mozpath.dirname(jarinfo.name),
+ m.replace("%", mozpath.basename(jarinfo.name) + "/"),
+ )
+ self.consume_object(
+ ChromeManifestEntry(
+ jar_context, "%s.manifest" % jarinfo.name, entry
+ )
+ )
+
+ def _write_rust_xpidl_summary(self, manager):
+ """Write out a rust file which includes the generated xpcom rust modules"""
+ topobjdir = self.environment.topobjdir
+
+ include_tmpl = 'include!(mozbuild::objdir_path!("dist/xpcrs/%s/%s.rs"))'
+
+ # Ensure deterministic output files.
+ stems = sorted(manager.idl_stems())
+
+ with self._write_file(
+ mozpath.join(topobjdir, "dist", "xpcrs", "rt", "all.rs")
+ ) as fh:
+ fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n")
+ for stem in stems:
+ fh.write(include_tmpl % ("rt", stem))
+ fh.write(";\n")
+
+ with self._write_file(
+ mozpath.join(topobjdir, "dist", "xpcrs", "bt", "all.rs")
+ ) as fh:
+ fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n")
+ fh.write("&[\n")
+ for stem in stems:
+ fh.write(include_tmpl % ("bt", stem))
+ fh.write(",\n")
+ fh.write("]\n")
diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
new file mode 100644
index 0000000000..eef1b62ee6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -0,0 +1,357 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import sys
+from collections import OrderedDict
+from collections.abc import Iterable
+from pathlib import Path
+from types import ModuleType
+
+import mozpack.path as mozpath
+import six
+
+from mozbuild.shellutil import quote as shell_quote
+from mozbuild.util import (
+ FileAvoidWrite,
+ ReadOnlyDict,
+ memoized_property,
+ system_encoding,
+)
+
+
+class ConfigStatusFailure(Exception):
+ """Error loading config.status"""
+
+
+class BuildConfig(object):
+ """Represents the output of configure."""
+
+ _CODE_CACHE = {}
+
+ def __init__(self):
+ self.topsrcdir = None
+ self.topobjdir = None
+ self.defines = {}
+ self.substs = {}
+ self.files = []
+ self.mozconfig = None
+
+ @classmethod
+ def from_config_status(cls, path):
+ """Create an instance from a config.status file."""
+ code_cache = cls._CODE_CACHE
+ mtime = os.path.getmtime(path)
+
+ # cache the compiled code as it can be reused
+ # we cache it the first time, or if the file changed
+ if path not in code_cache or code_cache[path][0] != mtime:
+ # Add config.status manually to sys.modules so it gets picked up by
+ # iter_modules_in_path() for automatic dependencies.
+ mod = ModuleType("config.status")
+ mod.__file__ = path
+ sys.modules["config.status"] = mod
+
+ with open(path, "rt") as fh:
+ source = fh.read()
+ code_cache[path] = (
+ mtime,
+ compile(source, path, "exec", dont_inherit=1),
+ )
+
+ g = {"__builtins__": __builtins__, "__file__": path}
+ l = {}
+ try:
+ exec(code_cache[path][1], g, l)
+ except Exception:
+ raise ConfigStatusFailure()
+
+ config = BuildConfig()
+
+ for name in l["__all__"]:
+ setattr(config, name, l[name])
+
+ return config
+
+
+class ConfigEnvironment(object):
+ """Perform actions associated with a configured but bare objdir.
+
+ The purpose of this class is to preprocess files from the source directory
+ and output results in the object directory.
+
+ There are two types of files: config files and config headers,
+ each treated through a different member function.
+
+ Creating a ConfigEnvironment requires a few arguments:
+ - topsrcdir and topobjdir are, respectively, the top source and
+ the top object directory.
+ - defines is a dict filled from AC_DEFINE and AC_DEFINE_UNQUOTED in autoconf.
+ - substs is a dict filled from AC_SUBST in autoconf.
+
+ ConfigEnvironment automatically defines one additional substs variable
+ from all the defines:
+ - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
+ preprocessor command lines. The order in which defines were given
+ when creating the ConfigEnvironment is preserved.
+
+ and two other additional subst variables from all the other substs:
+ - ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted
+ order, for use in autoconf.mk. It includes ACDEFINES.
+ Only substs with a VALUE are included, such that the resulting file
+ doesn't change when new empty substs are added.
+ This results in less invalidation of build dependencies in the case
+ of autoconf.mk..
+ - ALLEMPTYSUBSTS contains the substs with an empty value, in the form NAME =.
+
+ ConfigEnvironment expects a "top_srcdir" subst to be set with the top
+ source directory, in msys format on windows. It is used to derive a
+ "srcdir" subst when treating config files. It can either be an absolute
+ path or a path relative to the topobjdir.
+ """
+
+ def __init__(
+ self,
+ topsrcdir,
+ topobjdir,
+ defines=None,
+ substs=None,
+ source=None,
+ mozconfig=None,
+ ):
+
+ if not source:
+ source = mozpath.join(topobjdir, "config.status")
+ self.source = source
+ self.defines = ReadOnlyDict(defines or {})
+ self.substs = dict(substs or {})
+ self.topsrcdir = mozpath.abspath(topsrcdir)
+ self.topobjdir = mozpath.abspath(topobjdir)
+ self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None
+ self.lib_prefix = self.substs.get("LIB_PREFIX", "")
+ if "LIB_SUFFIX" in self.substs:
+ self.lib_suffix = ".%s" % self.substs["LIB_SUFFIX"]
+ self.dll_prefix = self.substs.get("DLL_PREFIX", "")
+ self.dll_suffix = self.substs.get("DLL_SUFFIX", "")
+ self.host_dll_prefix = self.substs.get("HOST_DLL_PREFIX", "")
+ self.host_dll_suffix = self.substs.get("HOST_DLL_SUFFIX", "")
+ if self.substs.get("IMPORT_LIB_SUFFIX"):
+ self.import_prefix = self.lib_prefix
+ self.import_suffix = ".%s" % self.substs["IMPORT_LIB_SUFFIX"]
+ else:
+ self.import_prefix = self.dll_prefix
+ self.import_suffix = self.dll_suffix
+ if self.substs.get("HOST_IMPORT_LIB_SUFFIX"):
+ self.host_import_prefix = self.substs.get("HOST_LIB_PREFIX", "")
+ self.host_import_suffix = ".%s" % self.substs["HOST_IMPORT_LIB_SUFFIX"]
+ else:
+ self.host_import_prefix = self.host_dll_prefix
+ self.host_import_suffix = self.host_dll_suffix
+ self.bin_suffix = self.substs.get("BIN_SUFFIX", "")
+
+ global_defines = [name for name in self.defines]
+ self.substs["ACDEFINES"] = " ".join(
+ [
+ "-D%s=%s" % (name, shell_quote(self.defines[name]).replace("$", "$$"))
+ for name in sorted(global_defines)
+ ]
+ )
+
+ def serialize(name, obj):
+ if isinstance(obj, six.string_types):
+ return obj
+ if isinstance(obj, Iterable):
+ return " ".join(obj)
+ raise Exception("Unhandled type %s for %s", type(obj), str(name))
+
+ self.substs["ALLSUBSTS"] = "\n".join(
+ sorted(
+ [
+ "%s = %s" % (name, serialize(name, self.substs[name]))
+ for name in self.substs
+ if self.substs[name]
+ ]
+ )
+ )
+ self.substs["ALLEMPTYSUBSTS"] = "\n".join(
+ sorted(["%s =" % name for name in self.substs if not self.substs[name]])
+ )
+
+ self.substs = ReadOnlyDict(self.substs)
+
+ @property
+ def is_artifact_build(self):
+ return self.substs.get("MOZ_ARTIFACT_BUILDS", False)
+
+ @memoized_property
+ def acdefines(self):
+ acdefines = dict((name, self.defines[name]) for name in self.defines)
+ return ReadOnlyDict(acdefines)
+
+ @staticmethod
+ def from_config_status(path):
+ config = BuildConfig.from_config_status(path)
+
+ return ConfigEnvironment(
+ config.topsrcdir, config.topobjdir, config.defines, config.substs, path
+ )
+
+
+class PartialConfigDict(object):
+ """Facilitates mapping the config.statusd defines & substs with dict-like access.
+
+ This allows a buildconfig client to use buildconfig.defines['FOO'] (and
+ similar for substs), where the value of FOO is delay-loaded until it is
+ needed.
+ """
+
+ def __init__(self, config_statusd, typ, environ_override=False):
+ self._dict = {}
+ self._datadir = mozpath.join(config_statusd, typ)
+ self._config_track = mozpath.join(self._datadir, "config.track")
+ self._files = set()
+ self._environ_override = environ_override
+
+ def _load_config_track(self):
+ existing_files = set()
+ try:
+ with open(self._config_track) as fh:
+ existing_files.update(fh.read().splitlines())
+ except IOError:
+ pass
+ return existing_files
+
+ def _write_file(self, key, value):
+ filename = mozpath.join(self._datadir, key)
+ with FileAvoidWrite(filename) as fh:
+ to_write = json.dumps(value, indent=4)
+ fh.write(to_write.encode(system_encoding))
+ return filename
+
+ def _fill_group(self, values):
+ # Clear out any cached values. This is mostly for tests that will check
+ # the environment, write out a new set of variables, and then check the
+ # environment again. Normally only configure ends up calling this
+ # function, and other consumers create their own
+ # PartialConfigEnvironments in new python processes.
+ self._dict = {}
+
+ existing_files = self._load_config_track()
+ existing_files = {Path(f) for f in existing_files}
+
+ new_files = set()
+ for k, v in six.iteritems(values):
+ new_files.add(Path(self._write_file(k, v)))
+
+ for filename in existing_files - new_files:
+ # We can't actually os.remove() here, since make would not see that the
+ # file has been removed and that the target needs to be updated. Instead
+ # we just overwrite the file with a value of None, which is equivalent
+ # to a non-existing file.
+ with FileAvoidWrite(filename) as fh:
+ json.dump(None, fh)
+
+ with FileAvoidWrite(self._config_track) as fh:
+ for f in sorted(new_files):
+ fh.write("%s\n" % f)
+
+ def __getitem__(self, key):
+ if self._environ_override:
+ if (key not in ("CPP", "CXXCPP", "SHELL")) and (key in os.environ):
+ return os.environ[key]
+
+ if key not in self._dict:
+ data = None
+ try:
+ filename = mozpath.join(self._datadir, key)
+ self._files.add(filename)
+ with open(filename) as f:
+ data = json.load(f)
+ except IOError:
+ pass
+ self._dict[key] = data
+
+ if self._dict[key] is None:
+ raise KeyError("'%s'" % key)
+ return self._dict[key]
+
+ def __setitem__(self, key, value):
+ self._dict[key] = value
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __contains__(self, key):
+ try:
+ return self[key] is not None
+ except KeyError:
+ return False
+
+ def iteritems(self):
+ existing_files = self._load_config_track()
+ for f in existing_files:
+ # The track file contains filenames, and the basename is the
+ # variable name.
+ var = mozpath.basename(f)
+ yield var, self[var]
+
+
+class PartialConfigEnvironment(object):
+ """Allows access to individual config.status items via config.statusd/* files.
+
+ This class is similar to the full ConfigEnvironment, which uses
+ config.status, except this allows access and tracks dependencies to
+ individual configure values. It is intended to be used during the build
+ process to handle things like GENERATED_FILES, CONFIGURE_DEFINE_FILES, and
+ anything else that may need to access specific substs or defines.
+
+ Creating a PartialConfigEnvironment requires only the topobjdir, which is
+ needed to distinguish between the top-level environment and the js/src
+ environment.
+
+ The PartialConfigEnvironment automatically defines one additional subst variable
+ from all the defines:
+
+ - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
+ preprocessor command lines. The order in which defines were given
+ when creating the ConfigEnvironment is preserved.
+
+ and one additional define from all the defines as a dictionary:
+
+ - ALLDEFINES contains all of the global defines as a dictionary. This is
+ intended to be used instead of the defines structure from config.status so
+ that scripts can depend directly on its value.
+ """
+
+ def __init__(self, topobjdir):
+ config_statusd = mozpath.join(topobjdir, "config.statusd")
+ self.substs = PartialConfigDict(config_statusd, "substs", environ_override=True)
+ self.defines = PartialConfigDict(config_statusd, "defines")
+ self.topobjdir = topobjdir
+
+ def write_vars(self, config):
+ substs = config["substs"].copy()
+ defines = config["defines"].copy()
+
+ global_defines = [name for name in config["defines"]]
+ acdefines = " ".join(
+ [
+ "-D%s=%s"
+ % (name, shell_quote(config["defines"][name]).replace("$", "$$"))
+ for name in sorted(global_defines)
+ ]
+ )
+ substs["ACDEFINES"] = acdefines
+
+ all_defines = OrderedDict()
+ for k in global_defines:
+ all_defines[k] = config["defines"][k]
+ defines["ALLDEFINES"] = all_defines
+
+ self.substs._fill_group(substs)
+ self.defines._fill_group(defines)
+
+ def get_dependencies(self):
+ return ["$(wildcard %s)" % f for f in self.substs._files | self.defines._files]
diff --git a/python/mozbuild/mozbuild/backend/cpp_eclipse.py b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
new file mode 100644
index 0000000000..413cca3f75
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
@@ -0,0 +1,876 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import glob
+import os
+import shutil
+import subprocess
+from xml.sax.saxutils import quoteattr
+
+from mozbuild.base import ExecutionSummary
+
+from ..frontend.data import ComputedFlags
+from .common import CommonBackend
+
+# TODO Have ./mach eclipse generate the workspace and index it:
+# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
+# Open eclipse:
+# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
+
+
+class CppEclipseBackend(CommonBackend):
+ """Backend that generates Cpp Eclipse project files."""
+
+ def __init__(self, environment):
+ if os.name == "nt":
+ raise Exception(
+ "Eclipse is not supported on Windows. "
+ "Consider using Visual Studio instead."
+ )
+ super(CppEclipseBackend, self).__init__(environment)
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ self._args_for_dirs = {}
+ self._project_name = "Gecko"
+ self._workspace_dir = self._get_workspace_path()
+ self._workspace_lang_dir = os.path.join(
+ self._workspace_dir, ".metadata/.plugins/org.eclipse.cdt.core"
+ )
+ self._project_dir = os.path.join(self._workspace_dir, self._project_name)
+ self._overwriting_workspace = os.path.isdir(self._workspace_dir)
+
+ self._macbundle = self.environment.substs["MOZ_MACBUNDLE_NAME"]
+ self._appname = self.environment.substs["MOZ_APP_NAME"]
+ self._bin_suffix = self.environment.substs["BIN_SUFFIX"]
+ self._cxx = self.environment.substs["CXX"]
+ # Note: We need the C Pre Processor (CPP) flags, not the CXX flags
+ self._cppflags = self.environment.substs.get("CPPFLAGS", "")
+
+ def summary(self):
+ return ExecutionSummary(
+ "CppEclipse backend executed in {execution_time:.2f}s\n"
+ 'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
+ "If missing, import the project using File > Import > General > Existing Project into workspace\n"
+ "\n"
+ "Run with: eclipse -data {workspace:s}\n",
+ execution_time=self._execution_time,
+ workspace=self._workspace_dir,
+ )
+
+ def _get_workspace_path(self):
+ return CppEclipseBackend.get_workspace_path(
+ self.environment.topsrcdir, self.environment.topobjdir
+ )
+
+ @staticmethod
+ def get_workspace_path(topsrcdir, topobjdir):
+ # Eclipse doesn't support having the workspace inside the srcdir.
+ # Since most people have their objdir inside their srcdir it's easier
+ # and more consistent to just put the workspace along side the srcdir
+ srcdir_parent = os.path.dirname(topsrcdir)
+ workspace_dirname = "eclipse_" + os.path.basename(topobjdir)
+ return os.path.join(srcdir_parent, workspace_dirname)
+
+ def consume_object(self, obj):
+ reldir = getattr(obj, "relsrcdir", None)
+
+ # Note that unlike VS, Eclipse' indexer seem to crawl the headers and
+ # isn't picky about the local includes.
+ if isinstance(obj, ComputedFlags):
+ args = self._args_for_dirs.setdefault(
+ "tree/" + reldir, {"includes": [], "defines": []}
+ )
+ # use the same args for any objdirs we include:
+ if reldir == "dom/bindings":
+ self._args_for_dirs.setdefault("generated-webidl", args)
+ if reldir == "ipc/ipdl":
+ self._args_for_dirs.setdefault("generated-ipdl", args)
+
+ includes = args["includes"]
+ if "BASE_INCLUDES" in obj.flags and obj.flags["BASE_INCLUDES"]:
+ includes += obj.flags["BASE_INCLUDES"]
+ if "LOCAL_INCLUDES" in obj.flags and obj.flags["LOCAL_INCLUDES"]:
+ includes += obj.flags["LOCAL_INCLUDES"]
+
+ defs = args["defines"]
+ if "DEFINES" in obj.flags and obj.flags["DEFINES"]:
+ defs += obj.flags["DEFINES"]
+ if "LIBRARY_DEFINES" in obj.flags and obj.flags["LIBRARY_DEFINES"]:
+ defs += obj.flags["LIBRARY_DEFINES"]
+
+ return True
+
+ def consume_finished(self):
+ settings_dir = os.path.join(self._project_dir, ".settings")
+ launch_dir = os.path.join(self._project_dir, "RunConfigurations")
+ workspace_settings_dir = os.path.join(
+ self._workspace_dir, ".metadata/.plugins/org.eclipse.core.runtime/.settings"
+ )
+
+ for dir_name in [
+ self._project_dir,
+ settings_dir,
+ launch_dir,
+ workspace_settings_dir,
+ self._workspace_lang_dir,
+ ]:
+ try:
+ os.makedirs(dir_name)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ project_path = os.path.join(self._project_dir, ".project")
+ with open(project_path, "w") as fh:
+ self._write_project(fh)
+
+ cproject_path = os.path.join(self._project_dir, ".cproject")
+ with open(cproject_path, "w") as fh:
+ self._write_cproject(fh)
+
+ language_path = os.path.join(settings_dir, "language.settings.xml")
+ with open(language_path, "w") as fh:
+ self._write_language_settings(fh)
+
+ workspace_language_path = os.path.join(
+ self._workspace_lang_dir, "language.settings.xml"
+ )
+ with open(workspace_language_path, "w") as fh:
+ workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
+ workspace_lang_settings = workspace_lang_settings.replace(
+ "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags
+ )
+ fh.write(workspace_lang_settings)
+
+ self._write_launch_files(launch_dir)
+
+ core_resources_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.core.resources.prefs"
+ )
+ with open(core_resources_prefs_path, "w") as fh:
+ fh.write(STATIC_CORE_RESOURCES_PREFS)
+
+ core_runtime_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.core.runtime.prefs"
+ )
+ with open(core_runtime_prefs_path, "w") as fh:
+ fh.write(STATIC_CORE_RUNTIME_PREFS)
+
+ ui_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.prefs")
+ with open(ui_prefs_path, "w") as fh:
+ fh.write(STATIC_UI_PREFS)
+
+ cdt_ui_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.cdt.ui.prefs"
+ )
+ cdt_ui_prefs = STATIC_CDT_UI_PREFS
+ # Here we generate the code formatter that will show up in the UI with
+ # the name "Mozilla". The formatter is stored as a single line of XML
+ # in the org.eclipse.cdt.ui.formatterprofiles pref.
+ cdt_ui_prefs += """org.eclipse.cdt.ui.formatterprofiles=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?>\\n<profiles version\="1">\\n<profile kind\="CodeFormatterProfile" name\="Mozilla" version\="1">\\n"""
+ XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
+ for line in FORMATTER_SETTINGS.splitlines():
+ [pref, val] = line.split("=")
+ cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace(
+ "@PREF_VAL@", val
+ )
+ cdt_ui_prefs += "</profile>\\n</profiles>\\n"
+ with open(cdt_ui_prefs_path, "w") as fh:
+ fh.write(cdt_ui_prefs)
+
+ cdt_core_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.cdt.core.prefs"
+ )
+ with open(cdt_core_prefs_path, "w") as fh:
+ cdt_core_prefs = STATIC_CDT_CORE_PREFS
+ # When we generated the code formatter called "Mozilla" above, we
+ # also set it to be the active formatter. When a formatter is set
+ # as the active formatter all its prefs are set in this prefs file,
+ # so we need add those now:
+ cdt_core_prefs += FORMATTER_SETTINGS
+ fh.write(cdt_core_prefs)
+
+ editor_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.ui.editors.prefs"
+ )
+ with open(editor_prefs_path, "w") as fh:
+ fh.write(EDITOR_SETTINGS)
+
+ # Now import the project into the workspace
+ self._import_project()
+
+ def _import_project(self):
+ # If the workspace already exists then don't import the project again because
+ # eclipse doesn't handle this properly
+ if self._overwriting_workspace:
+ return
+
+ # We disable the indexer otherwise we're forced to index
+ # the whole codebase when importing the project. Indexing the project can take 20 minutes.
+ self._write_noindex()
+
+ try:
+ subprocess.check_call(
+ [
+ "eclipse",
+ "-application",
+ "-nosplash",
+ "org.eclipse.cdt.managedbuilder.core.headlessbuild",
+ "-data",
+ self._workspace_dir,
+ "-importAll",
+ self._project_dir,
+ ]
+ )
+ except OSError as e:
+ # Remove the workspace directory so we re-generate it and
+ # try to import again when the backend is invoked again.
+ shutil.rmtree(self._workspace_dir)
+
+ if e.errno == errno.ENOENT:
+ raise Exception(
+ "Failed to launch eclipse to import project. "
+ "Ensure 'eclipse' is in your PATH and try again"
+ )
+ else:
+ raise
+ finally:
+ self._remove_noindex()
+
+ def _write_noindex(self):
+ noindex_path = os.path.join(
+ self._project_dir, ".settings/org.eclipse.cdt.core.prefs"
+ )
+ with open(noindex_path, "w") as fh:
+ fh.write(NOINDEX_TEMPLATE)
+
+ def _remove_noindex(self):
+ # Below we remove the config file that temporarily disabled the indexer
+ # while we were importing the project. Unfortunately, CDT doesn't
+ # notice indexer settings changes in config files when it restarts. To
+ # work around that we remove the index database here to force it to:
+ for f in glob.glob(os.path.join(self._workspace_lang_dir, "Gecko.*.pdom")):
+ os.remove(f)
+
+ noindex_path = os.path.join(
+ self._project_dir, ".settings/org.eclipse.cdt.core.prefs"
+ )
+ # This may fail if the entire tree has been removed; that's fine.
+ try:
+ os.remove(noindex_path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ def _write_language_settings(self, fh):
+ def add_abs_include_path(absinclude):
+ assert absinclude[:3] == "-I/"
+ return LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE.replace(
+ "@INCLUDE_PATH@", absinclude[2:]
+ )
+
+ def add_objdir_include_path(relpath):
+ p = os.path.join(self.environment.topobjdir, relpath)
+ return LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE.replace("@INCLUDE_PATH@", p)
+
+ def add_define(name, value):
+ define = LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE
+ define = define.replace("@NAME@", name)
+ # We use quoteattr here because some defines contain characters
+ # such as "<" and '"' which need proper XML escaping.
+ define = define.replace("@VALUE@", quoteattr(value))
+ return define
+
+ fh.write(LANGUAGE_SETTINGS_TEMPLATE_HEADER)
+
+ # Unfortunately, whenever we set a user defined include path or define
+ # on a directory, Eclipse ignores user defined include paths and defines
+ # on ancestor directories. That means that we need to add all the
+ # common include paths and defines to every single directory entry that
+ # we add settings for. (Fortunately that doesn't appear to have a
+ # noticeable impact on the time it takes to open the generated Eclipse
+ # project.) We do that by generating a template here that we can then
+ # use for each individual directory in the loop below.
+ #
+ dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
+
+ # Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
+ dirsettings_template = dirsettings_template.replace(
+ "@PREINCLUDE_FILE_PATH@",
+ os.path.join(self.environment.topobjdir, "dist/include/mozilla-config.h"),
+ )
+ dirsettings_template += add_define("MOZILLA_CLIENT", "1")
+
+ # Add EXTRA_INCLUDES args:
+ dirsettings_template += add_objdir_include_path("dist/include")
+
+ # Add OS_INCLUDES args:
+ # XXX media/webrtc/trunk/webrtc's moz.builds reset this.
+ dirsettings_template += add_objdir_include_path("dist/include/nspr")
+ dirsettings_template += add_objdir_include_path("dist/include/nss")
+
+ # Finally, add anything else that makes things work better.
+ #
+ # Because of https://developer.mozilla.org/en-US/docs/Eclipse_CDT#Headers_are_only_parsed_once
+ # we set MOZILLA_INTERNAL_API for all directories to make sure
+ # headers are indexed with MOZILLA_INTERNAL_API set. Unfortunately
+ # this means that MOZILLA_EXTERNAL_API code will suffer.
+ #
+ # TODO: If we're doing this for MOZILLA_EXTERNAL_API then we may want
+ # to do it for other LIBRARY_DEFINES's defines too. Well, at least for
+ # STATIC_EXPORTABLE_JS_API which may be important to JS people.
+ # (The other two LIBRARY_DEFINES defines -- MOZ_HAS_MOZGLUE and
+ # IMPL_LIBXUL -- don't affect much and probably don't matter to anyone).
+ #
+ # TODO: Should we also always set DEBUG so that DEBUG code is always
+ # indexed? Or is there significant amounts of non-DEBUG code that
+ # would be adversely affected?
+ #
+ # TODO: Investigate whether the ordering of directories in the project
+ # file can be used to our advantage so that the first indexing of
+ # important headers has the defines we want.
+ #
+ dirsettings_template += add_objdir_include_path("ipc/ipdl/_ipdlheaders")
+ dirsettings_template += add_define("MOZILLA_INTERNAL_API", "1")
+
+ for path, args in self._args_for_dirs.items():
+ dirsettings = dirsettings_template
+ dirsettings = dirsettings.replace("@RELATIVE_PATH@", path)
+ for i in args["includes"]:
+ dirsettings += add_abs_include_path(i)
+ for d in args["defines"]:
+ assert d[:2] == u"-D" or d[:2] == u"-U"
+ if d[:2] == u"-U":
+ # gfx/harfbuzz/src uses -UDEBUG, at least on Mac
+ # netwerk/sctp/src uses -U__APPLE__ on Mac
+ # XXX We should make this code smart enough to remove existing defines.
+ continue
+ d = d[2:] # get rid of leading "-D"
+ name_value = d.split("=", 1)
+ name = name_value[0]
+ value = ""
+ if len(name_value) == 2:
+ value = name_value[1]
+ dirsettings += add_define(name, str(value))
+ dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
+ fh.write(dirsettings)
+
+ fh.write(
+ LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
+ "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags
+ )
+ )
+
+ def _write_launch_files(self, launch_dir):
+ bin_dir = os.path.join(self.environment.topobjdir, "dist")
+
+ # TODO Improve binary detection
+ if self._macbundle:
+ exe_path = os.path.join(bin_dir, self._macbundle, "Contents/MacOS")
+ else:
+ exe_path = os.path.join(bin_dir, "bin")
+
+ exe_path = os.path.join(exe_path, self._appname + self._bin_suffix)
+
+ main_gecko_launch = os.path.join(launch_dir, "gecko.launch")
+ with open(main_gecko_launch, "w") as fh:
+ launch = GECKO_LAUNCH_CONFIG_TEMPLATE
+ launch = launch.replace("@LAUNCH_PROGRAM@", exe_path)
+ launch = launch.replace("@LAUNCH_ARGS@", "-P -no-remote")
+ fh.write(launch)
+
+ # TODO Add more launch configs (and delegate calls to mach)
+
+ def _write_project(self, fh):
+ project = PROJECT_TEMPLATE
+
+ project = project.replace("@PROJECT_NAME@", self._project_name)
+ project = project.replace("@PROJECT_TOPSRCDIR@", self.environment.topsrcdir)
+ project = project.replace(
+ "@GENERATED_IPDL_FILES@",
+ os.path.join(self.environment.topobjdir, "ipc", "ipdl"),
+ )
+ project = project.replace(
+ "@GENERATED_WEBIDL_FILES@",
+ os.path.join(self.environment.topobjdir, "dom", "bindings"),
+ )
+ fh.write(project)
+
+ def _write_cproject(self, fh):
+ cproject_header = CPROJECT_TEMPLATE_HEADER
+ cproject_header = cproject_header.replace(
+ "@PROJECT_TOPSRCDIR@", self.environment.topobjdir
+ )
+ cproject_header = cproject_header.replace(
+ "@MACH_COMMAND@", os.path.join(self.environment.topsrcdir, "mach")
+ )
+ fh.write(cproject_header)
+ fh.write(CPROJECT_TEMPLATE_FOOTER)
+
+
+PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>@PROJECT_NAME@</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
+ <triggers>clean,full,incremental,</triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
+ <triggers></triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.cdt.core.cnature</nature>
+ <nature>org.eclipse.cdt.core.ccnature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>tree</name>
+ <type>2</type>
+ <location>@PROJECT_TOPSRCDIR@</location>
+ </link>
+ <link>
+ <name>generated-ipdl</name>
+ <type>2</type>
+ <location>@GENERATED_IPDL_FILES@</location>
+ </link>
+ <link>
+ <name>generated-webidl</name>
+ <type>2</type>
+ <location>@GENERATED_WEBIDL_FILES@</location>
+ </link>
+ </linkedResources>
+ <filteredResources>
+ <filter>
+ <id>17111971</id>
+ <name>tree</name>
+ <type>30</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-obj-*</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>14081994</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.rej</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>25121970</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.orig</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>10102004</id>
+ <name>tree</name>
+ <type>10</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-.hg</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>23122002</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.pyc</arguments>
+ </matcher>
+ </filter>
+ </filteredResources>
+</projectDescription>
+"""
+
+CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?fileVersion 4.0.0?>
+
+<cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
+ <storageModule moduleId="org.eclipse.cdt.core.settings">
+ <cconfiguration id="0.1674256904">
+ <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.1674256904" moduleId="org.eclipse.cdt.core.settings" name="Default">
+ <externalSettings/>
+ <extensions>
+ <extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ </extensions>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <configuration artifactName="${ProjName}" buildProperties="" description="" id="0.1674256904" name="Default" parent="org.eclipse.cdt.build.core.prefbase.cfg">
+ <folderInfo id="0.1674256904." name="/" resourcePath="">
+ <toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
+ <targetPlatform archList="all" binaryParser="" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
+ <builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
+ </toolChain>
+ </folderInfo>
+"""
+CPROJECT_TEMPLATE_FILEINFO = """ <fileInfo id="0.1674256904.474736658" name="Layers.cpp" rcbsApplicability="disable" resourcePath="tree/gfx/layers/Layers.cpp" toolsToInvoke="org.eclipse.cdt.build.core.settings.holder.582514939.463639939">
+ <tool id="org.eclipse.cdt.build.core.settings.holder.582514939.463639939" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
+ <option id="org.eclipse.cdt.build.core.settings.holder.symbols.232300236" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
+ <listOptionValue builtIn="false" value="BENWA=BENWAVAL"/>
+ </option>
+ <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.1942876228" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
+ </tool>
+ </fileInfo>
+"""
+CPROJECT_TEMPLATE_FOOTER = """
+ <sourceEntries>
+ <entry excluding="**/lib*|**/third_party/|tree/*.xcodeproj/|tree/.cargo/|tree/.vscode/|tree/build/|tree/extensions/|tree/gfx/angle/|tree/gfx/cairo/|tree/gfx/skia/skia/|tree/intl/icu/|tree/js/|tree/media/|tree/modules/freetype2|tree/modules/pdfium/|tree/netwerk/|tree/netwerk/sctp|tree/netwerk/srtp|tree/nsprpub/lib|tree/nsprpub/pr/src|tree/other-licenses/|tree/parser/|tree/python/|tree/security/nss/|tree/tools/" flags="VALUE_WORKSPACE_PATH" kind="sourcePath" name=""/>
+ </sourceEntries>
+ </configuration>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
+ </cconfiguration>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <project id="Empty.null.1281234804" name="Empty"/>
+ </storageModule>
+ <storageModule moduleId="scannerConfiguration">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ <scannerConfigBuildInfo instanceId="0.1674256904">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ </storageModule>
+ <storageModule moduleId="refreshScope" versionNumber="2">
+ <configuration configurationName="Default"/>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
+</cproject>
+"""
+
+WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<plugin>
+ <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
+ <provider class="org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector" console="true" id="org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot;">
+ <language-scope id="org.eclipse.cdt.core.gcc"/>
+ <language-scope id="org.eclipse.cdt.core.g++"/>
+ </provider>
+ </extension>
+</plugin>
+"""
+
+
+# The settings set via this template can be found in the UI by opening
+# the Properties for a directory in the Project Explorer tab, then going to
+# C/C++ General > Preprocessor Include Paths, Macros, etc., selecting the
+# C++ item from the Languages column, and then expanding the
+# CDT User Settings Entries item to the right.
+
+LANGUAGE_SETTINGS_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<project>
+ <configuration id="0.1674256904" name="Default">
+ <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
+ <provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
+ <language id="org.eclipse.cdt.core.g++">
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ <resource project-relative-path="@RELATIVE_PATH@">
+ <entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ <entry kind="includePath" name="@INCLUDE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ <entry kind="macro" name="@NAME@" value=@VALUE@/>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ </resource>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ </language>
+ </provider>
+ <provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
+ <language-scope id="org.eclipse.cdt.core.gcc"/>
+ <language-scope id="org.eclipse.cdt.core.g++"/>
+ </provider>
+ <provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
+ </extension>
+ </configuration>
+</project>
+"""
+
+
+GECKO_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
+<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
+<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
+<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_ARGUMENTS" value="@LAUNCH_ARGS@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
+<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
+<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
+<listEntry value="/gecko"/>
+</listAttribute>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
+<listEntry value="4"/>
+</listAttribute>
+<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
+<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
+</launchConfiguration>
+"""
+
+
+EDITOR_SETTINGS = """eclipse.preferences.version=1
+lineNumberRuler=true
+overviewRuler_migration=migrated_3.1
+printMargin=true
+printMarginColumn=80
+showCarriageReturn=false
+showEnclosedSpaces=false
+showLeadingSpaces=false
+showLineFeed=false
+showWhitespaceCharacters=true
+spacesForTabs=true
+tabWidth=2
+undoHistorySize=200
+"""
+
+
+STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1
+refresh.enabled=true
+"""
+
+STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1
+content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm
+content-types/org.eclipse.core.runtime.xml/file-extensions=xul
+content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm
+"""
+
+STATIC_UI_PREFS = """eclipse.preferences.version=1
+showIntro=false
+"""
+
+STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1
+indexer.updatePolicy=0
+"""
+
+FORMATTER_SETTINGS = """org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.cdt.core.formatter.alignment_for_assignment=16
+org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80
+org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.cdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.cdt.core.formatter.alignment_for_conditional_expression=34
+org.eclipse.cdt.core.formatter.alignment_for_conditional_expression_chain=18
+org.eclipse.cdt.core.formatter.alignment_for_constructor_initializer_list=48
+org.eclipse.cdt.core.formatter.alignment_for_declarator_list=16
+org.eclipse.cdt.core.formatter.alignment_for_enumerator_list=48
+org.eclipse.cdt.core.formatter.alignment_for_expression_list=0
+org.eclipse.cdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.cdt.core.formatter.alignment_for_member_access=0
+org.eclipse.cdt.core.formatter.alignment_for_overloaded_left_shift_chain=16
+org.eclipse.cdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.cdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.cdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_block_in_case=next_line_shifted
+org.eclipse.cdt.core.formatter.brace_position_for_method_declaration=next_line
+org.eclipse.cdt.core.formatter.brace_position_for_namespace_declaration=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_type_declaration=next_line
+org.eclipse.cdt.core.formatter.comment.min_distance_between_code_and_line_comment=1
+org.eclipse.cdt.core.formatter.comment.never_indent_line_comments_on_first_column=true
+org.eclipse.cdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true
+org.eclipse.cdt.core.formatter.compact_else_if=true
+org.eclipse.cdt.core.formatter.continuation_indentation=2
+org.eclipse.cdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.cdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.cdt.core.formatter.indent_access_specifier_compare_to_type_header=false
+org.eclipse.cdt.core.formatter.indent_access_specifier_extra_spaces=0
+org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_access_specifier=true
+org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_namespace_header=false
+org.eclipse.cdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.cdt.core.formatter.indent_declaration_compare_to_template_header=true
+org.eclipse.cdt.core.formatter.indent_empty_lines=false
+org.eclipse.cdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.cdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_switch=false
+org.eclipse.cdt.core.formatter.indentation.size=2
+org.eclipse.cdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_after_template_declaration=insert
+org.eclipse.cdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_colon_in_constructor_initializer_list=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_identifier_in_function_declaration=insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.cdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_base_clause=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_base_types=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_declarator_list=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_expression_list=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.cdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_base_clause=insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_base_types=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_declarator_list=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_expression_list=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_namespace_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_exception_specification=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.cdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_brackets=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.join_wrapped_lines=false
+org.eclipse.cdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.cdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.cdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.cdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.cdt.core.formatter.lineSplit=80
+org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.cdt.core.formatter.tabulation.char=space
+org.eclipse.cdt.core.formatter.tabulation.size=2
+org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
+"""
+
+STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1
+buildConsoleLines=10000
+Console.limitConsoleOutput=false
+ensureNewlineAtEOF=false
+formatter_profile=_Mozilla
+formatter_settings_version=1
+org.eclipse.cdt.ui.formatterprofiles.version=1
+removeTrailingWhitespace=true
+removeTrailingWhitespaceEditedLines=true
+scalability.numberOfLines=15000
+markOccurrences=true
+markOverloadedOperatorsOccurrences=true
+stickyOccurrences=false
+"""
+
+NOINDEX_TEMPLATE = """eclipse.preferences.version=1
+indexer/indexerId=org.eclipse.cdt.core.nullIndexer
+"""
diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
new file mode 100644
index 0000000000..324db29866
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/fastermake.py
@@ -0,0 +1,300 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from operator import itemgetter
+
+import mozpack.path as mozpath
+import six
+from mozpack.manifests import InstallManifest
+
+from mozbuild.backend.base import PartialBackend
+from mozbuild.backend.make import MakeBackend
+from mozbuild.frontend.context import ObjDirPath, Path
+from mozbuild.frontend.data import (
+ ChromeManifestEntry,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ JARManifest,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ XPIDLModule,
+)
+from mozbuild.makeutil import Makefile
+from mozbuild.util import OrderedDefaultDict
+
+
+class FasterMakeBackend(MakeBackend, PartialBackend):
+ def _init(self):
+ super(FasterMakeBackend, self)._init()
+
+ self._manifest_entries = OrderedDefaultDict(set)
+
+ self._install_manifests = OrderedDefaultDict(InstallManifest)
+
+ self._dependencies = OrderedDefaultDict(list)
+ self._l10n_dependencies = OrderedDefaultDict(list)
+
+ self._has_xpidl = False
+
+ self._generated_files_map = {}
+ self._generated_files = []
+
+ def _add_preprocess(self, obj, path, dest, target=None, **kwargs):
+ if target is None:
+ target = mozpath.basename(path)
+ # This matches what PP_TARGETS do in config/rules.
+ if target.endswith(".in"):
+ target = target[:-3]
+ if target.endswith(".css"):
+ kwargs["marker"] = "%"
+ depfile = mozpath.join(
+ self.environment.topobjdir,
+ "faster",
+ ".deps",
+ mozpath.join(obj.install_target, dest, target).replace("/", "_"),
+ )
+ self._install_manifests[obj.install_target].add_preprocess(
+ mozpath.join(obj.srcdir, path),
+ mozpath.join(dest, target),
+ depfile,
+ **kwargs
+ )
+
+ def consume_object(self, obj):
+ if isinstance(obj, JARManifest) and obj.install_target.startswith("dist/bin"):
+ self._consume_jar_manifest(obj)
+
+ elif isinstance(
+ obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)
+ ) and obj.install_target.startswith("dist/bin"):
+ ab_cd = self.environment.substs["MOZ_UI_LOCALE"][0]
+ localized = isinstance(obj, (LocalizedFiles, LocalizedPreprocessedFiles))
+ defines = obj.defines or {}
+ if defines:
+ defines = defines.defines
+ for path, files in obj.files.walk():
+ for f in files:
+ # For localized files we need to find the file from the locale directory.
+ if localized and not isinstance(f, ObjDirPath) and ab_cd != "en-US":
+ src = self.localized_path(obj.relsrcdir, f)
+
+ dep_target = "install-%s" % obj.install_target
+
+ if "*" not in src:
+ merge = mozpath.abspath(
+ mozpath.join(
+ self.environment.topobjdir,
+ "l10n_merge",
+ obj.relsrcdir,
+ f,
+ )
+ )
+ self._l10n_dependencies[dep_target].append(
+ (merge, f.full_path, src)
+ )
+ src = merge
+ else:
+ src = f.full_path
+
+ if isinstance(obj, FinalTargetPreprocessedFiles):
+ self._add_preprocess(
+ obj, src, path, target=f.target_basename, defines=defines
+ )
+ elif "*" in f:
+
+ def _prefix(s):
+ for p in mozpath.split(s):
+ if "*" not in p:
+ yield p + "/"
+
+ prefix = "".join(_prefix(src))
+
+ if "*" in f.target_basename:
+ target = path
+ else:
+ target = mozpath.join(path, f.target_basename)
+ self._install_manifests[obj.install_target].add_pattern_link(
+ prefix, src[len(prefix) :], target
+ )
+ else:
+ self._install_manifests[obj.install_target].add_link(
+ src, mozpath.join(path, f.target_basename)
+ )
+ if isinstance(f, ObjDirPath):
+ dep_target = "install-%s" % obj.install_target
+ dep = mozpath.relpath(f.full_path, self.environment.topobjdir)
+ if dep in self._generated_files_map:
+ # Only the first output file is specified as a
+ # dependency. If there are multiple output files
+ # from a single GENERATED_FILES invocation that are
+ # installed, we only want to run the command once.
+ dep = self._generated_files_map[dep]
+ self._dependencies[dep_target].append(dep)
+
+ elif isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith(
+ "dist/bin"
+ ):
+ top_level = mozpath.join(obj.install_target, "chrome.manifest")
+ if obj.path != top_level:
+ entry = "manifest %s" % mozpath.relpath(obj.path, obj.install_target)
+ self._manifest_entries[top_level].add(entry)
+ self._manifest_entries[obj.path].add(str(obj.entry))
+
+ elif isinstance(obj, GeneratedFile):
+ if obj.outputs:
+ first_output = mozpath.relpath(
+ mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir
+ )
+ for o in obj.outputs[1:]:
+ fullpath = mozpath.join(obj.objdir, o)
+ self._generated_files_map[
+ mozpath.relpath(fullpath, self.environment.topobjdir)
+ ] = first_output
+ self._generated_files.append(obj)
+ return False
+
+ elif isinstance(obj, XPIDLModule):
+ self._has_xpidl = True
+ # We're not actually handling XPIDL files.
+ return False
+
+ else:
+ return False
+
+ return True
+
+ def consume_finished(self):
+ mk = Makefile()
+ # Add the default rule at the very beginning.
+ mk.create_rule(["default"])
+ mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir)
+ mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir)
+ mk.add_statement("MDDEPDIR = .deps")
+ mk.add_statement("TOUCH ?= touch")
+ mk.add_statement("include $(TOPSRCDIR)/config/makefiles/functions.mk")
+ mk.add_statement("include $(TOPSRCDIR)/config/AB_rCD.mk")
+ mk.add_statement("AB_CD = en-US")
+ if not self._has_xpidl:
+ mk.add_statement("NO_XPIDL = 1")
+
+ # Add a few necessary variables inherited from configure
+ for var in (
+ "PYTHON3",
+ "ACDEFINES",
+ "MOZ_BUILD_APP",
+ "MOZ_WIDGET_TOOLKIT",
+ ):
+ value = self.environment.substs.get(var)
+ if value is not None:
+ mk.add_statement("%s = %s" % (var, value))
+
+ install_manifests_bases = self._install_manifests.keys()
+
+ # Add information for chrome manifest generation
+ manifest_targets = []
+
+ for target, entries in six.iteritems(self._manifest_entries):
+ manifest_targets.append(target)
+ install_target = mozpath.basedir(target, install_manifests_bases)
+ self._install_manifests[install_target].add_content(
+ "".join("%s\n" % e for e in sorted(entries)),
+ mozpath.relpath(target, install_target),
+ )
+
+ # Add information for install manifests.
+ mk.add_statement(
+ "INSTALL_MANIFESTS = %s" % " ".join(sorted(self._install_manifests.keys()))
+ )
+
+ # Add dependencies we inferred:
+ for target, deps in sorted(six.iteritems(self._dependencies)):
+ mk.create_rule([target]).add_dependencies(
+ "$(TOPOBJDIR)/%s" % d for d in sorted(deps)
+ )
+
+ # This is not great, but it's better to have some dependencies on these Python files.
+ python_deps = [
+ "$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py",
+ "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py",
+ "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py",
+ ]
+ # Add l10n dependencies we inferred:
+ for target, deps in sorted(six.iteritems(self._l10n_dependencies)):
+ mk.create_rule([target]).add_dependencies(
+ "%s" % d[0] for d in sorted(deps, key=itemgetter(0))
+ )
+ for (merge, ref_file, l10n_file) in deps:
+ rule = mk.create_rule([merge]).add_dependencies(
+ [ref_file, l10n_file] + python_deps
+ )
+ rule.add_commands(
+ [
+ "$(PYTHON3) -m mozbuild.action.l10n_merge "
+ "--output {} --ref-file {} --l10n-file {}".format(
+ merge, ref_file, l10n_file
+ )
+ ]
+ )
+ # Add a dummy rule for the l10n file since it might not exist.
+ mk.create_rule([l10n_file])
+
+ mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk")
+
+ for base, install_manifest in six.iteritems(self._install_manifests):
+ with self._write_file(
+ mozpath.join(
+ self.environment.topobjdir,
+ "faster",
+ "install_%s" % base.replace("/", "_"),
+ )
+ ) as fh:
+ install_manifest.write(fileobj=fh)
+
+ # Write a single unified manifest for consumption by |mach watch|.
+ # Since this doesn't start 'install_', it's not processed by the build.
+ unified_manifest = InstallManifest()
+ for base, install_manifest in six.iteritems(self._install_manifests):
+ # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
+ assert base.startswith("dist/bin")
+ base = base[len("dist/bin") :]
+ if base and base[0] == "/":
+ base = base[1:]
+ unified_manifest.add_entries_from(install_manifest, base=base)
+
+ with self._write_file(
+ mozpath.join(
+ self.environment.topobjdir, "faster", "unified_install_dist_bin"
+ )
+ ) as fh:
+ unified_manifest.write(fileobj=fh)
+
+ for obj in self._generated_files:
+ for stmt in self._format_statements_for_generated_file(obj, "default"):
+ mk.add_statement(stmt)
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, "faster", "Makefile")
+ ) as fh:
+ mk.dump(fh, removal_guard=False)
+
+ def _pretty_path(self, path, obj):
+ if path.startswith(self.environment.topobjdir):
+ return mozpath.join(
+ "$(TOPOBJDIR)", mozpath.relpath(path, self.environment.topobjdir)
+ )
+ elif path.startswith(self.environment.topsrcdir):
+ return mozpath.join(
+ "$(TOPSRCDIR)", mozpath.relpath(path, self.environment.topsrcdir)
+ )
+ else:
+ return path
+
+ def _format_generated_file_input_name(self, path, obj):
+ return self._pretty_path(path.full_path, obj)
+
+ def _format_generated_file_output_name(self, path, obj):
+ if not isinstance(path, Path):
+ path = ObjDirPath(obj._context, "!" + path)
+ return self._pretty_path(path.full_path, obj)
diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py
new file mode 100644
index 0000000000..1b83ebc826
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/mach_commands.py
@@ -0,0 +1,420 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import logging
+import os
+import subprocess
+import sys
+
+import mozpack.path as mozpath
+from mach.decorators import Command, CommandArgument
+from mozfile import which
+
+from mozbuild import build_commands
+
+
+@Command(
+ "ide",
+ category="devenv",
+ description="Generate a project and launch an IDE.",
+ virtualenv_name="build",
+)
+@CommandArgument("ide", choices=["eclipse", "visualstudio", "vscode"])
+@CommandArgument(
+ "--no-interactive",
+ default=False,
+ action="store_true",
+ help="Just generate the configuration",
+)
+@CommandArgument("args", nargs=argparse.REMAINDER)
+def run(command_context, ide, no_interactive, args):
+ interactive = not no_interactive
+
+ if ide == "eclipse":
+ backend = "CppEclipse"
+ elif ide == "visualstudio":
+ backend = "VisualStudio"
+ elif ide == "vscode":
+ backend = "Clangd"
+
+ if ide == "eclipse" and not which("eclipse"):
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Eclipse CDT 8.4 or later must be installed in your PATH.",
+ )
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Download: http://www.eclipse.org/cdt/downloads.php",
+ )
+ return 1
+
+ if ide == "vscode":
+ rc = build_commands.configure(command_context)
+
+ if rc != 0:
+ return rc
+
+ # First install what we can through install manifests.
+ rc = command_context._run_make(
+ directory=command_context.topobjdir,
+ target="pre-export",
+ line_handler=None,
+ )
+ if rc != 0:
+ return rc
+
+ # Then build the rest of the build dependencies by running the full
+ # export target, because we can't do anything better.
+ for target in ("export", "pre-compile"):
+ rc = command_context._run_make(
+ directory=command_context.topobjdir,
+ target=target,
+ line_handler=None,
+ )
+ if rc != 0:
+ return rc
+ else:
+ # Here we refresh the whole build. 'build export' is sufficient here and is
+ # probably more correct but it's also nice having a single target to get a fully
+ # built and indexed project (gives a easy target to use before go out to lunch).
+ res = command_context._mach_context.commands.dispatch(
+ "build", command_context._mach_context
+ )
+ if res != 0:
+ return 1
+
+ # Generate or refresh the IDE backend.
+ python = command_context.virtualenv_manager.python_path
+ config_status = os.path.join(command_context.topobjdir, "config.status")
+ args = [python, config_status, "--backend=%s" % backend]
+ res = command_context._run_command_in_objdir(
+ args=args, pass_thru=True, ensure_exit_code=False
+ )
+ if res != 0:
+ return 1
+
+ if ide == "eclipse":
+ eclipse_workspace_dir = get_eclipse_workspace_path(command_context)
+ subprocess.check_call(["eclipse", "-data", eclipse_workspace_dir])
+ elif ide == "visualstudio":
+ visual_studio_workspace_dir = get_visualstudio_workspace_path(command_context)
+ subprocess.call(["explorer.exe", visual_studio_workspace_dir])
+ elif ide == "vscode":
+ return setup_vscode(command_context, interactive)
+
+
+def get_eclipse_workspace_path(command_context):
+ from mozbuild.backend.cpp_eclipse import CppEclipseBackend
+
+ return CppEclipseBackend.get_workspace_path(
+ command_context.topsrcdir, command_context.topobjdir
+ )
+
+
+def get_visualstudio_workspace_path(command_context):
+ return os.path.normpath(
+ os.path.join(command_context.topobjdir, "msvc", "mozilla.sln")
+ )
+
+
+def setup_vscode(command_context, interactive):
+ from mozbuild.backend.clangd import find_vscode_cmd
+
+ # Check if platform has VSCode installed
+ if interactive:
+ vscode_cmd = find_vscode_cmd()
+ if vscode_cmd is None:
+ choice = prompt_bool(
+ "VSCode cannot be found, and may not be installed. Proceed?"
+ )
+ if not choice:
+ return 1
+
+ vscode_settings = mozpath.join(
+ command_context.topsrcdir, ".vscode", "settings.json"
+ )
+
+ new_settings = {}
+ artifact_prefix = ""
+ if command_context.config_environment.is_artifact_build:
+ artifact_prefix = (
+ "\nArtifact build configured: Skipping clang and rust setup. "
+ "If you later switch to a full build, please re-run this command."
+ )
+ else:
+ new_settings = setup_clangd_rust_in_vscode(command_context)
+
+ # Add file associations.
+ new_settings = {
+ **new_settings,
+ "files.associations": {
+ "*.jsm": "javascript",
+ "*.sjs": "javascript",
+ },
+ # Note, the top-level editor settings are left as default to allow the
+ # user's defaults (if any) to take effect.
+ "[javascript][javascriptreact][typescript][typescriptreact][json][html]": {
+ "editor.defaultFormatter": "esbenp.prettier-vscode",
+ "editor.formatOnSave": True,
+ },
+ }
+
+ import difflib
+ import json
+
+ # Load the existing .vscode/settings.json file, to check if if needs to
+ # be created or updated.
+ try:
+ with open(vscode_settings) as fh:
+ old_settings_str = fh.read()
+ except FileNotFoundError:
+ print(
+ "Configuration for {} will be created.{}".format(
+ vscode_settings, artifact_prefix
+ )
+ )
+ old_settings_str = None
+
+ if old_settings_str is None:
+ # No old settings exist
+ with open(vscode_settings, "w") as fh:
+ json.dump(new_settings, fh, indent=4)
+ else:
+ # Merge our new settings with the existing settings, and check if we
+ # need to make changes. Only prompt & write out the updated config
+ # file if settings actually changed.
+ try:
+ old_settings = json.loads(old_settings_str)
+ prompt_prefix = ""
+ except ValueError:
+ old_settings = {}
+ prompt_prefix = (
+ "\n**WARNING**: Parsing of existing settings file failed. "
+ "Existing settings will be lost!"
+ )
+
+ # If we've got an old section with the formatting configuration, remove it
+ # so that we effectively "upgrade" the user to include json from the new
+ # settings. The user is presented with the diffs so should spot any issues.
+ if "[javascript][javascriptreact][typescript][typescriptreact]" in old_settings:
+ old_settings.pop(
+ "[javascript][javascriptreact][typescript][typescriptreact]"
+ )
+ if (
+ "[javascript][javascriptreact][typescript][typescriptreact][json]"
+ in old_settings
+ ):
+ old_settings.pop(
+ "[javascript][javascriptreact][typescript][typescriptreact][json]"
+ )
+
+ settings = {**old_settings, **new_settings}
+
+ if old_settings != settings:
+ # Prompt the user with a diff of the changes we're going to make
+ new_settings_str = json.dumps(settings, indent=4)
+ if interactive:
+ print(
+ "\nThe following modifications to {settings} will occur:\n{diff}".format(
+ settings=vscode_settings,
+ diff="".join(
+ difflib.unified_diff(
+ old_settings_str.splitlines(keepends=True),
+ new_settings_str.splitlines(keepends=True),
+ "a/.vscode/settings.json",
+ "b/.vscode/settings.json",
+ n=30,
+ )
+ ),
+ )
+ )
+ choice = prompt_bool(
+ "{}{}\nProceed with modifications to {}?".format(
+ artifact_prefix, prompt_prefix, vscode_settings
+ )
+ )
+ if not choice:
+ return 1
+
+ with open(vscode_settings, "w") as fh:
+ fh.write(new_settings_str)
+
+ if not interactive:
+ return 0
+
+ # Open vscode with new configuration, or ask the user to do so if the
+ # binary was not found.
+ if vscode_cmd is None:
+ print(
+ "Please open VS Code manually and load directory: {}".format(
+ command_context.topsrcdir
+ )
+ )
+ return 0
+
+ rc = subprocess.call(vscode_cmd + [command_context.topsrcdir])
+
+ if rc != 0:
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Unable to open VS Code. Please open VS Code manually and load "
+ "directory: {}".format(command_context.topsrcdir),
+ )
+ return rc
+
+ return 0
+
+
+def setup_clangd_rust_in_vscode(command_context):
+ clangd_cc_path = mozpath.join(command_context.topobjdir, "clangd")
+
+ # Verify if the required files are present
+ clang_tools_path = mozpath.join(
+ command_context._mach_context.state_dir, "clang-tools"
+ )
+ clang_tidy_bin = mozpath.join(clang_tools_path, "clang-tidy", "bin")
+
+ clangd_path = mozpath.join(
+ clang_tidy_bin,
+ "clangd" + command_context.config_environment.substs.get("BIN_SUFFIX", ""),
+ )
+
+ if not os.path.exists(clangd_path):
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Unable to locate clangd in {}.".format(clang_tidy_bin),
+ )
+ rc = get_clang_tools(command_context, clang_tools_path)
+
+ if rc != 0:
+ return rc
+
+ import multiprocessing
+
+ from mozbuild.code_analysis.utils import ClangTidyConfig
+
+ clang_tidy_cfg = ClangTidyConfig(command_context.topsrcdir)
+
+ if sys.platform == "win32":
+ cargo_check_command = [sys.executable, "mach"]
+ else:
+ cargo_check_command = ["./mach"]
+
+ cargo_check_command += [
+ "--log-no-times",
+ "cargo",
+ "check",
+ "-j",
+ str(multiprocessing.cpu_count() // 2),
+ "--all-crates",
+ "--message-format-json",
+ ]
+
+ clang_tidy = {}
+ clang_tidy["Checks"] = ",".join(clang_tidy_cfg.checks)
+ clang_tidy.update(clang_tidy_cfg.checks_config)
+
+ # Write .clang-tidy yml
+ import yaml
+
+ with open(".clang-tidy", "w") as file:
+ yaml.dump(clang_tidy, file)
+
+ clangd_cfg = {
+ "CompileFlags": {
+ "CompilationDatabase": clangd_cc_path,
+ }
+ }
+
+ with open(".clangd", "w") as file:
+ yaml.dump(clangd_cfg, file)
+
+ return {
+ "clangd.path": clangd_path,
+ "clangd.arguments": [
+ "-j",
+ str(multiprocessing.cpu_count() // 2),
+ "--limit-results",
+ "0",
+ "--completion-style",
+ "detailed",
+ "--background-index",
+ "--all-scopes-completion",
+ "--log",
+ "info",
+ "--pch-storage",
+ "disk",
+ "--clang-tidy",
+ ],
+ "rust-analyzer.server.extraEnv": {
+ # Point rust-analyzer at the real target directory used by our
+ # build, so it can discover the files created when we run `./mach
+ # cargo check`.
+ "CARGO_TARGET_DIR": command_context.topobjdir,
+ },
+ "rust-analyzer.cargo.buildScripts.overrideCommand": cargo_check_command,
+ "rust-analyzer.check.overrideCommand": cargo_check_command,
+ }
+
+
+def get_clang_tools(command_context, clang_tools_path):
+ import shutil
+
+ if os.path.isdir(clang_tools_path):
+ shutil.rmtree(clang_tools_path)
+
+ # Create base directory where we store clang binary
+ os.mkdir(clang_tools_path)
+
+ from mozbuild.artifact_commands import artifact_toolchain
+
+ job, _ = command_context.platform
+
+ if job is None:
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "The current platform isn't supported. "
+ "Currently only the following platforms are "
+ "supported: win32/win64, linux64 and macosx64.",
+ )
+ return 1
+
+ job += "-clang-tidy"
+
+ # We want to unpack data in the clang-tidy mozbuild folder
+ currentWorkingDir = os.getcwd()
+ os.chdir(clang_tools_path)
+ rc = artifact_toolchain(
+ command_context, verbose=False, from_build=[job], no_unpack=False, retry=0
+ )
+ # Change back the cwd
+ os.chdir(currentWorkingDir)
+
+ return rc
+
+
+def prompt_bool(prompt, limit=5):
+ """Prompts the user with prompt and requires a boolean value."""
+ from distutils.util import strtobool
+
+ for _ in range(limit):
+ try:
+ return strtobool(input(prompt + " [Y/N]\n"))
+ except ValueError:
+ print(
+ "ERROR! Please enter a valid option! Please use any of the following:"
+ " Y, N, True, False, 1, 0"
+ )
+ return False
diff --git a/python/mozbuild/mozbuild/backend/make.py b/python/mozbuild/mozbuild/backend/make.py
new file mode 100644
index 0000000000..90b37e6758
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/make.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozpack.path as mozpath
+
+from mozbuild.frontend.data import GeneratedFile
+from mozbuild.shellutil import quote as shell_quote
+
+from .common import CommonBackend
+
+
+class MakeBackend(CommonBackend):
+ """Class encapsulating logic for backends that use Make."""
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ def _format_statements_for_generated_file(self, obj, tier, extra_dependencies=""):
+ """Return the list of statements to write to the Makefile for this
+ GeneratedFile.
+
+ This function will invoke _format_generated_file_input_name and
+ _format_generated_file_output_name to munge the input/output filenames
+ before sending them to the output.
+ """
+ assert isinstance(obj, GeneratedFile)
+
+ # Localized generated files can use {AB_CD} and {AB_rCD} in their
+ # output paths.
+ if obj.localized:
+ substs = {"AB_CD": "$(AB_CD)", "AB_rCD": "$(AB_rCD)"}
+ else:
+ substs = {}
+
+ outputs = []
+ needs_AB_rCD = False
+ for o in obj.outputs:
+ needs_AB_rCD = needs_AB_rCD or ("AB_rCD" in o)
+ try:
+ outputs.append(
+ self._format_generated_file_output_name(o.format(**substs), obj)
+ )
+ except KeyError as e:
+ raise ValueError(
+ "%s not in %s is not a valid substitution in %s"
+ % (e.args[0], ", ".join(sorted(substs.keys())), o)
+ )
+
+ first_output = outputs[0]
+ dep_file = mozpath.join(
+ mozpath.dirname(first_output),
+ "$(MDDEPDIR)",
+ "%s.pp" % mozpath.basename(first_output),
+ )
+ # The stub target file needs to go in MDDEPDIR so that it doesn't
+ # get written into generated Android resource directories, breaking
+ # Gradle tooling and/or polluting the Android packages.
+ stub_file = mozpath.join(
+ mozpath.dirname(first_output),
+ "$(MDDEPDIR)",
+ "%s.stub" % mozpath.basename(first_output),
+ )
+
+ if obj.inputs:
+ inputs = [
+ self._format_generated_file_input_name(f, obj) for f in obj.inputs
+ ]
+ else:
+ inputs = []
+
+ force = ""
+ if obj.force:
+ force = " FORCE"
+ elif obj.localized:
+ force = " $(if $(IS_LANGUAGE_REPACK),FORCE)"
+
+ ret = []
+
+ if obj.script:
+ # If we are doing an artifact build, we don't run compiler, so
+ # we can skip generated files that are needed during compile,
+ # or let the rule run as the result of something depending on
+ # it.
+ if (
+ not (obj.required_before_compile or obj.required_during_compile)
+ or not self.environment.is_artifact_build
+ ):
+ if tier and not needs_AB_rCD:
+ # Android localized resources have special Makefile
+ # handling.
+
+ # Double-colon tiers via a variable that the backend adds as a dependency
+ # later. See https://bugzilla.mozilla.org/show_bug.cgi?id=1645986#c0 as
+ # to why.
+ if tier in ("export", "pre-compile", "libs", "misc"):
+ dep = "%s_TARGETS" % tier.replace("-", "_").upper()
+ ret.append("%s += %s" % (dep, stub_file))
+ else:
+ ret.append("%s: %s" % (tier, stub_file))
+ for output in outputs:
+ ret.append("%s: %s ;" % (output, stub_file))
+ ret.append("EXTRA_MDDEPEND_FILES += %s" % dep_file)
+
+ ret.append(
+ (
+ """{stub}: {script}{inputs}{backend}{force}
+\t$(REPORT_BUILD)
+\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
+ """{method} {output} {dep_file} {stub}{inputs}{flags})
+\t@$(TOUCH) $@
+"""
+ ).format(
+ stub=stub_file,
+ output=first_output,
+ dep_file=dep_file,
+ inputs=" " + " ".join(inputs) if inputs else "",
+ flags=" " + " ".join(shell_quote(f) for f in obj.flags)
+ if obj.flags
+ else "",
+ backend=" " + extra_dependencies if extra_dependencies else "",
+ # Locale repacks repack multiple locales from a single configured objdir,
+ # so standard mtime dependencies won't work properly when the build is re-run
+ # with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
+ # in this situation, so simply force the generation to run in that case.
+ force=force,
+ locale="--locale=$(AB_CD) " if obj.localized else "",
+ script=obj.script,
+ method=obj.method,
+ )
+ )
+
+ return ret
+
+ def _format_generated_file_input_name(self, path, obj):
+ raise NotImplementedError("Subclass must implement")
+
+ def _format_generated_file_output_name(self, path, obj):
+ raise NotImplementedError("Subclass must implement")
diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py
new file mode 100644
index 0000000000..d92864d081
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -0,0 +1,1904 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import io
+import logging
+import os
+import re
+from collections import defaultdict, namedtuple
+from itertools import chain
+from operator import itemgetter
+
+import mozpack.path as mozpath
+import six
+from mozpack.manifests import InstallManifest
+from six import StringIO
+
+from mozbuild import frontend
+from mozbuild.frontend.context import (
+ AbsolutePath,
+ ObjDirPath,
+ Path,
+ RenamedSourcePath,
+ SourcePath,
+)
+from mozbuild.shellutil import quote as shell_quote
+
+from ..frontend.data import (
+ BaseLibrary,
+ BaseProgram,
+ BaseRustLibrary,
+ ChromeManifestEntry,
+ ComputedFlags,
+ ConfigFileSubstitution,
+ ContextDerived,
+ Defines,
+ DirectoryTraversal,
+ ExternalLibrary,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostDefines,
+ HostLibrary,
+ HostProgram,
+ HostRustProgram,
+ HostSharedLibrary,
+ HostSimpleProgram,
+ HostSources,
+ InstallationTarget,
+ JARManifest,
+ Linkable,
+ LocalInclude,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ ObjdirFiles,
+ ObjdirPreprocessedFiles,
+ PerSourceFlag,
+ Program,
+ RustProgram,
+ RustTests,
+ SandboxedWasmLibrary,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestManifest,
+ VariablePassthru,
+ WasmSources,
+ XPIDLModule,
+)
+from ..makeutil import Makefile
+from ..util import FileAvoidWrite, OrderedDefaultDict, ensureParentDir, pairwise
+from .common import CommonBackend
+from .make import MakeBackend
+
+# To protect against accidentally adding logic to Makefiles that belong in moz.build,
+# we check if moz.build-like variables are defined in Makefiles. If they are, we throw
+# an error to encourage the usage of moz.build instead.
+_MOZBUILD_ONLY_VARIABLES = set(frontend.context.VARIABLES.keys()) - {
+ # The migration to moz.build from Makefiles still isn't complete, and there's still
+ # some straggling Makefile logic that uses variables that only moz.build should
+ # use.
+ # These remaining variables are excluded from our blacklist. As the variables here
+ # are migrated from Makefiles in the future, they should be removed from this
+ # "override" list.
+ "XPI_NAME",
+ "USE_EXTENSION_MANIFEST",
+ "CFLAGS",
+ "CXXFLAGS",
+}
+
+DEPRECATED_VARIABLES = [
+ "ALLOW_COMPILER_WARNINGS",
+ "EXPORT_LIBRARY",
+ "EXTRA_LIBS",
+ "FAIL_ON_WARNINGS",
+ "HOST_LIBS",
+ "LIBXUL_LIBRARY",
+ "MOCHITEST_A11Y_FILES",
+ "MOCHITEST_BROWSER_FILES",
+ "MOCHITEST_BROWSER_FILES_PARTS",
+ "MOCHITEST_CHROME_FILES",
+ "MOCHITEST_FILES",
+ "MOCHITEST_FILES_PARTS",
+ "MOCHITEST_METRO_FILES",
+ "MOCHITEST_ROBOCOP_FILES",
+ "MODULE_OPTIMIZE_FLAGS",
+ "MOZ_CHROME_FILE_FORMAT",
+ "SHORT_LIBNAME",
+ "TESTING_JS_MODULES",
+ "TESTING_JS_MODULE_DIR",
+]
+
+MOZBUILD_VARIABLES_MESSAGE = "It should only be defined in moz.build files."
+
+DEPRECATED_VARIABLES_MESSAGE = (
+ "This variable has been deprecated. It does nothing. It must be removed "
+ "in order to build."
+)
+
+
+def make_quote(s):
+ return s.replace("#", "\#").replace("$", "$$")
+
+
+class BackendMakeFile(object):
+ """Represents a generated backend.mk file.
+
+ This is both a wrapper around a file handle as well as a container that
+ holds accumulated state.
+
+ It's worth taking a moment to explain the make dependencies. The
+ generated backend.mk as well as the Makefile.in (if it exists) are in the
+ GLOBAL_DEPS list. This means that if one of them changes, all targets
+ in that Makefile are invalidated. backend.mk also depends on all of its
+ input files.
+
+ It's worth considering the effect of file mtimes on build behavior.
+
+ Since we perform an "all or none" traversal of moz.build files (the whole
+ tree is scanned as opposed to individual files), if we were to blindly
+ write backend.mk files, the net effect of updating a single mozbuild file
+ in the tree is all backend.mk files have new mtimes. This would in turn
+ invalidate all make targets across the whole tree! This would effectively
+ undermine incremental builds as any mozbuild change would cause the entire
+ tree to rebuild!
+
+ The solution is to not update the mtimes of backend.mk files unless they
+ actually change. We use FileAvoidWrite to accomplish this.
+ """
+
+ def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir, dry_run):
+ self.topsrcdir = topsrcdir
+ self.srcdir = srcdir
+ self.objdir = objdir
+ self.relobjdir = mozpath.relpath(objdir, topobjdir)
+ self.environment = environment
+ self.name = mozpath.join(objdir, "backend.mk")
+
+ self.xpt_name = None
+
+ self.fh = FileAvoidWrite(self.name, capture_diff=True, dry_run=dry_run)
+ self.fh.write("# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n")
+ self.fh.write("\n")
+
+ def write(self, buf):
+ self.fh.write(buf)
+
+ def write_once(self, buf):
+ buf = six.ensure_text(buf)
+ if "\n" + buf not in six.ensure_text(self.fh.getvalue()):
+ self.write(buf)
+
+ # For compatibility with makeutil.Makefile
+ def add_statement(self, stmt):
+ self.write("%s\n" % stmt)
+
+ def close(self):
+ if self.xpt_name:
+ # We just recompile all xpidls because it's easier and less error
+ # prone.
+ self.fh.write("NONRECURSIVE_TARGETS += export\n")
+ self.fh.write("NONRECURSIVE_TARGETS_export += xpidl\n")
+ self.fh.write(
+ "NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = "
+ "$(DEPTH)/xpcom/xpidl\n"
+ )
+ self.fh.write("NONRECURSIVE_TARGETS_export_xpidl_TARGETS += " "export\n")
+
+ return self.fh.close()
+
+ @property
+ def diff(self):
+ return self.fh.diff
+
+
+class RecursiveMakeTraversal(object):
+ """
+ Helper class to keep track of how the "traditional" recursive make backend
+ recurses subdirectories. This is useful until all adhoc rules are removed
+ from Makefiles.
+
+ Each directory may have one or more types of subdirectories:
+ - (normal) dirs
+ - tests
+ """
+
+ SubDirectoryCategories = ["dirs", "tests"]
+ SubDirectoriesTuple = namedtuple("SubDirectories", SubDirectoryCategories)
+
+ class SubDirectories(SubDirectoriesTuple):
+ def __new__(self):
+ return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
+
+ def __init__(self):
+ self._traversal = {}
+ self._attached = set()
+
+ def add(self, dir, dirs=[], tests=[]):
+ """
+ Adds a directory to traversal, registering its subdirectories,
+ sorted by categories. If the directory was already added to
+ traversal, adds the new subdirectories to the already known lists.
+ """
+ subdirs = self._traversal.setdefault(dir, self.SubDirectories())
+ for key, value in (("dirs", dirs), ("tests", tests)):
+ assert key in self.SubDirectoryCategories
+ # Callers give us generators
+ value = list(value)
+ getattr(subdirs, key).extend(value)
+ self._attached |= set(value)
+
+ @staticmethod
+ def default_filter(current, subdirs):
+ """
+ Default filter for use with compute_dependencies and traverse.
+ """
+ return current, [], subdirs.dirs + subdirs.tests
+
+ def call_filter(self, current, filter):
+ """
+ Helper function to call a filter from compute_dependencies and
+ traverse.
+ """
+ return filter(current, self.get_subdirs(current))
+
+ def compute_dependencies(self, filter=None):
+ """
+ Compute make dependencies corresponding to the registered directory
+ traversal.
+
+ filter is a function with the following signature:
+ def filter(current, subdirs)
+
+ where current is the directory being traversed, and subdirs the
+ SubDirectories instance corresponding to it.
+ The filter function returns a tuple (filtered_current, filtered_parallel,
+ filtered_dirs) where filtered_current is either current or None if
+ the current directory is to be skipped, and filtered_parallel and
+ filtered_dirs are lists of parallel directories and sequential
+ directories, which can be rearranged from whatever is given in the
+ SubDirectories members.
+
+ The default filter corresponds to a default recursive traversal.
+
+ """
+ filter = filter or self.default_filter
+
+ deps = {}
+
+ def recurse(start_node, prev_nodes=None):
+ current, parallel, sequential = self.call_filter(start_node, filter)
+ if current is not None:
+ if start_node != "":
+ deps[start_node] = prev_nodes
+ prev_nodes = (start_node,)
+ if start_node not in self._traversal:
+ return prev_nodes
+ parallel_nodes = []
+ for node in parallel:
+ nodes = recurse(node, prev_nodes)
+ if nodes and nodes != ("",):
+ parallel_nodes.extend(nodes)
+ if parallel_nodes:
+ prev_nodes = tuple(parallel_nodes)
+ for dir in sequential:
+ prev_nodes = recurse(dir, prev_nodes)
+ return prev_nodes
+
+ return recurse(""), deps
+
+ def traverse(self, start, filter=None):
+ """
+ Iterate over the filtered subdirectories, following the traditional
+ make traversal order.
+ """
+ if filter is None:
+ filter = self.default_filter
+
+ current, parallel, sequential = self.call_filter(start, filter)
+ if current is not None:
+ yield start
+ if start not in self._traversal:
+ return
+ for node in parallel:
+ for n in self.traverse(node, filter):
+ yield n
+ for dir in sequential:
+ for d in self.traverse(dir, filter):
+ yield d
+
+ def get_subdirs(self, dir):
+ """
+ Returns all direct subdirectories under the given directory.
+ """
+ result = self._traversal.get(dir, self.SubDirectories())
+ if dir == "":
+ unattached = set(self._traversal) - self._attached - set([""])
+ if unattached:
+ new_result = self.SubDirectories()
+ new_result.dirs.extend(result.dirs)
+ new_result.dirs.extend(sorted(unattached))
+ new_result.tests.extend(result.tests)
+ result = new_result
+ return result
+
+
+class RecursiveMakeBackend(MakeBackend):
+ """Backend that integrates with the existing recursive make build system.
+
+ This backend facilitates the transition from Makefile.in to moz.build
+ files.
+
+ This backend performs Makefile.in -> Makefile conversion. It also writes
+ out .mk files containing content derived from moz.build files. Both are
+ consumed by the recursive make builder.
+
+ This backend may eventually evolve to write out non-recursive make files.
+ However, as long as there are Makefile.in files in the tree, we are tied to
+ recursive make and thus will need this backend.
+ """
+
+ def _init(self):
+ MakeBackend._init(self)
+
+ self._backend_files = {}
+ self._idl_dirs = set()
+
+ self._makefile_in_count = 0
+ self._makefile_out_count = 0
+
+ self._test_manifests = {}
+
+ self.backend_input_files.add(
+ mozpath.join(self.environment.topobjdir, "config", "autoconf.mk")
+ )
+
+ self._install_manifests = defaultdict(InstallManifest)
+ # The build system relies on some install manifests always existing
+ # even if they are empty, because the directories are still filled
+ # by the build system itself, and the install manifests are only
+ # used for a "magic" rm -rf.
+ self._install_manifests["dist_public"]
+ self._install_manifests["dist_private"]
+
+ self._traversal = RecursiveMakeTraversal()
+ self._compile_graph = OrderedDefaultDict(set)
+ self._rust_targets = set()
+ self._gkrust_target = None
+ self._pre_compile = set()
+
+ self._no_skip = {
+ "pre-export": set(),
+ "export": set(),
+ "libs": set(),
+ "misc": set(),
+ "tools": set(),
+ "check": set(),
+ "syms": set(),
+ }
+
+ def summary(self):
+ summary = super(RecursiveMakeBackend, self).summary()
+ summary.extend(
+ "; {makefile_in:d} -> {makefile_out:d} Makefile",
+ makefile_in=self._makefile_in_count,
+ makefile_out=self._makefile_out_count,
+ )
+ return summary
+
+ def _get_backend_file_for(self, obj):
+ # For generated files that we put in the export or misc tiers, we use the
+ # top-level backend file, except for localized files, which we need to keep
+ # in each directory for dependencies from jar manifests for l10n repacks.
+ if (
+ isinstance(obj, GeneratedFile)
+ and not obj.required_during_compile
+ and not obj.localized
+ ):
+ objdir = self.environment.topobjdir
+ else:
+ objdir = obj.objdir
+
+ if objdir not in self._backend_files:
+ self._backend_files[objdir] = BackendMakeFile(
+ obj.srcdir,
+ objdir,
+ obj.config,
+ obj.topsrcdir,
+ self.environment.topobjdir,
+ self.dry_run,
+ )
+ return self._backend_files[objdir]
+
+ def consume_object(self, obj):
+ """Write out build files necessary to build with recursive make."""
+
+ if not isinstance(obj, ContextDerived):
+ return False
+
+ backend_file = self._get_backend_file_for(obj)
+
+ consumed = CommonBackend.consume_object(self, obj)
+
+ # CommonBackend handles XPIDLModule, but we want to do
+ # some extra things for them.
+ if isinstance(obj, XPIDLModule):
+ backend_file.xpt_name = "%s.xpt" % obj.name
+ self._idl_dirs.add(obj.relobjdir)
+
+ # If CommonBackend acknowledged the object, we're done with it.
+ if consumed:
+ return True
+
+ if not isinstance(obj, Defines):
+ self.consume_object(obj.defines)
+
+ if isinstance(obj, Linkable):
+ self._process_test_support_file(obj)
+
+ if isinstance(obj, DirectoryTraversal):
+ self._process_directory_traversal(obj, backend_file)
+ elif isinstance(obj, ConfigFileSubstitution):
+ # Other ConfigFileSubstitution should have been acked by
+ # CommonBackend.
+ assert os.path.basename(obj.output_path) == "Makefile"
+ self._create_makefile(obj)
+ elif isinstance(obj, Sources):
+ suffix_map = {
+ ".s": "ASFILES",
+ ".c": "CSRCS",
+ ".m": "CMSRCS",
+ ".mm": "CMMSRCS",
+ ".cpp": "CPPSRCS",
+ ".S": "SSRCS",
+ }
+ variables = [suffix_map[obj.canonical_suffix]]
+ for files, base, cls, prefix in (
+ (obj.static_files, backend_file.srcdir, SourcePath, ""),
+ (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+ ):
+ for f in sorted(files):
+ p = self._pretty_path(
+ cls(obj._context, prefix + mozpath.relpath(f, base)),
+ backend_file,
+ )
+ for var in variables:
+ backend_file.write("%s += %s\n" % (var, p))
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+ elif isinstance(obj, HostSources):
+ suffix_map = {
+ ".c": "HOST_CSRCS",
+ ".mm": "HOST_CMMSRCS",
+ ".cpp": "HOST_CPPSRCS",
+ }
+ variables = [suffix_map[obj.canonical_suffix]]
+ for files, base, cls, prefix in (
+ (obj.static_files, backend_file.srcdir, SourcePath, ""),
+ (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+ ):
+ for f in sorted(files):
+ p = self._pretty_path(
+ cls(obj._context, prefix + mozpath.relpath(f, base)),
+ backend_file,
+ )
+ for var in variables:
+ backend_file.write("%s += %s\n" % (var, p))
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "host-objects")]
+ elif isinstance(obj, WasmSources):
+ suffix_map = {".c": "WASM_CSRCS", ".cpp": "WASM_CPPSRCS"}
+ variables = [suffix_map[obj.canonical_suffix]]
+ for files, base, cls, prefix in (
+ (obj.static_files, backend_file.srcdir, SourcePath, ""),
+ (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+ ):
+ for f in sorted(files):
+ p = self._pretty_path(
+ cls(obj._context, prefix + mozpath.relpath(f, base)),
+ backend_file,
+ )
+ for var in variables:
+ backend_file.write("%s += %s\n" % (var, p))
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+ elif isinstance(obj, VariablePassthru):
+ # Sorted so output is consistent and we don't bump mtimes.
+ for k, v in sorted(obj.variables.items()):
+ if isinstance(v, list):
+ for item in v:
+ backend_file.write(
+ "%s += %s\n" % (k, make_quote(shell_quote(item)))
+ )
+ elif isinstance(v, bool):
+ if v:
+ backend_file.write("%s := 1\n" % k)
+ elif isinstance(v, Path):
+ path = self._pretty_path(Path(obj._context, v), backend_file)
+ backend_file.write("%s := %s\n" % (k, path))
+ else:
+ backend_file.write("%s := %s\n" % (k, v))
+ elif isinstance(obj, HostDefines):
+ self._process_defines(obj, backend_file, which="HOST_DEFINES")
+ elif isinstance(obj, Defines):
+ self._process_defines(obj, backend_file)
+
+ elif isinstance(obj, GeneratedFile):
+ if obj.required_before_export:
+ tier = "pre-export"
+ elif obj.required_before_compile:
+ tier = "export"
+ elif obj.required_during_compile:
+ tier = "pre-compile"
+ else:
+ tier = "misc"
+ relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir)
+ if tier == "pre-compile":
+ self._pre_compile.add(relobjdir)
+ else:
+ self._no_skip[tier].add(relobjdir)
+ backend_file.write_once("include $(topsrcdir)/config/AB_rCD.mk\n")
+ relobjdir = mozpath.relpath(obj.objdir, backend_file.objdir)
+ # For generated files that we handle in the top-level backend file,
+ # we want to have a `directory/tier` target depending on the file.
+ # For the others, we want a `tier` target.
+ if tier != "pre-compile" and relobjdir:
+ tier = "%s/%s" % (relobjdir, tier)
+ for stmt in self._format_statements_for_generated_file(
+ obj, tier, extra_dependencies="backend.mk" if obj.flags else ""
+ ):
+ backend_file.write(stmt + "\n")
+
+ elif isinstance(obj, JARManifest):
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ backend_file.write("JAR_MANIFEST := %s\n" % obj.path.full_path)
+
+ elif isinstance(obj, RustProgram):
+ self._process_rust_program(obj, backend_file)
+ # Hook the program into the compile graph.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._rust_targets.add(build_target)
+
+ elif isinstance(obj, HostRustProgram):
+ self._process_host_rust_program(obj, backend_file)
+ # Hook the program into the compile graph.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._rust_targets.add(build_target)
+
+ elif isinstance(obj, RustTests):
+ self._process_rust_tests(obj, backend_file)
+
+ elif isinstance(obj, Program):
+ self._process_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+ self._no_skip["syms"].add(backend_file.relobjdir)
+
+ elif isinstance(obj, HostProgram):
+ self._process_host_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, SimpleProgram):
+ self._process_simple_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+ self._no_skip["syms"].add(backend_file.relobjdir)
+
+ elif isinstance(obj, HostSimpleProgram):
+ self._process_host_simple_program(obj.program, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, LocalInclude):
+ self._process_local_include(obj.path, backend_file)
+
+ elif isinstance(obj, PerSourceFlag):
+ self._process_per_source_flag(obj, backend_file)
+
+ elif isinstance(obj, ComputedFlags):
+ self._process_computed_flags(obj, backend_file)
+
+ elif isinstance(obj, InstallationTarget):
+ self._process_installation_target(obj, backend_file)
+
+ elif isinstance(obj, BaseRustLibrary):
+ self.backend_input_files.add(obj.cargo_file)
+ self._process_rust_library(obj, backend_file)
+ # No need to call _process_linked_libraries, because Rust
+ # libraries are self-contained objects at this point.
+
+ # Hook the library into the compile graph.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._rust_targets.add(build_target)
+ if obj.is_gkrust:
+ self._gkrust_target = build_target
+
+ elif isinstance(obj, SharedLibrary):
+ self._process_shared_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+ self._no_skip["syms"].add(backend_file.relobjdir)
+
+ elif isinstance(obj, StaticLibrary):
+ self._process_static_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, SandboxedWasmLibrary):
+ self._process_sandboxed_wasm_library(obj, backend_file)
+
+ elif isinstance(obj, HostLibrary):
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, HostSharedLibrary):
+ self._process_host_shared_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, ObjdirFiles):
+ self._process_objdir_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, ObjdirPreprocessedFiles):
+ self._process_final_target_pp_files(
+ obj, obj.files, backend_file, "OBJDIR_PP_FILES"
+ )
+
+ elif isinstance(obj, LocalizedFiles):
+ self._process_localized_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, LocalizedPreprocessedFiles):
+ self._process_localized_pp_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, FinalTargetFiles):
+ self._process_final_target_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, FinalTargetPreprocessedFiles):
+ self._process_final_target_pp_files(
+ obj, obj.files, backend_file, "DIST_FILES"
+ )
+
+ elif isinstance(obj, ChromeManifestEntry):
+ self._process_chrome_manifest_entry(obj, backend_file)
+
+ elif isinstance(obj, TestManifest):
+ self._process_test_manifest(obj, backend_file)
+
+ else:
+ return False
+
+ return True
+
+ def _fill_root_mk(self):
+ """
+ Create two files, root.mk and root-deps.mk, the first containing
+ convenience variables, and the other dependency definitions for a
+ hopefully proper directory traversal.
+ """
+ for tier, no_skip in self._no_skip.items():
+ self.log(
+ logging.DEBUG,
+ "fill_root_mk",
+ {"number": len(no_skip), "tier": tier},
+ "Using {number} directories during {tier}",
+ )
+
+ def should_skip(tier, dir):
+ if tier in self._no_skip:
+ return dir not in self._no_skip[tier]
+ return False
+
+ # Traverse directories in parallel, and skip static dirs
+ def parallel_filter(current, subdirs):
+ all_subdirs = subdirs.dirs + subdirs.tests
+ if should_skip(tier, current) or current.startswith("subtiers/"):
+ current = None
+ return current, all_subdirs, []
+
+ # build everything in parallel, including static dirs
+ # Because of bug 925236 and possible other unknown race conditions,
+ # don't parallelize the libs tier.
+ def libs_filter(current, subdirs):
+ if should_skip("libs", current) or current.startswith("subtiers/"):
+ current = None
+ return current, [], subdirs.dirs + subdirs.tests
+
+ # Because of bug 925236 and possible other unknown race conditions,
+ # don't parallelize the tools tier. There aren't many directories for
+ # this tier anyways.
+ def tools_filter(current, subdirs):
+ if should_skip("tools", current) or current.startswith("subtiers/"):
+ current = None
+ return current, [], subdirs.dirs + subdirs.tests
+
+ filters = [
+ ("export", parallel_filter),
+ ("libs", libs_filter),
+ ("misc", parallel_filter),
+ ("tools", tools_filter),
+ ("check", parallel_filter),
+ ]
+
+ root_deps_mk = Makefile()
+
+ # Fill the dependencies for traversal of each tier.
+ for tier, filter in sorted(filters, key=itemgetter(0)):
+ main, all_deps = self._traversal.compute_dependencies(filter)
+ for dir, deps in sorted(all_deps.items()):
+ if deps is not None or (dir in self._idl_dirs and tier == "export"):
+ rule = root_deps_mk.create_rule(["%s/%s" % (dir, tier)])
+ if deps:
+ rule.add_dependencies(
+ "%s/%s" % (d, tier) for d in sorted(deps) if d
+ )
+ rule = root_deps_mk.create_rule(["recurse_%s" % tier])
+ if main:
+ rule.add_dependencies("%s/%s" % (d, tier) for d in sorted(main))
+
+ rule = root_deps_mk.create_rule(["recurse_pre-compile"])
+ rule.add_dependencies("%s/pre-compile" % d for d in sorted(self._pre_compile))
+
+ targets_with_pre_compile = sorted(
+ t for t in self._compile_graph if mozpath.dirname(t) in self._pre_compile
+ )
+ for t in targets_with_pre_compile:
+ relobjdir = mozpath.dirname(t)
+ rule = root_deps_mk.create_rule([t])
+ rule.add_dependencies(["%s/pre-compile" % relobjdir])
+
+ all_compile_deps = (
+ six.moves.reduce(lambda x, y: x | y, self._compile_graph.values())
+ if self._compile_graph
+ else set()
+ )
+ # Include the following as dependencies of the top recursion target for
+ # compilation:
+ # - nodes that are not dependended upon by anything. Typically, this
+ # would include programs, that need to be recursed, but that nothing
+ # depends on.
+ # - nodes that have no dependencies of their own. Technically, this is
+ # not necessary, because other things have dependencies on them, and
+ # they all end up rooting to nodes from the above category. But the
+ # way make works[1] is such that there can be benefits listing them
+ # as direct dependencies of the top recursion target, to somehow
+ # prioritize them.
+ # 1. See bug 1262241 comment 5.
+ compile_roots = [
+ t
+ for t, deps in six.iteritems(self._compile_graph)
+ if not deps or t not in all_compile_deps
+ ]
+
+ def add_category_rules(category, roots, graph):
+ rule = root_deps_mk.create_rule(["recurse_%s" % category])
+ # Directories containing rust compilations don't generally depend
+ # on other directories in the tree, so putting them first here will
+ # start them earlier in the build.
+ rust_roots = sorted(r for r in roots if r in self._rust_targets)
+ if category == "compile" and rust_roots:
+ rust_rule = root_deps_mk.create_rule(["recurse_rust"])
+ rust_rule.add_dependencies(rust_roots)
+ # Ensure our cargo invocations are serialized, and gecko comes
+ # first. Cargo will lock on the build output directory anyway,
+ # so trying to run things in parallel is not useful. Dependencies
+ # for gecko are especially expensive to build and parallelize
+ # poorly, so prioritizing these will save some idle time in full
+ # builds.
+ for prior_target, target in pairwise(
+ sorted(
+ [t for t in rust_roots], key=lambda t: t != self._gkrust_target
+ )
+ ):
+ r = root_deps_mk.create_rule([target])
+ r.add_dependencies([prior_target])
+
+ rule.add_dependencies(chain(rust_roots, sorted(roots)))
+ for target, deps in sorted(graph.items()):
+ if deps:
+ rule = root_deps_mk.create_rule([target])
+ rule.add_dependencies(sorted(deps))
+
+ non_default_roots = defaultdict(list)
+ non_default_graphs = defaultdict(lambda: OrderedDefaultDict(set))
+
+ for root in compile_roots:
+ # If this is a non-default target, separate the root from the
+ # rest of the compile graph.
+ target_name = mozpath.basename(root)
+
+ if target_name not in ("target", "target-objects", "host", "host-objects"):
+ non_default_roots[target_name].append(root)
+ non_default_graphs[target_name][root] = self._compile_graph[root]
+ del self._compile_graph[root]
+
+ for root in chain(*non_default_roots.values()):
+ compile_roots.remove(root)
+ dirname = mozpath.dirname(root)
+ # If a directory only contains non-default compile targets, we don't
+ # attempt to dump symbols there.
+ if (
+ dirname in self._no_skip["syms"]
+ and "%s/target" % dirname not in self._compile_graph
+ ):
+ self._no_skip["syms"].remove(dirname)
+
+ add_category_rules("compile", compile_roots, self._compile_graph)
+ for category, graph in sorted(six.iteritems(non_default_graphs)):
+ add_category_rules(category, non_default_roots[category], graph)
+
+ root_mk = Makefile()
+
+ # Fill root.mk with the convenience variables.
+ for tier, filter in filters:
+ all_dirs = self._traversal.traverse("", filter)
+ root_mk.add_statement("%s_dirs := %s" % (tier, " ".join(all_dirs)))
+
+ # Need a list of compile targets because we can't use pattern rules:
+ # https://savannah.gnu.org/bugs/index.php?42833
+ root_mk.add_statement(
+ "pre_compile_targets := %s"
+ % " ".join(sorted("%s/pre-compile" % p for p in self._pre_compile))
+ )
+ root_mk.add_statement(
+ "compile_targets := %s"
+ % " ".join(sorted(set(self._compile_graph.keys()) | all_compile_deps))
+ )
+ root_mk.add_statement(
+ "syms_targets := %s"
+ % " ".join(sorted(set("%s/syms" % d for d in self._no_skip["syms"])))
+ )
+ root_mk.add_statement(
+ "rust_targets := %s" % " ".join(sorted(self._rust_targets))
+ )
+
+ root_mk.add_statement(
+ "non_default_tiers := %s" % " ".join(sorted(non_default_roots.keys()))
+ )
+
+ for category, graphs in sorted(six.iteritems(non_default_graphs)):
+ category_dirs = [mozpath.dirname(target) for target in graphs.keys()]
+ root_mk.add_statement("%s_dirs := %s" % (category, " ".join(category_dirs)))
+
+ root_mk.add_statement("include root-deps.mk")
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, "root.mk")
+ ) as root:
+ root_mk.dump(root, removal_guard=False)
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, "root-deps.mk")
+ ) as root_deps:
+ root_deps_mk.dump(root_deps, removal_guard=False)
+
+ def _add_unified_build_rules(
+ self,
+ makefile,
+ unified_source_mapping,
+ unified_files_makefile_variable="unified_files",
+ include_curdir_build_rules=True,
+ ):
+
+ # In case it's a generator.
+ unified_source_mapping = sorted(unified_source_mapping)
+
+ explanation = (
+ "\n"
+ "# We build files in 'unified' mode by including several files\n"
+ "# together into a single source file. This cuts down on\n"
+ "# compilation times and debug information size."
+ )
+ makefile.add_statement(explanation)
+
+ all_sources = " ".join(source for source, _ in unified_source_mapping)
+ makefile.add_statement(
+ "%s := %s" % (unified_files_makefile_variable, all_sources)
+ )
+
+ if include_curdir_build_rules:
+ makefile.add_statement(
+ "\n"
+ '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
+ "# Help it out by explicitly specifiying dependencies."
+ )
+ makefile.add_statement(
+ "all_absolute_unified_files := \\\n"
+ " $(addprefix $(CURDIR)/,$(%s))" % unified_files_makefile_variable
+ )
+ rule = makefile.create_rule(["$(all_absolute_unified_files)"])
+ rule.add_dependencies(["$(CURDIR)/%: %"])
+
+ def _check_blacklisted_variables(self, makefile_in, makefile_content):
+ if "EXTERNALLY_MANAGED_MAKE_FILE" in makefile_content:
+ # Bypass the variable restrictions for externally managed makefiles.
+ return
+
+ for l in makefile_content.splitlines():
+ l = l.strip()
+ # Don't check comments
+ if l.startswith("#"):
+ continue
+ for x in chain(_MOZBUILD_ONLY_VARIABLES, DEPRECATED_VARIABLES):
+ if x not in l:
+ continue
+
+ # Finding the variable name in the Makefile is not enough: it
+ # may just appear as part of something else, like DIRS appears
+ # in GENERATED_DIRS.
+ if re.search(r"\b%s\s*[:?+]?=" % x, l):
+ if x in _MOZBUILD_ONLY_VARIABLES:
+ message = MOZBUILD_VARIABLES_MESSAGE
+ else:
+ message = DEPRECATED_VARIABLES_MESSAGE
+ raise Exception(
+ "Variable %s is defined in %s. %s" % (x, makefile_in, message)
+ )
+
+ def consume_finished(self):
+ CommonBackend.consume_finished(self)
+
+ for objdir, backend_file in sorted(self._backend_files.items()):
+ srcdir = backend_file.srcdir
+ with self._write_file(fh=backend_file) as bf:
+ makefile_in = mozpath.join(srcdir, "Makefile.in")
+ makefile = mozpath.join(objdir, "Makefile")
+
+ # If Makefile.in exists, use it as a template. Otherwise,
+ # create a stub.
+ stub = not os.path.exists(makefile_in)
+ if not stub:
+ self.log(
+ logging.DEBUG,
+ "substitute_makefile",
+ {"path": makefile},
+ "Substituting makefile: {path}",
+ )
+ self._makefile_in_count += 1
+
+ # In the export and libs tiers, we don't skip directories
+ # containing a Makefile.in.
+ # topobjdir is handled separatedly, don't do anything for
+ # it.
+ if bf.relobjdir:
+ for tier in ("export", "libs"):
+ self._no_skip[tier].add(bf.relobjdir)
+ else:
+ self.log(
+ logging.DEBUG,
+ "stub_makefile",
+ {"path": makefile},
+ "Creating stub Makefile: {path}",
+ )
+
+ obj = self.Substitution()
+ obj.output_path = makefile
+ obj.input_path = makefile_in
+ obj.topsrcdir = backend_file.topsrcdir
+ obj.topobjdir = bf.environment.topobjdir
+ obj.config = bf.environment
+ self._create_makefile(obj, stub=stub)
+ with io.open(obj.output_path, encoding="utf-8") as fh:
+ content = fh.read()
+ # Directories with a Makefile containing a tools target, or
+ # XPI_PKGNAME can't be skipped and must run during the
+ # 'tools' tier.
+ for t in ("XPI_PKGNAME", "tools"):
+ if t not in content:
+ continue
+ if t == "tools" and not re.search(
+ "(?:^|\s)tools.*::", content, re.M
+ ):
+ continue
+ if objdir == self.environment.topobjdir:
+ continue
+ self._no_skip["tools"].add(
+ mozpath.relpath(objdir, self.environment.topobjdir)
+ )
+
+ # Directories with a Makefile containing a check target
+ # can't be skipped and must run during the 'check' tier.
+ if re.search("(?:^|\s)check.*::", content, re.M):
+ self._no_skip["check"].add(
+ mozpath.relpath(objdir, self.environment.topobjdir)
+ )
+
+ # Detect any Makefile.ins that contain variables on the
+ # moz.build-only list
+ self._check_blacklisted_variables(makefile_in, content)
+
+ self._fill_root_mk()
+
+ # Make the master test manifest files.
+ for flavor, t in self._test_manifests.items():
+ install_prefix, manifests = t
+ manifest_stem = mozpath.join(install_prefix, "%s.ini" % flavor)
+ self._write_master_test_manifest(
+ mozpath.join(self.environment.topobjdir, "_tests", manifest_stem),
+ manifests,
+ )
+
+ # Catch duplicate inserts.
+ try:
+ self._install_manifests["_tests"].add_optional_exists(manifest_stem)
+ except ValueError:
+ pass
+
+ self._write_manifests("install", self._install_manifests)
+
+ ensureParentDir(mozpath.join(self.environment.topobjdir, "dist", "foo"))
+
+ def _pretty_path_parts(self, path, backend_file):
+ assert isinstance(path, Path)
+ if isinstance(path, SourcePath):
+ if path.full_path.startswith(backend_file.srcdir):
+ return "$(srcdir)", path.full_path[len(backend_file.srcdir) :]
+ if path.full_path.startswith(backend_file.topsrcdir):
+ return "$(topsrcdir)", path.full_path[len(backend_file.topsrcdir) :]
+ elif isinstance(path, ObjDirPath):
+ if path.full_path.startswith(backend_file.objdir):
+ return "", path.full_path[len(backend_file.objdir) + 1 :]
+ if path.full_path.startswith(self.environment.topobjdir):
+ return "$(DEPTH)", path.full_path[len(self.environment.topobjdir) :]
+
+ return "", path.full_path
+
+ def _pretty_path(self, path, backend_file):
+ return "".join(self._pretty_path_parts(path, backend_file))
+
+ def _process_unified_sources(self, obj):
+ backend_file = self._get_backend_file_for(obj)
+
+ suffix_map = {
+ ".c": "UNIFIED_CSRCS",
+ ".m": "UNIFIED_CMSRCS",
+ ".mm": "UNIFIED_CMMSRCS",
+ ".cpp": "UNIFIED_CPPSRCS",
+ }
+
+ var = suffix_map[obj.canonical_suffix]
+ non_unified_var = var[len("UNIFIED_") :]
+
+ if obj.have_unified_mapping:
+ self._add_unified_build_rules(
+ backend_file,
+ obj.unified_source_mapping,
+ unified_files_makefile_variable=var,
+ include_curdir_build_rules=False,
+ )
+ backend_file.write("%s += $(%s)\n" % (non_unified_var, var))
+ else:
+ # Sorted so output is consistent and we don't bump mtimes.
+ source_files = list(sorted(obj.files))
+
+ backend_file.write("%s += %s\n" % (non_unified_var, " ".join(source_files)))
+
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+
+ def _process_directory_traversal(self, obj, backend_file):
+ """Process a data.DirectoryTraversal instance."""
+ fh = backend_file.fh
+
+ def relativize(base, dirs):
+ return (mozpath.relpath(d.translated, base) for d in dirs)
+
+ if obj.dirs:
+ fh.write(
+ "DIRS := %s\n" % " ".join(relativize(backend_file.objdir, obj.dirs))
+ )
+ self._traversal.add(
+ backend_file.relobjdir,
+ dirs=relativize(self.environment.topobjdir, obj.dirs),
+ )
+
+ # The directory needs to be registered whether subdirectories have been
+ # registered or not.
+ self._traversal.add(backend_file.relobjdir)
+
+ def _process_defines(self, obj, backend_file, which="DEFINES"):
+ """Output the DEFINES rules to the given backend file."""
+ defines = list(obj.get_defines())
+ if defines:
+ defines = " ".join(shell_quote(d) for d in defines)
+ backend_file.write_once("%s += %s\n" % (which, defines))
+
+ def _process_installation_target(self, obj, backend_file):
+ # A few makefiles need to be able to override the following rules via
+ # make XPI_NAME=blah commands, so we default to the lazy evaluation as
+ # much as possible here to avoid breaking things.
+ if obj.xpiname:
+ backend_file.write("XPI_NAME = %s\n" % (obj.xpiname))
+ if obj.subdir:
+ backend_file.write("DIST_SUBDIR = %s\n" % (obj.subdir))
+ if obj.target and not obj.is_custom():
+ backend_file.write("FINAL_TARGET = $(DEPTH)/%s\n" % (obj.target))
+ else:
+ backend_file.write(
+ "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),"
+ "$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n"
+ )
+
+ if not obj.enabled:
+ backend_file.write("NO_DIST_INSTALL := 1\n")
+
+ def _handle_idl_manager(self, manager):
+ build_files = self._install_manifests["xpidl"]
+
+ for p in ("Makefile", "backend.mk", ".deps/.mkdir.done"):
+ build_files.add_optional_exists(p)
+
+ for stem in manager.idl_stems():
+ self._install_manifests["dist_include"].add_optional_exists("%s.h" % stem)
+
+ for module in manager.modules:
+ build_files.add_optional_exists(mozpath.join(".deps", "%s.pp" % module))
+
+ modules = manager.modules
+ xpt_modules = sorted(modules.keys())
+
+ mk = Makefile()
+ all_directories = set()
+
+ for module_name in xpt_modules:
+ module = manager.modules[module_name]
+ all_directories |= module.directories
+ deps = sorted(module.idl_files)
+
+ # It may seem strange to have the .idl files listed as
+ # prerequisites both here and in the auto-generated .pp files.
+ # It is necessary to list them here to handle the case where a
+ # new .idl is added to an xpt. If we add a new .idl and nothing
+ # else has changed, the new .idl won't be referenced anywhere
+ # except in the command invocation. Therefore, the .xpt won't
+ # be rebuilt because the dependencies say it is up to date. By
+ # listing the .idls here, we ensure the make file has a
+ # reference to the new .idl. Since the new .idl presumably has
+ # an mtime newer than the .xpt, it will trigger xpt generation.
+
+ mk.add_statement("%s_deps := %s" % (module_name, " ".join(deps)))
+
+ build_files.add_optional_exists("%s.xpt" % module_name)
+
+ mk.add_statement("all_idl_dirs := %s" % " ".join(sorted(all_directories)))
+
+ rules = StringIO()
+ mk.dump(rules, removal_guard=False)
+
+ # Create dependency for output header so we force regeneration if the
+ # header was deleted. This ideally should not be necessary. However,
+ # some processes (such as PGO at the time this was implemented) wipe
+ # out dist/include without regard to our install manifests.
+
+ obj = self.Substitution()
+ obj.output_path = mozpath.join(
+ self.environment.topobjdir, "config", "makefiles", "xpidl", "Makefile"
+ )
+ obj.input_path = mozpath.join(
+ self.environment.topsrcdir, "config", "makefiles", "xpidl", "Makefile.in"
+ )
+ obj.topsrcdir = self.environment.topsrcdir
+ obj.topobjdir = self.environment.topobjdir
+ obj.config = self.environment
+ self._create_makefile(
+ obj,
+ extra=dict(
+ xpidl_rules=rules.getvalue(), xpidl_modules=" ".join(xpt_modules)
+ ),
+ )
+
+ def _process_program(self, obj, backend_file):
+ backend_file.write(
+ "PROGRAM = %s\n" % self._pretty_path(obj.output_path, backend_file)
+ )
+ if not obj.cxx_link and not self.environment.bin_suffix:
+ backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program)
+
+ def _process_host_program(self, program, backend_file):
+ backend_file.write(
+ "HOST_PROGRAM = %s\n" % self._pretty_path(program.output_path, backend_file)
+ )
+
+ def _process_rust_program_base(
+ self, obj, backend_file, target_variable, target_cargo_variable
+ ):
+ backend_file.write_once("CARGO_FILE := %s\n" % obj.cargo_file)
+ target_dir = mozpath.normpath(backend_file.environment.topobjdir)
+ backend_file.write_once("CARGO_TARGET_DIR := %s\n" % target_dir)
+ backend_file.write("%s += $(DEPTH)/%s\n" % (target_variable, obj.location))
+ backend_file.write("%s += %s\n" % (target_cargo_variable, obj.name))
+
+ def _process_rust_program(self, obj, backend_file):
+ self._process_rust_program_base(
+ obj, backend_file, "RUST_PROGRAMS", "RUST_CARGO_PROGRAMS"
+ )
+
+ def _process_host_rust_program(self, obj, backend_file):
+ self._process_rust_program_base(
+ obj, backend_file, "HOST_RUST_PROGRAMS", "HOST_RUST_CARGO_PROGRAMS"
+ )
+
+ def _process_rust_tests(self, obj, backend_file):
+ if obj.config.substs.get("MOZ_RUST_TESTS"):
+ # If --enable-rust-tests has been set, run these as a part of
+ # make check.
+ self._no_skip["check"].add(backend_file.relobjdir)
+ backend_file.write("check:: force-cargo-test-run\n")
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._process_non_default_target(obj, "force-cargo-test-run", backend_file)
+ backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n")
+ backend_file.write_once("RUST_TESTS := %s\n" % " ".join(obj.names))
+ backend_file.write_once("RUST_TEST_FEATURES := %s\n" % " ".join(obj.features))
+
+ def _process_simple_program(self, obj, backend_file):
+ if obj.is_unit_test:
+ backend_file.write("CPP_UNIT_TESTS += %s\n" % obj.program)
+ assert obj.cxx_link
+ else:
+ backend_file.write("SIMPLE_PROGRAMS += %s\n" % obj.program)
+ if not obj.cxx_link and not self.environment.bin_suffix:
+ backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program)
+
+ def _process_host_simple_program(self, program, backend_file):
+ backend_file.write("HOST_SIMPLE_PROGRAMS += %s\n" % program)
+
+ def _process_test_support_file(self, obj):
+ # Ensure test support programs and libraries are tracked by an
+ # install manifest for the benefit of the test packager.
+ if not obj.install_target.startswith("_tests"):
+ return
+
+ dest_basename = None
+ if isinstance(obj, BaseLibrary):
+ dest_basename = obj.lib_name
+ elif isinstance(obj, BaseProgram):
+ dest_basename = obj.program
+ if dest_basename is None:
+ return
+
+ self._install_manifests["_tests"].add_optional_exists(
+ mozpath.join(obj.install_target[len("_tests") + 1 :], dest_basename)
+ )
+
+ def _process_test_manifest(self, obj, backend_file):
+ # Much of the logic in this function could be moved to CommonBackend.
+ for source in obj.source_relpaths:
+ self.backend_input_files.add(mozpath.join(obj.topsrcdir, source))
+
+ # Don't allow files to be defined multiple times unless it is allowed.
+ # We currently allow duplicates for non-test files or test files if
+ # the manifest is listed as a duplicate.
+ for source, (dest, is_test) in obj.installs.items():
+ try:
+ self._install_manifests["_test_files"].add_link(source, dest)
+ except ValueError:
+ if not obj.dupe_manifest and is_test:
+ raise
+
+ for base, pattern, dest in obj.pattern_installs:
+ try:
+ self._install_manifests["_test_files"].add_pattern_link(
+ base, pattern, dest
+ )
+ except ValueError:
+ if not obj.dupe_manifest:
+ raise
+
+ for dest in obj.external_installs:
+ try:
+ self._install_manifests["_test_files"].add_optional_exists(dest)
+ except ValueError:
+ if not obj.dupe_manifest:
+ raise
+
+ m = self._test_manifests.setdefault(obj.flavor, (obj.install_prefix, set()))
+ m[1].add(obj.manifest_obj_relpath)
+
+ try:
+ from reftest import ReftestManifest
+
+ if isinstance(obj.manifest, ReftestManifest):
+ # Mark included files as part of the build backend so changes
+ # result in re-config.
+ self.backend_input_files |= obj.manifest.manifests
+ except ImportError:
+ # Ignore errors caused by the reftest module not being present.
+ # This can happen when building SpiderMonkey standalone, for example.
+ pass
+
+ def _process_local_include(self, local_include, backend_file):
+ d, path = self._pretty_path_parts(local_include, backend_file)
+ if isinstance(local_include, ObjDirPath) and not d:
+ # path doesn't start with a slash in this case
+ d = "$(CURDIR)/"
+ elif d == "$(DEPTH)":
+ d = "$(topobjdir)"
+ quoted_path = shell_quote(path) if path else path
+ if quoted_path != path:
+ path = quoted_path[0] + d + quoted_path[1:]
+ else:
+ path = d + path
+ backend_file.write("LOCAL_INCLUDES += -I%s\n" % path)
+
+ def _process_per_source_flag(self, per_source_flag, backend_file):
+ for flag in per_source_flag.flags:
+ backend_file.write(
+ "%s_FLAGS += %s\n" % (mozpath.basename(per_source_flag.file_name), flag)
+ )
+
+ def _process_computed_flags(self, computed_flags, backend_file):
+ for var, flags in computed_flags.get_flags():
+ backend_file.write(
+ "COMPUTED_%s += %s\n"
+ % (var, " ".join(make_quote(shell_quote(f)) for f in flags))
+ )
+
+ def _process_non_default_target(self, libdef, target_name, backend_file):
+ backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
+ backend_file.write("MOZBUILD_NON_DEFAULT_TARGETS += %s\n" % target_name)
+
+ def _process_shared_library(self, libdef, backend_file):
+ backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename)
+ backend_file.write("FORCE_SHARED_LIB := 1\n")
+ backend_file.write("IMPORT_LIBRARY := %s\n" % libdef.import_name)
+ backend_file.write("SHARED_LIBRARY := %s\n" % libdef.lib_name)
+ if libdef.soname:
+ backend_file.write("DSO_SONAME := %s\n" % libdef.soname)
+ if libdef.symbols_file:
+ if libdef.symbols_link_arg:
+ backend_file.write("EXTRA_DSO_LDOPTS += %s\n" % libdef.symbols_link_arg)
+ if not libdef.cxx_link:
+ backend_file.write("LIB_IS_C_ONLY := 1\n")
+ if libdef.output_category:
+ self._process_non_default_target(libdef, libdef.lib_name, backend_file)
+ # Override the install rule target for this library. This is hacky,
+ # but can go away as soon as we start building libraries in their
+ # final location (bug 1459764).
+ backend_file.write("SHARED_LIBRARY_TARGET := %s\n" % libdef.output_category)
+
+ def _process_static_library(self, libdef, backend_file):
+ backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename)
+ backend_file.write("FORCE_STATIC_LIB := 1\n")
+ backend_file.write("REAL_LIBRARY := %s\n" % libdef.lib_name)
+ if libdef.no_expand_lib:
+ backend_file.write("NO_EXPAND_LIBS := 1\n")
+
+ def _process_sandboxed_wasm_library(self, libdef, backend_file):
+ backend_file.write("WASM_ARCHIVE := %s\n" % libdef.basename)
+
+ def _process_rust_library(self, libdef, backend_file):
+ backend_file.write_once(
+ "%s := %s\n" % (libdef.LIB_FILE_VAR, libdef.import_name)
+ )
+ backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n")
+ # Need to normalize the path so Cargo sees the same paths from all
+ # possible invocations of Cargo with this CARGO_TARGET_DIR. Otherwise,
+ # Cargo's dependency calculations don't work as we expect and we wind
+ # up recompiling lots of things.
+ target_dir = mozpath.normpath(backend_file.environment.topobjdir)
+ backend_file.write("CARGO_TARGET_DIR := %s\n" % target_dir)
+ if libdef.features:
+ backend_file.write(
+ "%s := %s\n" % (libdef.FEATURES_VAR, " ".join(libdef.features))
+ )
+ if libdef.output_category:
+ self._process_non_default_target(libdef, libdef.import_name, backend_file)
+
+ def _process_host_shared_library(self, libdef, backend_file):
+ backend_file.write("HOST_SHARED_LIBRARY = %s\n" % libdef.lib_name)
+
+ def _build_target_for_obj(self, obj):
+ if hasattr(obj, "output_category") and obj.output_category:
+ target_name = obj.output_category
+ else:
+ target_name = obj.KIND
+ if target_name == "wasm":
+ target_name = "target"
+ return "%s/%s" % (
+ mozpath.relpath(obj.objdir, self.environment.topobjdir),
+ target_name,
+ )
+
+ def _process_linked_libraries(self, obj, backend_file):
+ def pretty_relpath(lib, name):
+ return os.path.normpath(
+ mozpath.join(mozpath.relpath(lib.objdir, obj.objdir), name)
+ )
+
+ objs, shared_libs, os_libs, static_libs = self._expand_libs(obj)
+
+ obj_target = obj.name
+ if isinstance(obj, Program):
+ obj_target = self._pretty_path(obj.output_path, backend_file)
+
+ objs_ref = " \\\n ".join(os.path.relpath(o, obj.objdir) for o in objs)
+ # Don't bother with a list file if we're only linking objects built
+ # in this directory or building a real static library. This
+ # accommodates clang-plugin, where we would otherwise pass an
+ # incorrect list file format to the host compiler as well as when
+ # creating an archive with AR, which doesn't understand list files.
+ if (
+ objs == obj.objs
+ and not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary))
+ or isinstance(obj, (StaticLibrary, SandboxedWasmLibrary))
+ and obj.no_expand_lib
+ ):
+ backend_file.write_once("%s_OBJS := %s\n" % (obj.name, objs_ref))
+ backend_file.write("%s: %s\n" % (obj_target, objs_ref))
+ elif not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ list_file_path = "%s.list" % obj.name.replace(".", "_")
+ list_file_ref = self._make_list_file(
+ obj.KIND, obj.objdir, objs, list_file_path
+ )
+ backend_file.write_once("%s_OBJS := %s\n" % (obj.name, list_file_ref))
+ backend_file.write_once("%s: %s\n" % (obj_target, list_file_path))
+ backend_file.write("%s: %s\n" % (obj_target, objs_ref))
+
+ if getattr(obj, "symbols_file", None):
+ backend_file.write_once("%s: %s\n" % (obj_target, obj.symbols_file))
+
+ for lib in shared_libs:
+ assert obj.KIND != "host" and obj.KIND != "wasm"
+ backend_file.write_once(
+ "SHARED_LIBS += %s\n" % pretty_relpath(lib, lib.import_name)
+ )
+
+ # We have to link any Rust libraries after all intermediate static
+ # libraries have been listed to ensure that the Rust libraries are
+ # searched after the C/C++ objects that might reference Rust symbols.
+ var = "HOST_LIBS" if obj.KIND == "host" else "STATIC_LIBS"
+ for lib in chain(
+ (l for l in static_libs if not isinstance(l, BaseRustLibrary)),
+ (l for l in static_libs if isinstance(l, BaseRustLibrary)),
+ ):
+ backend_file.write_once(
+ "%s += %s\n" % (var, pretty_relpath(lib, lib.import_name))
+ )
+
+ for lib in os_libs:
+ if obj.KIND == "target":
+ backend_file.write_once("OS_LIBS += %s\n" % lib)
+ elif obj.KIND == "host":
+ backend_file.write_once("HOST_EXTRA_LIBS += %s\n" % lib)
+
+ if not isinstance(obj, (StaticLibrary, HostLibrary)) or obj.no_expand_lib:
+ # This will create the node even if there aren't any linked libraries.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+
+ # Make the build target depend on all the target/host-objects that
+ # recursively are linked into it.
+ def recurse_libraries(obj):
+ for lib in obj.linked_libraries:
+ if (
+ isinstance(lib, (StaticLibrary, HostLibrary))
+ and not lib.no_expand_lib
+ ):
+ recurse_libraries(lib)
+ elif not isinstance(lib, ExternalLibrary):
+ self._compile_graph[build_target].add(
+ self._build_target_for_obj(lib)
+ )
+ relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir)
+ objects_target = mozpath.join(relobjdir, "%s-objects" % obj.KIND)
+ if objects_target in self._compile_graph:
+ self._compile_graph[build_target].add(objects_target)
+
+ recurse_libraries(obj)
+
+ # Process library-based defines
+ self._process_defines(obj.lib_defines, backend_file)
+
+ def _add_install_target(self, backend_file, install_target, tier, dest, files):
+ self._no_skip[tier].add(backend_file.relobjdir)
+ for f in files:
+ backend_file.write("%s_FILES += %s\n" % (install_target, f))
+ backend_file.write("%s_DEST := %s\n" % (install_target, dest))
+ backend_file.write("%s_TARGET := %s\n" % (install_target, tier))
+ backend_file.write("INSTALL_TARGETS += %s\n" % install_target)
+
+ def _process_final_target_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(
+ target, ("dist/bin", "dist/xpi-stage", "_tests", "dist/include")
+ )
+ if not path:
+ raise Exception("Cannot install to " + target)
+
+ # Exports are not interesting to artifact builds.
+ if path == "dist/include" and self.environment.is_artifact_build:
+ return
+
+ manifest = path.replace("/", "_")
+ install_manifest = self._install_manifests[manifest]
+ reltarget = mozpath.relpath(target, path)
+
+ for path, files in files.walk():
+ target_var = (mozpath.join(target, path) if path else target).replace(
+ "/", "_"
+ )
+ # We don't necessarily want to combine these, because non-wildcard
+ # absolute files tend to be libraries, and we don't want to mix
+ # those in with objdir headers that will be installed during export.
+ # (See bug 1642882 for details.)
+ objdir_files = []
+ absolute_files = []
+
+ for f in files:
+ assert not isinstance(f, RenamedSourcePath)
+ dest_dir = mozpath.join(reltarget, path)
+ dest_file = mozpath.join(dest_dir, f.target_basename)
+ if not isinstance(f, ObjDirPath):
+ if "*" in f:
+ if f.startswith("/") or isinstance(f, AbsolutePath):
+ basepath, wild = os.path.split(f.full_path)
+ if "*" in basepath:
+ raise Exception(
+ "Wildcards are only supported in the filename part"
+ " of srcdir-relative or absolute paths."
+ )
+
+ install_manifest.add_pattern_link(basepath, wild, dest_dir)
+ else:
+ install_manifest.add_pattern_link(f.srcdir, f, dest_dir)
+ elif isinstance(f, AbsolutePath):
+ if not f.full_path.lower().endswith((".dll", ".pdb", ".so")):
+ raise Exception(
+ "Absolute paths installed to FINAL_TARGET_FILES must"
+ " only be shared libraries or associated debug"
+ " information."
+ )
+ install_manifest.add_optional_exists(dest_file)
+ absolute_files.append(f.full_path)
+ else:
+ install_manifest.add_link(f.full_path, dest_file)
+ else:
+ install_manifest.add_optional_exists(dest_file)
+ objdir_files.append(self._pretty_path(f, backend_file))
+ install_location = "$(DEPTH)/%s" % mozpath.join(target, path)
+ if objdir_files:
+ tier = "export" if obj.install_target == "dist/include" else "misc"
+ # We cannot generate multilocale.txt during misc at the moment.
+ if objdir_files[0] == "multilocale.txt":
+ tier = "libs"
+ self._add_install_target(
+ backend_file, target_var, tier, install_location, objdir_files
+ )
+ if absolute_files:
+ # Unfortunately, we can't use _add_install_target because on
+ # Windows, the absolute file paths that we want to install
+ # from often have spaces. So we write our own rule.
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ backend_file.write(
+ "misc::\n%s\n"
+ % "\n".join(
+ "\t$(INSTALL) %s %s"
+ % (make_quote(shell_quote(f)), install_location)
+ for f in absolute_files
+ )
+ )
+
+ def _process_final_target_pp_files(self, obj, files, backend_file, name):
+ # Bug 1177710 - We'd like to install these via manifests as
+ # preprocessed files. But they currently depend on non-standard flags
+ # being added via some Makefiles, so for now we just pass them through
+ # to the underlying Makefile.in.
+ #
+ # Note that if this becomes a manifest, OBJDIR_PP_FILES will likely
+ # still need to use PP_TARGETS internally because we can't have an
+ # install manifest for the root of the objdir.
+ for i, (path, files) in enumerate(files.walk()):
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ var = "%s_%d" % (name, i)
+ for f in files:
+ backend_file.write(
+ "%s += %s\n" % (var, self._pretty_path(f, backend_file))
+ )
+ backend_file.write(
+ "%s_PATH := $(DEPTH)/%s\n"
+ % (var, mozpath.join(obj.install_target, path))
+ )
+ backend_file.write("%s_TARGET := misc\n" % var)
+ backend_file.write("PP_TARGETS += %s\n" % var)
+
+ def _write_localized_files_files(self, files, name, backend_file):
+ for f in files:
+ if not isinstance(f, ObjDirPath):
+ # The emitter asserts that all srcdir files start with `en-US/`
+ e, f = f.split("en-US/")
+ assert not e
+ if "*" in f:
+ # We can't use MERGE_FILE for wildcards because it takes
+ # only the first match internally. This is only used
+ # in one place in the tree currently so we'll hardcode
+ # that specific behavior for now.
+ backend_file.write(
+ "%s += $(wildcard $(LOCALE_SRCDIR)/%s)\n" % (name, f)
+ )
+ else:
+ backend_file.write("%s += $(call MERGE_FILE,%s)\n" % (name, f))
+ else:
+ # Objdir files are allowed from LOCALIZED_GENERATED_FILES
+ backend_file.write(
+ "%s += %s\n" % (name, self._pretty_path(f, backend_file))
+ )
+
+ def _process_localized_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(target, ("dist/bin",))
+ if not path:
+ raise Exception("Cannot install localized files to " + target)
+ for i, (path, files) in enumerate(files.walk()):
+ name = "LOCALIZED_FILES_%d" % i
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ self._write_localized_files_files(files, name + "_FILES", backend_file)
+ # Use FINAL_TARGET here because some l10n repack rules set
+ # XPI_NAME to generate langpacks.
+ backend_file.write("%s_DEST = $(FINAL_TARGET)/%s\n" % (name, path))
+ backend_file.write("%s_TARGET := misc\n" % name)
+ backend_file.write("INSTALL_TARGETS += %s\n" % name)
+
+ def _process_localized_pp_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(target, ("dist/bin",))
+ if not path:
+ raise Exception("Cannot install localized files to " + target)
+ for i, (path, files) in enumerate(files.walk()):
+ name = "LOCALIZED_PP_FILES_%d" % i
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ self._write_localized_files_files(files, name, backend_file)
+ # Use FINAL_TARGET here because some l10n repack rules set
+ # XPI_NAME to generate langpacks.
+ backend_file.write("%s_PATH = $(FINAL_TARGET)/%s\n" % (name, path))
+ backend_file.write("%s_TARGET := misc\n" % name)
+ # Localized files will have different content in different
+ # localizations, and some preprocessed files may not have
+ # any preprocessor directives.
+ backend_file.write(
+ "%s_FLAGS := --silence-missing-directive-warnings\n" % name
+ )
+ backend_file.write("PP_TARGETS += %s\n" % name)
+
+ def _process_objdir_files(self, obj, files, backend_file):
+ # We can't use an install manifest for the root of the objdir, since it
+ # would delete all the other files that get put there by the build
+ # system.
+ for i, (path, files) in enumerate(files.walk()):
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ for f in files:
+ backend_file.write(
+ "OBJDIR_%d_FILES += %s\n" % (i, self._pretty_path(f, backend_file))
+ )
+ backend_file.write("OBJDIR_%d_DEST := $(topobjdir)/%s\n" % (i, path))
+ backend_file.write("OBJDIR_%d_TARGET := misc\n" % i)
+ backend_file.write("INSTALL_TARGETS += OBJDIR_%d\n" % i)
+
+ def _process_chrome_manifest_entry(self, obj, backend_file):
+ fragment = Makefile()
+ rule = fragment.create_rule(targets=["misc:"])
+
+ top_level = mozpath.join(obj.install_target, "chrome.manifest")
+ if obj.path != top_level:
+ args = [
+ mozpath.join("$(DEPTH)", top_level),
+ make_quote(
+ shell_quote(
+ "manifest %s" % mozpath.relpath(obj.path, obj.install_target)
+ )
+ ),
+ ]
+ rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)])
+ args = [
+ mozpath.join("$(DEPTH)", obj.path),
+ make_quote(shell_quote(str(obj.entry))),
+ ]
+ rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)])
+ fragment.dump(backend_file.fh, removal_guard=False)
+
+ self._no_skip["misc"].add(obj.relsrcdir)
+
+ def _write_manifests(self, dest, manifests):
+ man_dir = mozpath.join(self.environment.topobjdir, "_build_manifests", dest)
+
+ for k, manifest in manifests.items():
+ with self._write_file(mozpath.join(man_dir, k)) as fh:
+ manifest.write(fileobj=fh)
+
+ def _write_master_test_manifest(self, path, manifests):
+ with self._write_file(path) as master:
+ master.write(
+ "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n\n"
+ )
+
+ for manifest in sorted(manifests):
+ master.write("[include:%s]\n" % manifest)
+
+ class Substitution(object):
+ """BaseConfigSubstitution-like class for use with _create_makefile."""
+
+ __slots__ = ("input_path", "output_path", "topsrcdir", "topobjdir", "config")
+
+ def _create_makefile(self, obj, stub=False, extra=None):
+ """Creates the given makefile. Makefiles are treated the same as
+ config files, but some additional header and footer is added to the
+ output.
+
+ When the stub argument is True, no source file is used, and a stub
+ makefile with the default header and footer only is created.
+ """
+ with self._get_preprocessor(obj) as pp:
+ if extra:
+ pp.context.update(extra)
+ if not pp.context.get("autoconfmk", ""):
+ pp.context["autoconfmk"] = "autoconf.mk"
+ pp.handleLine(
+ "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n"
+ )
+ pp.handleLine("DEPTH := @DEPTH@\n")
+ pp.handleLine("topobjdir := @topobjdir@\n")
+ pp.handleLine("topsrcdir := @top_srcdir@\n")
+ pp.handleLine("srcdir := @srcdir@\n")
+ pp.handleLine("srcdir_rel := @srcdir_rel@\n")
+ pp.handleLine("relativesrcdir := @relativesrcdir@\n")
+ pp.handleLine("include $(DEPTH)/config/@autoconfmk@\n")
+ if not stub:
+ pp.do_include(obj.input_path)
+ # Empty line to avoid failures when last line in Makefile.in ends
+ # with a backslash.
+ pp.handleLine("\n")
+ pp.handleLine("include $(topsrcdir)/config/recurse.mk\n")
+ if not stub:
+ # Adding the Makefile.in here has the desired side-effect
+ # that if the Makefile.in disappears, this will force
+ # moz.build traversal. This means that when we remove empty
+ # Makefile.in files, the old file will get replaced with
+ # the autogenerated one automatically.
+ self.backend_input_files.add(obj.input_path)
+
+ self._makefile_out_count += 1
+
+ def _handle_linked_rust_crates(self, obj, extern_crate_file):
+ backend_file = self._get_backend_file_for(obj)
+
+ backend_file.write("RS_STATICLIB_CRATE_SRC := %s\n" % extern_crate_file)
+
+ def _handle_ipdl_sources(
+ self,
+ ipdl_dir,
+ sorted_ipdl_sources,
+ sorted_nonstatic_ipdl_sources,
+ sorted_static_ipdl_sources,
+ ):
+ # Write out a master list of all IPDL source files.
+ mk = Makefile()
+
+ sorted_nonstatic_ipdl_basenames = list()
+ for source in sorted_nonstatic_ipdl_sources:
+ basename = os.path.basename(source)
+ sorted_nonstatic_ipdl_basenames.append(basename)
+ rule = mk.create_rule([basename])
+ rule.add_dependencies([source])
+ rule.add_commands(
+ [
+ "$(RM) $@",
+ "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) "
+ "$< -o $@)",
+ ]
+ )
+
+ mk.add_statement(
+ "ALL_IPDLSRCS := %s %s"
+ % (
+ " ".join(sorted_nonstatic_ipdl_basenames),
+ " ".join(sorted_static_ipdl_sources),
+ )
+ )
+
+ # Preprocessed ipdl files are generated in ipdl_dir.
+ mk.add_statement(
+ "IPDLDIRS := %s %s"
+ % (
+ ipdl_dir,
+ " ".join(
+ sorted(set(mozpath.dirname(p) for p in sorted_static_ipdl_sources))
+ ),
+ )
+ )
+
+ with self._write_file(mozpath.join(ipdl_dir, "ipdlsrcs.mk")) as ipdls:
+ mk.dump(ipdls, removal_guard=False)
+
+ def _handle_webidl_build(
+ self,
+ bindings_dir,
+ unified_source_mapping,
+ webidls,
+ expected_build_output_files,
+ global_define_files,
+ ):
+ include_dir = mozpath.join(self.environment.topobjdir, "dist", "include")
+ for f in expected_build_output_files:
+ if f.startswith(include_dir):
+ self._install_manifests["dist_include"].add_optional_exists(
+ mozpath.relpath(f, include_dir)
+ )
+
+ # We pass WebIDL info to make via a completely generated make file.
+ mk = Makefile()
+ mk.add_statement(
+ "nonstatic_webidl_files := %s"
+ % " ".join(sorted(webidls.all_non_static_basenames()))
+ )
+ mk.add_statement(
+ "globalgen_sources := %s" % " ".join(sorted(global_define_files))
+ )
+ mk.add_statement(
+ "test_sources := %s"
+ % " ".join(sorted("%sBinding.cpp" % s for s in webidls.all_test_stems()))
+ )
+
+ # Add rules to preprocess bindings.
+ # This should ideally be using PP_TARGETS. However, since the input
+ # filenames match the output filenames, the existing PP_TARGETS rules
+ # result in circular dependencies and other make weirdness. One
+ # solution is to rename the input or output files repsectively. See
+ # bug 928195 comment 129.
+ for source in sorted(webidls.all_preprocessed_sources()):
+ basename = os.path.basename(source)
+ rule = mk.create_rule([basename])
+ # GLOBAL_DEPS would be used here, but due to the include order of
+ # our makefiles it's not set early enough to be useful, so we use
+ # WEBIDL_PP_DEPS, which has analagous content.
+ rule.add_dependencies([source, "$(WEBIDL_PP_DEPS)"])
+ rule.add_commands(
+ [
+ # Remove the file before writing so bindings that go from
+ # static to preprocessed don't end up writing to a symlink,
+ # which would modify content in the source directory.
+ "$(RM) $@",
+ "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) "
+ "$< -o $@)",
+ ]
+ )
+
+ self._add_unified_build_rules(
+ mk,
+ unified_source_mapping,
+ unified_files_makefile_variable="unified_binding_cpp_files",
+ )
+
+ webidls_mk = mozpath.join(bindings_dir, "webidlsrcs.mk")
+ with self._write_file(webidls_mk) as fh:
+ mk.dump(fh, removal_guard=False)
+
+ # Add the test directory to the compile graph.
+ if self.environment.substs.get("ENABLE_TESTS"):
+ self._compile_graph[
+ mozpath.join(
+ mozpath.relpath(bindings_dir, self.environment.topobjdir),
+ "test",
+ "target-objects",
+ )
+ ]
+
+ def _format_generated_file_input_name(self, path, obj):
+ if obj.localized:
+ # Localized generated files can have locale-specific inputs, which
+ # are indicated by paths starting with `en-US/` or containing
+ # `locales/en-US/`.
+ if "locales/en-US" in path:
+ # We need an "absolute source path" relative to
+ # topsrcdir, like "/source/path".
+ if not path.startswith("/"):
+ path = "/" + mozpath.relpath(path.full_path, obj.topsrcdir)
+ e, f = path.split("locales/en-US/", 1)
+ assert f
+ return "$(call MERGE_RELATIVE_FILE,{},{}locales)".format(
+ f, e if not e.startswith("/") else e[len("/") :]
+ )
+ elif path.startswith("en-US/"):
+ e, f = path.split("en-US/", 1)
+ assert not e
+ return "$(call MERGE_FILE,%s)" % f
+ return self._pretty_path(path, self._get_backend_file_for(obj))
+ else:
+ return self._pretty_path(path, self._get_backend_file_for(obj))
+
+ def _format_generated_file_output_name(self, path, obj):
+ if not isinstance(path, Path):
+ path = ObjDirPath(obj._context, "!" + path)
+ return self._pretty_path(path, self._get_backend_file_for(obj))
diff --git a/python/mozbuild/mozbuild/backend/static_analysis.py b/python/mozbuild/mozbuild/backend/static_analysis.py
new file mode 100644
index 0000000000..2b3ce96e75
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/static_analysis.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides a backend static-analysis, like clang-tidy and coverity.
+# The main difference between this and the default database backend is that this one
+# tracks folders that can be built in the non-unified environment and generates
+# the coresponding build commands for the files.
+
+import os
+
+import mozpack.path as mozpath
+
+from mozbuild.compilation.database import CompileDBBackend
+
+
+class StaticAnalysisBackend(CompileDBBackend):
+ def _init(self):
+ CompileDBBackend._init(self)
+ self.non_unified_build = []
+
+ # List of directories can be built outside of the unified build system.
+ with open(
+ mozpath.join(self.environment.topsrcdir, "build", "non-unified-compat")
+ ) as fh:
+ content = fh.readlines()
+ self.non_unified_build = [
+ mozpath.join(self.environment.topsrcdir, line.strip())
+ for line in content
+ ]
+
+ def _build_cmd(self, cmd, filename, unified):
+ cmd = list(cmd)
+ # Maybe the file is in non-unified environment or it resides under a directory
+ # that can also be built in non-unified environment
+ if unified is None or any(
+ filename.startswith(path) for path in self.non_unified_build
+ ):
+ cmd.append(filename)
+ else:
+ cmd.append(unified)
+
+ return cmd
+
+ def _outputfile_path(self):
+ database_path = os.path.join(self.environment.topobjdir, "static-analysis")
+
+ if not os.path.exists(database_path):
+ os.mkdir(database_path)
+
+ # Output the database (a JSON file) to objdir/static-analysis/compile_commands.json
+ return mozpath.join(database_path, "compile_commands.json")
diff --git a/python/mozbuild/mozbuild/backend/test_manifest.py b/python/mozbuild/mozbuild/backend/test_manifest.py
new file mode 100644
index 0000000000..ba1e5135f4
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/test_manifest.py
@@ -0,0 +1,110 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import defaultdict
+
+import mozpack.path as mozpath
+import six
+import six.moves.cPickle as pickle
+
+from mozbuild.backend.base import PartialBackend
+from mozbuild.frontend.data import TestManifest
+
+
+class TestManifestBackend(PartialBackend):
+ """Partial backend that generates test metadata files."""
+
+ def _init(self):
+ self.tests_by_path = defaultdict(list)
+ self.installs_by_path = defaultdict(list)
+ self.deferred_installs = set()
+ self.manifest_defaults = {}
+
+ # Add config.status so performing a build will invalidate this backend.
+ self.backend_input_files.add(
+ mozpath.join(self.environment.topobjdir, "config.status")
+ )
+
+ def consume_object(self, obj):
+ if not isinstance(obj, TestManifest):
+ return
+
+ self.backend_input_files.add(obj.path)
+ self.backend_input_files |= obj.context_all_paths
+ for source in obj.source_relpaths:
+ self.backend_input_files.add(mozpath.join(obj.topsrcdir, source))
+ try:
+ from reftest import ReftestManifest
+
+ if isinstance(obj.manifest, ReftestManifest):
+ # Mark included files as part of the build backend so changes
+ # result in re-config.
+ self.backend_input_files |= obj.manifest.manifests
+ except ImportError:
+ # Ignore errors caused by the reftest module not being present.
+ # This can happen when building SpiderMonkey standalone, for example.
+ pass
+
+ for test in obj.tests:
+ self.add(test, obj.flavor, obj.topsrcdir)
+ self.add_defaults(obj.manifest)
+ self.add_installs(obj, obj.topsrcdir)
+
+ def consume_finished(self):
+ topobjdir = self.environment.topobjdir
+
+ with self._write_file(
+ mozpath.join(topobjdir, "all-tests.pkl"), readmode="rb"
+ ) as fh:
+ pickle.dump(dict(self.tests_by_path), fh, protocol=2)
+
+ with self._write_file(
+ mozpath.join(topobjdir, "test-defaults.pkl"), readmode="rb"
+ ) as fh:
+ pickle.dump(self.manifest_defaults, fh, protocol=2)
+
+ path = mozpath.join(topobjdir, "test-installs.pkl")
+ with self._write_file(path, readmode="rb") as fh:
+ pickle.dump(
+ {
+ k: v
+ for k, v in self.installs_by_path.items()
+ if k in self.deferred_installs
+ },
+ fh,
+ protocol=2,
+ )
+
+ def add(self, t, flavor, topsrcdir):
+ t = dict(t)
+ t["flavor"] = flavor
+
+ path = mozpath.normpath(t["path"])
+ manifest = mozpath.normpath(t["manifest"])
+ assert mozpath.basedir(path, [topsrcdir])
+ assert mozpath.basedir(manifest, [topsrcdir])
+
+ key = path[len(topsrcdir) + 1 :]
+ t["file_relpath"] = key
+ t["dir_relpath"] = mozpath.dirname(key)
+ t["srcdir_relpath"] = key
+ t["manifest_relpath"] = manifest[len(topsrcdir) + 1 :]
+
+ self.tests_by_path[key].append(t)
+
+ def add_defaults(self, manifest):
+ if not hasattr(manifest, "manifest_defaults"):
+ return
+ for sub_manifest, defaults in manifest.manifest_defaults.items():
+ self.manifest_defaults[sub_manifest] = defaults
+
+ def add_installs(self, obj, topsrcdir):
+ for src, (dest, _) in six.iteritems(obj.installs):
+ key = src[len(topsrcdir) + 1 :]
+ self.installs_by_path[key].append((src, dest))
+ for src, pat, dest in obj.pattern_installs:
+ key = mozpath.join(src[len(topsrcdir) + 1 :], pat)
+ self.installs_by_path[key].append((src, pat, dest))
+ for path in obj.deferred_installs:
+ self.deferred_installs.add(path[2:])
diff --git a/python/mozbuild/mozbuild/backend/visualstudio.py b/python/mozbuild/mozbuild/backend/visualstudio.py
new file mode 100644
index 0000000000..b9b30804b8
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/visualstudio.py
@@ -0,0 +1,712 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains a build backend for generating Visual Studio project
+# files.
+
+import errno
+import os
+import re
+import sys
+import uuid
+from pathlib import Path
+from xml.dom import getDOMImplementation
+
+from mozpack.files import FileFinder
+
+from mozbuild.base import ExecutionSummary
+
+from ..frontend.data import (
+ Defines,
+ HostProgram,
+ HostSources,
+ Library,
+ LocalInclude,
+ Program,
+ SandboxedWasmLibrary,
+ Sources,
+ UnifiedSources,
+)
+from .common import CommonBackend
+
+MSBUILD_NAMESPACE = "http://schemas.microsoft.com/developer/msbuild/2003"
+MSNATVIS_NAMESPACE = "http://schemas.microsoft.com/vstudio/debugger/natvis/2010"
+
+
+def get_id(name):
+ if sys.version_info[0] == 2:
+ name = name.encode("utf-8")
+ return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
+
+
+def visual_studio_product_to_solution_version(version):
+ if version == "2017":
+ return "12.00", "15"
+ elif version == "2019":
+ return "12.00", "16"
+ elif version == "2022":
+ return "12.00", "17"
+ else:
+ raise Exception("Unknown version seen: %s" % version)
+
+
+def visual_studio_product_to_platform_toolset_version(version):
+ if version == "2017":
+ return "v141"
+ elif version == "2019":
+ return "v142"
+ elif version == "2022":
+ return "v143"
+ else:
+ raise Exception("Unknown version seen: %s" % version)
+
+
+class VisualStudioBackend(CommonBackend):
+ """Generate Visual Studio project files.
+
+ This backend is used to produce Visual Studio projects and a solution
+ to foster developing Firefox with Visual Studio.
+
+ This backend is currently considered experimental. There are many things
+ not optimal about how it works.
+ """
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ # These should eventually evolve into parameters.
+ self._out_dir = os.path.join(self.environment.topobjdir, "msvc")
+ self._projsubdir = "projects"
+
+ self._version = self.environment.substs.get("MSVS_VERSION", "2017")
+
+ self._paths_to_sources = {}
+ self._paths_to_includes = {}
+ self._paths_to_defines = {}
+ self._paths_to_configs = {}
+ self._libs_to_paths = {}
+ self._progs_to_paths = {}
+
+ def summary(self):
+ return ExecutionSummary(
+ "VisualStudio backend executed in {execution_time:.2f}s\n"
+ "Generated Visual Studio solution at {path:s}",
+ execution_time=self._execution_time,
+ path=os.path.join(self._out_dir, "mozilla.sln"),
+ )
+
+ def consume_object(self, obj):
+ reldir = getattr(obj, "relsrcdir", None)
+
+ if hasattr(obj, "config") and reldir not in self._paths_to_configs:
+ self._paths_to_configs[reldir] = obj.config
+
+ if isinstance(obj, Sources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, HostSources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, UnifiedSources):
+ # XXX we should be letting CommonBackend.consume_object call this
+ # for us instead.
+ self._process_unified_sources(obj)
+
+ elif isinstance(obj, Library) and not isinstance(obj, SandboxedWasmLibrary):
+ self._libs_to_paths[obj.basename] = reldir
+
+ elif isinstance(obj, Program) or isinstance(obj, HostProgram):
+ self._progs_to_paths[obj.program] = reldir
+
+ elif isinstance(obj, Defines):
+ self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
+
+ elif isinstance(obj, LocalInclude):
+ includes = self._paths_to_includes.setdefault(reldir, [])
+ includes.append(obj.path.full_path)
+
+ # Just acknowledge everything.
+ return True
+
+ def _add_sources(self, reldir, obj):
+ s = self._paths_to_sources.setdefault(reldir, set())
+ s.update(obj.files)
+
+ def _process_unified_sources(self, obj):
+ reldir = getattr(obj, "relsrcdir", None)
+
+ s = self._paths_to_sources.setdefault(reldir, set())
+ s.update(obj.files)
+
+ def consume_finished(self):
+ out_dir = self._out_dir
+ out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
+
+ projects = self._write_projects_for_sources(
+ self._libs_to_paths, "library", out_proj_dir
+ )
+ projects.update(
+ self._write_projects_for_sources(
+ self._progs_to_paths, "binary", out_proj_dir
+ )
+ )
+
+ # Generate projects that can be used to build common targets.
+ for target in ("export", "binaries", "tools", "full"):
+ basename = "target_%s" % target
+ command = "$(SolutionDir)\\mach.bat build"
+ if target != "full":
+ command += " %s" % target
+
+ project_id = self._write_vs_project(
+ out_proj_dir,
+ basename,
+ target,
+ build_command=command,
+ clean_command="$(SolutionDir)\\mach.bat clobber",
+ )
+
+ projects[basename] = (project_id, basename, target)
+
+ # A project that can be used to regenerate the visual studio projects.
+ basename = "target_vs"
+ project_id = self._write_vs_project(
+ out_proj_dir,
+ basename,
+ "visual-studio",
+ build_command="$(SolutionDir)\\mach.bat build-backend -b VisualStudio",
+ )
+ projects[basename] = (project_id, basename, "visual-studio")
+
+ # Write out a shared property file with common variables.
+ props_path = os.path.join(out_proj_dir, "mozilla.props")
+ with self._write_file(props_path, readmode="rb") as fh:
+ self._write_props(fh)
+
+ # Generate some wrapper scripts that allow us to invoke mach inside
+ # a MozillaBuild-like environment. We currently only use the batch
+ # script. We'd like to use the PowerShell script. However, it seems
+ # to buffer output from within Visual Studio (surely this is
+ # configurable) and the default execution policy of PowerShell doesn't
+ # allow custom scripts to be executed.
+ with self._write_file(os.path.join(out_dir, "mach.bat"), readmode="rb") as fh:
+ self._write_mach_batch(fh)
+
+ with self._write_file(os.path.join(out_dir, "mach.ps1"), readmode="rb") as fh:
+ self._write_mach_powershell(fh)
+
+ # Write out a solution file to tie it all together.
+ solution_path = os.path.join(out_dir, "mozilla.sln")
+ with self._write_file(solution_path, readmode="rb") as fh:
+ self._write_solution(fh, projects)
+
+ def _write_projects_for_sources(self, sources, prefix, out_dir):
+ projects = {}
+ for item, path in sorted(sources.items()):
+ config = self._paths_to_configs.get(path, None)
+ sources = self._paths_to_sources.get(path, set())
+ sources = set(os.path.join("$(TopSrcDir)", path, s) for s in sources)
+ sources = set(os.path.normpath(s) for s in sources)
+
+ finder = FileFinder(os.path.join(self.environment.topsrcdir, path))
+
+ headers = [t[0] for t in finder.find("*.h")]
+ headers = [
+ os.path.normpath(os.path.join("$(TopSrcDir)", path, f)) for f in headers
+ ]
+
+ includes = [
+ os.path.join("$(TopSrcDir)", path),
+ os.path.join("$(TopObjDir)", path),
+ ]
+ includes.extend(self._paths_to_includes.get(path, []))
+ includes.append("$(TopObjDir)\\dist\\include\\nss")
+ includes.append("$(TopObjDir)\\dist\\include")
+
+ for v in (
+ "NSPR_CFLAGS",
+ "NSS_CFLAGS",
+ "MOZ_JPEG_CFLAGS",
+ "MOZ_PNG_CFLAGS",
+ "MOZ_ZLIB_CFLAGS",
+ "MOZ_PIXMAN_CFLAGS",
+ ):
+ if not config:
+ break
+
+ args = config.substs.get(v, [])
+
+ for i, arg in enumerate(args):
+ if arg.startswith("-I"):
+ includes.append(os.path.normpath(arg[2:]))
+
+ # Pull in system defaults.
+ includes.append("$(DefaultIncludes)")
+
+ includes = [os.path.normpath(i) for i in includes]
+
+ defines = []
+ for k, v in self._paths_to_defines.get(path, {}).items():
+ if v is True:
+ defines.append(k)
+ else:
+ defines.append("%s=%s" % (k, v))
+
+ debugger = None
+ if prefix == "binary":
+ if item.startswith(self.environment.substs["MOZ_APP_NAME"]):
+ app_args = "-no-remote -profile $(TopObjDir)\\tmp\\profile-default"
+ if self.environment.substs.get("MOZ_LAUNCHER_PROCESS", False):
+ app_args += " -wait-for-browser"
+ debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, app_args)
+ else:
+ debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, "")
+
+ basename = "%s_%s" % (prefix, item)
+
+ project_id = self._write_vs_project(
+ out_dir,
+ basename,
+ item,
+ includes=includes,
+ forced_includes=["$(TopObjDir)\\dist\\include\\mozilla-config.h"],
+ defines=defines,
+ headers=headers,
+ sources=sources,
+ debugger=debugger,
+ )
+
+ projects[basename] = (project_id, basename, item)
+
+ return projects
+
+ def _write_solution(self, fh, projects):
+ # Visual Studio appears to write out its current version in the
+ # solution file. Instead of trying to figure out what version it will
+ # write, try to parse the version out of the existing file and use it
+ # verbatim.
+ vs_version = None
+ try:
+ with open(fh.name, "rb") as sfh:
+ for line in sfh:
+ if line.startswith(b"VisualStudioVersion = "):
+ vs_version = line.split(b" = ", 1)[1].strip()
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ format_version, comment_version = visual_studio_product_to_solution_version(
+ self._version
+ )
+ # This is a Visual C++ Project type.
+ project_type = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+
+ # Visual Studio seems to require this header.
+ fh.write(
+ "Microsoft Visual Studio Solution File, Format Version %s\r\n"
+ % format_version
+ )
+ fh.write("# Visual Studio %s\r\n" % comment_version)
+
+ if vs_version:
+ fh.write("VisualStudioVersion = %s\r\n" % vs_version)
+
+ # Corresponds to VS2013.
+ fh.write("MinimumVisualStudioVersion = 12.0.31101.0\r\n")
+
+ binaries_id = projects["target_binaries"][0]
+
+ # Write out entries for each project.
+ for key in sorted(projects):
+ project_id, basename, name = projects[key]
+ path = os.path.join(self._projsubdir, "%s.vcxproj" % basename)
+
+ fh.write(
+ 'Project("{%s}") = "%s", "%s", "{%s}"\r\n'
+ % (project_type, name, path, project_id)
+ )
+
+ # Make all libraries depend on the binaries target.
+ if key.startswith("library_"):
+ fh.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
+ fh.write("\t\t{%s} = {%s}\r\n" % (binaries_id, binaries_id))
+ fh.write("\tEndProjectSection\r\n")
+
+ fh.write("EndProject\r\n")
+
+ # Write out solution folders for organizing things.
+
+ # This is the UUID you use for solution folders.
+ container_id = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+
+ def write_container(desc):
+ cid = get_id(desc)
+ fh.write(
+ 'Project("{%s}") = "%s", "%s", "{%s}"\r\n'
+ % (container_id, desc, desc, cid)
+ )
+ fh.write("EndProject\r\n")
+
+ return cid
+
+ library_id = write_container("Libraries")
+ target_id = write_container("Build Targets")
+ binary_id = write_container("Binaries")
+
+ fh.write("Global\r\n")
+
+ # Make every project a member of our one configuration.
+ fh.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
+ fh.write("\t\tBuild|Win32 = Build|Win32\r\n")
+ fh.write("\tEndGlobalSection\r\n")
+
+ # Set every project's active configuration to the one configuration and
+ # set up the default build project.
+ fh.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
+ for name, project in sorted(projects.items()):
+ fh.write("\t\t{%s}.Build|Win32.ActiveCfg = Build|Win32\r\n" % project[0])
+
+ # Only build the full build target by default.
+ # It's important we don't write multiple entries here because they
+ # conflict!
+ if name == "target_full":
+ fh.write("\t\t{%s}.Build|Win32.Build.0 = Build|Win32\r\n" % project[0])
+
+ fh.write("\tEndGlobalSection\r\n")
+
+ fh.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
+ fh.write("\t\tHideSolutionNode = FALSE\r\n")
+ fh.write("\tEndGlobalSection\r\n")
+
+ # Associate projects with containers.
+ fh.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
+ for key in sorted(projects):
+ project_id = projects[key][0]
+
+ if key.startswith("library_"):
+ container_id = library_id
+ elif key.startswith("target_"):
+ container_id = target_id
+ elif key.startswith("binary_"):
+ container_id = binary_id
+ else:
+ raise Exception("Unknown project type: %s" % key)
+
+ fh.write("\t\t{%s} = {%s}\r\n" % (project_id, container_id))
+ fh.write("\tEndGlobalSection\r\n")
+
+ fh.write("EndGlobal\r\n")
+
+ def _write_props(self, fh):
+ impl = getDOMImplementation()
+ doc = impl.createDocument(MSBUILD_NAMESPACE, "Project", None)
+
+ project = doc.documentElement
+ project.setAttribute("xmlns", MSBUILD_NAMESPACE)
+ project.setAttribute("ToolsVersion", "4.0")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "PropertySheets")
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Label", "UserMacros")
+
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+
+ def add_var(k, v):
+ e = pg.appendChild(doc.createElement(k))
+ e.appendChild(doc.createTextNode(v))
+
+ e = ig.appendChild(doc.createElement("BuildMacro"))
+ e.setAttribute("Include", k)
+
+ e = e.appendChild(doc.createElement("Value"))
+ e.appendChild(doc.createTextNode("$(%s)" % k))
+
+ natvis = ig.appendChild(doc.createElement("Natvis"))
+ natvis.setAttribute("Include", "../../../toolkit/library/gecko.natvis")
+
+ add_var("TopObjDir", os.path.normpath(self.environment.topobjdir))
+ add_var("TopSrcDir", os.path.normpath(self.environment.topsrcdir))
+ add_var("PYTHON", "$(TopObjDir)\\_virtualenv\\Scripts\\python.exe")
+ add_var("MACH", "$(TopSrcDir)\\mach")
+
+ # From MozillaBuild.
+ add_var("DefaultIncludes", os.environ.get("INCLUDE", ""))
+
+ fh.write(b"\xef\xbb\xbf")
+ doc.writexml(fh, addindent=" ", newl="\r\n")
+
+ def _create_natvis_type(
+ self, doc, visualizer, name, displayString, stringView=None
+ ):
+
+ t = visualizer.appendChild(doc.createElement("Type"))
+ t.setAttribute("Name", name)
+
+ ds = t.appendChild(doc.createElement("DisplayString"))
+ ds.appendChild(doc.createTextNode(displayString))
+
+ if stringView is not None:
+ sv = t.appendChild(doc.createElement("DisplayString"))
+ sv.appendChild(doc.createTextNode(stringView))
+
+ def _create_natvis_simple_string_type(self, doc, visualizer, name):
+ self._create_natvis_type(
+ doc, visualizer, name + "<char16_t>", "{mData,su}", "mData,su"
+ )
+ self._create_natvis_type(
+ doc, visualizer, name + "<char>", "{mData,s}", "mData,s"
+ )
+
+ def _create_natvis_string_tuple_type(self, doc, visualizer, chartype, formatstring):
+ t = visualizer.appendChild(doc.createElement("Type"))
+ t.setAttribute("Name", "nsTSubstringTuple<" + chartype + ">")
+
+ ds1 = t.appendChild(doc.createElement("DisplayString"))
+ ds1.setAttribute("Condition", "mHead != nullptr")
+ ds1.appendChild(
+ doc.createTextNode("{mHead,na} {mFragB->mData," + formatstring + "}")
+ )
+
+ ds2 = t.appendChild(doc.createElement("DisplayString"))
+ ds2.setAttribute("Condition", "mHead == nullptr")
+ ds2.appendChild(
+ doc.createTextNode(
+ "{mFragA->mData,"
+ + formatstring
+ + "} {mFragB->mData,"
+ + formatstring
+ + "}"
+ )
+ )
+
+ def _relevant_environment_variables(self):
+ # Write out the environment variables, presumably coming from
+ # MozillaBuild.
+ for k, v in sorted(os.environ.items()):
+ if not re.match("^[a-zA-Z0-9_]+$", k):
+ continue
+
+ if k in ("OLDPWD", "PS1"):
+ continue
+
+ if k.startswith("_"):
+ continue
+
+ yield k, v
+
+ yield "TOPSRCDIR", self.environment.topsrcdir
+ yield "TOPOBJDIR", self.environment.topobjdir
+
+ def _write_mach_powershell(self, fh):
+ for k, v in self._relevant_environment_variables():
+ fh.write(b'$env:%s = "%s"\r\n' % (k.encode("utf-8"), v.encode("utf-8")))
+
+ relpath = os.path.relpath(
+ self.environment.topsrcdir, self.environment.topobjdir
+ ).replace("\\", "/")
+
+ fh.write(
+ b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath.encode("utf-8")
+ )
+ fh.write(b"$bashargs = $bashargs + $args\r\n")
+
+ fh.write(b"$expanded = $bashargs -join ' '\r\n")
+ fh.write(b'$procargs = "-c", $expanded\r\n')
+
+ if (Path(os.environ["MOZILLABUILD"]) / "msys2").exists():
+ bash_path = rb"msys2\usr\bin\bash"
+ else:
+ bash_path = rb"msys\bin\bash"
+
+ fh.write(
+ b"Start-Process -WorkingDirectory $env:TOPOBJDIR "
+ b"-FilePath $env:MOZILLABUILD\\%b "
+ b"-ArgumentList $procargs "
+ b"-Wait -NoNewWindow\r\n" % bash_path
+ )
+
+ def _write_mach_batch(self, fh):
+ """Write out a batch script that builds the tree.
+
+ The script "bootstraps" into the MozillaBuild environment by setting
+ the environment variables that are active in the current MozillaBuild
+ environment. Then, it builds the tree.
+ """
+ for k, v in self._relevant_environment_variables():
+ fh.write(b'SET "%s=%s"\r\n' % (k.encode("utf-8"), v.encode("utf-8")))
+
+ fh.write(b"cd %TOPOBJDIR%\r\n")
+
+ # We need to convert Windows-native paths to msys paths. Easiest way is
+ # relative paths, since munging c:\ to /c/ is slightly more
+ # complicated.
+ relpath = os.path.relpath(
+ self.environment.topsrcdir, self.environment.topobjdir
+ ).replace("\\", "/")
+
+ if (Path(os.environ["MOZILLABUILD"]) / "msys2").exists():
+ bash_path = rb"msys2\usr\bin\bash"
+ else:
+ bash_path = rb"msys\bin\bash"
+
+ # We go through mach because it has the logic for choosing the most
+ # appropriate build tool.
+ fh.write(
+ b'"%%MOZILLABUILD%%\\%b" '
+ b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"'
+ % (bash_path, relpath.encode("utf-8"))
+ )
+
+ def _write_vs_project(self, out_dir, basename, name, **kwargs):
+ root = "%s.vcxproj" % basename
+ project_id = get_id(basename)
+
+ with self._write_file(os.path.join(out_dir, root), readmode="rb") as fh:
+ project_id, name = VisualStudioBackend.write_vs_project(
+ fh, self._version, project_id, name, **kwargs
+ )
+
+ with self._write_file(
+ os.path.join(out_dir, "%s.user" % root), readmode="rb"
+ ) as fh:
+ fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
+ fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' % MSBUILD_NAMESPACE)
+ fh.write("</Project>\r\n")
+
+ return project_id
+
+ @staticmethod
+ def write_vs_project(
+ fh,
+ version,
+ project_id,
+ name,
+ includes=[],
+ forced_includes=[],
+ defines=[],
+ build_command=None,
+ clean_command=None,
+ debugger=None,
+ headers=[],
+ sources=[],
+ ):
+
+ impl = getDOMImplementation()
+ doc = impl.createDocument(MSBUILD_NAMESPACE, "Project", None)
+
+ project = doc.documentElement
+ project.setAttribute("DefaultTargets", "Build")
+ project.setAttribute("ToolsVersion", "4.0")
+ project.setAttribute("xmlns", MSBUILD_NAMESPACE)
+
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+ ig.setAttribute("Label", "ProjectConfigurations")
+
+ pc = ig.appendChild(doc.createElement("ProjectConfiguration"))
+ pc.setAttribute("Include", "Build|Win32")
+
+ c = pc.appendChild(doc.createElement("Configuration"))
+ c.appendChild(doc.createTextNode("Build"))
+
+ p = pc.appendChild(doc.createElement("Platform"))
+ p.appendChild(doc.createTextNode("Win32"))
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Label", "Globals")
+
+ n = pg.appendChild(doc.createElement("ProjectName"))
+ n.appendChild(doc.createTextNode(name))
+
+ k = pg.appendChild(doc.createElement("Keyword"))
+ k.appendChild(doc.createTextNode("MakeFileProj"))
+
+ g = pg.appendChild(doc.createElement("ProjectGuid"))
+ g.appendChild(doc.createTextNode("{%s}" % project_id))
+
+ rn = pg.appendChild(doc.createElement("RootNamespace"))
+ rn.appendChild(doc.createTextNode("mozilla"))
+
+ pts = pg.appendChild(doc.createElement("PlatformToolset"))
+ pts.appendChild(
+ doc.createTextNode(
+ visual_studio_product_to_platform_toolset_version(version)
+ )
+ )
+
+ i = project.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.Default.props")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "ExtensionTargets")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "ExtensionSettings")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "PropertySheets")
+ i = ig.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "mozilla.props")
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Label", "Configuration")
+ ct = pg.appendChild(doc.createElement("ConfigurationType"))
+ ct.appendChild(doc.createTextNode("Makefile"))
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Condition", "'$(Configuration)|$(Platform)'=='Build|Win32'")
+
+ if build_command:
+ n = pg.appendChild(doc.createElement("NMakeBuildCommandLine"))
+ n.appendChild(doc.createTextNode(build_command))
+
+ if clean_command:
+ n = pg.appendChild(doc.createElement("NMakeCleanCommandLine"))
+ n.appendChild(doc.createTextNode(clean_command))
+
+ if includes:
+ n = pg.appendChild(doc.createElement("NMakeIncludeSearchPath"))
+ n.appendChild(doc.createTextNode(";".join(includes)))
+
+ if forced_includes:
+ n = pg.appendChild(doc.createElement("NMakeForcedIncludes"))
+ n.appendChild(doc.createTextNode(";".join(forced_includes)))
+
+ if defines:
+ n = pg.appendChild(doc.createElement("NMakePreprocessorDefinitions"))
+ n.appendChild(doc.createTextNode(";".join(defines)))
+
+ if debugger:
+ n = pg.appendChild(doc.createElement("LocalDebuggerCommand"))
+ n.appendChild(doc.createTextNode(debugger[0]))
+
+ n = pg.appendChild(doc.createElement("LocalDebuggerCommandArguments"))
+ n.appendChild(doc.createTextNode(debugger[1]))
+
+ # Sets IntelliSense to use c++17 Language Standard
+ n = pg.appendChild(doc.createElement("AdditionalOptions"))
+ n.appendChild(doc.createTextNode("/std:c++17"))
+
+ i = project.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.props")
+
+ i = project.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.targets")
+
+ # Now add files to the project.
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+ for header in sorted(headers or []):
+ n = ig.appendChild(doc.createElement("ClInclude"))
+ n.setAttribute("Include", header)
+
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+ for source in sorted(sources or []):
+ n = ig.appendChild(doc.createElement("ClCompile"))
+ n.setAttribute("Include", source)
+
+ fh.write(b"\xef\xbb\xbf")
+ doc.writexml(fh, addindent=" ", newl="\r\n")
+
+ return project_id, name