summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozbuild/frontend
diff options
context:
space:
mode:
Diffstat (limited to 'python/mozbuild/mozbuild/frontend')
-rw-r--r--python/mozbuild/mozbuild/frontend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/frontend/context.py3144
-rw-r--r--python/mozbuild/mozbuild/frontend/data.py1369
-rw-r--r--python/mozbuild/mozbuild/frontend/emitter.py1892
-rw-r--r--python/mozbuild/mozbuild/frontend/gyp_reader.py497
-rw-r--r--python/mozbuild/mozbuild/frontend/mach_commands.py338
-rw-r--r--python/mozbuild/mozbuild/frontend/reader.py1432
-rw-r--r--python/mozbuild/mozbuild/frontend/sandbox.py313
8 files changed, 8985 insertions, 0 deletions
diff --git a/python/mozbuild/mozbuild/frontend/__init__.py b/python/mozbuild/mozbuild/frontend/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/__init__.py
diff --git a/python/mozbuild/mozbuild/frontend/context.py b/python/mozbuild/mozbuild/frontend/context.py
new file mode 100644
index 0000000000..1e241c5656
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/context.py
@@ -0,0 +1,3144 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+######################################################################
+# DO NOT UPDATE THIS FILE WITHOUT SIGN-OFF FROM A BUILD MODULE PEER. #
+######################################################################
+
+r"""This module contains the data structure (context) holding the configuration
+from a moz.build. The data emitted by the frontend derives from those contexts.
+
+It also defines the set of variables and functions available in moz.build.
+If you are looking for the absolute authority on what moz.build files can
+contain, you've come to the right place.
+"""
+
+import itertools
+import operator
+import os
+from collections import Counter, OrderedDict
+from types import FunctionType
+
+import mozpack.path as mozpath
+import six
+
+from mozbuild.util import (
+ HierarchicalStringList,
+ ImmutableStrictOrderingOnAppendList,
+ KeyedDefaultDict,
+ List,
+ ReadOnlyKeyedDefaultDict,
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithAction,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ TypedList,
+ TypedNamedTuple,
+ memoize,
+ memoized_property,
+)
+
+from .. import schedules
+from ..testing import read_manifestparser_manifest, read_reftest_manifest
+
+
+class ContextDerivedValue(object):
+ """Classes deriving from this one receive a special treatment in a
+ Context. See Context documentation.
+ """
+
+ __slots__ = ()
+
+
+class Context(KeyedDefaultDict):
+ """Represents a moz.build configuration context.
+
+ Instances of this class are filled by the execution of sandboxes.
+ At the core, a Context is a dict, with a defined set of possible keys we'll
+ call variables. Each variable is associated with a type.
+
+ When reading a value for a given key, we first try to read the existing
+ value. If a value is not found and it is defined in the allowed variables
+ set, we return a new instance of the class for that variable. We don't
+ assign default instances until they are accessed because this makes
+ debugging the end-result much simpler. Instead of a data structure with
+ lots of empty/default values, you have a data structure with only the
+ values that were read or touched.
+
+ Instances of variables classes are created by invoking ``class_name()``,
+ except when class_name derives from ``ContextDerivedValue`` or
+ ``SubContext``, in which case ``class_name(instance_of_the_context)`` or
+ ``class_name(self)`` is invoked. A value is added to those calls when
+ instances are created during assignment (setitem).
+
+ allowed_variables is a dict of the variables that can be set and read in
+ this context instance. Keys in this dict are the strings representing keys
+ in this context which are valid. Values are tuples of stored type,
+ assigned type, default value, a docstring describing the purpose of the
+ variable, and a tier indicator (see comment above the VARIABLES declaration
+ in this module).
+
+ config is the ConfigEnvironment for this context.
+ """
+
+ def __init__(self, allowed_variables={}, config=None, finder=None):
+ self._allowed_variables = allowed_variables
+ self.main_path = None
+ self.current_path = None
+ # There aren't going to be enough paths for the performance of scanning
+ # a list to be a problem.
+ self._all_paths = []
+ self.config = config
+ self._sandbox = None
+ self._finder = finder
+ KeyedDefaultDict.__init__(self, self._factory)
+
+ def push_source(self, path):
+ """Adds the given path as source of the data from this context and make
+ it the current path for the context."""
+ assert os.path.isabs(path)
+ if not self.main_path:
+ self.main_path = path
+ else:
+ # Callers shouldn't push after main_path has been popped.
+ assert self.current_path
+ self.current_path = path
+ # The same file can be pushed twice, so don't remove any previous
+ # occurrence.
+ self._all_paths.append(path)
+
+ def pop_source(self):
+ """Get back to the previous current path for the context."""
+ assert self.main_path
+ assert self.current_path
+ last = self._all_paths.pop()
+ # Keep the popped path in the list of all paths, but before the main
+ # path so that it's not popped again.
+ self._all_paths.insert(0, last)
+ if last == self.main_path:
+ self.current_path = None
+ else:
+ self.current_path = self._all_paths[-1]
+ return last
+
+ def add_source(self, path):
+ """Adds the given path as source of the data from this context."""
+ assert os.path.isabs(path)
+ if not self.main_path:
+ self.main_path = self.current_path = path
+ # Insert at the beginning of the list so that it's always before the
+ # main path.
+ if path not in self._all_paths:
+ self._all_paths.insert(0, path)
+
+ @property
+ def error_is_fatal(self):
+ """Returns True if the error function should be fatal."""
+ return self.config and getattr(self.config, "error_is_fatal", True)
+
+ @property
+ def all_paths(self):
+ """Returns all paths ever added to the context."""
+ return set(self._all_paths)
+
+ @property
+ def source_stack(self):
+ """Returns the current stack of pushed sources."""
+ if not self.current_path:
+ return []
+ return self._all_paths[self._all_paths.index(self.main_path) :]
+
+ @memoized_property
+ def objdir(self):
+ return mozpath.join(self.config.topobjdir, self.relobjdir).rstrip("/")
+
+ @memoize
+ def _srcdir(self, path):
+ return mozpath.join(self.config.topsrcdir, self._relsrcdir(path)).rstrip("/")
+
+ @property
+ def srcdir(self):
+ return self._srcdir(self.current_path or self.main_path)
+
+ @memoize
+ def _relsrcdir(self, path):
+ return mozpath.relpath(mozpath.dirname(path), self.config.topsrcdir)
+
+ @property
+ def relsrcdir(self):
+ assert self.main_path
+ return self._relsrcdir(self.current_path or self.main_path)
+
+ @memoized_property
+ def relobjdir(self):
+ assert self.main_path
+ return mozpath.relpath(mozpath.dirname(self.main_path), self.config.topsrcdir)
+
+ def _factory(self, key):
+ """Function called when requesting a missing key."""
+ defaults = self._allowed_variables.get(key)
+ if not defaults:
+ raise KeyError("global_ns", "get_unknown", key)
+
+ # If the default is specifically a lambda (or, rather, any function
+ # --but not a class that can be called), then it is actually a rule to
+ # generate the default that should be used.
+ default = defaults[0]
+ if issubclass(default, ContextDerivedValue):
+ return default(self)
+ else:
+ return default()
+
+ def _validate(self, key, value, is_template=False):
+ """Validates whether the key is allowed and if the value's type
+ matches.
+ """
+ stored_type, input_type, docs = self._allowed_variables.get(
+ key, (None, None, None)
+ )
+
+ if stored_type is None or not is_template and key in TEMPLATE_VARIABLES:
+ raise KeyError("global_ns", "set_unknown", key, value)
+
+ # If the incoming value is not the type we store, we try to convert
+ # it to that type. This relies on proper coercion rules existing. This
+ # is the responsibility of whoever defined the symbols: a type should
+ # not be in the allowed set if the constructor function for the stored
+ # type does not accept an instance of that type.
+ if not isinstance(value, (stored_type, input_type)):
+ raise ValueError("global_ns", "set_type", key, value, input_type)
+
+ return stored_type
+
+ def __setitem__(self, key, value):
+ stored_type = self._validate(key, value)
+
+ if not isinstance(value, stored_type):
+ if issubclass(stored_type, ContextDerivedValue):
+ value = stored_type(self, value)
+ else:
+ value = stored_type(value)
+
+ return KeyedDefaultDict.__setitem__(self, key, value)
+
+ def update(self, iterable={}, **kwargs):
+ """Like dict.update(), but using the context's setitem.
+
+ This function is transactional: if setitem fails for one of the values,
+ the context is not updated at all."""
+ if isinstance(iterable, dict):
+ iterable = iterable.items()
+
+ update = {}
+ for key, value in itertools.chain(iterable, kwargs.items()):
+ stored_type = self._validate(key, value)
+ # Don't create an instance of stored_type if coercion is needed,
+ # until all values are validated.
+ update[key] = (value, stored_type)
+ for key, (value, stored_type) in update.items():
+ if not isinstance(value, stored_type):
+ update[key] = stored_type(value)
+ else:
+ update[key] = value
+ KeyedDefaultDict.update(self, update)
+
+
+class TemplateContext(Context):
+ def __init__(self, template=None, allowed_variables={}, config=None):
+ self.template = template
+ super(TemplateContext, self).__init__(allowed_variables, config)
+
+ def _validate(self, key, value):
+ return Context._validate(self, key, value, True)
+
+
+class SubContext(Context, ContextDerivedValue):
+ """A Context derived from another Context.
+
+ Sub-contexts are intended to be used as context managers.
+
+ Sub-contexts inherit paths and other relevant state from the parent
+ context.
+ """
+
+ def __init__(self, parent):
+ assert isinstance(parent, Context)
+
+ Context.__init__(self, allowed_variables=self.VARIABLES, config=parent.config)
+
+ # Copy state from parent.
+ for p in parent.source_stack:
+ self.push_source(p)
+ self._sandbox = parent._sandbox
+
+ def __enter__(self):
+ if not self._sandbox or self._sandbox() is None:
+ raise Exception("a sandbox is required")
+
+ self._sandbox().push_subcontext(self)
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._sandbox().pop_subcontext(self)
+
+
+class InitializedDefines(ContextDerivedValue, OrderedDict):
+ def __init__(self, context, value=None):
+ OrderedDict.__init__(self)
+ for define in context.config.substs.get("MOZ_DEBUG_DEFINES", ()):
+ self[define] = 1
+ if value:
+ if not isinstance(value, OrderedDict):
+ raise ValueError("Can only initialize with another OrderedDict")
+ self.update(value)
+
+ def update(self, *other, **kwargs):
+ # Since iteration over non-ordered dicts is non-deterministic, this dict
+ # will be populated in an unpredictable order unless the argument to
+ # update() is also ordered. (It's important that we maintain this
+ # invariant so we can be sure that running `./mach build-backend` twice
+ # in a row without updating any files in the workspace generates exactly
+ # the same output.)
+ if kwargs:
+ raise ValueError("Cannot call update() with kwargs")
+ if other:
+ if not isinstance(other[0], OrderedDict):
+ raise ValueError("Can only call update() with another OrderedDict")
+ return super(InitializedDefines, self).update(*other, **kwargs)
+ raise ValueError("No arguments passed to update()")
+
+
+class BaseCompileFlags(ContextDerivedValue, dict):
+ def __init__(self, context):
+ self._context = context
+
+ klass_name = self.__class__.__name__
+ for k, v, build_vars in self.flag_variables:
+ if not isinstance(k, six.text_type):
+ raise ValueError("Flag %s for %s is not a string" % (k, klass_name))
+ if not isinstance(build_vars, tuple):
+ raise ValueError(
+ "Build variables `%s` for %s in %s is not a tuple"
+ % (build_vars, k, klass_name)
+ )
+
+ self._known_keys = set(k for k, v, _ in self.flag_variables)
+
+ # Providing defaults here doesn't play well with multiple templates
+ # modifying COMPILE_FLAGS from the same moz.build, because the merge
+ # done after the template runs can't tell which values coming from
+ # a template were set and which were provided as defaults.
+ template_name = getattr(context, "template", None)
+ if template_name in (None, "Gyp"):
+ dict.__init__(
+ self,
+ (
+ (k, v if v is None else TypedList(six.text_type)(v))
+ for k, v, _ in self.flag_variables
+ ),
+ )
+ else:
+ dict.__init__(self)
+
+
+class HostCompileFlags(BaseCompileFlags):
+ def __init__(self, context):
+ self._context = context
+ main_src_dir = mozpath.dirname(context.main_path)
+
+ self.flag_variables = (
+ (
+ "HOST_CXXFLAGS",
+ context.config.substs.get("HOST_CXXFLAGS"),
+ ("HOST_CXXFLAGS", "HOST_CXX_LDFLAGS"),
+ ),
+ (
+ "HOST_CFLAGS",
+ context.config.substs.get("HOST_CFLAGS"),
+ ("HOST_CFLAGS", "HOST_C_LDFLAGS"),
+ ),
+ (
+ "HOST_OPTIMIZE",
+ self._optimize_flags(),
+ ("HOST_CFLAGS", "HOST_CXXFLAGS", "HOST_C_LDFLAGS", "HOST_CXX_LDFLAGS"),
+ ),
+ ("RTL", None, ("HOST_CFLAGS", "HOST_C_LDFLAGS")),
+ ("HOST_DEFINES", None, ("HOST_CFLAGS", "HOST_CXXFLAGS")),
+ ("MOZBUILD_HOST_CFLAGS", [], ("HOST_CFLAGS", "HOST_C_LDFLAGS")),
+ ("MOZBUILD_HOST_CXXFLAGS", [], ("HOST_CXXFLAGS", "HOST_CXX_LDFLAGS")),
+ (
+ "BASE_INCLUDES",
+ ["-I%s" % main_src_dir, "-I%s" % context.objdir],
+ ("HOST_CFLAGS", "HOST_CXXFLAGS"),
+ ),
+ ("LOCAL_INCLUDES", None, ("HOST_CFLAGS", "HOST_CXXFLAGS")),
+ (
+ "EXTRA_INCLUDES",
+ ["-I%s/dist/include" % context.config.topobjdir],
+ ("HOST_CFLAGS", "HOST_CXXFLAGS"),
+ ),
+ (
+ "WARNINGS_CFLAGS",
+ context.config.substs.get("WARNINGS_HOST_CFLAGS"),
+ ("HOST_CFLAGS",),
+ ),
+ (
+ "WARNINGS_CXXFLAGS",
+ context.config.substs.get("WARNINGS_HOST_CXXFLAGS"),
+ ("HOST_CXXFLAGS",),
+ ),
+ )
+ BaseCompileFlags.__init__(self, context)
+
+ def _optimize_flags(self):
+ optimize_flags = []
+ if self._context.config.substs.get("CROSS_COMPILE"):
+ optimize_flags += self._context.config.substs.get("HOST_OPTIMIZE_FLAGS")
+ elif self._context.config.substs.get("MOZ_OPTIMIZE"):
+ optimize_flags += self._context.config.substs.get("MOZ_OPTIMIZE_FLAGS")
+ return optimize_flags
+
+
+class AsmFlags(BaseCompileFlags):
+ def __init__(self, context):
+ self._context = context
+ self.flag_variables = (
+ ("DEFINES", None, ("SFLAGS",)),
+ ("LIBRARY_DEFINES", None, ("SFLAGS",)),
+ ("OS", context.config.substs.get("ASFLAGS"), ("ASFLAGS", "SFLAGS")),
+ ("DEBUG", self._debug_flags(), ("ASFLAGS", "SFLAGS")),
+ ("LOCAL_INCLUDES", None, ("SFLAGS",)),
+ ("MOZBUILD", None, ("ASFLAGS", "SFLAGS")),
+ )
+ BaseCompileFlags.__init__(self, context)
+
+ def _debug_flags(self):
+ debug_flags = []
+ if self._context.config.substs.get(
+ "MOZ_DEBUG"
+ ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"):
+ if self._context.get("USE_NASM"):
+ if self._context.config.substs.get("OS_ARCH") == "WINNT":
+ debug_flags += ["-F", "cv8"]
+ elif self._context.config.substs.get("OS_ARCH") != "Darwin":
+ debug_flags += ["-F", "dwarf"]
+ elif (
+ self._context.config.substs.get("OS_ARCH") == "WINNT"
+ and self._context.config.substs.get("CPU_ARCH") == "aarch64"
+ ):
+ # armasm64 accepts a paucity of options compared to ml/ml64.
+ pass
+ else:
+ debug_flags += self._context.config.substs.get(
+ "MOZ_DEBUG_FLAGS", ""
+ ).split()
+ return debug_flags
+
+
+class LinkFlags(BaseCompileFlags):
+ def __init__(self, context):
+ self._context = context
+
+ self.flag_variables = (
+ ("OS", self._os_ldflags(), ("LDFLAGS",)),
+ (
+ "MOZ_HARDENING_LDFLAGS",
+ context.config.substs.get("MOZ_HARDENING_LDFLAGS"),
+ ("LDFLAGS",),
+ ),
+ ("DEFFILE", None, ("LDFLAGS",)),
+ ("MOZBUILD", None, ("LDFLAGS",)),
+ (
+ "FIX_LINK_PATHS",
+ context.config.substs.get("MOZ_FIX_LINK_PATHS"),
+ ("LDFLAGS",),
+ ),
+ (
+ "OPTIMIZE",
+ (
+ context.config.substs.get("MOZ_OPTIMIZE_LDFLAGS", [])
+ if context.config.substs.get("MOZ_OPTIMIZE")
+ else []
+ ),
+ ("LDFLAGS",),
+ ),
+ (
+ "CETCOMPAT",
+ (
+ context.config.substs.get("MOZ_CETCOMPAT_LDFLAGS")
+ if context.config.substs.get("NIGHTLY_BUILD")
+ else []
+ ),
+ ("LDFLAGS",),
+ ),
+ )
+ BaseCompileFlags.__init__(self, context)
+
+ def _os_ldflags(self):
+ flags = self._context.config.substs.get("OS_LDFLAGS", [])[:]
+
+ if self._context.config.substs.get(
+ "MOZ_DEBUG"
+ ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"):
+ flags += self._context.config.substs.get("MOZ_DEBUG_LDFLAGS", [])
+
+ # TODO: This is pretty convoluted, and isn't really a per-context thing,
+ # configure would be a better place to aggregate these.
+ if all(
+ [
+ self._context.config.substs.get("OS_ARCH") == "WINNT",
+ not self._context.config.substs.get("GNU_CC"),
+ not self._context.config.substs.get("MOZ_DEBUG"),
+ ]
+ ):
+
+ if self._context.config.substs.get("MOZ_OPTIMIZE"):
+ flags.append("-OPT:REF,ICF")
+
+ return flags
+
+
+class TargetCompileFlags(BaseCompileFlags):
+ """Base class that encapsulates some common logic between CompileFlags and
+ WasmCompileFlags.
+ """
+
+ def _debug_flags(self):
+ if self._context.config.substs.get(
+ "MOZ_DEBUG"
+ ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"):
+ return self._context.config.substs.get("MOZ_DEBUG_FLAGS", "").split()
+ return []
+
+ def _warnings_as_errors(self):
+ warnings_as_errors = self._context.config.substs.get("WARNINGS_AS_ERRORS")
+ if warnings_as_errors:
+ return [warnings_as_errors]
+
+ def _optimize_flags(self):
+ if not self._context.config.substs.get("MOZ_OPTIMIZE"):
+ return []
+ optimize_flags = None
+ if self._context.config.substs.get("MOZ_PGO"):
+ optimize_flags = self._context.config.substs.get("MOZ_PGO_OPTIMIZE_FLAGS")
+ if not optimize_flags:
+ # If MOZ_PGO_OPTIMIZE_FLAGS is empty we fall back to
+ # MOZ_OPTIMIZE_FLAGS. Presently this occurs on Windows.
+ optimize_flags = self._context.config.substs.get("MOZ_OPTIMIZE_FLAGS")
+ return optimize_flags
+
+ def __setitem__(self, key, value):
+ if key not in self._known_keys:
+ raise ValueError(
+ "Invalid value. `%s` is not a compile flags " "category." % key
+ )
+ if key in self and self[key] is None:
+ raise ValueError(
+ "`%s` may not be set in COMPILE_FLAGS from moz.build, this "
+ "value is resolved from the emitter." % key
+ )
+ if not (
+ isinstance(value, list)
+ and all(isinstance(v, six.string_types) for v in value)
+ ):
+ raise ValueError(
+ "A list of strings must be provided as a value for a compile "
+ "flags category."
+ )
+ dict.__setitem__(self, key, value)
+
+
+class CompileFlags(TargetCompileFlags):
+ def __init__(self, context):
+ main_src_dir = mozpath.dirname(context.main_path)
+ self._context = context
+
+ self.flag_variables = (
+ ("STL", context.config.substs.get("STL_FLAGS"), ("CXXFLAGS",)),
+ (
+ "VISIBILITY",
+ context.config.substs.get("VISIBILITY_FLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "MOZ_HARDENING_CFLAGS",
+ context.config.substs.get("MOZ_HARDENING_CFLAGS"),
+ ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ ("DEFINES", None, ("CXXFLAGS", "CFLAGS")),
+ ("LIBRARY_DEFINES", None, ("CXXFLAGS", "CFLAGS")),
+ (
+ "BASE_INCLUDES",
+ ["-I%s" % main_src_dir, "-I%s" % context.objdir],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ ("LOCAL_INCLUDES", None, ("CXXFLAGS", "CFLAGS")),
+ (
+ "EXTRA_INCLUDES",
+ ["-I%s/dist/include" % context.config.topobjdir],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "OS_INCLUDES",
+ list(
+ itertools.chain(
+ *(
+ context.config.substs.get(v, [])
+ for v in (
+ "NSPR_CFLAGS",
+ "NSS_CFLAGS",
+ "MOZ_JPEG_CFLAGS",
+ "MOZ_PNG_CFLAGS",
+ "MOZ_ZLIB_CFLAGS",
+ "MOZ_PIXMAN_CFLAGS",
+ "MOZ_ICU_CFLAGS",
+ )
+ )
+ )
+ ),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ ("RTL", None, ("CXXFLAGS", "CFLAGS")),
+ (
+ "OS_COMPILE_CFLAGS",
+ context.config.substs.get("OS_COMPILE_CFLAGS"),
+ ("CFLAGS",),
+ ),
+ (
+ "OS_COMPILE_CXXFLAGS",
+ context.config.substs.get("OS_COMPILE_CXXFLAGS"),
+ ("CXXFLAGS",),
+ ),
+ (
+ "OS_CPPFLAGS",
+ context.config.substs.get("OS_CPPFLAGS"),
+ ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "OS_CFLAGS",
+ context.config.substs.get("OS_CFLAGS"),
+ ("CFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "OS_CXXFLAGS",
+ context.config.substs.get("OS_CXXFLAGS"),
+ ("CXXFLAGS", "CXX_LDFLAGS"),
+ ),
+ (
+ "DEBUG",
+ self._debug_flags(),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "CLANG_PLUGIN",
+ context.config.substs.get("CLANG_PLUGIN_FLAGS"),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "OPTIMIZE",
+ self._optimize_flags(),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "FRAMEPTR",
+ context.config.substs.get("MOZ_FRAMEPTR_FLAGS"),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "WARNINGS_AS_ERRORS",
+ self._warnings_as_errors(),
+ ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "WARNINGS_CFLAGS",
+ context.config.substs.get("WARNINGS_CFLAGS"),
+ ("CFLAGS",),
+ ),
+ (
+ "WARNINGS_CXXFLAGS",
+ context.config.substs.get("WARNINGS_CXXFLAGS"),
+ ("CXXFLAGS",),
+ ),
+ ("MOZBUILD_CFLAGS", None, ("CFLAGS",)),
+ ("MOZBUILD_CXXFLAGS", None, ("CXXFLAGS",)),
+ (
+ "COVERAGE",
+ context.config.substs.get("COVERAGE_CFLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "PASS_MANAGER",
+ context.config.substs.get("MOZ_PASS_MANAGER_FLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "FILE_PREFIX_MAP",
+ context.config.substs.get("MOZ_FILE_PREFIX_MAP_FLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ # See bug 414641
+ "NO_STRICT_ALIASING",
+ ["-fno-strict-aliasing"],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ # Disable floating-point contraction by default.
+ "FP_CONTRACT",
+ (
+ ["-Xclang"]
+ if context.config.substs.get("CC_TYPE") == "clang-cl"
+ else []
+ )
+ + ["-ffp-contract=off"],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ )
+
+ TargetCompileFlags.__init__(self, context)
+
+
+class WasmFlags(TargetCompileFlags):
+ def __init__(self, context):
+ main_src_dir = mozpath.dirname(context.main_path)
+ self._context = context
+
+ self.flag_variables = (
+ ("LIBRARY_DEFINES", None, ("WASM_CXXFLAGS", "WASM_CFLAGS")),
+ (
+ "BASE_INCLUDES",
+ ["-I%s" % main_src_dir, "-I%s" % context.objdir],
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ ("LOCAL_INCLUDES", None, ("WASM_CXXFLAGS", "WASM_CFLAGS")),
+ (
+ "EXTRA_INCLUDES",
+ ["-I%s/dist/include" % context.config.topobjdir],
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ (
+ "OS_INCLUDES",
+ list(
+ itertools.chain(
+ *(
+ context.config.substs.get(v, [])
+ for v in (
+ "NSPR_CFLAGS",
+ "NSS_CFLAGS",
+ "MOZ_JPEG_CFLAGS",
+ "MOZ_PNG_CFLAGS",
+ "MOZ_ZLIB_CFLAGS",
+ "MOZ_PIXMAN_CFLAGS",
+ )
+ )
+ )
+ ),
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ ("DEBUG", self._debug_flags(), ("WASM_CFLAGS", "WASM_CXXFLAGS")),
+ (
+ "CLANG_PLUGIN",
+ context.config.substs.get("CLANG_PLUGIN_FLAGS"),
+ ("WASM_CFLAGS", "WASM_CXXFLAGS"),
+ ),
+ ("OPTIMIZE", self._optimize_flags(), ("WASM_CFLAGS", "WASM_CXXFLAGS")),
+ (
+ "WARNINGS_AS_ERRORS",
+ self._warnings_as_errors(),
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ ("MOZBUILD_CFLAGS", None, ("WASM_CFLAGS",)),
+ ("MOZBUILD_CXXFLAGS", None, ("WASM_CXXFLAGS",)),
+ ("WASM_CFLAGS", context.config.substs.get("WASM_CFLAGS"), ("WASM_CFLAGS",)),
+ (
+ "WASM_CXXFLAGS",
+ context.config.substs.get("WASM_CXXFLAGS"),
+ ("WASM_CXXFLAGS",),
+ ),
+ ("WASM_DEFINES", None, ("WASM_CFLAGS", "WASM_CXXFLAGS")),
+ ("MOZBUILD_WASM_CFLAGS", None, ("WASM_CFLAGS",)),
+ ("MOZBUILD_WASM_CXXFLAGS", None, ("WASM_CXXFLAGS",)),
+ (
+ "NEWPM",
+ context.config.substs.get("MOZ_NEW_PASS_MANAGER_FLAGS"),
+ ("WASM_CFLAGS", "WASM_CXXFLAGS"),
+ ),
+ (
+ "FILE_PREFIX_MAP",
+ context.config.substs.get("MOZ_FILE_PREFIX_MAP_FLAGS"),
+ ("WASM_CFLAGS", "WASM_CXXFLAGS"),
+ ),
+ ("STL", context.config.substs.get("STL_FLAGS"), ("WASM_CXXFLAGS",)),
+ )
+
+ TargetCompileFlags.__init__(self, context)
+
+ def _debug_flags(self):
+ substs = self._context.config.substs
+ if substs.get("MOZ_DEBUG") or substs.get("MOZ_DEBUG_SYMBOLS"):
+ return ["-g"]
+ return []
+
+ def _optimize_flags(self):
+ if not self._context.config.substs.get("MOZ_OPTIMIZE"):
+ return []
+
+ # We don't want `MOZ_{PGO_,}OPTIMIZE_FLAGS here because they may contain
+ # optimization flags that aren't suitable for wasm (e.g. -freorder-blocks).
+ # Just optimize for size in all cases; we may want to make this
+ # configurable.
+ return ["-Os"]
+
+
+class FinalTargetValue(ContextDerivedValue, six.text_type):
+ def __new__(cls, context, value=""):
+ if not value:
+ value = "dist/"
+ if context["XPI_NAME"]:
+ value += "xpi-stage/" + context["XPI_NAME"]
+ else:
+ value += "bin"
+ if context["DIST_SUBDIR"]:
+ value += "/" + context["DIST_SUBDIR"]
+ return six.text_type.__new__(cls, value)
+
+
+def Enum(*values):
+ assert len(values)
+ default = values[0]
+
+ class EnumClass(object):
+ def __new__(cls, value=None):
+ if value is None:
+ return default
+ if value in values:
+ return value
+ raise ValueError(
+ "Invalid value. Allowed values are: %s"
+ % ", ".join(repr(v) for v in values)
+ )
+
+ return EnumClass
+
+
+class PathMeta(type):
+ """Meta class for the Path family of classes.
+
+ It handles calling __new__ with the right arguments in cases where a Path
+ is instantiated with another instance of Path instead of having received a
+ context.
+
+ It also makes Path(context, value) instantiate one of the
+ subclasses depending on the value, allowing callers to do
+ standard type checking (isinstance(path, ObjDirPath)) instead
+ of checking the value itself (path.startswith('!')).
+ """
+
+ def __call__(cls, context, value=None):
+ if isinstance(context, Path):
+ assert value is None
+ value = context
+ context = context.context
+ else:
+ assert isinstance(context, Context)
+ if isinstance(value, Path):
+ context = value.context
+ if not issubclass(cls, (SourcePath, ObjDirPath, AbsolutePath)):
+ if value.startswith("!"):
+ cls = ObjDirPath
+ elif value.startswith("%"):
+ cls = AbsolutePath
+ else:
+ cls = SourcePath
+ return super(PathMeta, cls).__call__(context, value)
+
+
+class Path(six.with_metaclass(PathMeta, ContextDerivedValue, six.text_type)):
+ """Stores and resolves a source path relative to a given context
+
+ This class is used as a backing type for some of the sandbox variables.
+ It expresses paths relative to a context. Supported paths are:
+ - '/topsrcdir/relative/paths'
+ - 'srcdir/relative/paths'
+ - '!/topobjdir/relative/paths'
+ - '!objdir/relative/paths'
+ - '%/filesystem/absolute/paths'
+ """
+
+ def __new__(cls, context, value=None):
+ self = super(Path, cls).__new__(cls, value)
+ self.context = context
+ self.srcdir = context.srcdir
+ return self
+
+ def join(self, *p):
+ """ContextDerived equivalent of `mozpath.join(self, *p)`, returning a
+ new Path instance.
+ """
+ return Path(self.context, mozpath.join(self, *p))
+
+ def __cmp__(self, other):
+ # We expect this function to never be called to avoid issues in the
+ # switch from Python 2 to 3.
+ raise AssertionError()
+
+ def _cmp(self, other, op):
+ if isinstance(other, Path) and self.srcdir != other.srcdir:
+ return op(self.full_path, other.full_path)
+ return op(six.text_type(self), other)
+
+ def __eq__(self, other):
+ return self._cmp(other, operator.eq)
+
+ def __ne__(self, other):
+ return self._cmp(other, operator.ne)
+
+ def __lt__(self, other):
+ return self._cmp(other, operator.lt)
+
+ def __gt__(self, other):
+ return self._cmp(other, operator.gt)
+
+ def __le__(self, other):
+ return self._cmp(other, operator.le)
+
+ def __ge__(self, other):
+ return self._cmp(other, operator.ge)
+
+ def __repr__(self):
+ return "<%s (%s)%s>" % (self.__class__.__name__, self.srcdir, self)
+
+ def __hash__(self):
+ return hash(self.full_path)
+
+ @memoized_property
+ def target_basename(self):
+ return mozpath.basename(self.full_path)
+
+
+class SourcePath(Path):
+ """Like Path, but limited to paths in the source directory."""
+
+ def __new__(cls, context, value=None):
+ if value.startswith("!"):
+ raise ValueError(f'Object directory paths are not allowed\nPath: "{value}"')
+ if value.startswith("%"):
+ raise ValueError(
+ f'Filesystem absolute paths are not allowed\nPath: "{value}"'
+ )
+ self = super(SourcePath, cls).__new__(cls, context, value)
+
+ if value.startswith("/"):
+ path = None
+ if not path or not os.path.exists(path):
+ path = mozpath.join(context.config.topsrcdir, value[1:])
+ else:
+ path = mozpath.join(self.srcdir, value)
+ self.full_path = mozpath.normpath(path)
+ return self
+
+ @memoized_property
+ def translated(self):
+ """Returns the corresponding path in the objdir.
+
+ Ideally, we wouldn't need this function, but the fact that both source
+ path under topsrcdir and the external source dir end up mixed in the
+ objdir (aka pseudo-rework), this is needed.
+ """
+ return ObjDirPath(self.context, "!%s" % self).full_path
+
+
+class RenamedSourcePath(SourcePath):
+ """Like SourcePath, but with a different base name when installed.
+
+ The constructor takes a tuple of (source, target_basename).
+
+ This class is not meant to be exposed to moz.build sandboxes as of now,
+ and is not supported by the RecursiveMake backend.
+ """
+
+ def __new__(cls, context, value):
+ assert isinstance(value, tuple)
+ source, target_basename = value
+ self = super(RenamedSourcePath, cls).__new__(cls, context, source)
+ self._target_basename = target_basename
+ return self
+
+ @property
+ def target_basename(self):
+ return self._target_basename
+
+
+class ObjDirPath(Path):
+ """Like Path, but limited to paths in the object directory."""
+
+ def __new__(cls, context, value=None):
+ if not value.startswith("!"):
+ raise ValueError("Object directory paths must start with ! prefix")
+ self = super(ObjDirPath, cls).__new__(cls, context, value)
+
+ if value.startswith("!/"):
+ path = mozpath.join(context.config.topobjdir, value[2:])
+ else:
+ path = mozpath.join(context.objdir, value[1:])
+ self.full_path = mozpath.normpath(path)
+ return self
+
+
+class AbsolutePath(Path):
+ """Like Path, but allows arbitrary paths outside the source and object directories."""
+
+ def __new__(cls, context, value=None):
+ if not value.startswith("%"):
+ raise ValueError("Absolute paths must start with % prefix")
+ if not os.path.isabs(value[1:]):
+ raise ValueError("Path '%s' is not absolute" % value[1:])
+ self = super(AbsolutePath, cls).__new__(cls, context, value)
+ self.full_path = mozpath.normpath(value[1:])
+ return self
+
+
+@memoize
+def ContextDerivedTypedList(klass, base_class=List):
+ """Specialized TypedList for use with ContextDerivedValue types."""
+ assert issubclass(klass, ContextDerivedValue)
+
+ class _TypedList(ContextDerivedValue, TypedList(klass, base_class)):
+ def __init__(self, context, iterable=[], **kwargs):
+ self.context = context
+ super(_TypedList, self).__init__(iterable, **kwargs)
+
+ def normalize(self, e):
+ if not isinstance(e, klass):
+ e = klass(self.context, e)
+ return e
+
+ return _TypedList
+
+
+@memoize
+def ContextDerivedTypedListWithItems(type, base_class=List):
+ """Specialized TypedList for use with ContextDerivedValue types."""
+
+ class _TypedListWithItems(ContextDerivedTypedList(type, base_class)):
+ def __getitem__(self, name):
+ name = self.normalize(name)
+ return super(_TypedListWithItems, self).__getitem__(name)
+
+ return _TypedListWithItems
+
+
+@memoize
+def ContextDerivedTypedRecord(*fields):
+ """Factory for objects with certain properties and dynamic
+ type checks.
+
+ This API is extremely similar to the TypedNamedTuple API,
+ except that properties may be mutated. This supports syntax like:
+
+ .. code-block:: python
+
+ VARIABLE_NAME.property += [
+ 'item1',
+ 'item2',
+ ]
+ """
+
+ class _TypedRecord(ContextDerivedValue):
+ __slots__ = tuple([name for name, _ in fields])
+
+ def __init__(self, context):
+ for fname, ftype in self._fields.items():
+ if issubclass(ftype, ContextDerivedValue):
+ setattr(self, fname, self._fields[fname](context))
+ else:
+ setattr(self, fname, self._fields[fname]())
+
+ def __setattr__(self, name, value):
+ if name in self._fields and not isinstance(value, self._fields[name]):
+ value = self._fields[name](value)
+ object.__setattr__(self, name, value)
+
+ _TypedRecord._fields = dict(fields)
+ return _TypedRecord
+
+
+class Schedules(object):
+ """Similar to a ContextDerivedTypedRecord, but with different behavior
+ for the properties:
+
+ * VAR.inclusive can only be appended to (+=), and can only contain values
+ from mozbuild.schedules.INCLUSIVE_COMPONENTS
+
+ * VAR.exclusive can only be assigned to (no +=), and can only contain
+ values from mozbuild.schedules.ALL_COMPONENTS
+ """
+
+ __slots__ = ("_exclusive", "_inclusive")
+
+ def __init__(self, inclusive=None, exclusive=None):
+ if inclusive is None:
+ self._inclusive = TypedList(Enum(*schedules.INCLUSIVE_COMPONENTS))()
+ else:
+ self._inclusive = inclusive
+ if exclusive is None:
+ self._exclusive = ImmutableStrictOrderingOnAppendList(
+ schedules.EXCLUSIVE_COMPONENTS
+ )
+ else:
+ self._exclusive = exclusive
+
+ # inclusive is mutable but cannot be assigned to (+= only)
+ @property
+ def inclusive(self):
+ return self._inclusive
+
+ @inclusive.setter
+ def inclusive(self, value):
+ if value is not self._inclusive:
+ raise AttributeError("Cannot assign to this value - use += instead")
+ unexpected = [v for v in value if v not in schedules.INCLUSIVE_COMPONENTS]
+ if unexpected:
+ raise Exception(
+ "unexpected inclusive component(s) " + ", ".join(unexpected)
+ )
+
+ # exclusive is immutable but can be set (= only)
+ @property
+ def exclusive(self):
+ return self._exclusive
+
+ @exclusive.setter
+ def exclusive(self, value):
+ if not isinstance(value, (tuple, list)):
+ raise Exception("expected a tuple or list")
+ unexpected = [v for v in value if v not in schedules.ALL_COMPONENTS]
+ if unexpected:
+ raise Exception(
+ "unexpected exclusive component(s) " + ", ".join(unexpected)
+ )
+ self._exclusive = ImmutableStrictOrderingOnAppendList(sorted(value))
+
+ # components provides a synthetic summary of all components
+ @property
+ def components(self):
+ return list(sorted(set(self._inclusive) | set(self._exclusive)))
+
+ # The `Files` context uses | to combine SCHEDULES from multiple levels; at this
+ # point the immutability is no longer needed so we use plain lists
+ def __or__(self, other):
+ inclusive = self._inclusive + other._inclusive
+ if other._exclusive == self._exclusive:
+ exclusive = self._exclusive
+ elif self._exclusive == schedules.EXCLUSIVE_COMPONENTS:
+ exclusive = other._exclusive
+ elif other._exclusive == schedules.EXCLUSIVE_COMPONENTS:
+ exclusive = self._exclusive
+ else:
+ # in a case where two SCHEDULES.exclusive set different values, take
+ # the later one; this acts the way we expect assignment to work.
+ exclusive = other._exclusive
+ return Schedules(inclusive=inclusive, exclusive=exclusive)
+
+
+@memoize
+def ContextDerivedTypedHierarchicalStringList(type):
+ """Specialized HierarchicalStringList for use with ContextDerivedValue
+ types."""
+
+ class _TypedListWithItems(ContextDerivedValue, HierarchicalStringList):
+ __slots__ = ("_strings", "_children", "_context")
+
+ def __init__(self, context):
+ self._strings = ContextDerivedTypedList(type, StrictOrderingOnAppendList)(
+ context
+ )
+ self._children = {}
+ self._context = context
+
+ def _get_exportvariable(self, name):
+ child = self._children.get(name)
+ if not child:
+ child = self._children[name] = _TypedListWithItems(self._context)
+ return child
+
+ return _TypedListWithItems
+
+
+def OrderedPathListWithAction(action):
+ """Returns a class which behaves as a StrictOrderingOnAppendList, but
+ invokes the given callable with each input and a context as it is
+ read, storing a tuple including the result and the original item.
+
+ This used to extend moz.build reading to make more data available in
+ filesystem-reading mode.
+ """
+
+ class _OrderedListWithAction(
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendListWithAction)
+ ):
+ def __init__(self, context, *args):
+ def _action(item):
+ return item, action(context, item)
+
+ super(_OrderedListWithAction, self).__init__(context, action=_action, *args)
+
+ return _OrderedListWithAction
+
+
+ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest)
+ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
+
+BugzillaComponent = TypedNamedTuple(
+ "BugzillaComponent", [("product", six.text_type), ("component", six.text_type)]
+)
+SchedulingComponents = ContextDerivedTypedRecord(
+ ("inclusive", TypedList(six.text_type, StrictOrderingOnAppendList)),
+ ("exclusive", TypedList(six.text_type, StrictOrderingOnAppendList)),
+)
+
+GeneratedFilesList = StrictOrderingOnAppendListWithFlagsFactory(
+ {"script": six.text_type, "inputs": list, "force": bool, "flags": list}
+)
+
+
+class Files(SubContext):
+ """Metadata attached to files.
+
+ It is common to want to annotate files with metadata, such as which
+ Bugzilla component tracks issues with certain files. This sub-context is
+ where we stick that metadata.
+
+ The argument to this sub-context is a file matching pattern that is applied
+ against the host file's directory. If the pattern matches a file whose info
+ is currently being sought, the metadata attached to this instance will be
+ applied to that file.
+
+ Patterns are collections of filename characters with ``/`` used as the
+ directory separate (UNIX-style paths) and ``*`` and ``**`` used to denote
+ wildcard matching.
+
+ Patterns without the ``*`` character are literal matches and will match at
+ most one entity.
+
+ Patterns with ``*`` or ``**`` are wildcard matches. ``*`` matches files
+ at least within a single directory. ``**`` matches files across several
+ directories.
+
+ ``foo.html``
+ Will match only the ``foo.html`` file in the current directory.
+ ``*.jsm``
+ Will match all ``.jsm`` files in the current directory.
+ ``**/*.cpp``
+ Will match all ``.cpp`` files in this and all child directories.
+ ``foo/*.css``
+ Will match all ``.css`` files in the ``foo/`` directory.
+ ``bar/*``
+ Will match all files in the ``bar/`` directory and all of its
+ children directories.
+ ``bar/**``
+ This is equivalent to ``bar/*`` above.
+ ``bar/**/foo``
+ Will match all ``foo`` files in the ``bar/`` directory and all of its
+ children directories.
+
+ The difference in behavior between ``*`` and ``**`` is only evident if
+ a pattern follows the ``*`` or ``**``. A pattern ending with ``*`` is
+ greedy. ``**`` is needed when you need an additional pattern after the
+ wildcard. e.g. ``**/foo``.
+ """
+
+ VARIABLES = {
+ "BUG_COMPONENT": (
+ BugzillaComponent,
+ tuple,
+ """The bug component that tracks changes to these files.
+
+ Values are a 2-tuple of unicode describing the Bugzilla product and
+ component. e.g. ``('Firefox Build System', 'General')``.
+ """,
+ ),
+ "FINAL": (
+ bool,
+ bool,
+ """Mark variable assignments as finalized.
+
+ During normal processing, values from newer Files contexts
+ overwrite previously set values. Last write wins. This behavior is
+ not always desired. ``FINAL`` provides a mechanism to prevent
+ further updates to a variable.
+
+ When ``FINAL`` is set, the value of all variables defined in this
+ context are marked as frozen and all subsequent writes to them
+ are ignored during metadata reading.
+
+ See :ref:`mozbuild_files_metadata_finalizing` for more info.
+ """,
+ ),
+ "SCHEDULES": (
+ Schedules,
+ list,
+ """Maps source files to the CI tasks that should be scheduled when
+ they change. The tasks are grouped by named components, and those
+ names appear again in the taskgraph configuration
+ `($topsrcdir/taskgraph/).
+
+ Some components are "inclusive", meaning that changes to most files
+ do not schedule them, aside from those described in a Files
+ subcontext. For example, py-lint tasks need not be scheduled for
+ most changes, but should be scheduled when any Python file changes.
+ Such components are named by appending to `SCHEDULES.inclusive`:
+
+ with Files('**.py'):
+ SCHEDULES.inclusive += ['py-lint']
+
+ Other components are 'exclusive', meaning that changes to most
+ files schedule them, but some files affect only one or two
+ components. For example, most files schedule builds and tests of
+ Firefox for Android, OS X, Windows, and Linux, but files under
+ `mobile/android/` affect Android builds and tests exclusively, so
+ builds for other operating systems are not needed. Test suites
+ provide another example: most files schedule reftests, but changes
+ to reftest scripts need only schedule reftests and no other suites.
+
+ Exclusive components are named by setting `SCHEDULES.exclusive`:
+
+ with Files('mobile/android/**'):
+ SCHEDULES.exclusive = ['android']
+ """,
+ ),
+ }
+
+ def __init__(self, parent, *patterns):
+ super(Files, self).__init__(parent)
+ self.patterns = patterns
+ self.finalized = set()
+
+ def __iadd__(self, other):
+ assert isinstance(other, Files)
+
+ for k, v in other.items():
+ if k == "SCHEDULES" and "SCHEDULES" in self:
+ self["SCHEDULES"] = self["SCHEDULES"] | v
+ continue
+
+ # Ignore updates to finalized flags.
+ if k in self.finalized:
+ continue
+
+ # Only finalize variables defined in this instance.
+ if k == "FINAL":
+ self.finalized |= set(other) - {"FINAL"}
+ continue
+
+ self[k] = v
+
+ return self
+
+ def asdict(self):
+ """Return this instance as a dict with built-in data structures.
+
+ Call this to obtain an object suitable for serializing.
+ """
+ d = {}
+ if "BUG_COMPONENT" in self:
+ bc = self["BUG_COMPONENT"]
+ d["bug_component"] = (bc.product, bc.component)
+
+ return d
+
+ @staticmethod
+ def aggregate(files):
+ """Given a mapping of path to Files, obtain aggregate results.
+
+ Consumers may want to extract useful information from a collection of
+ Files describing paths. e.g. given the files info data for N paths,
+ recommend a single bug component based on the most frequent one. This
+ function provides logic for deriving aggregate knowledge from a
+ collection of path File metadata.
+
+ Note: the intent of this function is to operate on the result of
+ :py:func:`mozbuild.frontend.reader.BuildReader.files_info`. The
+ :py:func:`mozbuild.frontend.context.Files` instances passed in are
+ thus the "collapsed" (``__iadd__``ed) results of all ``Files`` from all
+ moz.build files relevant to a specific path, not individual ``Files``
+ instances from a single moz.build file.
+ """
+ d = {}
+
+ bug_components = Counter()
+
+ for f in files.values():
+ bug_component = f.get("BUG_COMPONENT")
+ if bug_component:
+ bug_components[bug_component] += 1
+
+ d["bug_component_counts"] = []
+ for c, count in bug_components.most_common():
+ component = (c.product, c.component)
+ d["bug_component_counts"].append((c, count))
+
+ if "recommended_bug_component" not in d:
+ d["recommended_bug_component"] = component
+ recommended_count = count
+ elif count == recommended_count:
+ # Don't recommend a component if it doesn't have a clear lead.
+ d["recommended_bug_component"] = None
+
+ # In case no bug components.
+ d.setdefault("recommended_bug_component", None)
+
+ return d
+
+
+# This defines functions that create sub-contexts.
+#
+# Values are classes that are SubContexts. The class name will be turned into
+# a function that when called emits an instance of that class.
+#
+# Arbitrary arguments can be passed to the class constructor. The first
+# argument is always the parent context. It is up to each class to perform
+# argument validation.
+SUBCONTEXTS = [Files]
+
+for cls in SUBCONTEXTS:
+ if not issubclass(cls, SubContext):
+ raise ValueError("SUBCONTEXTS entry not a SubContext class: %s" % cls)
+
+ if not hasattr(cls, "VARIABLES"):
+ raise ValueError("SUBCONTEXTS entry does not have VARIABLES: %s" % cls)
+
+SUBCONTEXTS = {cls.__name__: cls for cls in SUBCONTEXTS}
+
+
+# This defines the set of mutable global variables.
+#
+# Each variable is a tuple of:
+#
+# (storage_type, input_types, docs)
+
+VARIABLES = {
+ "SOURCES": (
+ ContextDerivedTypedListWithItems(
+ Path,
+ StrictOrderingOnAppendListWithFlagsFactory({"no_pgo": bool, "flags": List}),
+ ),
+ list,
+ """Source code files.
+
+ This variable contains a list of source code files to compile.
+ Accepts assembler, C, C++, Objective C/C++.
+ """,
+ ),
+ "FILES_PER_UNIFIED_FILE": (
+ int,
+ int,
+ """The number of source files to compile into each unified source file.
+
+ """,
+ ),
+ "IS_RUST_LIBRARY": (
+ bool,
+ bool,
+ """Whether the current library defined by this moz.build is built by Rust.
+
+ The library defined by this moz.build should have a build definition in
+ a Cargo.toml file that exists in this moz.build's directory.
+ """,
+ ),
+ "IS_GKRUST": (
+ bool,
+ bool,
+ """Whether the current library defined by this moz.build is gkrust.
+
+ Indicates whether the current library contains rust for libxul.
+ """,
+ ),
+ "RUST_LIBRARY_FEATURES": (
+ List,
+ list,
+ """Cargo features to activate for this library.
+
+ This variable should not be used directly; you should be using the
+ RustLibrary template instead.
+ """,
+ ),
+ "HOST_RUST_LIBRARY_FEATURES": (
+ List,
+ list,
+ """Cargo features to activate for this host library.
+
+ This variable should not be used directly; you should be using the
+ HostRustLibrary template instead.
+ """,
+ ),
+ "RUST_TESTS": (
+ TypedList(six.text_type),
+ list,
+ """Names of Rust tests to build and run via `cargo test`.
+ """,
+ ),
+ "RUST_TEST_FEATURES": (
+ TypedList(six.text_type),
+ list,
+ """Cargo features to activate for RUST_TESTS.
+ """,
+ ),
+ "UNIFIED_SOURCES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Source code files that can be compiled together.
+
+ This variable contains a list of source code files to compile,
+ that can be concatenated all together and built as a single source
+ file. This can help make the build faster and reduce the debug info
+ size.
+ """,
+ ),
+ "GENERATED_FILES": (
+ GeneratedFilesList,
+ list,
+ """Generic generated files.
+
+ Unless you have a reason not to, use the GeneratedFile template rather
+ than referencing GENERATED_FILES directly. The GeneratedFile template
+ has all the same arguments as the attributes listed below (``script``,
+ ``inputs``, ``flags``, ``force``), plus an additional ``entry_point``
+ argument to specify a particular function to run in the given script.
+
+ This variable contains a list of files for the build system to
+ generate at export time. The generation method may be declared
+ with optional ``script``, ``inputs``, ``flags``, and ``force``
+ attributes on individual entries.
+ If the optional ``script`` attribute is not present on an entry, it
+ is assumed that rules for generating the file are present in
+ the associated Makefile.in.
+
+ Example::
+
+ GENERATED_FILES += ['bar.c', 'baz.c', 'foo.c']
+ bar = GENERATED_FILES['bar.c']
+ bar.script = 'generate.py'
+ bar.inputs = ['datafile-for-bar']
+ foo = GENERATED_FILES['foo.c']
+ foo.script = 'generate.py'
+ foo.inputs = ['datafile-for-foo']
+
+ This definition will generate bar.c by calling the main method of
+ generate.py with a open (for writing) file object for bar.c, and
+ the string ``datafile-for-bar``. In a similar fashion, the main
+ method of generate.py will also be called with an open
+ (for writing) file object for foo.c and the string
+ ``datafile-for-foo``. Please note that only string arguments are
+ supported for passing to scripts, and that all arguments provided
+ to the script should be filenames relative to the directory in which
+ the moz.build file is located.
+
+ To enable using the same script for generating multiple files with
+ slightly different non-filename parameters, alternative entry points
+ into ``script`` can be specified::
+
+ GENERATED_FILES += ['bar.c']
+ bar = GENERATED_FILES['bar.c']
+ bar.script = 'generate.py:make_bar'
+
+ The chosen script entry point may optionally return a set of strings,
+ indicating extra files the output depends on.
+
+ When the ``flags`` attribute is present, the given list of flags is
+ passed as extra arguments following the inputs.
+
+ When the ``force`` attribute is present, the file is generated every
+ build, regardless of whether it is stale. This is special to the
+ RecursiveMake backend and intended for special situations only (e.g.,
+ localization). Please consult a build peer (on the #build channel at
+ https://chat.mozilla.org) before using ``force``.
+ """,
+ ),
+ "DEFINES": (
+ InitializedDefines,
+ dict,
+ """Dictionary of compiler defines to declare.
+
+ These are passed in to the compiler as ``-Dkey='value'`` for string
+ values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the
+ value is True. Note that for string values, the outer-level of
+ single-quotes will be consumed by the shell. If you want to have
+ a string-literal in the program, the value needs to have
+ double-quotes.
+
+ Example::
+
+ DEFINES['NS_NO_XPCOM'] = True
+ DEFINES['MOZ_EXTENSIONS_DB_SCHEMA'] = 15
+ DEFINES['DLL_SUFFIX'] = '".so"'
+
+ This will result in the compiler flags ``-DNS_NO_XPCOM``,
+ ``-DMOZ_EXTENSIONS_DB_SCHEMA=15``, and ``-DDLL_SUFFIX='".so"'``,
+ respectively.
+
+ Note that these entries are not necessarily passed to the assembler.
+ Whether they are depends on the type of assembly file. As an
+ alternative, you may add a ``-DKEY=value`` entry to ``ASFLAGS``.
+ """,
+ ),
+ "DELAYLOAD_DLLS": (
+ List,
+ list,
+ """Delay-loaded DLLs.
+
+ This variable contains a list of DLL files which the module being linked
+ should load lazily. This only has an effect when building with MSVC.
+ """,
+ ),
+ "DIRS": (
+ ContextDerivedTypedList(SourcePath),
+ list,
+ """Child directories to descend into looking for build frontend files.
+
+ This works similarly to the ``DIRS`` variable in make files. Each str
+ value in the list is the name of a child directory. When this file is
+ done parsing, the build reader will descend into each listed directory
+ and read the frontend file there. If there is no frontend file, an error
+ is raised.
+
+ Values are relative paths. They can be multiple directory levels
+ above or below. Use ``..`` for parent directories and ``/`` for path
+ delimiters.
+ """,
+ ),
+ "FINAL_TARGET_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be installed into the application directory.
+
+ ``FINAL_TARGET_FILES`` will copy (or symlink, if the platform supports it)
+ the contents of its files to the directory specified by
+ ``FINAL_TARGET`` (typically ``dist/bin``). Files that are destined for a
+ subdirectory can be specified by accessing a field, or as a dict access.
+ For example, to export ``foo.png`` to the top-level directory and
+ ``bar.svg`` to the directory ``images/do-not-use``, append to
+ ``FINAL_TARGET_FILES`` like so::
+
+ FINAL_TARGET_FILES += ['foo.png']
+ FINAL_TARGET_FILES.images['do-not-use'] += ['bar.svg']
+ """,
+ ),
+ "FINAL_TARGET_PP_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """Like ``FINAL_TARGET_FILES``, with preprocessing.
+ """,
+ ),
+ "LOCALIZED_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of locale-dependent files to be installed into the application
+ directory.
+
+ This functions similarly to ``FINAL_TARGET_FILES``, but the files are
+ sourced from the locale directory and will vary per localization.
+ For an en-US build, this is functionally equivalent to
+ ``FINAL_TARGET_FILES``. For a build with ``--enable-ui-locale``,
+ the file will be taken from ``$LOCALE_SRCDIR``, with the leading
+ ``en-US`` removed. For a l10n repack of an en-US build, the file
+ will be taken from the first location where it exists from:
+ * the merged locale directory if it exists
+ * ``$LOCALE_SRCDIR`` with the leading ``en-US`` removed
+ * the in-tree en-US location
+
+ Source directory paths specified here must must include a leading ``en-US``.
+ Wildcards are allowed, and will be expanded at the time of locale packaging to match
+ files in the locale directory.
+
+ Object directory paths are allowed here only if the path matches an entry in
+ ``LOCALIZED_GENERATED_FILES``.
+
+ Files that are missing from a locale will typically have the en-US
+ version used, but for wildcard expansions only files from the
+ locale directory will be used, even if that means no files will
+ be copied.
+
+ Example::
+
+ LOCALIZED_FILES.foo += [
+ 'en-US/foo.js',
+ 'en-US/things/*.ini',
+ ]
+
+ If this was placed in ``toolkit/locales/moz.build``, it would copy
+ ``toolkit/locales/en-US/foo.js`` and
+ ``toolkit/locales/en-US/things/*.ini`` to ``$(DIST)/bin/foo`` in an
+ en-US build, and in a build of a different locale (or a repack),
+ it would copy ``$(LOCALE_SRCDIR)/toolkit/foo.js`` and
+ ``$(LOCALE_SRCDIR)/toolkit/things/*.ini``.
+ """,
+ ),
+ "LOCALIZED_PP_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """Like ``LOCALIZED_FILES``, with preprocessing.
+
+ Note that the ``AB_CD`` define is available and expands to the current
+ locale being packaged, as with preprocessed entries in jar manifests.
+ """,
+ ),
+ "LOCALIZED_GENERATED_FILES": (
+ GeneratedFilesList,
+ list,
+ """Like ``GENERATED_FILES``, but for files whose content varies based on the locale in use.
+
+ For simple cases of text substitution, prefer ``LOCALIZED_PP_FILES``.
+
+ Refer to the documentation of ``GENERATED_FILES``; for the most part things work the same.
+ The two major differences are:
+ 1. The function in the Python script will be passed an additional keyword argument `locale`
+ which provides the locale in use, i.e. ``en-US``.
+ 2. The ``inputs`` list may contain paths to files that will be taken from the locale
+ source directory (see ``LOCALIZED_FILES`` for a discussion of the specifics). Paths
+ in ``inputs`` starting with ``en-US/`` or containing ``locales/en-US/`` are considered
+ localized files.
+
+ To place the generated output file in a specific location, list its objdir path in
+ ``LOCALIZED_FILES``.
+
+ In addition, ``LOCALIZED_GENERATED_FILES`` can use the special substitutions ``{AB_CD}``
+ and ``{AB_rCD}`` in their output paths. ``{AB_CD}`` expands to the current locale during
+ multi-locale builds and single-locale repacks and ``{AB_rCD}`` expands to an
+ Android-specific encoding of the current locale. Both expand to the empty string when the
+ current locale is ``en-US``.
+ """,
+ ),
+ "OBJDIR_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be installed anywhere in the objdir. Use sparingly.
+
+ ``OBJDIR_FILES`` is similar to FINAL_TARGET_FILES, but it allows copying
+ anywhere in the object directory. This is intended for various one-off
+ cases, not for general use. If you wish to add entries to OBJDIR_FILES,
+ please consult a build peer (on the #build channel at https://chat.mozilla.org).
+ """,
+ ),
+ "OBJDIR_PP_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
+ """,
+ ),
+ "FINAL_LIBRARY": (
+ six.text_type,
+ six.text_type,
+ """Library in which the objects of the current directory will be linked.
+
+ This variable contains the name of a library, defined elsewhere with
+ ``LIBRARY_NAME``, in which the objects of the current directory will be
+ linked.
+ """,
+ ),
+ "CPP_UNIT_TESTS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of C++ unit test names.
+
+ Each name in this variable corresponds to an executable built from the
+ corresponding source file with the same base name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to each name. If a name already ends with
+ ``BIN_SUFFIX``, the name will remain unchanged.
+ """,
+ ),
+ "FORCE_SHARED_LIB": (
+ bool,
+ bool,
+ """Whether the library in this directory is a shared library.
+ """,
+ ),
+ "FORCE_STATIC_LIB": (
+ bool,
+ bool,
+ """Whether the library in this directory is a static library.
+ """,
+ ),
+ "USE_STATIC_LIBS": (
+ bool,
+ bool,
+ """Whether the code in this directory is a built against the static
+ runtime library.
+
+ This variable only has an effect when building with MSVC.
+ """,
+ ),
+ "HOST_SOURCES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Source code files to compile with the host compiler.
+
+ This variable contains a list of source code files to compile.
+ with the host compiler.
+ """,
+ ),
+ "WASM_SOURCES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Source code files to compile with the wasm compiler.
+ """,
+ ),
+ "HOST_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """Name of target library generated when cross compiling.
+ """,
+ ),
+ "LIBRARY_DEFINES": (
+ OrderedDict,
+ dict,
+ """Dictionary of compiler defines to declare for the entire library.
+
+ This variable works like DEFINES, except that declarations apply to all
+ libraries that link into this library via FINAL_LIBRARY.
+ """,
+ ),
+ "LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The code name of the library generated for a directory.
+
+ By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name.
+ In ``example/components/moz.build``,::
+
+ LIBRARY_NAME = 'xpcomsample'
+
+ would generate ``example/components/libxpcomsample.so`` on Linux, or
+ ``example/components/xpcomsample.lib`` on Windows.
+ """,
+ ),
+ "SHARED_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of the static library generated for a directory, if it needs to
+ differ from the library code name.
+
+ Implies FORCE_SHARED_LIB.
+ """,
+ ),
+ "SANDBOXED_WASM_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of the static sandboxed wasm library generated for a directory.
+ """,
+ ),
+ "SHARED_LIBRARY_OUTPUT_CATEGORY": (
+ six.text_type,
+ six.text_type,
+ """The output category for this context's shared library. If set this will
+ correspond to the build command that will build this shared library, and
+ the library will not be built as part of the default build.
+ """,
+ ),
+ "RUST_LIBRARY_OUTPUT_CATEGORY": (
+ six.text_type,
+ six.text_type,
+ """The output category for this context's rust library. If set this will
+ correspond to the build command that will build this rust library, and
+ the library will not be built as part of the default build.
+ """,
+ ),
+ "IS_FRAMEWORK": (
+ bool,
+ bool,
+ """Whether the library to build should be built as a framework on OSX.
+
+ This implies the name of the library won't be prefixed nor suffixed.
+ Implies FORCE_SHARED_LIB.
+ """,
+ ),
+ "STATIC_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of the static library generated for a directory, if it needs to
+ differ from the library code name.
+
+ Implies FORCE_STATIC_LIB.
+ """,
+ ),
+ "USE_LIBS": (
+ StrictOrderingOnAppendList,
+ list,
+ """List of libraries to link to programs and libraries.
+ """,
+ ),
+ "HOST_USE_LIBS": (
+ StrictOrderingOnAppendList,
+ list,
+ """List of libraries to link to host programs and libraries.
+ """,
+ ),
+ "HOST_OS_LIBS": (
+ List,
+ list,
+ """List of system libraries for host programs and libraries.
+ """,
+ ),
+ "LOCAL_INCLUDES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Additional directories to be searched for include files by the compiler.
+ """,
+ ),
+ "NO_PGO": (
+ bool,
+ bool,
+ """Whether profile-guided optimization is disable in this directory.
+ """,
+ ),
+ "OS_LIBS": (
+ List,
+ list,
+ """System link libraries.
+
+ This variable contains a list of system libaries to link against.
+ """,
+ ),
+ "RCFILE": (
+ Path,
+ six.text_type,
+ """The program .rc file.
+
+ This variable can only be used on Windows.
+ """,
+ ),
+ "RCINCLUDE": (
+ Path,
+ six.text_type,
+ """The resource script file to be included in the default .res file.
+
+ This variable can only be used on Windows.
+ """,
+ ),
+ "DEFFILE": (
+ Path,
+ six.text_type,
+ """The program .def (module definition) file.
+
+ This variable can only be used on Windows.
+ """,
+ ),
+ "SYMBOLS_FILE": (
+ Path,
+ six.text_type,
+ """A file containing a list of symbols to export from a shared library.
+
+ The given file contains a list of symbols to be exported, and is
+ preprocessed.
+ A special marker "@DATA@" must be added after a symbol name if it
+ points to data instead of code, so that the Windows linker can treat
+ them correctly.
+ """,
+ ),
+ "SIMPLE_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of executable names.
+
+ Each name in this variable corresponds to an executable built from the
+ corresponding source file with the same base name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to each name. If a name already ends with
+ ``BIN_SUFFIX``, the name will remain unchanged.
+ """,
+ ),
+ "SONAME": (
+ six.text_type,
+ six.text_type,
+ """The soname of the shared object currently being linked
+
+ soname is the "logical name" of a shared object, often used to provide
+ version backwards compatibility. This variable makes sense only for
+ shared objects, and is supported only on some unix platforms.
+ """,
+ ),
+ "HOST_SIMPLE_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of host executable names.
+
+ Each name in this variable corresponds to a hosst executable built
+ from the corresponding source file with the same base name.
+
+ If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will
+ be automatically appended to each name. If a name already ends with
+ ``HOST_BIN_SUFFIX``, the name will remain unchanged.
+ """,
+ ),
+ "RUST_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of Rust host executable names.
+
+ Each name in this variable corresponds to an executable built from
+ the Cargo.toml in the same directory.
+ """,
+ ),
+ "HOST_RUST_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of Rust executable names.
+
+ Each name in this variable corresponds to an executable built from
+ the Cargo.toml in the same directory.
+ """,
+ ),
+ "CONFIGURE_SUBST_FILES": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """Output files that will be generated using configure-like substitution.
+
+ This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
+ list, we will search for a file in the srcdir having the name
+ ``{path}.in``. The contents of this file will be read and variable
+ patterns like ``@foo@`` will be substituted with the values of the
+ ``AC_SUBST`` variables declared during configure.
+ """,
+ ),
+ "CONFIGURE_DEFINE_FILES": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """Output files generated from configure/config.status.
+
+ This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
+ similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
+ into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
+ """,
+ ),
+ "EXPORTS": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be exported, and in which subdirectories.
+
+ ``EXPORTS`` is generally used to list the include files to be exported to
+ ``dist/include``, but it can be used for other files as well. This variable
+ behaves as a list when appending filenames for export in the top-level
+ directory. Files can also be appended to a field to indicate which
+ subdirectory they should be exported to. For example, to export
+ ``foo.h`` to the top-level directory, and ``bar.h`` to ``mozilla/dom/``,
+ append to ``EXPORTS`` like so::
+
+ EXPORTS += ['foo.h']
+ EXPORTS.mozilla.dom += ['bar.h']
+
+ Entries in ``EXPORTS`` are paths, so objdir paths may be used, but
+ any files listed from the objdir must also be listed in
+ ``GENERATED_FILES``.
+ """,
+ ),
+ "PROGRAM": (
+ six.text_type,
+ six.text_type,
+ """Compiled executable name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to ``PROGRAM``. If ``PROGRAM`` already ends with
+ ``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged.
+ """,
+ ),
+ "HOST_PROGRAM": (
+ six.text_type,
+ six.text_type,
+ """Compiled host executable name.
+
+ If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be
+ automatically appended to ``HOST_PROGRAM``. If ``HOST_PROGRAM`` already
+ ends with ``HOST_BIN_SUFFIX``, ``HOST_PROGRAM`` will remain unchanged.
+ """,
+ ),
+ "DIST_INSTALL": (
+ Enum(None, False, True),
+ bool,
+ """Whether to install certain files into the dist directory.
+
+ By default, some files types are installed in the dist directory, and
+ some aren't. Set this variable to True to force the installation of
+ some files that wouldn't be installed by default. Set this variable to
+ False to force to not install some files that would be installed by
+ default.
+
+ This is confusing for historical reasons, but eventually, the behavior
+ will be made explicit.
+ """,
+ ),
+ "JAR_MANIFESTS": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """JAR manifest files that should be processed as part of the build.
+
+ JAR manifests are files in the tree that define how to package files
+ into JARs and how chrome registration is performed. For more info,
+ see :ref:`jar_manifests`.
+ """,
+ ),
+ # IDL Generation.
+ "XPIDL_SOURCES": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """XPCOM Interface Definition Files (xpidl).
+
+ This is a list of files that define XPCOM interface definitions.
+ Entries must be files that exist. Entries are almost certainly ``.idl``
+ files.
+ """,
+ ),
+ "XPIDL_MODULE": (
+ six.text_type,
+ six.text_type,
+ """XPCOM Interface Definition Module Name.
+
+ This is the name of the ``.xpt`` file that is created by linking
+ ``XPIDL_SOURCES`` together. If unspecified, it defaults to be the same
+ as ``MODULE``.
+ """,
+ ),
+ "XPCOM_MANIFESTS": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """XPCOM Component Manifest Files.
+
+ This is a list of files that define XPCOM components to be added
+ to the component registry.
+ """,
+ ),
+ "PREPROCESSED_IPDL_SOURCES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Preprocessed IPDL source files.
+
+ These files will be preprocessed, then parsed and converted to
+ ``.cpp`` files.
+ """,
+ ),
+ "IPDL_SOURCES": (
+ StrictOrderingOnAppendList,
+ list,
+ """IPDL source files.
+
+ These are ``.ipdl`` files that will be parsed and converted to
+ ``.cpp`` files.
+ """,
+ ),
+ "WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """WebIDL source files.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files.
+ """,
+ ),
+ "GENERATED_EVENTS_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """WebIDL source files for generated events.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files.
+ """,
+ ),
+ "TEST_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Test WebIDL source files.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files
+ if tests are enabled.
+ """,
+ ),
+ "GENERATED_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Generated WebIDL source files.
+
+ These will be generated from some other files.
+ """,
+ ),
+ "PREPROCESSED_TEST_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Preprocessed test WebIDL source files.
+
+ These will be preprocessed, then parsed and converted to .cpp
+ and ``.h`` files if tests are enabled.
+ """,
+ ),
+ "PREPROCESSED_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Preprocessed WebIDL source files.
+
+ These will be preprocessed before being parsed and converted.
+ """,
+ ),
+ "WEBIDL_EXAMPLE_INTERFACES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Names of example WebIDL interfaces to build as part of the build.
+
+ Names in this list correspond to WebIDL interface names defined in
+ WebIDL files included in the build from one of the \*WEBIDL_FILES
+ variables.
+ """,
+ ),
+ # Test declaration.
+ "A11Y_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining a11y tests.
+ """,
+ ),
+ "BROWSER_CHROME_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining browser chrome tests.
+ """,
+ ),
+ "ANDROID_INSTRUMENTATION_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining Android instrumentation tests.
+ """,
+ ),
+ "FIREFOX_UI_FUNCTIONAL_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining firefox-ui-functional tests.
+ """,
+ ),
+ "MARIONETTE_LAYOUT_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining marionette-layout tests.
+ """,
+ ),
+ "MARIONETTE_UNIT_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining marionette-unit tests.
+ """,
+ ),
+ "METRO_CHROME_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining metro browser chrome tests.
+ """,
+ ),
+ "MOCHITEST_CHROME_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining mochitest chrome tests.
+ """,
+ ),
+ "MOCHITEST_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining mochitest tests.
+ """,
+ ),
+ "REFTEST_MANIFESTS": (
+ ReftestManifestList,
+ list,
+ """List of manifest files defining reftests.
+
+ These are commonly named reftest.list.
+ """,
+ ),
+ "CRASHTEST_MANIFESTS": (
+ ReftestManifestList,
+ list,
+ """List of manifest files defining crashtests.
+
+ These are commonly named crashtests.list.
+ """,
+ ),
+ "XPCSHELL_TESTS_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining xpcshell tests.
+ """,
+ ),
+ "PYTHON_UNITTEST_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining python unit tests.
+ """,
+ ),
+ "PERFTESTS_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining MozPerftest performance tests.
+ """,
+ ),
+ "CRAMTEST_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining cram unit tests.
+ """,
+ ),
+ "TELEMETRY_TESTS_CLIENT_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining telemetry client tests.
+ """,
+ ),
+ # The following variables are used to control the target of installed files.
+ "XPI_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of an extension XPI to generate.
+
+ When this variable is present, the results of this directory will end up
+ being packaged into an extension instead of the main dist/bin results.
+ """,
+ ),
+ "DIST_SUBDIR": (
+ six.text_type,
+ six.text_type,
+ """The name of an alternate directory to install files to.
+
+ When this variable is present, the results of this directory will end up
+ being placed in the $(DIST_SUBDIR) subdirectory of where it would
+ otherwise be placed.
+ """,
+ ),
+ "FINAL_TARGET": (
+ FinalTargetValue,
+ six.text_type,
+ """The name of the directory to install targets to.
+
+ The directory is relative to the top of the object directory. The
+ default value is dependent on the values of XPI_NAME and DIST_SUBDIR. If
+ neither are present, the result is dist/bin. If XPI_NAME is present, the
+ result is dist/xpi-stage/$(XPI_NAME). If DIST_SUBDIR is present, then
+ the $(DIST_SUBDIR) directory of the otherwise default value is used.
+ """,
+ ),
+ "USE_EXTENSION_MANIFEST": (
+ bool,
+ bool,
+ """Controls the name of the manifest for JAR files.
+
+ By default, the name of the manifest is ${JAR_MANIFEST}.manifest.
+ Setting this variable to ``True`` changes the name of the manifest to
+ chrome.manifest.
+ """,
+ ),
+ "GYP_DIRS": (
+ StrictOrderingOnAppendListWithFlagsFactory(
+ {
+ "variables": dict,
+ "input": six.text_type,
+ "sandbox_vars": dict,
+ "no_chromium": bool,
+ "no_unified": bool,
+ "non_unified_sources": StrictOrderingOnAppendList,
+ "action_overrides": dict,
+ }
+ ),
+ list,
+ """Defines a list of object directories handled by gyp configurations.
+
+ Elements of this list give the relative object directory. For each
+ element of the list, GYP_DIRS may be accessed as a dictionary
+ (GYP_DIRS[foo]). The object this returns has attributes that need to be
+ set to further specify gyp processing:
+ - input, gives the path to the root gyp configuration file for that
+ object directory.
+ - variables, a dictionary containing variables and values to pass
+ to the gyp processor.
+ - sandbox_vars, a dictionary containing variables and values to
+ pass to the mozbuild processor on top of those derived from gyp
+ configuration.
+ - no_chromium, a boolean which if set to True disables some
+ special handling that emulates gyp_chromium.
+ - no_unified, a boolean which if set to True disables source
+ file unification entirely.
+ - non_unified_sources, a list containing sources files, relative to
+ the current moz.build, that should be excluded from source file
+ unification.
+ - action_overrides, a dict of action_name to values of the `script`
+ attribute to use for GENERATED_FILES for the specified action.
+
+ Typical use looks like:
+ GYP_DIRS += ['foo', 'bar']
+ GYP_DIRS['foo'].input = 'foo/foo.gyp'
+ GYP_DIRS['foo'].variables = {
+ 'foo': 'bar',
+ (...)
+ }
+ (...)
+ """,
+ ),
+ "SPHINX_TREES": (
+ dict,
+ dict,
+ """Describes what the Sphinx documentation tree will look like.
+
+ Keys are relative directories inside the final Sphinx documentation
+ tree to install files into. Values are directories (relative to this
+ file) whose content to copy into the Sphinx documentation tree.
+ """,
+ ),
+ "SPHINX_PYTHON_PACKAGE_DIRS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Directories containing Python packages that Sphinx documents.
+ """,
+ ),
+ "COMPILE_FLAGS": (
+ CompileFlags,
+ dict,
+ """Recipe for compile flags for this context. Not to be manipulated
+ directly.
+ """,
+ ),
+ "LINK_FLAGS": (
+ LinkFlags,
+ dict,
+ """Recipe for linker flags for this context. Not to be manipulated
+ directly.
+ """,
+ ),
+ "WASM_FLAGS": (
+ WasmFlags,
+ dict,
+ """Recipe for wasm flags for this context. Not to be
+ manipulated directly.
+ """,
+ ),
+ "ASM_FLAGS": (
+ AsmFlags,
+ dict,
+ """Recipe for linker flags for this context. Not to be
+ manipulated directly.
+ """,
+ ),
+ "CFLAGS": (
+ List,
+ list,
+ """Flags passed to the C compiler for all of the C source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "CXXFLAGS": (
+ List,
+ list,
+ """Flags passed to the C++ compiler for all of the C++ source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "HOST_COMPILE_FLAGS": (
+ HostCompileFlags,
+ dict,
+ """Recipe for host compile flags for this context. Not to be manipulated
+ directly.
+ """,
+ ),
+ "HOST_DEFINES": (
+ InitializedDefines,
+ dict,
+ """Dictionary of compiler defines to declare for host compilation.
+ See ``DEFINES`` for specifics.
+ """,
+ ),
+ "WASM_CFLAGS": (
+ List,
+ list,
+ """Flags passed to the C-to-wasm compiler for all of the C
+ source files declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "WASM_CXXFLAGS": (
+ List,
+ list,
+ """Flags passed to the C++-to-wasm compiler for all of the
+ C++ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "WASM_DEFINES": (
+ InitializedDefines,
+ dict,
+ """Dictionary of compiler defines to declare for wasm compilation.
+ See ``DEFINES`` for specifics.
+ """,
+ ),
+ "CMFLAGS": (
+ List,
+ list,
+ """Flags passed to the Objective-C compiler for all of the Objective-C
+ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "CMMFLAGS": (
+ List,
+ list,
+ """Flags passed to the Objective-C++ compiler for all of the
+ Objective-C++ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "ASFLAGS": (
+ List,
+ list,
+ """Flags passed to the assembler for all of the assembly source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the assembler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "HOST_CFLAGS": (
+ List,
+ list,
+ """Flags passed to the host C compiler for all of the C source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "HOST_CXXFLAGS": (
+ List,
+ list,
+ """Flags passed to the host C++ compiler for all of the C++ source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "LDFLAGS": (
+ List,
+ list,
+ """Flags passed to the linker when linking all of the libraries and
+ executables declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "EXTRA_DSO_LDOPTS": (
+ List,
+ list,
+ """Flags passed to the linker when linking a shared library.
+
+ Note that the ordering of flags matter here, these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "WIN32_EXE_LDFLAGS": (
+ List,
+ list,
+ """Flags passed to the linker when linking a Windows .exe executable
+ declared in this directory.
+
+ Note that the ordering of flags matter here, these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+
+ This variable only has an effect on Windows.
+ """,
+ ),
+ "TEST_HARNESS_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be installed for test harnesses.
+
+ ``TEST_HARNESS_FILES`` can be used to install files to any directory
+ under $objdir/_tests. Files can be appended to a field to indicate
+ which subdirectory they should be exported to. For example,
+ to export ``foo.py`` to ``_tests/foo``, append to
+ ``TEST_HARNESS_FILES`` like so::
+ TEST_HARNESS_FILES.foo += ['foo.py']
+
+ Files from topsrcdir and the objdir can also be installed by prefixing
+ the path(s) with a '/' character and a '!' character, respectively::
+ TEST_HARNESS_FILES.path += ['/build/bar.py', '!quux.py']
+ """,
+ ),
+ "NO_EXPAND_LIBS": (
+ bool,
+ bool,
+ """Forces to build a real static library, and no corresponding fake
+ library.
+ """,
+ ),
+ "USE_NASM": (
+ bool,
+ bool,
+ """Use the nasm assembler to assemble assembly files from SOURCES.
+
+ By default, the build will use the toolchain assembler, $(AS), to
+ assemble source files in assembly language (.s or .asm files). Setting
+ this value to ``True`` will cause it to use nasm instead.
+
+ If nasm is not available on this system, or does not support the
+ current target architecture, an error will be raised.
+ """,
+ ),
+ "USE_INTEGRATED_CLANGCL_AS": (
+ bool,
+ bool,
+ """Use the integrated clang-cl assembler to assemble assembly files from SOURCES.
+
+ This allows using clang-cl to assemble assembly files which is useful
+ on platforms like aarch64 where the alternative is to have to run a
+ pre-processor to generate files with suitable syntax.
+ """,
+ ),
+}
+
+# Sanity check: we don't want any variable above to have a list as storage type.
+for name, (storage_type, input_types, docs) in VARIABLES.items():
+ if storage_type == list:
+ raise RuntimeError('%s has a "list" storage type. Use "List" instead.' % name)
+
+# Set of variables that are only allowed in templates:
+TEMPLATE_VARIABLES = {
+ "CPP_UNIT_TESTS",
+ "FORCE_SHARED_LIB",
+ "HOST_PROGRAM",
+ "HOST_LIBRARY_NAME",
+ "HOST_SIMPLE_PROGRAMS",
+ "IS_FRAMEWORK",
+ "IS_GKRUST",
+ "LIBRARY_NAME",
+ "PROGRAM",
+ "SIMPLE_PROGRAMS",
+}
+
+# Add a note to template variable documentation.
+for name in TEMPLATE_VARIABLES:
+ if name not in VARIABLES:
+ raise RuntimeError("%s is in TEMPLATE_VARIABLES but not in VARIABLES." % name)
+ storage_type, input_types, docs = VARIABLES[name]
+ docs += "This variable is only available in templates.\n"
+ VARIABLES[name] = (storage_type, input_types, docs)
+
+
+# The set of functions exposed to the sandbox.
+#
+# Each entry is a tuple of:
+#
+# (function returning the corresponding function from a given sandbox,
+# (argument types), docs)
+#
+# The first element is an attribute on Sandbox that should be a function type.
+#
+FUNCTIONS = {
+ "include": (
+ lambda self: self._include,
+ (SourcePath,),
+ """Include another mozbuild file in the context of this one.
+
+ This is similar to a ``#include`` in C languages. The filename passed to
+ the function will be read and its contents will be evaluated within the
+ context of the calling file.
+
+ If a relative path is given, it is evaluated as relative to the file
+ currently being processed. If there is a chain of multiple include(),
+ the relative path computation is from the most recent/active file.
+
+ If an absolute path is given, it is evaluated from ``TOPSRCDIR``. In
+ other words, ``include('/foo')`` references the path
+ ``TOPSRCDIR + '/foo'``.
+
+ Example usage
+ ^^^^^^^^^^^^^
+
+ Include ``sibling.build`` from the current directory.::
+
+ include('sibling.build')
+
+ Include ``foo.build`` from a path within the top source directory::
+
+ include('/elsewhere/foo.build')
+ """,
+ ),
+ "export": (
+ lambda self: self._export,
+ (str,),
+ """Make the specified variable available to all child directories.
+
+ The variable specified by the argument string is added to the
+ environment of all directories specified in the DIRS and TEST_DIRS
+ variables. If those directories themselves have child directories,
+ the variable will be exported to all of them.
+
+ The value used for the variable is the final value at the end of the
+ moz.build file, so it is possible (but not recommended style) to place
+ the export before the definition of the variable.
+
+ This function is limited to the upper-case variables that have special
+ meaning in moz.build files.
+
+ NOTE: Please consult with a build peer (on the #build channel at
+ https://chat.mozilla.org) before adding a new use of this function.
+
+ Example usage
+ ^^^^^^^^^^^^^
+
+ To make all children directories install as the given extension::
+
+ XPI_NAME = 'cool-extension'
+ export('XPI_NAME')
+ """,
+ ),
+ "warning": (
+ lambda self: self._warning,
+ (str,),
+ """Issue a warning.
+
+ Warnings are string messages that are printed during execution.
+
+ Warnings are ignored during execution.
+ """,
+ ),
+ "error": (
+ lambda self: self._error,
+ (str,),
+ """Issue a fatal error.
+
+ If this function is called, processing is aborted immediately.
+ """,
+ ),
+ "template": (
+ lambda self: self._template_decorator,
+ (FunctionType,),
+ """Decorator for template declarations.
+
+ Templates are a special kind of functions that can be declared in
+ mozbuild files. Uppercase variables assigned in the function scope
+ are considered to be the result of the template.
+
+ Contrary to traditional python functions:
+ - return values from template functions are ignored,
+ - template functions don't have access to the global scope.
+
+ Example template
+ ^^^^^^^^^^^^^^^^
+
+ The following ``Program`` template sets two variables ``PROGRAM`` and
+ ``USE_LIBS``. ``PROGRAM`` is set to the argument given on the template
+ invocation, and ``USE_LIBS`` to contain "mozglue"::
+
+ @template
+ def Program(name):
+ PROGRAM = name
+ USE_LIBS += ['mozglue']
+
+ Template invocation
+ ^^^^^^^^^^^^^^^^^^^
+
+ A template is invoked in the form of a function call::
+
+ Program('myprog')
+
+ The result of the template, being all the uppercase variable it sets
+ is mixed to the existing set of variables defined in the mozbuild file
+ invoking the template::
+
+ FINAL_TARGET = 'dist/other'
+ USE_LIBS += ['mylib']
+ Program('myprog')
+ USE_LIBS += ['otherlib']
+
+ The above mozbuild results in the following variables set:
+
+ - ``FINAL_TARGET`` is 'dist/other'
+ - ``USE_LIBS`` is ['mylib', 'mozglue', 'otherlib']
+ - ``PROGRAM`` is 'myprog'
+
+ """,
+ ),
+}
+
+
+TestDirsPlaceHolder = List()
+
+
+# Special variables. These complement VARIABLES.
+#
+# Each entry is a tuple of:
+#
+# (function returning the corresponding value from a given context, type, docs)
+#
+SPECIAL_VARIABLES = {
+ "TOPSRCDIR": (
+ lambda context: context.config.topsrcdir,
+ str,
+ """Constant defining the top source directory.
+
+ The top source directory is the parent directory containing the source
+ code and all build files. It is typically the root directory of a
+ cloned repository.
+ """,
+ ),
+ "TOPOBJDIR": (
+ lambda context: context.config.topobjdir,
+ str,
+ """Constant defining the top object directory.
+
+ The top object directory is the parent directory which will contain
+ the output of the build. This is commonly referred to as "the object
+ directory."
+ """,
+ ),
+ "RELATIVEDIR": (
+ lambda context: context.relsrcdir,
+ str,
+ """Constant defining the relative path of this file.
+
+ The relative path is from ``TOPSRCDIR``. This is defined as relative
+ to the main file being executed, regardless of whether additional
+ files have been included using ``include()``.
+ """,
+ ),
+ "SRCDIR": (
+ lambda context: context.srcdir,
+ str,
+ """Constant defining the source directory of this file.
+
+ This is the path inside ``TOPSRCDIR`` where this file is located. It
+ is the same as ``TOPSRCDIR + RELATIVEDIR``.
+ """,
+ ),
+ "OBJDIR": (
+ lambda context: context.objdir,
+ str,
+ """The path to the object directory for this file.
+
+ Is is the same as ``TOPOBJDIR + RELATIVEDIR``.
+ """,
+ ),
+ "CONFIG": (
+ lambda context: ReadOnlyKeyedDefaultDict(
+ lambda key: context.config.substs.get(key)
+ ),
+ dict,
+ """Dictionary containing the current configuration variables.
+
+ All the variables defined by the configuration system are available
+ through this object. e.g. ``ENABLE_TESTS``, ``CFLAGS``, etc.
+
+ Values in this container are read-only. Attempts at changing values
+ will result in a run-time error.
+
+ Access to an unknown variable will return None.
+ """,
+ ),
+ "EXTRA_COMPONENTS": (
+ lambda context: context["FINAL_TARGET_FILES"].components._strings,
+ list,
+ """Additional component files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/components/``.
+ """,
+ ),
+ "EXTRA_PP_COMPONENTS": (
+ lambda context: context["FINAL_TARGET_PP_FILES"].components._strings,
+ list,
+ """Javascript XPCOM files.
+
+ This variable contains a list of files to preprocess. Generated
+ files will be installed in the ``/components`` directory of the distribution.
+ """,
+ ),
+ "JS_PREFERENCE_FILES": (
+ lambda context: context["FINAL_TARGET_FILES"].defaults.pref._strings,
+ list,
+ """Exported JavaScript files.
+
+ A list of files copied into the dist directory for packaging and installation.
+ Path will be defined for gre or application prefs dir based on what is building.
+ """,
+ ),
+ "JS_PREFERENCE_PP_FILES": (
+ lambda context: context["FINAL_TARGET_PP_FILES"].defaults.pref._strings,
+ list,
+ """Like JS_PREFERENCE_FILES, preprocessed..
+ """,
+ ),
+ "RESOURCE_FILES": (
+ lambda context: context["FINAL_TARGET_FILES"].res,
+ list,
+ """List of resources to be exported, and in which subdirectories.
+
+ ``RESOURCE_FILES`` is used to list the resource files to be exported to
+ ``dist/bin/res``, but it can be used for other files as well. This variable
+ behaves as a list when appending filenames for resources in the top-level
+ directory. Files can also be appended to a field to indicate which
+ subdirectory they should be exported to. For example, to export
+ ``foo.res`` to the top-level directory, and ``bar.res`` to ``fonts/``,
+ append to ``RESOURCE_FILES`` like so::
+
+ RESOURCE_FILES += ['foo.res']
+ RESOURCE_FILES.fonts += ['bar.res']
+ """,
+ ),
+ "CONTENT_ACCESSIBLE_FILES": (
+ lambda context: context["FINAL_TARGET_FILES"].contentaccessible,
+ list,
+ """List of files which can be accessed by web content through resource:// URIs.
+
+ ``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported
+ to ``dist/bin/contentaccessible``. Files can also be appended to a
+ field to indicate which subdirectory they should be exported to.
+ """,
+ ),
+ "EXTRA_JS_MODULES": (
+ lambda context: context["FINAL_TARGET_FILES"].modules,
+ list,
+ """Additional JavaScript files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/modules.
+ """,
+ ),
+ "EXTRA_PP_JS_MODULES": (
+ lambda context: context["FINAL_TARGET_PP_FILES"].modules,
+ list,
+ """Additional JavaScript files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/modules``, after preprocessing.
+ """,
+ ),
+ "TESTING_JS_MODULES": (
+ lambda context: context["TEST_HARNESS_FILES"].modules,
+ list,
+ """JavaScript modules to install in the test-only destination.
+
+ Some JavaScript modules (JSMs) are test-only and not distributed
+ with Firefox. This variable defines them.
+
+ To install modules in a subdirectory, use properties of this
+ variable to control the final destination. e.g.
+
+ ``TESTING_JS_MODULES.foo += ['module.jsm']``.
+ """,
+ ),
+ "TEST_DIRS": (
+ lambda context: context["DIRS"]
+ if context.config.substs.get("ENABLE_TESTS")
+ else TestDirsPlaceHolder,
+ list,
+ """Like DIRS but only for directories that contain test-only code.
+
+ If tests are not enabled, this variable will be ignored.
+
+ This variable may go away once the transition away from Makefiles is
+ complete.
+ """,
+ ),
+}
+
+# Deprecation hints.
+DEPRECATION_HINTS = {
+ "ASM_FLAGS": """
+ Please use
+
+ ASFLAGS
+
+ instead of manipulating ASM_FLAGS directly.
+ """,
+ "CPP_UNIT_TESTS": """
+ Please use'
+
+ CppUnitTests(['foo', 'bar'])
+
+ instead of
+
+ CPP_UNIT_TESTS += ['foo', 'bar']
+ """,
+ "DISABLE_STL_WRAPPING": """
+ Please use
+
+ DisableStlWrapping()
+
+ instead of
+
+ DISABLE_STL_WRAPPING = True
+ """,
+ "HOST_PROGRAM": """
+ Please use
+
+ HostProgram('foo')
+
+ instead of
+
+ HOST_PROGRAM = 'foo'
+ """,
+ "HOST_LIBRARY_NAME": """
+ Please use
+
+ HostLibrary('foo')
+
+ instead of
+
+ HOST_LIBRARY_NAME = 'foo'
+ """,
+ "HOST_SIMPLE_PROGRAMS": """
+ Please use
+
+ HostSimplePrograms(['foo', 'bar'])
+
+ instead of
+
+ HOST_SIMPLE_PROGRAMS += ['foo', 'bar']"
+ """,
+ "LIBRARY_NAME": """
+ Please use
+
+ Library('foo')
+
+ instead of
+
+ LIBRARY_NAME = 'foo'
+ """,
+ "NO_VISIBILITY_FLAGS": """
+ Please use
+
+ NoVisibilityFlags()
+
+ instead of
+
+ NO_VISIBILITY_FLAGS = True
+ """,
+ "PROGRAM": """
+ Please use
+
+ Program('foo')
+
+ instead of
+
+ PROGRAM = 'foo'"
+ """,
+ "SIMPLE_PROGRAMS": """
+ Please use
+
+ SimplePrograms(['foo', 'bar'])
+
+ instead of
+
+ SIMPLE_PROGRAMS += ['foo', 'bar']"
+ """,
+ "ALLOW_COMPILER_WARNINGS": """
+ Please use
+
+ AllowCompilerWarnings()
+
+ instead of
+
+ ALLOW_COMPILER_WARNINGS = True
+ """,
+ "FORCE_SHARED_LIB": """
+ Please use
+
+ SharedLibrary('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ FORCE_SHARED_LIB = True
+ """,
+ "IS_FRAMEWORK": """
+ Please use
+
+ Framework('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ IS_FRAMEWORK = True
+ """,
+ "IS_GKRUST": """
+ Please use
+
+ RustLibrary('gkrust', ... is_gkrust=True)
+
+ instead of
+
+ RustLibrary('gkrust') [ or LIBRARY_NAME = 'gkrust' ]
+ IS_GKRUST = True
+ """,
+ "TOOL_DIRS": "Please use the DIRS variable instead.",
+ "TEST_TOOL_DIRS": "Please use the TEST_DIRS variable instead.",
+ "PARALLEL_DIRS": "Please use the DIRS variable instead.",
+ "NO_DIST_INSTALL": """
+ Please use
+
+ DIST_INSTALL = False
+
+ instead of
+
+ NO_DIST_INSTALL = True
+ """,
+ "GENERATED_SOURCES": """
+ Please use
+
+ SOURCES += [ '!foo.cpp' ]
+
+ instead of
+
+ GENERATED_SOURCES += [ 'foo.cpp']
+ """,
+ "GENERATED_INCLUDES": """
+ Please use
+
+ LOCAL_INCLUDES += [ '!foo' ]
+
+ instead of
+
+ GENERATED_INCLUDES += [ 'foo' ]
+ """,
+ "DIST_FILES": """
+ Please use
+
+ FINAL_TARGET_PP_FILES += [ 'foo' ]
+
+ instead of
+
+ DIST_FILES += [ 'foo' ]
+ """,
+}
+
+# Make sure that all template variables have a deprecation hint.
+for name in TEMPLATE_VARIABLES:
+ if name not in DEPRECATION_HINTS:
+ raise RuntimeError("Missing deprecation hint for %s" % name)
diff --git a/python/mozbuild/mozbuild/frontend/data.py b/python/mozbuild/mozbuild/frontend/data.py
new file mode 100644
index 0000000000..84a47f90cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -0,0 +1,1369 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Data structures representing Mozilla's source tree.
+
+The frontend files are parsed into static data structures. These data
+structures are defined in this module.
+
+All data structures of interest are children of the TreeMetadata class.
+
+Logic for populating these data structures is not defined in this class.
+Instead, what we have here are dumb container classes. The emitter module
+contains the code for converting executed mozbuild files into these data
+structures.
+"""
+
+from collections import OrderedDict, defaultdict
+
+import mozpack.path as mozpath
+import six
+from mozpack.chrome.manifest import ManifestEntry
+
+from mozbuild.frontend.context import ObjDirPath, SourcePath
+
+from ..testing import all_test_flavors
+from ..util import group_unified_files
+from .context import FinalTargetValue
+
+
+class TreeMetadata(object):
+ """Base class for all data being captured."""
+
+ __slots__ = ()
+
+ def to_dict(self):
+ return {k.lower(): getattr(self, k) for k in self.DICT_ATTRS}
+
+
+class ContextDerived(TreeMetadata):
+ """Build object derived from a single Context instance.
+
+ It holds fields common to all context derived classes. This class is likely
+ never instantiated directly but is instead derived from.
+ """
+
+ __slots__ = (
+ "context_main_path",
+ "context_all_paths",
+ "topsrcdir",
+ "topobjdir",
+ "relsrcdir",
+ "srcdir",
+ "objdir",
+ "config",
+ "_context",
+ )
+
+ def __init__(self, context):
+ TreeMetadata.__init__(self)
+
+ # Capture the files that were evaluated to fill this context.
+ self.context_main_path = context.main_path
+ self.context_all_paths = context.all_paths
+
+ # Basic directory state.
+ self.topsrcdir = context.config.topsrcdir
+ self.topobjdir = context.config.topobjdir
+
+ self.relsrcdir = context.relsrcdir
+ self.srcdir = context.srcdir
+ self.objdir = context.objdir
+
+ self.config = context.config
+
+ self._context = context
+
+ @property
+ def install_target(self):
+ return self._context["FINAL_TARGET"]
+
+ @property
+ def installed(self):
+ return self._context["DIST_INSTALL"] is not False
+
+ @property
+ def defines(self):
+ defines = self._context["DEFINES"]
+ return Defines(self._context, defines) if defines else None
+
+ @property
+ def relobjdir(self):
+ return mozpath.relpath(self.objdir, self.topobjdir)
+
+
+class HostMixin(object):
+ @property
+ def defines(self):
+ defines = self._context["HOST_DEFINES"]
+ return HostDefines(self._context, defines) if defines else None
+
+
+class DirectoryTraversal(ContextDerived):
+ """Describes how directory traversal for building should work.
+
+ This build object is likely only of interest to the recursive make backend.
+ Other build backends should (ideally) not attempt to mimic the behavior of
+ the recursive make backend. The only reason this exists is to support the
+ existing recursive make backend while the transition to mozbuild frontend
+ files is complete and we move to a more optimal build backend.
+
+ Fields in this class correspond to similarly named variables in the
+ frontend files.
+ """
+
+ __slots__ = ("dirs",)
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.dirs = []
+
+
+class BaseConfigSubstitution(ContextDerived):
+ """Base class describing autogenerated files as part of config.status."""
+
+ __slots__ = ("input_path", "output_path", "relpath")
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.input_path = None
+ self.output_path = None
+ self.relpath = None
+
+
+class ConfigFileSubstitution(BaseConfigSubstitution):
+ """Describes a config file that will be generated using substitutions."""
+
+
+class VariablePassthru(ContextDerived):
+ """A dict of variables to pass through to backend.mk unaltered.
+
+ The purpose of this object is to facilitate rapid transitioning of
+ variables from Makefile.in to moz.build. In the ideal world, this class
+ does not exist and every variable has a richer class representing it.
+ As long as we rely on this class, we lose the ability to have flexibility
+ in our build backends since we will continue to be tied to our rules.mk.
+ """
+
+ __slots__ = "variables"
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.variables = {}
+
+
+class ComputedFlags(ContextDerived):
+ """Aggregate flags for consumption by various backends."""
+
+ __slots__ = ("flags",)
+
+ def __init__(self, context, reader_flags):
+ ContextDerived.__init__(self, context)
+ self.flags = reader_flags
+
+ def resolve_flags(self, key, value):
+ # Bypass checks done by CompileFlags that would keep us from
+ # setting a value here.
+ dict.__setitem__(self.flags, key, value)
+
+ def get_flags(self):
+ flags = defaultdict(list)
+ for key, _, dest_vars in self.flags.flag_variables:
+ value = self.flags.get(key)
+ if value:
+ for dest_var in dest_vars:
+ flags[dest_var].extend(value)
+ return sorted(flags.items())
+
+
+class XPIDLModule(ContextDerived):
+ """Describes an XPIDL module to be compiled."""
+
+ __slots__ = ("name", "idl_files")
+
+ def __init__(self, context, name, idl_files):
+ ContextDerived.__init__(self, context)
+
+ assert all(isinstance(idl, SourcePath) for idl in idl_files)
+ self.name = name
+ self.idl_files = idl_files
+
+
+class BaseDefines(ContextDerived):
+ """Context derived container object for DEFINES/HOST_DEFINES,
+ which are OrderedDicts.
+ """
+
+ __slots__ = "defines"
+
+ def __init__(self, context, defines):
+ ContextDerived.__init__(self, context)
+ self.defines = defines
+
+ def get_defines(self):
+ for define, value in six.iteritems(self.defines):
+ if value is True:
+ yield ("-D%s" % define)
+ elif value is False:
+ yield ("-U%s" % define)
+ else:
+ yield ("-D%s=%s" % (define, value))
+
+ def update(self, more_defines):
+ if isinstance(more_defines, Defines):
+ self.defines.update(more_defines.defines)
+ else:
+ self.defines.update(more_defines)
+
+
+class Defines(BaseDefines):
+ pass
+
+
+class HostDefines(BaseDefines):
+ pass
+
+
+class WasmDefines(BaseDefines):
+ pass
+
+
+class WebIDLCollection(ContextDerived):
+ """Collects WebIDL info referenced during the build."""
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.sources = set()
+ self.generated_sources = set()
+ self.generated_events_sources = set()
+ self.preprocessed_sources = set()
+ self.test_sources = set()
+ self.preprocessed_test_sources = set()
+ self.example_interfaces = set()
+
+ def all_regular_sources(self):
+ return (
+ self.sources
+ | self.generated_sources
+ | self.generated_events_sources
+ | self.preprocessed_sources
+ )
+
+ def all_regular_basenames(self):
+ return [mozpath.basename(source) for source in self.all_regular_sources()]
+
+ def all_regular_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.all_regular_basenames()]
+
+ def all_regular_bindinggen_stems(self):
+ for stem in self.all_regular_stems():
+ yield "%sBinding" % stem
+
+ for source in self.generated_events_sources:
+ yield mozpath.splitext(mozpath.basename(source))[0]
+
+ def all_regular_cpp_basenames(self):
+ for stem in self.all_regular_bindinggen_stems():
+ yield "%s.cpp" % stem
+
+ def all_test_sources(self):
+ return self.test_sources | self.preprocessed_test_sources
+
+ def all_test_basenames(self):
+ return [mozpath.basename(source) for source in self.all_test_sources()]
+
+ def all_test_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.all_test_basenames()]
+
+ def all_test_cpp_basenames(self):
+ return sorted("%sBinding.cpp" % s for s in self.all_test_stems())
+
+ def all_static_sources(self):
+ return self.sources | self.generated_events_sources | self.test_sources
+
+ def all_non_static_sources(self):
+ return self.generated_sources | self.all_preprocessed_sources()
+
+ def all_non_static_basenames(self):
+ return [mozpath.basename(s) for s in self.all_non_static_sources()]
+
+ def all_preprocessed_sources(self):
+ return self.preprocessed_sources | self.preprocessed_test_sources
+
+ def all_sources(self):
+ return set(self.all_regular_sources()) | set(self.all_test_sources())
+
+ def all_basenames(self):
+ return [mozpath.basename(source) for source in self.all_sources()]
+
+ def all_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.all_basenames()]
+
+ def generated_events_basenames(self):
+ return [mozpath.basename(s) for s in self.generated_events_sources]
+
+ def generated_events_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.generated_events_basenames()]
+
+ @property
+ def unified_source_mapping(self):
+ # Bindings are compiled in unified mode to speed up compilation and
+ # to reduce linker memory size. Note that test bindings are separated
+ # from regular ones so tests bindings aren't shipped.
+ return list(
+ group_unified_files(
+ sorted(self.all_regular_cpp_basenames()),
+ unified_prefix="UnifiedBindings",
+ unified_suffix="cpp",
+ files_per_unified_file=32,
+ )
+ )
+
+ def all_source_files(self):
+ from mozwebidlcodegen import WebIDLCodegenManager
+
+ return sorted(list(WebIDLCodegenManager.GLOBAL_DEFINE_FILES)) + sorted(
+ set(p for p, _ in self.unified_source_mapping)
+ )
+
+
+class IPDLCollection(ContextDerived):
+ """Collects IPDL files during the build."""
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.sources = set()
+ self.preprocessed_sources = set()
+
+ def all_sources(self):
+ return self.sources | self.preprocessed_sources
+
+ def all_regular_sources(self):
+ return self.sources
+
+ def all_preprocessed_sources(self):
+ return self.preprocessed_sources
+
+ def all_source_files(self):
+ # Source files generated by IPDL are built as generated UnifiedSources
+ # from the context which included the IPDL file, rather than the context
+ # which builds the IPDLCollection, so we report no files here.
+ return []
+
+
+class XPCOMComponentManifests(ContextDerived):
+ """Collects XPCOM manifest files during the build."""
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.manifests = set()
+
+ def all_sources(self):
+ return self.manifests
+
+ def all_source_files(self):
+ return []
+
+
+class LinkageWrongKindError(Exception):
+ """Error thrown when trying to link objects of the wrong kind"""
+
+
+class Linkable(ContextDerived):
+ """Generic context derived container object for programs and libraries"""
+
+ __slots__ = (
+ "cxx_link",
+ "lib_defines",
+ "linked_libraries",
+ "linked_system_libs",
+ "sources",
+ )
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.cxx_link = False
+ self.linked_libraries = []
+ self.linked_system_libs = []
+ self.lib_defines = Defines(context, OrderedDict())
+ self.sources = defaultdict(list)
+
+ def link_library(self, obj):
+ assert isinstance(obj, BaseLibrary)
+ if obj.KIND != self.KIND:
+ raise LinkageWrongKindError("%s != %s" % (obj.KIND, self.KIND))
+ self.linked_libraries.append(obj)
+ if obj.cxx_link and not isinstance(obj, SharedLibrary):
+ self.cxx_link = True
+ obj.refs.append(self)
+
+ def link_system_library(self, lib):
+ # The '$' check is here as a special temporary rule, allowing the
+ # inherited use of make variables, most notably in TK_LIBS.
+ if not lib.startswith("$") and not lib.startswith("-"):
+ type_var = "HOST_CC_TYPE" if self.KIND == "host" else "CC_TYPE"
+ compiler_type = self.config.substs.get(type_var)
+ if compiler_type in ("gcc", "clang"):
+ lib = "-l%s" % lib
+ elif self.KIND == "host":
+ lib = "%s%s%s" % (
+ self.config.host_import_prefix,
+ lib,
+ self.config.host_import_suffix,
+ )
+ else:
+ lib = "%s%s%s" % (
+ self.config.import_prefix,
+ lib,
+ self.config.import_suffix,
+ )
+ self.linked_system_libs.append(lib)
+
+ def source_files(self):
+ all_sources = []
+ # This is ordered for reproducibility and consistently w/
+ # config/rules.mk
+ for suffix in (".c", ".S", ".cpp", ".m", ".mm", ".s"):
+ all_sources += self.sources.get(suffix, [])
+ return all_sources
+
+ def _get_objs(self, sources):
+ obj_prefix = ""
+ if self.KIND == "host":
+ obj_prefix = "host_"
+
+ return [
+ mozpath.join(
+ self.objdir,
+ "%s%s.%s"
+ % (
+ obj_prefix,
+ mozpath.splitext(mozpath.basename(f))[0],
+ self._obj_suffix(),
+ ),
+ )
+ for f in sources
+ ]
+
+ def _obj_suffix(self):
+ """Can be overridden by a base class for custom behavior."""
+ return self.config.substs.get("OBJ_SUFFIX", "")
+
+ @property
+ def objs(self):
+ return self._get_objs(self.source_files())
+
+
+class BaseProgram(Linkable):
+ """Context derived container object for programs, which is a unicode
+ string.
+
+ This class handles automatically appending a binary suffix to the program
+ name.
+ If the suffix is not defined, the program name is unchanged.
+ Otherwise, if the program name ends with the given suffix, it is unchanged
+ Otherwise, the suffix is appended to the program name.
+ """
+
+ __slots__ = "program"
+
+ DICT_ATTRS = {"install_target", "KIND", "program", "relobjdir"}
+
+ def __init__(self, context, program, is_unit_test=False):
+ Linkable.__init__(self, context)
+
+ bin_suffix = context.config.substs.get(self.SUFFIX_VAR, "")
+ if not program.endswith(bin_suffix):
+ program += bin_suffix
+ self.program = program
+ self.is_unit_test = is_unit_test
+
+ @property
+ def output_path(self):
+ if self.installed:
+ return ObjDirPath(
+ self._context, "!/" + mozpath.join(self.install_target, self.program)
+ )
+ else:
+ return ObjDirPath(self._context, "!" + self.program)
+
+ def __repr__(self):
+ return "<%s: %s/%s>" % (type(self).__name__, self.relobjdir, self.program)
+
+ @property
+ def name(self):
+ return self.program
+
+
+class Program(BaseProgram):
+ """Context derived container object for PROGRAM"""
+
+ SUFFIX_VAR = "BIN_SUFFIX"
+ KIND = "target"
+
+
+class HostProgram(HostMixin, BaseProgram):
+ """Context derived container object for HOST_PROGRAM"""
+
+ SUFFIX_VAR = "HOST_BIN_SUFFIX"
+ KIND = "host"
+
+ @property
+ def install_target(self):
+ return "dist/host/bin"
+
+
+class SimpleProgram(BaseProgram):
+ """Context derived container object for each program in SIMPLE_PROGRAMS"""
+
+ SUFFIX_VAR = "BIN_SUFFIX"
+ KIND = "target"
+
+ def source_files(self):
+ for srcs in self.sources.values():
+ for f in srcs:
+ if (
+ mozpath.basename(mozpath.splitext(f)[0])
+ == mozpath.splitext(self.program)[0]
+ ):
+ return [f]
+ return []
+
+
+class HostSimpleProgram(HostMixin, BaseProgram):
+ """Context derived container object for each program in
+ HOST_SIMPLE_PROGRAMS"""
+
+ SUFFIX_VAR = "HOST_BIN_SUFFIX"
+ KIND = "host"
+
+ def source_files(self):
+ for srcs in self.sources.values():
+ for f in srcs:
+ if (
+ "host_%s" % mozpath.basename(mozpath.splitext(f)[0])
+ == mozpath.splitext(self.program)[0]
+ ):
+ return [f]
+ return []
+
+
+def cargo_output_directory(context, target_var):
+ # cargo creates several directories and places its build artifacts
+ # in those directories. The directory structure depends not only
+ # on the target, but also what sort of build we are doing.
+ rust_build_kind = "release"
+ if context.config.substs.get("MOZ_DEBUG_RUST"):
+ rust_build_kind = "debug"
+ return mozpath.join(context.config.substs[target_var], rust_build_kind)
+
+
+# Rust programs aren't really Linkable, since Cargo handles all the details
+# of linking things.
+class BaseRustProgram(ContextDerived):
+ __slots__ = (
+ "name",
+ "cargo_file",
+ "location",
+ "SUFFIX_VAR",
+ "KIND",
+ "TARGET_SUBST_VAR",
+ )
+
+ def __init__(self, context, name, cargo_file):
+ ContextDerived.__init__(self, context)
+ self.name = name
+ self.cargo_file = cargo_file
+ # Skip setting properties below which depend on cargo
+ # when we don't have a compile environment. The required
+ # config keys won't be available, but the instance variables
+ # that we don't set should never be accessed by the actual
+ # build in that case.
+ if not context.config.substs.get("COMPILE_ENVIRONMENT"):
+ return
+ cargo_dir = cargo_output_directory(context, self.TARGET_SUBST_VAR)
+ exe_file = "%s%s" % (name, context.config.substs.get(self.SUFFIX_VAR, ""))
+ self.location = mozpath.join(cargo_dir, exe_file)
+
+
+class RustProgram(BaseRustProgram):
+ SUFFIX_VAR = "BIN_SUFFIX"
+ KIND = "target"
+ TARGET_SUBST_VAR = "RUST_TARGET"
+
+
+class HostRustProgram(BaseRustProgram):
+ SUFFIX_VAR = "HOST_BIN_SUFFIX"
+ KIND = "host"
+ TARGET_SUBST_VAR = "RUST_HOST_TARGET"
+
+
+class RustTests(ContextDerived):
+ __slots__ = ("names", "features", "output_category")
+
+ def __init__(self, context, names, features):
+ ContextDerived.__init__(self, context)
+ self.names = names
+ self.features = features
+ self.output_category = "rusttests"
+
+
+class BaseLibrary(Linkable):
+ """Generic context derived container object for libraries."""
+
+ __slots__ = ("basename", "lib_name", "import_name", "refs")
+
+ def __init__(self, context, basename):
+ Linkable.__init__(self, context)
+
+ self.basename = self.lib_name = basename
+ if self.lib_name:
+ self.lib_name = "%s%s%s" % (
+ context.config.lib_prefix,
+ self.lib_name,
+ context.config.lib_suffix,
+ )
+ self.import_name = self.lib_name
+
+ self.refs = []
+
+ def __repr__(self):
+ return "<%s: %s/%s>" % (type(self).__name__, self.relobjdir, self.lib_name)
+
+ @property
+ def name(self):
+ return self.lib_name
+
+
+class Library(BaseLibrary):
+ """Context derived container object for a library"""
+
+ KIND = "target"
+ __slots__ = ()
+
+ def __init__(self, context, basename, real_name=None):
+ BaseLibrary.__init__(self, context, real_name or basename)
+ self.basename = basename
+
+
+class StaticLibrary(Library):
+ """Context derived container object for a static library"""
+
+ __slots__ = ("link_into", "no_expand_lib")
+
+ def __init__(
+ self, context, basename, real_name=None, link_into=None, no_expand_lib=False
+ ):
+ Library.__init__(self, context, basename, real_name)
+ self.link_into = link_into
+ self.no_expand_lib = no_expand_lib
+
+
+class SandboxedWasmLibrary(Library):
+ """Context derived container object for a static sandboxed wasm library"""
+
+ # This is a real static library; make it known to the build system.
+ no_expand_lib = True
+ KIND = "wasm"
+
+ def __init__(self, context, basename, real_name=None):
+ Library.__init__(self, context, basename, real_name)
+
+ # Wasm libraries are not going to compile unless we have a compiler
+ # for them.
+ assert context.config.substs["WASM_CC"] and context.config.substs["WASM_CXX"]
+
+ self.lib_name = "%s%s%s" % (
+ context.config.dll_prefix,
+ real_name or basename,
+ context.config.dll_suffix,
+ )
+
+ def _obj_suffix(self):
+ """Can be overridden by a base class for custom behavior."""
+ return self.config.substs.get("WASM_OBJ_SUFFIX", "")
+
+
+class BaseRustLibrary(object):
+ slots = (
+ "cargo_file",
+ "crate_type",
+ "dependencies",
+ "deps_path",
+ "features",
+ "output_category",
+ "is_gkrust",
+ )
+
+ def init(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ ):
+ self.is_gkrust = is_gkrust
+ self.cargo_file = cargo_file
+ self.crate_type = crate_type
+ # We need to adjust our naming here because cargo replaces '-' in
+ # package names defined in Cargo.toml with underscores in actual
+ # filenames. But we need to keep the basename consistent because
+ # many other things in the build system depend on that.
+ assert self.crate_type == "staticlib"
+ self.lib_name = "%s%s%s" % (
+ context.config.lib_prefix,
+ basename.replace("-", "_"),
+ context.config.lib_suffix,
+ )
+ self.dependencies = dependencies
+ self.features = features
+ self.output_category = context.get("RUST_LIBRARY_OUTPUT_CATEGORY")
+ # Skip setting properties below which depend on cargo
+ # when we don't have a compile environment. The required
+ # config keys won't be available, but the instance variables
+ # that we don't set should never be accessed by the actual
+ # build in that case.
+ if not context.config.substs.get("COMPILE_ENVIRONMENT"):
+ return
+ build_dir = mozpath.join(
+ context.config.topobjdir,
+ cargo_output_directory(context, self.TARGET_SUBST_VAR),
+ )
+ self.import_name = mozpath.join(build_dir, self.lib_name)
+ self.deps_path = mozpath.join(build_dir, "deps")
+
+
+class RustLibrary(StaticLibrary, BaseRustLibrary):
+ """Context derived container object for a rust static library"""
+
+ KIND = "target"
+ TARGET_SUBST_VAR = "RUST_TARGET"
+ FEATURES_VAR = "RUST_LIBRARY_FEATURES"
+ LIB_FILE_VAR = "RUST_LIBRARY_FILE"
+ __slots__ = BaseRustLibrary.slots
+
+ def __init__(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust=False,
+ link_into=None,
+ ):
+ StaticLibrary.__init__(
+ self,
+ context,
+ basename,
+ link_into=link_into,
+ # A rust library is a real static library ; make
+ # it known to the build system.
+ no_expand_lib=True,
+ )
+ BaseRustLibrary.init(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ )
+
+
+class SharedLibrary(Library):
+ """Context derived container object for a shared library"""
+
+ __slots__ = (
+ "soname",
+ "variant",
+ "symbols_file",
+ "output_category",
+ "symbols_link_arg",
+ )
+
+ DICT_ATTRS = {
+ "basename",
+ "import_name",
+ "install_target",
+ "lib_name",
+ "relobjdir",
+ "soname",
+ }
+
+ FRAMEWORK = 1
+ MAX_VARIANT = 2
+
+ def __init__(
+ self,
+ context,
+ basename,
+ real_name=None,
+ soname=None,
+ variant=None,
+ symbols_file=False,
+ ):
+ assert variant in range(1, self.MAX_VARIANT) or variant is None
+ Library.__init__(self, context, basename, real_name)
+ self.variant = variant
+ self.lib_name = real_name or basename
+ self.output_category = context.get("SHARED_LIBRARY_OUTPUT_CATEGORY")
+ assert self.lib_name
+
+ if variant == self.FRAMEWORK:
+ self.import_name = self.lib_name
+ else:
+ self.import_name = "%s%s%s" % (
+ context.config.import_prefix,
+ self.lib_name,
+ context.config.import_suffix,
+ )
+ self.lib_name = "%s%s%s" % (
+ context.config.dll_prefix,
+ self.lib_name,
+ context.config.dll_suffix,
+ )
+ if soname:
+ self.soname = "%s%s%s" % (
+ context.config.dll_prefix,
+ soname,
+ context.config.dll_suffix,
+ )
+ else:
+ self.soname = self.lib_name
+
+ if symbols_file is False:
+ # No symbols file.
+ self.symbols_file = None
+ elif symbols_file is True:
+ # Symbols file with default name.
+ if context.config.substs["OS_TARGET"] == "WINNT":
+ self.symbols_file = "%s.def" % self.lib_name
+ else:
+ self.symbols_file = "%s.symbols" % self.lib_name
+ else:
+ # Explicitly provided name.
+ self.symbols_file = symbols_file
+
+ if self.symbols_file:
+ os_target = context.config.substs["OS_TARGET"]
+ if os_target == "Darwin":
+ self.symbols_link_arg = (
+ "-Wl,-exported_symbols_list," + self.symbols_file
+ )
+ elif os_target == "SunOS":
+ self.symbols_link_arg = (
+ "-z gnu-version-script-compat -Wl,--version-script,"
+ + self.symbols_file
+ )
+ elif os_target == "WINNT":
+ if context.config.substs.get("GNU_CC"):
+ self.symbols_link_arg = self.symbols_file
+ else:
+ self.symbols_link_arg = "-DEF:" + self.symbols_file
+ elif context.config.substs.get("GCC_USE_GNU_LD"):
+ self.symbols_link_arg = "-Wl,--version-script," + self.symbols_file
+
+
+class HostSharedLibrary(HostMixin, Library):
+ """Context derived container object for a host shared library.
+
+ This class supports less things than SharedLibrary does for target shared
+ libraries. Currently has enough build system support to build the clang
+ plugin."""
+
+ KIND = "host"
+
+ def __init__(self, context, basename):
+ Library.__init__(self, context, basename)
+ self.lib_name = "%s%s%s" % (
+ context.config.host_dll_prefix,
+ self.basename,
+ context.config.host_dll_suffix,
+ )
+
+
+class ExternalLibrary(object):
+ """Empty mixin for libraries built by an external build system."""
+
+
+class ExternalStaticLibrary(StaticLibrary, ExternalLibrary):
+ """Context derived container for static libraries built by an external
+ build system."""
+
+
+class ExternalSharedLibrary(SharedLibrary, ExternalLibrary):
+ """Context derived container for shared libraries built by an external
+ build system."""
+
+
+class HostLibrary(HostMixin, BaseLibrary):
+ """Context derived container object for a host library"""
+
+ KIND = "host"
+ no_expand_lib = False
+
+
+class HostRustLibrary(HostLibrary, BaseRustLibrary):
+ """Context derived container object for a host rust library"""
+
+ KIND = "host"
+ TARGET_SUBST_VAR = "RUST_HOST_TARGET"
+ FEATURES_VAR = "HOST_RUST_LIBRARY_FEATURES"
+ LIB_FILE_VAR = "HOST_RUST_LIBRARY_FILE"
+ __slots__ = BaseRustLibrary.slots
+ no_expand_lib = True
+
+ def __init__(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ ):
+ HostLibrary.__init__(self, context, basename)
+ BaseRustLibrary.init(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ )
+
+
+class TestManifest(ContextDerived):
+ """Represents a manifest file containing information about tests."""
+
+ __slots__ = (
+ # The type of test manifest this is.
+ "flavor",
+ # Maps source filename to destination filename. The destination
+ # path is relative from the tests root directory. Values are 2-tuples
+ # of (destpath, is_test_file) where the 2nd item is True if this
+ # item represents a test file (versus a support file).
+ "installs",
+ # A list of pattern matching installs to perform. Entries are
+ # (base, pattern, dest).
+ "pattern_installs",
+ # Where all files for this manifest flavor are installed in the unified
+ # test package directory.
+ "install_prefix",
+ # Set of files provided by an external mechanism.
+ "external_installs",
+ # Set of files required by multiple test directories, whose installation
+ # will be resolved when running tests.
+ "deferred_installs",
+ # The full path of this manifest file.
+ "path",
+ # The directory where this manifest is defined.
+ "directory",
+ # The parsed manifestparser.TestManifest instance.
+ "manifest",
+ # List of tests. Each element is a dict of metadata.
+ "tests",
+ # The relative path of the parsed manifest within the srcdir.
+ "manifest_relpath",
+ # The relative path of the parsed manifest within the objdir.
+ "manifest_obj_relpath",
+ # The relative paths to all source files for this manifest.
+ "source_relpaths",
+ # If this manifest is a duplicate of another one, this is the
+ # manifestparser.TestManifest of the other one.
+ "dupe_manifest",
+ )
+
+ def __init__(
+ self,
+ context,
+ path,
+ manifest,
+ flavor=None,
+ install_prefix=None,
+ relpath=None,
+ sources=(),
+ dupe_manifest=False,
+ ):
+ ContextDerived.__init__(self, context)
+
+ assert flavor in all_test_flavors()
+
+ self.path = path
+ self.directory = mozpath.dirname(path)
+ self.manifest = manifest
+ self.flavor = flavor
+ self.install_prefix = install_prefix
+ self.manifest_relpath = relpath
+ self.manifest_obj_relpath = relpath
+ self.source_relpaths = sources
+ self.dupe_manifest = dupe_manifest
+ self.installs = {}
+ self.pattern_installs = []
+ self.tests = []
+ self.external_installs = set()
+ self.deferred_installs = set()
+
+
+class LocalInclude(ContextDerived):
+ """Describes an individual local include path."""
+
+ __slots__ = ("path",)
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.path = path
+
+
+class PerSourceFlag(ContextDerived):
+ """Describes compiler flags specified for individual source files."""
+
+ __slots__ = ("file_name", "flags")
+
+ def __init__(self, context, file_name, flags):
+ ContextDerived.__init__(self, context)
+
+ self.file_name = file_name
+ self.flags = flags
+
+
+class JARManifest(ContextDerived):
+ """Describes an individual JAR manifest file and how to process it.
+
+ This class isn't very useful for optimizing backends yet because we don't
+ capture defines. We can't capture defines safely until all of them are
+ defined in moz.build and not Makefile.in files.
+ """
+
+ __slots__ = ("path",)
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.path = path
+
+
+class BaseSources(ContextDerived):
+ """Base class for files to be compiled during the build."""
+
+ __slots__ = ("files", "static_files", "generated_files", "canonical_suffix")
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ ContextDerived.__init__(self, context)
+
+ # Sorted so output is consistent and we don't bump mtimes, but always
+ # order generated files after static ones to be consistent across build
+ # environments, which may have different objdir paths relative to
+ # topsrcdir.
+ self.static_files = sorted(static_files)
+ self.generated_files = sorted(generated_files)
+ self.files = self.static_files + self.generated_files
+ self.canonical_suffix = canonical_suffix
+
+
+class Sources(BaseSources):
+ """Represents files to be compiled during the build."""
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+
+class PgoGenerateOnlySources(BaseSources):
+ """Represents files to be compiled during the build.
+
+ These files are only used during the PGO generation phase."""
+
+ def __init__(self, context, files):
+ BaseSources.__init__(self, context, files, [], ".cpp")
+
+
+class HostSources(HostMixin, BaseSources):
+ """Represents files to be compiled for the host during the build."""
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+
+class WasmSources(BaseSources):
+ """Represents files to be compiled with the wasm compiler during the build."""
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+
+class UnifiedSources(BaseSources):
+ """Represents files to be compiled in a unified fashion during the build."""
+
+ __slots__ = ("have_unified_mapping", "unified_source_mapping")
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+ unified_build = context.config.substs.get("ENABLE_UNIFIED_BUILD", False)
+ files_per_unified_file = (
+ context.get("FILES_PER_UNIFIED_FILE", 16) if unified_build else 1
+ )
+
+ self.have_unified_mapping = files_per_unified_file > 1
+
+ if self.have_unified_mapping:
+ # On Windows, path names have a maximum length of 255 characters,
+ # so avoid creating extremely long path names.
+ unified_prefix = context.relsrcdir
+ if len(unified_prefix) > 20:
+ unified_prefix = unified_prefix[-20:].split("/", 1)[-1]
+ unified_prefix = unified_prefix.replace("/", "_")
+
+ suffix = self.canonical_suffix[1:]
+ unified_prefix = "Unified_%s_%s" % (suffix, unified_prefix)
+ self.unified_source_mapping = list(
+ group_unified_files(
+ # NOTE: self.files is already (partially) sorted, and we
+ # intentionally do not re-sort it here to avoid a dependency
+ # on the build environment's objdir path.
+ self.files,
+ unified_prefix=unified_prefix,
+ unified_suffix=suffix,
+ files_per_unified_file=files_per_unified_file,
+ )
+ )
+
+
+class InstallationTarget(ContextDerived):
+ """Describes the rules that affect where files get installed to."""
+
+ __slots__ = ("xpiname", "subdir", "target", "enabled")
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.xpiname = context.get("XPI_NAME", "")
+ self.subdir = context.get("DIST_SUBDIR", "")
+ self.target = context["FINAL_TARGET"]
+ self.enabled = context["DIST_INSTALL"] is not False
+
+ def is_custom(self):
+ """Returns whether or not the target is not derived from the default
+ given xpiname and subdir."""
+
+ return (
+ FinalTargetValue(dict(XPI_NAME=self.xpiname, DIST_SUBDIR=self.subdir))
+ == self.target
+ )
+
+
+class FinalTargetFiles(ContextDerived):
+ """Sandbox container object for FINAL_TARGET_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES.
+ """
+
+ __slots__ = "files"
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+
+class FinalTargetPreprocessedFiles(ContextDerived):
+ """Sandbox container object for FINAL_TARGET_PP_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing
+ FINAL_TARGET_PP_FILES.
+ """
+
+ __slots__ = "files"
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+
+class LocalizedFiles(FinalTargetFiles):
+ """Sandbox container object for LOCALIZED_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ pass
+
+
+class LocalizedPreprocessedFiles(FinalTargetPreprocessedFiles):
+ """Sandbox container object for LOCALIZED_PP_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ pass
+
+
+class ObjdirFiles(FinalTargetFiles):
+ """Sandbox container object for OBJDIR_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ @property
+ def install_target(self):
+ return ""
+
+
+class ObjdirPreprocessedFiles(FinalTargetPreprocessedFiles):
+ """Sandbox container object for OBJDIR_PP_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ @property
+ def install_target(self):
+ return ""
+
+
+class TestHarnessFiles(FinalTargetFiles):
+ """Sandbox container object for TEST_HARNESS_FILES,
+ which is a HierarchicalStringList.
+ """
+
+ @property
+ def install_target(self):
+ return "_tests"
+
+
+class Exports(FinalTargetFiles):
+ """Context derived container object for EXPORTS, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing EXPORTS.
+ """
+
+ @property
+ def install_target(self):
+ return "dist/include"
+
+
+class GeneratedFile(ContextDerived):
+ """Represents a generated file."""
+
+ __slots__ = (
+ "script",
+ "method",
+ "outputs",
+ "inputs",
+ "flags",
+ "required_before_export",
+ "required_before_compile",
+ "required_during_compile",
+ "localized",
+ "force",
+ "py2",
+ )
+
+ def __init__(
+ self,
+ context,
+ script,
+ method,
+ outputs,
+ inputs,
+ flags=(),
+ localized=False,
+ force=False,
+ py2=False,
+ required_during_compile=None,
+ ):
+ ContextDerived.__init__(self, context)
+ self.script = script
+ self.method = method
+ self.outputs = outputs if isinstance(outputs, tuple) else (outputs,)
+ self.inputs = inputs
+ self.flags = flags
+ self.localized = localized
+ self.force = force
+ self.py2 = py2
+
+ if self.config.substs.get("MOZ_WIDGET_TOOLKIT") == "android":
+ # In GeckoView builds we process Jinja files during pre-export
+ self.required_before_export = [
+ f for f in self.inputs if f.endswith(".jinja")
+ ]
+ else:
+ self.required_before_export = False
+
+ suffixes = [
+ ".h",
+ ".py",
+ ".rs",
+ # We need to compile Java to generate JNI wrappers for native code
+ # compilation to consume.
+ "android_apks",
+ ".profdata",
+ ".webidl",
+ ]
+
+ try:
+ lib_suffix = context.config.substs["LIB_SUFFIX"]
+ suffixes.append("." + lib_suffix)
+ except KeyError:
+ # Tests may not define LIB_SUFFIX
+ pass
+
+ suffixes = tuple(suffixes)
+
+ self.required_before_compile = [
+ f
+ for f in self.outputs
+ if f.endswith(suffixes) or "stl_wrappers/" in f or "xpidl.stub" in f
+ ]
+
+ if required_during_compile is None:
+ self.required_during_compile = [
+ f
+ for f in self.outputs
+ if f.endswith(
+ (".asm", ".c", ".cpp", ".inc", ".m", ".mm", ".def", "symverscript")
+ )
+ ]
+ else:
+ self.required_during_compile = required_during_compile
+
+
+class ChromeManifestEntry(ContextDerived):
+ """Represents a chrome.manifest entry."""
+
+ __slots__ = ("path", "entry")
+
+ def __init__(self, context, manifest_path, entry):
+ ContextDerived.__init__(self, context)
+ assert isinstance(entry, ManifestEntry)
+ self.path = mozpath.join(self.install_target, manifest_path)
+ # Ensure the entry is relative to the directory containing the
+ # manifest path.
+ entry = entry.rebase(mozpath.dirname(manifest_path))
+ # Then add the install_target to the entry base directory.
+ self.entry = entry.move(mozpath.dirname(self.path))
diff --git a/python/mozbuild/mozbuild/frontend/emitter.py b/python/mozbuild/mozbuild/frontend/emitter.py
new file mode 100644
index 0000000000..8d62072421
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -0,0 +1,1892 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+import sys
+import time
+import traceback
+from collections import OrderedDict, defaultdict
+
+import mozinfo
+import mozpack.path as mozpath
+import six
+import toml
+from mach.mixin.logging import LoggingMixin
+from mozpack.chrome.manifest import Manifest
+
+from mozbuild.base import ExecutionSummary
+from mozbuild.util import OrderedDefaultDict, memoize
+
+from ..testing import REFTEST_FLAVORS, TEST_MANIFESTS, SupportFilesConverter
+from .context import Context, ObjDirPath, Path, SourcePath, SubContext
+from .data import (
+ BaseRustProgram,
+ ChromeManifestEntry,
+ ComputedFlags,
+ ConfigFileSubstitution,
+ Defines,
+ DirectoryTraversal,
+ Exports,
+ ExternalSharedLibrary,
+ ExternalStaticLibrary,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostDefines,
+ HostLibrary,
+ HostProgram,
+ HostRustLibrary,
+ HostRustProgram,
+ HostSharedLibrary,
+ HostSimpleProgram,
+ HostSources,
+ InstallationTarget,
+ IPDLCollection,
+ JARManifest,
+ Library,
+ Linkable,
+ LocalInclude,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ ObjdirFiles,
+ ObjdirPreprocessedFiles,
+ PerSourceFlag,
+ Program,
+ RustLibrary,
+ RustProgram,
+ RustTests,
+ SandboxedWasmLibrary,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestHarnessFiles,
+ TestManifest,
+ UnifiedSources,
+ VariablePassthru,
+ WasmDefines,
+ WasmSources,
+ WebIDLCollection,
+ XPCOMComponentManifests,
+ XPIDLModule,
+)
+from .reader import SandboxValidationError
+
+
+class TreeMetadataEmitter(LoggingMixin):
+ """Converts the executed mozbuild files into data structures.
+
+ This is a bridge between reader.py and data.py. It takes what was read by
+ reader.BuildReader and converts it into the classes defined in the data
+ module.
+ """
+
+ def __init__(self, config):
+ self.populate_logger()
+
+ self.config = config
+
+ mozinfo.find_and_update_from_json(config.topobjdir)
+
+ self.info = dict(mozinfo.info)
+
+ self._libs = OrderedDefaultDict(list)
+ self._binaries = OrderedDict()
+ self._compile_dirs = set()
+ self._host_compile_dirs = set()
+ self._wasm_compile_dirs = set()
+ self._asm_compile_dirs = set()
+ self._compile_flags = dict()
+ self._compile_as_flags = dict()
+ self._linkage = []
+ self._static_linking_shared = set()
+ self._crate_verified_local = set()
+ self._crate_directories = dict()
+ self._idls = defaultdict(set)
+
+ # Keep track of external paths (third party build systems), starting
+ # from what we run a subconfigure in. We'll eliminate some directories
+ # as we traverse them with moz.build (e.g. js/src).
+ subconfigures = os.path.join(self.config.topobjdir, "subconfigures")
+ paths = []
+ if os.path.exists(subconfigures):
+ paths = open(subconfigures).read().splitlines()
+ self._external_paths = set(mozpath.normsep(d) for d in paths)
+
+ self._emitter_time = 0.0
+ self._object_count = 0
+ self._test_files_converter = SupportFilesConverter()
+
+ def summary(self):
+ return ExecutionSummary(
+ "Processed into {object_count:d} build config descriptors in "
+ "{execution_time:.2f}s",
+ execution_time=self._emitter_time,
+ object_count=self._object_count,
+ )
+
+ def emit(self, output, emitfn=None):
+ """Convert the BuildReader output into data structures.
+
+ The return value from BuildReader.read_topsrcdir() (a generator) is
+ typically fed into this function.
+ """
+ contexts = {}
+ emitfn = emitfn or self.emit_from_context
+
+ def emit_objs(objs):
+ for o in objs:
+ self._object_count += 1
+ yield o
+
+ for out in output:
+ # Nothing in sub-contexts is currently of interest to us. Filter
+ # them all out.
+ if isinstance(out, SubContext):
+ continue
+
+ if isinstance(out, Context):
+ # Keep all contexts around, we will need them later.
+ contexts[os.path.normcase(out.objdir)] = out
+
+ start = time.monotonic()
+ # We need to expand the generator for the timings to work.
+ objs = list(emitfn(out))
+ self._emitter_time += time.monotonic() - start
+
+ for o in emit_objs(objs):
+ yield o
+
+ else:
+ raise Exception("Unhandled output type: %s" % type(out))
+
+ # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set
+ if self.config.substs.get("COMPILE_ENVIRONMENT"):
+ start = time.monotonic()
+ objs = list(self._emit_libs_derived(contexts))
+ self._emitter_time += time.monotonic() - start
+
+ for o in emit_objs(objs):
+ yield o
+
+ def _emit_libs_derived(self, contexts):
+
+ # First aggregate idl sources.
+ webidl_attrs = [
+ ("GENERATED_EVENTS_WEBIDL_FILES", lambda c: c.generated_events_sources),
+ ("GENERATED_WEBIDL_FILES", lambda c: c.generated_sources),
+ ("PREPROCESSED_TEST_WEBIDL_FILES", lambda c: c.preprocessed_test_sources),
+ ("PREPROCESSED_WEBIDL_FILES", lambda c: c.preprocessed_sources),
+ ("TEST_WEBIDL_FILES", lambda c: c.test_sources),
+ ("WEBIDL_FILES", lambda c: c.sources),
+ ("WEBIDL_EXAMPLE_INTERFACES", lambda c: c.example_interfaces),
+ ]
+ ipdl_attrs = [
+ ("IPDL_SOURCES", lambda c: c.sources),
+ ("PREPROCESSED_IPDL_SOURCES", lambda c: c.preprocessed_sources),
+ ]
+ xpcom_attrs = [("XPCOM_MANIFESTS", lambda c: c.manifests)]
+
+ idl_sources = {}
+ for root, cls, attrs in (
+ (self.config.substs.get("WEBIDL_ROOT"), WebIDLCollection, webidl_attrs),
+ (self.config.substs.get("IPDL_ROOT"), IPDLCollection, ipdl_attrs),
+ (
+ self.config.substs.get("XPCOM_ROOT"),
+ XPCOMComponentManifests,
+ xpcom_attrs,
+ ),
+ ):
+ if root:
+ collection = cls(contexts[os.path.normcase(root)])
+ for var, src_getter in attrs:
+ src_getter(collection).update(self._idls[var])
+
+ idl_sources[root] = collection.all_source_files()
+ if isinstance(collection, WebIDLCollection):
+ # Test webidl sources are added here as a somewhat special
+ # case.
+ idl_sources[mozpath.join(root, "test")] = [
+ s for s in collection.all_test_cpp_basenames()
+ ]
+
+ yield collection
+
+ # Next do FINAL_LIBRARY linkage.
+ for lib in (l for libs in self._libs.values() for l in libs):
+ if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
+ continue
+ if lib.link_into not in self._libs:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
+ % lib.link_into,
+ contexts[os.path.normcase(lib.objdir)],
+ )
+ candidates = self._libs[lib.link_into]
+
+ # When there are multiple candidates, but all are in the same
+ # directory and have a different type, we want all of them to
+ # have the library linked. The typical usecase is when building
+ # both a static and a shared library in a directory, and having
+ # that as a FINAL_LIBRARY.
+ if (
+ len(set(type(l) for l in candidates)) == len(candidates)
+ and len(set(l.objdir for l in candidates)) == 1
+ ):
+ for c in candidates:
+ c.link_library(lib)
+ else:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
+ "multiple places:\n %s"
+ % (lib.link_into, "\n ".join(l.objdir for l in candidates)),
+ contexts[os.path.normcase(lib.objdir)],
+ )
+
+ # ...and USE_LIBS linkage.
+ for context, obj, variable in self._linkage:
+ self._link_libraries(context, obj, variable, idl_sources)
+
+ def recurse_refs(lib):
+ for o in lib.refs:
+ yield o
+ if isinstance(o, StaticLibrary):
+ for q in recurse_refs(o):
+ yield q
+
+ # Check that all static libraries refering shared libraries in
+ # USE_LIBS are linked into a shared library or program.
+ for lib in self._static_linking_shared:
+ if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
+ shared_libs = sorted(
+ l.basename
+ for l in lib.linked_libraries
+ if isinstance(l, SharedLibrary)
+ )
+ raise SandboxValidationError(
+ 'The static "%s" library is not used in a shared library '
+ "or a program, but USE_LIBS contains the following shared "
+ "library names:\n %s\n\nMaybe you can remove the "
+ 'static "%s" library?'
+ % (lib.basename, "\n ".join(shared_libs), lib.basename),
+ contexts[os.path.normcase(lib.objdir)],
+ )
+
+ @memoize
+ def rust_libraries(obj):
+ libs = []
+ for o in obj.linked_libraries:
+ if isinstance(o, (HostRustLibrary, RustLibrary)):
+ libs.append(o)
+ elif isinstance(o, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ libs.extend(rust_libraries(o))
+ return libs
+
+ def check_rust_libraries(obj):
+ rust_libs = set(rust_libraries(obj))
+ if len(rust_libs) <= 1:
+ return
+ if isinstance(obj, (Library, HostLibrary)):
+ what = '"%s" library' % obj.basename
+ else:
+ what = '"%s" program' % obj.name
+ raise SandboxValidationError(
+ "Cannot link the following Rust libraries into the %s:\n"
+ "%s\nOnly one is allowed."
+ % (
+ what,
+ "\n".join(
+ " - %s" % r.basename
+ for r in sorted(rust_libs, key=lambda r: r.basename)
+ ),
+ ),
+ contexts[os.path.normcase(obj.objdir)],
+ )
+
+ # Propagate LIBRARY_DEFINES to all child libraries recursively.
+ def propagate_defines(outerlib, defines):
+ outerlib.lib_defines.update(defines)
+ for lib in outerlib.linked_libraries:
+ # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS
+ # paths.
+ if (
+ isinstance(lib, StaticLibrary)
+ and lib.link_into == outerlib.basename
+ ):
+ propagate_defines(lib, defines)
+
+ for lib in (l for libs in self._libs.values() for l in libs):
+ if isinstance(lib, Library):
+ propagate_defines(lib, lib.lib_defines)
+ check_rust_libraries(lib)
+ yield lib
+
+ for lib in (l for libs in self._libs.values() for l in libs):
+ lib_defines = list(lib.lib_defines.get_defines())
+ if lib_defines:
+ objdir_flags = self._compile_flags[lib.objdir]
+ objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+ objdir_flags = self._compile_as_flags.get(lib.objdir)
+ if objdir_flags:
+ objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+ for flags_obj in self._compile_flags.values():
+ yield flags_obj
+
+ for flags_obj in self._compile_as_flags.values():
+ yield flags_obj
+
+ for obj in self._binaries.values():
+ if isinstance(obj, Linkable):
+ check_rust_libraries(obj)
+ yield obj
+
+ LIBRARY_NAME_VAR = {
+ "host": "HOST_LIBRARY_NAME",
+ "target": "LIBRARY_NAME",
+ "wasm": "SANDBOXED_WASM_LIBRARY_NAME",
+ }
+
+ ARCH_VAR = {"host": "HOST_OS_ARCH", "target": "OS_TARGET"}
+
+ STDCXXCOMPAT_NAME = {"host": "host_stdc++compat", "target": "stdc++compat"}
+
+ def _link_libraries(self, context, obj, variable, extra_sources):
+ """Add linkage declarations to a given object."""
+ assert isinstance(obj, Linkable)
+
+ if context.objdir in extra_sources:
+ # All "extra sources" are .cpp for the moment, and happen to come
+ # first in order.
+ obj.sources[".cpp"] = extra_sources[context.objdir] + obj.sources[".cpp"]
+
+ for path in context.get(variable, []):
+ self._link_library(context, obj, variable, path)
+
+ # Link system libraries from OS_LIBS/HOST_OS_LIBS.
+ for lib in context.get(variable.replace("USE", "OS"), []):
+ obj.link_system_library(lib)
+
+ # We have to wait for all the self._link_library calls above to have
+ # happened for obj.cxx_link to be final.
+ # FIXME: Theoretically, HostSharedLibrary shouldn't be here (bug
+ # 1474022).
+ if (
+ not isinstance(
+ obj, (StaticLibrary, HostLibrary, HostSharedLibrary, BaseRustProgram)
+ )
+ and obj.cxx_link
+ ):
+ if (
+ context.config.substs.get("MOZ_STDCXX_COMPAT")
+ and context.config.substs.get(self.ARCH_VAR.get(obj.KIND)) == "Linux"
+ ):
+ self._link_library(
+ context, obj, variable, self.STDCXXCOMPAT_NAME[obj.KIND]
+ )
+ if obj.KIND == "target":
+ for lib in context.config.substs.get("STLPORT_LIBS", []):
+ obj.link_system_library(lib)
+
+ def _link_library(self, context, obj, variable, path):
+ force_static = path.startswith("static:") and obj.KIND == "target"
+ if force_static:
+ path = path[7:]
+ name = mozpath.basename(path)
+ dir = mozpath.dirname(path)
+ candidates = [l for l in self._libs[name] if l.KIND == obj.KIND]
+ if dir:
+ if dir.startswith("/"):
+ dir = mozpath.normpath(mozpath.join(obj.topobjdir, dir[1:]))
+ else:
+ dir = mozpath.normpath(mozpath.join(obj.objdir, dir))
+ dir = mozpath.relpath(dir, obj.topobjdir)
+ candidates = [l for l in candidates if l.relobjdir == dir]
+ if not candidates:
+ # If the given directory is under one of the external
+ # (third party) paths, use a fake library reference to
+ # there.
+ for d in self._external_paths:
+ if dir.startswith("%s/" % d):
+ candidates = [
+ self._get_external_library(dir, name, force_static)
+ ]
+ break
+
+ if not candidates:
+ raise SandboxValidationError(
+ '%s contains "%s", but there is no "%s" %s in %s.'
+ % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+ context,
+ )
+
+ if len(candidates) > 1:
+ # If there's more than one remaining candidate, it could be
+ # that there are instances for the same library, in static and
+ # shared form.
+ libs = {}
+ for l in candidates:
+ key = mozpath.join(l.relobjdir, l.basename)
+ if force_static:
+ if isinstance(l, StaticLibrary):
+ libs[key] = l
+ else:
+ if key in libs and isinstance(l, SharedLibrary):
+ libs[key] = l
+ if key not in libs:
+ libs[key] = l
+ candidates = list(libs.values())
+ if force_static and not candidates:
+ if dir:
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is no static '
+ '"%s" %s in %s.'
+ % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+ context,
+ )
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is no static "%s" '
+ "%s in the tree"
+ % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]),
+ context,
+ )
+
+ if not candidates:
+ raise SandboxValidationError(
+ '%s contains "%s", which does not match any %s in the tree.'
+ % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]),
+ context,
+ )
+
+ elif len(candidates) > 1:
+ paths = (mozpath.join(l.relsrcdir, "moz.build") for l in candidates)
+ raise SandboxValidationError(
+ '%s contains "%s", which matches a %s defined in multiple '
+ "places:\n %s"
+ % (
+ variable,
+ path,
+ self.LIBRARY_NAME_VAR[obj.KIND],
+ "\n ".join(paths),
+ ),
+ context,
+ )
+
+ elif force_static and not isinstance(candidates[0], StaticLibrary):
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is only a shared "%s" '
+ "in %s. You may want to add FORCE_STATIC_LIB=True in "
+ '%s/moz.build, or remove "static:".'
+ % (
+ variable,
+ path,
+ name,
+ candidates[0].relobjdir,
+ candidates[0].relobjdir,
+ ),
+ context,
+ )
+
+ elif isinstance(obj, StaticLibrary) and isinstance(
+ candidates[0], SharedLibrary
+ ):
+ self._static_linking_shared.add(obj)
+ obj.link_library(candidates[0])
+
+ @memoize
+ def _get_external_library(self, dir, name, force_static):
+ # Create ExternalStaticLibrary or ExternalSharedLibrary object with a
+ # context more or less truthful about where the external library is.
+ context = Context(config=self.config)
+ context.add_source(mozpath.join(self.config.topsrcdir, dir, "dummy"))
+ if force_static:
+ return ExternalStaticLibrary(context, name)
+ else:
+ return ExternalSharedLibrary(context, name)
+
+ def _parse_cargo_file(self, context):
+ """Parse the Cargo.toml file in context and return a Python object
+ representation of it. Raise a SandboxValidationError if the Cargo.toml
+ file does not exist. Return a tuple of (config, cargo_file)."""
+ cargo_file = mozpath.join(context.srcdir, "Cargo.toml")
+ if not os.path.exists(cargo_file):
+ raise SandboxValidationError(
+ "No Cargo.toml file found in %s" % cargo_file, context
+ )
+ with open(cargo_file, "r") as f:
+ return toml.load(f), cargo_file
+
+ def _verify_deps(
+ self, context, crate_dir, crate_name, dependencies, description="Dependency"
+ ):
+ """Verify that a crate's dependencies all specify local paths."""
+ for dep_crate_name, values in six.iteritems(dependencies):
+ # A simple version number.
+ if isinstance(values, (six.binary_type, six.text_type)):
+ raise SandboxValidationError(
+ "%s %s of crate %s does not list a path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ dep_path = values.get("path", None)
+ if not dep_path:
+ raise SandboxValidationError(
+ "%s %s of crate %s does not list a path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ # Try to catch the case where somebody listed a
+ # local path for development.
+ if os.path.isabs(dep_path):
+ raise SandboxValidationError(
+ "%s %s of crate %s has a non-relative path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ if not os.path.exists(
+ mozpath.join(context.config.topsrcdir, crate_dir, dep_path)
+ ):
+ raise SandboxValidationError(
+ "%s %s of crate %s refers to a non-existent path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ def _rust_library(
+ self, context, libname, static_args, is_gkrust=False, cls=RustLibrary
+ ):
+ # We need to note any Rust library for linking purposes.
+ config, cargo_file = self._parse_cargo_file(context)
+ crate_name = config["package"]["name"]
+
+ if crate_name != libname:
+ raise SandboxValidationError(
+ "library %s does not match Cargo.toml-defined package %s"
+ % (libname, crate_name),
+ context,
+ )
+
+ # Check that the [lib.crate-type] field is correct
+ lib_section = config.get("lib", None)
+ if not lib_section:
+ raise SandboxValidationError(
+ "Cargo.toml for %s has no [lib] section" % libname, context
+ )
+
+ crate_type = lib_section.get("crate-type", None)
+ if not crate_type:
+ raise SandboxValidationError(
+ "Can't determine a crate-type for %s from Cargo.toml" % libname, context
+ )
+
+ crate_type = crate_type[0]
+ if crate_type != "staticlib":
+ raise SandboxValidationError(
+ "crate-type %s is not permitted for %s" % (crate_type, libname), context
+ )
+
+ dependencies = set(six.iterkeys(config.get("dependencies", {})))
+
+ features = context.get(cls.FEATURES_VAR, [])
+ unique_features = set(features)
+ if len(features) != len(unique_features):
+ raise SandboxValidationError(
+ "features for %s should not contain duplicates: %s"
+ % (libname, features),
+ context,
+ )
+
+ return cls(
+ context,
+ libname,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ **static_args,
+ )
+
+ def _handle_linkables(self, context, passthru, generated_files):
+ linkables = []
+ host_linkables = []
+ wasm_linkables = []
+
+ def add_program(prog, var):
+ if var.startswith("HOST_"):
+ host_linkables.append(prog)
+ else:
+ linkables.append(prog)
+
+ def check_unique_binary(program, kind):
+ if program in self._binaries:
+ raise SandboxValidationError(
+ 'Cannot use "%s" as %s name, '
+ "because it is already used in %s"
+ % (program, kind, self._binaries[program].relsrcdir),
+ context,
+ )
+
+ for kind, cls in [("PROGRAM", Program), ("HOST_PROGRAM", HostProgram)]:
+ program = context.get(kind)
+ if program:
+ check_unique_binary(program, kind)
+ self._binaries[program] = cls(context, program)
+ self._linkage.append(
+ (
+ context,
+ self._binaries[program],
+ kind.replace("PROGRAM", "USE_LIBS"),
+ )
+ )
+ add_program(self._binaries[program], kind)
+
+ all_rust_programs = []
+ for kind, cls in [
+ ("RUST_PROGRAMS", RustProgram),
+ ("HOST_RUST_PROGRAMS", HostRustProgram),
+ ]:
+ programs = context[kind]
+ if not programs:
+ continue
+
+ all_rust_programs.append((programs, kind, cls))
+
+ # Verify Rust program definitions.
+ if all_rust_programs:
+ config, cargo_file = self._parse_cargo_file(context)
+ bin_section = config.get("bin", None)
+ if not bin_section:
+ raise SandboxValidationError(
+ "Cargo.toml in %s has no [bin] section" % context.srcdir, context
+ )
+
+ defined_binaries = {b["name"] for b in bin_section}
+
+ for programs, kind, cls in all_rust_programs:
+ for program in programs:
+ if program not in defined_binaries:
+ raise SandboxValidationError(
+ "Cannot find Cargo.toml definition for %s" % program,
+ context,
+ )
+
+ check_unique_binary(program, kind)
+ self._binaries[program] = cls(context, program, cargo_file)
+ add_program(self._binaries[program], kind)
+
+ for kind, cls in [
+ ("SIMPLE_PROGRAMS", SimpleProgram),
+ ("CPP_UNIT_TESTS", SimpleProgram),
+ ("HOST_SIMPLE_PROGRAMS", HostSimpleProgram),
+ ]:
+ for program in context[kind]:
+ if program in self._binaries:
+ raise SandboxValidationError(
+ 'Cannot use "%s" in %s, '
+ "because it is already used in %s"
+ % (program, kind, self._binaries[program].relsrcdir),
+ context,
+ )
+ self._binaries[program] = cls(
+ context, program, is_unit_test=kind == "CPP_UNIT_TESTS"
+ )
+ self._linkage.append(
+ (
+ context,
+ self._binaries[program],
+ "HOST_USE_LIBS"
+ if kind == "HOST_SIMPLE_PROGRAMS"
+ else "USE_LIBS",
+ )
+ )
+ add_program(self._binaries[program], kind)
+
+ host_libname = context.get("HOST_LIBRARY_NAME")
+ libname = context.get("LIBRARY_NAME")
+
+ if host_libname:
+ if host_libname == libname:
+ raise SandboxValidationError(
+ "LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value",
+ context,
+ )
+
+ is_rust_library = context.get("IS_RUST_LIBRARY")
+ if is_rust_library:
+ lib = self._rust_library(context, host_libname, {}, cls=HostRustLibrary)
+ elif context.get("FORCE_SHARED_LIB"):
+ lib = HostSharedLibrary(context, host_libname)
+ else:
+ lib = HostLibrary(context, host_libname)
+ self._libs[host_libname].append(lib)
+ self._linkage.append((context, lib, "HOST_USE_LIBS"))
+ host_linkables.append(lib)
+
+ final_lib = context.get("FINAL_LIBRARY")
+ if not libname and final_lib:
+ # If no LIBRARY_NAME is given, create one.
+ libname = context.relsrcdir.replace("/", "_")
+
+ static_lib = context.get("FORCE_STATIC_LIB")
+ shared_lib = context.get("FORCE_SHARED_LIB")
+
+ static_name = context.get("STATIC_LIBRARY_NAME")
+ shared_name = context.get("SHARED_LIBRARY_NAME")
+
+ is_framework = context.get("IS_FRAMEWORK")
+
+ soname = context.get("SONAME")
+
+ lib_defines = context.get("LIBRARY_DEFINES")
+
+ wasm_lib = context.get("SANDBOXED_WASM_LIBRARY_NAME")
+
+ shared_args = {}
+ static_args = {}
+
+ if final_lib:
+ if static_lib:
+ raise SandboxValidationError(
+ "FINAL_LIBRARY implies FORCE_STATIC_LIB. "
+ "Please remove the latter.",
+ context,
+ )
+ if shared_lib:
+ raise SandboxValidationError(
+ "FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. "
+ "Please remove one.",
+ context,
+ )
+ if is_framework:
+ raise SandboxValidationError(
+ "FINAL_LIBRARY conflicts with IS_FRAMEWORK. " "Please remove one.",
+ context,
+ )
+ static_args["link_into"] = final_lib
+ static_lib = True
+
+ if libname:
+ if is_framework:
+ if soname:
+ raise SandboxValidationError(
+ "IS_FRAMEWORK conflicts with SONAME. " "Please remove one.",
+ context,
+ )
+ shared_lib = True
+ shared_args["variant"] = SharedLibrary.FRAMEWORK
+
+ if not static_lib and not shared_lib:
+ static_lib = True
+
+ if static_name:
+ if not static_lib:
+ raise SandboxValidationError(
+ "STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB", context
+ )
+ static_args["real_name"] = static_name
+
+ if shared_name:
+ if not shared_lib:
+ raise SandboxValidationError(
+ "SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB", context
+ )
+ shared_args["real_name"] = shared_name
+
+ if soname:
+ if not shared_lib:
+ raise SandboxValidationError(
+ "SONAME requires FORCE_SHARED_LIB", context
+ )
+ shared_args["soname"] = soname
+
+ if context.get("NO_EXPAND_LIBS"):
+ if not static_lib:
+ raise SandboxValidationError(
+ "NO_EXPAND_LIBS can only be set for static libraries.", context
+ )
+ static_args["no_expand_lib"] = True
+
+ if shared_lib and static_lib:
+ if not static_name and not shared_name:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but neither STATIC_LIBRARY_NAME or "
+ "SHARED_LIBRARY_NAME is set. At least one is required.",
+ context,
+ )
+ if static_name and not shared_name and static_name == libname:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, "
+ "and SHARED_LIBRARY_NAME is unset. Please either "
+ "change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set "
+ "SHARED_LIBRARY_NAME.",
+ context,
+ )
+ if shared_name and not static_name and shared_name == libname:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, "
+ "and STATIC_LIBRARY_NAME is unset. Please either "
+ "change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set "
+ "STATIC_LIBRARY_NAME.",
+ context,
+ )
+ if shared_name and static_name and shared_name == static_name:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but SHARED_LIBRARY_NAME is the same as "
+ "STATIC_LIBRARY_NAME. Please change one of them.",
+ context,
+ )
+
+ symbols_file = context.get("SYMBOLS_FILE")
+ if symbols_file:
+ if not shared_lib:
+ raise SandboxValidationError(
+ "SYMBOLS_FILE can only be used with a SHARED_LIBRARY.", context
+ )
+ if context.get("DEFFILE"):
+ raise SandboxValidationError(
+ "SYMBOLS_FILE cannot be used along DEFFILE.", context
+ )
+ if isinstance(symbols_file, SourcePath):
+ if not os.path.exists(symbols_file.full_path):
+ raise SandboxValidationError(
+ "Path specified in SYMBOLS_FILE does not exist: %s "
+ "(resolved to %s)" % (symbols_file, symbols_file.full_path),
+ context,
+ )
+ shared_args["symbols_file"] = True
+ else:
+ if symbols_file.target_basename not in generated_files:
+ raise SandboxValidationError(
+ (
+ "Objdir file specified in SYMBOLS_FILE not in "
+ + "GENERATED_FILES: %s"
+ )
+ % (symbols_file,),
+ context,
+ )
+ shared_args["symbols_file"] = symbols_file.target_basename
+
+ if shared_lib:
+ lib = SharedLibrary(context, libname, **shared_args)
+ self._libs[libname].append(lib)
+ self._linkage.append((context, lib, "USE_LIBS"))
+ linkables.append(lib)
+ if not lib.installed:
+ generated_files.add(lib.lib_name)
+ if symbols_file and isinstance(symbols_file, SourcePath):
+ script = mozpath.join(
+ mozpath.dirname(mozpath.dirname(__file__)),
+ "action",
+ "generate_symbols_file.py",
+ )
+ defines = ()
+ if lib.defines:
+ defines = lib.defines.get_defines()
+ yield GeneratedFile(
+ context,
+ script,
+ "generate_symbols_file",
+ lib.symbols_file,
+ [symbols_file],
+ defines,
+ required_during_compile=[lib.symbols_file],
+ )
+ if static_lib:
+ is_rust_library = context.get("IS_RUST_LIBRARY")
+ if is_rust_library:
+ lib = self._rust_library(
+ context,
+ libname,
+ static_args,
+ is_gkrust=bool(context.get("IS_GKRUST")),
+ )
+ else:
+ lib = StaticLibrary(context, libname, **static_args)
+ self._libs[libname].append(lib)
+ self._linkage.append((context, lib, "USE_LIBS"))
+ linkables.append(lib)
+
+ if lib_defines:
+ if not libname:
+ raise SandboxValidationError(
+ "LIBRARY_DEFINES needs a " "LIBRARY_NAME to take effect",
+ context,
+ )
+ lib.lib_defines.update(lib_defines)
+
+ if wasm_lib:
+ if wasm_lib == libname:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and LIBRARY_NAME must have a "
+ "different value.",
+ context,
+ )
+ if wasm_lib == host_libname:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and HOST_LIBRARY_NAME must "
+ "have a different value.",
+ context,
+ )
+ if wasm_lib == shared_name:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and SHARED_NAME must have a "
+ "different value.",
+ context,
+ )
+ if wasm_lib == static_name:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and STATIC_NAME must have a "
+ "different value.",
+ context,
+ )
+ lib = SandboxedWasmLibrary(context, wasm_lib)
+ self._libs[libname].append(lib)
+ wasm_linkables.append(lib)
+ self._wasm_compile_dirs.add(context.objdir)
+
+ seen = {}
+ for symbol in ("SOURCES", "UNIFIED_SOURCES"):
+ for src in context.get(symbol, []):
+ basename = os.path.splitext(os.path.basename(src))[0]
+ if basename in seen:
+ other_src, where = seen[basename]
+ extra = ""
+ if "UNIFIED_SOURCES" in (symbol, where):
+ extra = " in non-unified builds"
+ raise SandboxValidationError(
+ f"{src} from {symbol} would have the same object name "
+ f"as {other_src} from {where}{extra}.",
+ context,
+ )
+ seen[basename] = (src, symbol)
+
+ # Only emit sources if we have linkables defined in the same context.
+ # Note the linkables are not emitted in this function, but much later,
+ # after aggregation (because of e.g. USE_LIBS processing).
+ if not (linkables or host_linkables or wasm_linkables):
+ return
+
+ # TODO: objdirs with only host things in them shouldn't need target
+ # flags, but there's at least one Makefile.in (in
+ # build/unix/elfhack) that relies on the value of LDFLAGS being
+ # passed to one-off rules.
+ self._compile_dirs.add(context.objdir)
+
+ if host_linkables or any(
+ isinstance(l, (RustLibrary, RustProgram)) for l in linkables
+ ):
+ self._host_compile_dirs.add(context.objdir)
+
+ sources = defaultdict(list)
+ gen_sources = defaultdict(list)
+ all_flags = {}
+ for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES", "WASM_SOURCES"):
+ srcs = sources[symbol]
+ gen_srcs = gen_sources[symbol]
+ context_srcs = context.get(symbol, [])
+ seen_sources = set()
+ for f in context_srcs:
+ if f in seen_sources:
+ raise SandboxValidationError(
+ "Source file should only "
+ "be added to %s once: %s" % (symbol, f),
+ context,
+ )
+ seen_sources.add(f)
+ full_path = f.full_path
+ if isinstance(f, SourcePath):
+ srcs.append(full_path)
+ else:
+ assert isinstance(f, Path)
+ gen_srcs.append(full_path)
+ if symbol == "SOURCES":
+ context_flags = context_srcs[f]
+ if context_flags:
+ all_flags[full_path] = context_flags
+
+ if isinstance(f, SourcePath) and not os.path.exists(full_path):
+ raise SandboxValidationError(
+ "File listed in %s does not "
+ "exist: '%s'" % (symbol, full_path),
+ context,
+ )
+
+ # Process the .cpp files generated by IPDL as generated sources within
+ # the context which declared the IPDL_SOURCES attribute.
+ ipdl_root = self.config.substs.get("IPDL_ROOT")
+ for symbol in ("IPDL_SOURCES", "PREPROCESSED_IPDL_SOURCES"):
+ context_srcs = context.get(symbol, [])
+ for f in context_srcs:
+ root, ext = mozpath.splitext(mozpath.basename(f))
+
+ suffix_map = {
+ ".ipdlh": [".cpp"],
+ ".ipdl": [".cpp", "Child.cpp", "Parent.cpp"],
+ }
+ if ext not in suffix_map:
+ raise SandboxValidationError(
+ "Unexpected extension for IPDL source %s" % ext
+ )
+
+ gen_sources["UNIFIED_SOURCES"].extend(
+ mozpath.join(ipdl_root, root + suffix) for suffix in suffix_map[ext]
+ )
+
+ no_pgo = context.get("NO_PGO")
+ no_pgo_sources = [f for f, flags in six.iteritems(all_flags) if flags.no_pgo]
+ if no_pgo:
+ if no_pgo_sources:
+ raise SandboxValidationError(
+ "NO_PGO and SOURCES[...].no_pgo " "cannot be set at the same time",
+ context,
+ )
+ passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
+ if no_pgo_sources:
+ passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources
+
+ # A map from "canonical suffixes" for a particular source file
+ # language to the range of suffixes associated with that language.
+ #
+ # We deliberately don't list the canonical suffix in the suffix list
+ # in the definition; we'll add it in programmatically after defining
+ # things.
+ suffix_map = {
+ ".s": set([".asm"]),
+ ".c": set(),
+ ".m": set(),
+ ".mm": set(),
+ ".cpp": set([".cc", ".cxx"]),
+ ".S": set(),
+ }
+
+ # The inverse of the above, mapping suffixes to their canonical suffix.
+ canonicalized_suffix_map = {}
+ for suffix, alternatives in six.iteritems(suffix_map):
+ alternatives.add(suffix)
+ for a in alternatives:
+ canonicalized_suffix_map[a] = suffix
+
+ # A map from moz.build variables to the canonical suffixes of file
+ # kinds that can be listed therein.
+ all_suffixes = list(suffix_map.keys())
+ varmap = dict(
+ SOURCES=(Sources, all_suffixes),
+ HOST_SOURCES=(HostSources, [".c", ".mm", ".cpp"]),
+ UNIFIED_SOURCES=(UnifiedSources, [".c", ".mm", ".m", ".cpp"]),
+ )
+ # Only include a WasmSources context if there are any WASM_SOURCES.
+ # (This is going to matter later because we inject an extra .c file to
+ # compile with the wasm compiler if, and only if, there are any WASM
+ # sources.)
+ if sources["WASM_SOURCES"] or gen_sources["WASM_SOURCES"]:
+ varmap["WASM_SOURCES"] = (WasmSources, [".c", ".cpp"])
+ # Track whether there are any C++ source files.
+ # Technically this won't do the right thing for SIMPLE_PROGRAMS in
+ # a directory with mixed C and C++ source, but it's not that important.
+ cxx_sources = defaultdict(bool)
+
+ # Source files to track for linkables associated with this context.
+ ctxt_sources = defaultdict(lambda: defaultdict(list))
+
+ for variable, (klass, suffixes) in varmap.items():
+ # Group static and generated files by their canonical suffixes, and
+ # ensure we haven't been given filetypes that we don't recognize.
+ by_canonical_suffix = defaultdict(lambda: {"static": [], "generated": []})
+ for srcs, key in (
+ (sources[variable], "static"),
+ (gen_sources[variable], "generated"),
+ ):
+ for f in srcs:
+ canonical_suffix = canonicalized_suffix_map.get(
+ mozpath.splitext(f)[1]
+ )
+ if canonical_suffix not in suffixes:
+ raise SandboxValidationError(
+ "%s has an unknown file type." % f, context
+ )
+ by_canonical_suffix[canonical_suffix][key].append(f)
+
+ # Yield an object for each canonical suffix, grouping generated and
+ # static sources together to allow them to be unified together.
+ for canonical_suffix in sorted(by_canonical_suffix.keys()):
+ if canonical_suffix in (".cpp", ".mm"):
+ cxx_sources[variable] = True
+ elif canonical_suffix in (".s", ".S"):
+ self._asm_compile_dirs.add(context.objdir)
+ src_group = by_canonical_suffix[canonical_suffix]
+ obj = klass(
+ context,
+ src_group["static"],
+ src_group["generated"],
+ canonical_suffix,
+ )
+ srcs = list(obj.files)
+ if isinstance(obj, UnifiedSources) and obj.have_unified_mapping:
+ srcs = sorted(dict(obj.unified_source_mapping).keys())
+ ctxt_sources[variable][canonical_suffix] += srcs
+ yield obj
+
+ if ctxt_sources:
+ for linkable in linkables:
+ for target_var in ("SOURCES", "UNIFIED_SOURCES"):
+ for suffix, srcs in ctxt_sources[target_var].items():
+ linkable.sources[suffix] += srcs
+ for host_linkable in host_linkables:
+ for suffix, srcs in ctxt_sources["HOST_SOURCES"].items():
+ host_linkable.sources[suffix] += srcs
+ for wasm_linkable in wasm_linkables:
+ for suffix, srcs in ctxt_sources["WASM_SOURCES"].items():
+ wasm_linkable.sources[suffix] += srcs
+
+ for f, flags in sorted(six.iteritems(all_flags)):
+ if flags.flags:
+ ext = mozpath.splitext(f)[1]
+ yield PerSourceFlag(context, f, flags.flags)
+
+ # If there are any C++ sources, set all the linkables defined here
+ # to require the C++ linker.
+ for vars, linkable_items in (
+ (("SOURCES", "UNIFIED_SOURCES"), linkables),
+ (("HOST_SOURCES",), host_linkables),
+ ):
+ for var in vars:
+ if cxx_sources[var]:
+ for l in linkable_items:
+ l.cxx_link = True
+ break
+
+ def emit_from_context(self, context):
+ """Convert a Context to tree metadata objects.
+
+ This is a generator of mozbuild.frontend.data.ContextDerived instances.
+ """
+
+ # We only want to emit an InstallationTarget if one of the consulted
+ # variables is defined. Later on, we look up FINAL_TARGET, which has
+ # the side-effect of populating it. So, we need to do this lookup
+ # early.
+ if any(k in context for k in ("FINAL_TARGET", "XPI_NAME", "DIST_SUBDIR")):
+ yield InstallationTarget(context)
+
+ # We always emit a directory traversal descriptor. This is needed by
+ # the recursive make backend.
+ for o in self._emit_directory_traversal_from_context(context):
+ yield o
+
+ for obj in self._process_xpidl(context):
+ yield obj
+
+ computed_flags = ComputedFlags(context, context["COMPILE_FLAGS"])
+ computed_link_flags = ComputedFlags(context, context["LINK_FLAGS"])
+ computed_host_flags = ComputedFlags(context, context["HOST_COMPILE_FLAGS"])
+ computed_as_flags = ComputedFlags(context, context["ASM_FLAGS"])
+ computed_wasm_flags = ComputedFlags(context, context["WASM_FLAGS"])
+
+ # Proxy some variables as-is until we have richer classes to represent
+ # them. We should aim to keep this set small because it violates the
+ # desired abstraction of the build definition away from makefiles.
+ passthru = VariablePassthru(context)
+ varlist = [
+ "EXTRA_DSO_LDOPTS",
+ "RCFILE",
+ "RCINCLUDE",
+ "WIN32_EXE_LDFLAGS",
+ "USE_EXTENSION_MANIFEST",
+ ]
+ for v in varlist:
+ if v in context and context[v]:
+ passthru.variables[v] = context[v]
+
+ if (
+ context.config.substs.get("OS_TARGET") == "WINNT"
+ and context["DELAYLOAD_DLLS"]
+ ):
+ if context.config.substs.get("CC_TYPE") != "clang":
+ context["LDFLAGS"].extend(
+ [("-DELAYLOAD:%s" % dll) for dll in context["DELAYLOAD_DLLS"]]
+ )
+ else:
+ context["LDFLAGS"].extend(
+ [
+ ("-Wl,-Xlink=-DELAYLOAD:%s" % dll)
+ for dll in context["DELAYLOAD_DLLS"]
+ ]
+ )
+ context["OS_LIBS"].append("delayimp")
+
+ for v in ["CMFLAGS", "CMMFLAGS"]:
+ if v in context and context[v]:
+ passthru.variables["MOZBUILD_" + v] = context[v]
+
+ for v in ["CXXFLAGS", "CFLAGS"]:
+ if v in context and context[v]:
+ computed_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+ for v in ["WASM_CFLAGS", "WASM_CXXFLAGS"]:
+ if v in context and context[v]:
+ computed_wasm_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+ for v in ["HOST_CXXFLAGS", "HOST_CFLAGS"]:
+ if v in context and context[v]:
+ computed_host_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+ if "LDFLAGS" in context and context["LDFLAGS"]:
+ computed_link_flags.resolve_flags("MOZBUILD", context["LDFLAGS"])
+
+ deffile = context.get("DEFFILE")
+ if deffile and context.config.substs.get("OS_TARGET") == "WINNT":
+ if isinstance(deffile, SourcePath):
+ if not os.path.exists(deffile.full_path):
+ raise SandboxValidationError(
+ "Path specified in DEFFILE does not exist: %s "
+ "(resolved to %s)" % (deffile, deffile.full_path),
+ context,
+ )
+ path = mozpath.relpath(deffile.full_path, context.objdir)
+ else:
+ path = deffile.target_basename
+
+ if context.config.substs.get("GNU_CC"):
+ computed_link_flags.resolve_flags("DEFFILE", [path])
+ else:
+ computed_link_flags.resolve_flags("DEFFILE", ["-DEF:" + path])
+
+ dist_install = context["DIST_INSTALL"]
+ if dist_install is True:
+ passthru.variables["DIST_INSTALL"] = True
+ elif dist_install is False:
+ passthru.variables["NO_DIST_INSTALL"] = True
+
+ # Ideally, this should be done in templates, but this is difficult at
+ # the moment because USE_STATIC_LIBS can be set after a template
+ # returns. Eventually, with context-based templates, it will be
+ # possible.
+ if context.config.substs.get(
+ "OS_ARCH"
+ ) == "WINNT" and not context.config.substs.get("GNU_CC"):
+ use_static_lib = context.get(
+ "USE_STATIC_LIBS"
+ ) and not context.config.substs.get("MOZ_ASAN")
+ rtl_flag = "-MT" if use_static_lib else "-MD"
+ if context.config.substs.get("MOZ_DEBUG") and not context.config.substs.get(
+ "MOZ_NO_DEBUG_RTL"
+ ):
+ rtl_flag += "d"
+ computed_flags.resolve_flags("RTL", [rtl_flag])
+ if not context.config.substs.get("CROSS_COMPILE"):
+ computed_host_flags.resolve_flags("RTL", [rtl_flag])
+
+ generated_files = set()
+ localized_generated_files = set()
+ for obj in self._process_generated_files(context):
+ for f in obj.outputs:
+ generated_files.add(f)
+ if obj.localized:
+ localized_generated_files.add(f)
+ yield obj
+
+ for path in context["CONFIGURE_SUBST_FILES"]:
+ sub = self._create_substitution(ConfigFileSubstitution, context, path)
+ generated_files.add(str(sub.relpath))
+ yield sub
+
+ for defines_var, cls, backend_flags in (
+ ("DEFINES", Defines, (computed_flags, computed_as_flags)),
+ ("HOST_DEFINES", HostDefines, (computed_host_flags,)),
+ ("WASM_DEFINES", WasmDefines, (computed_wasm_flags,)),
+ ):
+ defines = context.get(defines_var)
+ if defines:
+ defines_obj = cls(context, defines)
+ if isinstance(defines_obj, Defines):
+ # DEFINES have consumers outside the compile command line,
+ # HOST_DEFINES do not.
+ yield defines_obj
+ else:
+ # If we don't have explicitly set defines we need to make sure
+ # initialized values if present end up in computed flags.
+ defines_obj = cls(context, context[defines_var])
+
+ defines_from_obj = list(defines_obj.get_defines())
+ if defines_from_obj:
+ for flags in backend_flags:
+ flags.resolve_flags(defines_var, defines_from_obj)
+
+ idl_vars = (
+ "GENERATED_EVENTS_WEBIDL_FILES",
+ "GENERATED_WEBIDL_FILES",
+ "PREPROCESSED_TEST_WEBIDL_FILES",
+ "PREPROCESSED_WEBIDL_FILES",
+ "TEST_WEBIDL_FILES",
+ "WEBIDL_FILES",
+ "IPDL_SOURCES",
+ "PREPROCESSED_IPDL_SOURCES",
+ "XPCOM_MANIFESTS",
+ )
+ for context_var in idl_vars:
+ for name in context.get(context_var, []):
+ self._idls[context_var].add(mozpath.join(context.srcdir, name))
+ # WEBIDL_EXAMPLE_INTERFACES do not correspond to files.
+ for name in context.get("WEBIDL_EXAMPLE_INTERFACES", []):
+ self._idls["WEBIDL_EXAMPLE_INTERFACES"].add(name)
+
+ local_includes = []
+ for local_include in context.get("LOCAL_INCLUDES", []):
+ full_path = local_include.full_path
+ if not isinstance(local_include, ObjDirPath):
+ if not os.path.exists(full_path):
+ raise SandboxValidationError(
+ "Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)"
+ % (local_include, full_path),
+ context,
+ )
+ if not os.path.isdir(full_path):
+ raise SandboxValidationError(
+ "Path specified in LOCAL_INCLUDES "
+ "is a filename, but a directory is required: %s "
+ "(resolved to %s)" % (local_include, full_path),
+ context,
+ )
+ if (
+ full_path == context.config.topsrcdir
+ or full_path == context.config.topobjdir
+ ):
+ raise SandboxValidationError(
+ "Path specified in LOCAL_INCLUDES "
+ "(%s) resolves to the topsrcdir or topobjdir (%s), which is "
+ "not allowed" % (local_include, full_path),
+ context,
+ )
+ include_obj = LocalInclude(context, local_include)
+ local_includes.append(include_obj.path.full_path)
+ yield include_obj
+
+ computed_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+ computed_as_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+ computed_host_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+ computed_wasm_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+
+ for obj in self._handle_linkables(context, passthru, generated_files):
+ yield obj
+
+ generated_files.update(
+ [
+ "%s%s" % (k, self.config.substs.get("BIN_SUFFIX", ""))
+ for k in self._binaries.keys()
+ ]
+ )
+
+ components = []
+ for var, cls in (
+ ("EXPORTS", Exports),
+ ("FINAL_TARGET_FILES", FinalTargetFiles),
+ ("FINAL_TARGET_PP_FILES", FinalTargetPreprocessedFiles),
+ ("LOCALIZED_FILES", LocalizedFiles),
+ ("LOCALIZED_PP_FILES", LocalizedPreprocessedFiles),
+ ("OBJDIR_FILES", ObjdirFiles),
+ ("OBJDIR_PP_FILES", ObjdirPreprocessedFiles),
+ ("TEST_HARNESS_FILES", TestHarnessFiles),
+ ):
+ all_files = context.get(var)
+ if not all_files:
+ continue
+ if dist_install is False and var != "TEST_HARNESS_FILES":
+ raise SandboxValidationError(
+ "%s cannot be used with DIST_INSTALL = False" % var, context
+ )
+ has_prefs = False
+ has_resources = False
+ for base, files in all_files.walk():
+ if var == "TEST_HARNESS_FILES" and not base:
+ raise SandboxValidationError(
+ "Cannot install files to the root of TEST_HARNESS_FILES",
+ context,
+ )
+ if base == "components":
+ components.extend(files)
+ if base == "defaults/pref":
+ has_prefs = True
+ if mozpath.split(base)[0] == "res":
+ has_resources = True
+ for f in files:
+ if (
+ var
+ in (
+ "FINAL_TARGET_PP_FILES",
+ "OBJDIR_PP_FILES",
+ "LOCALIZED_PP_FILES",
+ )
+ and not isinstance(f, SourcePath)
+ ):
+ raise SandboxValidationError(
+ ("Only source directory paths allowed in " + "%s: %s")
+ % (var, f),
+ context,
+ )
+ if var.startswith("LOCALIZED_"):
+ if isinstance(f, SourcePath):
+ if f.startswith("en-US/"):
+ pass
+ elif "locales/en-US/" in f:
+ pass
+ else:
+ raise SandboxValidationError(
+ "%s paths must start with `en-US/` or "
+ "contain `locales/en-US/`: %s" % (var, f),
+ context,
+ )
+
+ if not isinstance(f, ObjDirPath):
+ path = f.full_path
+ if "*" not in path and not os.path.exists(path):
+ raise SandboxValidationError(
+ "File listed in %s does not exist: %s" % (var, path),
+ context,
+ )
+ else:
+ # TODO: Bug 1254682 - The '/' check is to allow
+ # installing files generated from other directories,
+ # which is done occasionally for tests. However, it
+ # means we don't fail early if the file isn't actually
+ # created by the other moz.build file.
+ if f.target_basename not in generated_files and "/" not in f:
+ raise SandboxValidationError(
+ (
+ "Objdir file listed in %s not in "
+ + "GENERATED_FILES: %s"
+ )
+ % (var, f),
+ context,
+ )
+
+ if var.startswith("LOCALIZED_"):
+ # Further require that LOCALIZED_FILES are from
+ # LOCALIZED_GENERATED_FILES.
+ if f.target_basename not in localized_generated_files:
+ raise SandboxValidationError(
+ (
+ "Objdir file listed in %s not in "
+ + "LOCALIZED_GENERATED_FILES: %s"
+ )
+ % (var, f),
+ context,
+ )
+ else:
+ # Additionally, don't allow LOCALIZED_GENERATED_FILES to be used
+ # in anything *but* LOCALIZED_FILES.
+ if f.target_basename in localized_generated_files:
+ raise SandboxValidationError(
+ (
+ "Outputs of LOCALIZED_GENERATED_FILES cannot "
+ "be used in %s: %s"
+ )
+ % (var, f),
+ context,
+ )
+
+ # Addons (when XPI_NAME is defined) and Applications (when
+ # DIST_SUBDIR is defined) use a different preferences directory
+ # (default/preferences) from the one the GRE uses (defaults/pref).
+ # Hence, we move the files from the latter to the former in that
+ # case.
+ if has_prefs and (context.get("XPI_NAME") or context.get("DIST_SUBDIR")):
+ all_files.defaults.preferences += all_files.defaults.pref
+ del all_files.defaults._children["pref"]
+
+ if has_resources and (
+ context.get("DIST_SUBDIR") or context.get("XPI_NAME")
+ ):
+ raise SandboxValidationError(
+ "RESOURCES_FILES cannot be used with DIST_SUBDIR or " "XPI_NAME.",
+ context,
+ )
+
+ yield cls(context, all_files)
+
+ for c in components:
+ if c.endswith(".manifest"):
+ yield ChromeManifestEntry(
+ context,
+ "chrome.manifest",
+ Manifest("components", mozpath.basename(c)),
+ )
+
+ rust_tests = context.get("RUST_TESTS", [])
+ if rust_tests:
+ # TODO: more sophisticated checking of the declared name vs.
+ # contents of the Cargo.toml file.
+ features = context.get("RUST_TEST_FEATURES", [])
+
+ yield RustTests(context, rust_tests, features)
+
+ for obj in self._process_test_manifests(context):
+ yield obj
+
+ for obj in self._process_jar_manifests(context):
+ yield obj
+
+ computed_as_flags.resolve_flags("MOZBUILD", context.get("ASFLAGS"))
+
+ if context.get("USE_NASM") is True:
+ nasm = context.config.substs.get("NASM")
+ if not nasm:
+ raise SandboxValidationError("nasm is not available", context)
+ passthru.variables["AS"] = nasm
+ passthru.variables["AS_DASH_C_FLAG"] = ""
+ passthru.variables["ASOUTOPTION"] = "-o "
+ computed_as_flags.resolve_flags(
+ "OS", context.config.substs.get("NASM_ASFLAGS", [])
+ )
+
+ if context.get("USE_INTEGRATED_CLANGCL_AS") is True:
+ if context.config.substs.get("CC_TYPE") != "clang-cl":
+ raise SandboxValidationError("clang-cl is not available", context)
+ passthru.variables["AS"] = context.config.substs.get("CC")
+ passthru.variables["AS_DASH_C_FLAG"] = "-c"
+ passthru.variables["ASOUTOPTION"] = "-o "
+
+ if passthru.variables:
+ yield passthru
+
+ if context.objdir in self._compile_dirs:
+ self._compile_flags[context.objdir] = computed_flags
+ yield computed_link_flags
+
+ if context.objdir in self._asm_compile_dirs:
+ self._compile_as_flags[context.objdir] = computed_as_flags
+
+ if context.objdir in self._host_compile_dirs:
+ yield computed_host_flags
+
+ if context.objdir in self._wasm_compile_dirs:
+ yield computed_wasm_flags
+
+ def _create_substitution(self, cls, context, path):
+ sub = cls(context)
+ sub.input_path = "%s.in" % path.full_path
+ sub.output_path = path.translated
+ sub.relpath = path
+
+ return sub
+
+ def _process_xpidl(self, context):
+ # XPIDL source files get processed and turned into .h and .xpt files.
+ # If there are multiple XPIDL files in a directory, they get linked
+ # together into a final .xpt, which has the name defined by
+ # XPIDL_MODULE.
+ xpidl_module = context["XPIDL_MODULE"]
+
+ if not xpidl_module:
+ if context["XPIDL_SOURCES"]:
+ raise SandboxValidationError(
+ "XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.",
+ context,
+ )
+ return
+
+ if not context["XPIDL_SOURCES"]:
+ raise SandboxValidationError(
+ "XPIDL_MODULE cannot be defined " "unless there are XPIDL_SOURCES",
+ context,
+ )
+
+ if context["DIST_INSTALL"] is False:
+ self.log(
+ logging.WARN,
+ "mozbuild_warning",
+ dict(path=context.main_path),
+ "{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.",
+ )
+
+ for idl in context["XPIDL_SOURCES"]:
+ if not os.path.exists(idl.full_path):
+ raise SandboxValidationError(
+ "File %s from XPIDL_SOURCES " "does not exist" % idl.full_path,
+ context,
+ )
+
+ yield XPIDLModule(context, xpidl_module, context["XPIDL_SOURCES"])
+
+ def _process_generated_files(self, context):
+ for path in context["CONFIGURE_DEFINE_FILES"]:
+ script = mozpath.join(
+ mozpath.dirname(mozpath.dirname(__file__)),
+ "action",
+ "process_define_files.py",
+ )
+ yield GeneratedFile(
+ context,
+ script,
+ "process_define_file",
+ six.text_type(path),
+ [Path(context, path + ".in")],
+ )
+
+ generated_files = context.get("GENERATED_FILES") or []
+ localized_generated_files = context.get("LOCALIZED_GENERATED_FILES") or []
+ if not (generated_files or localized_generated_files):
+ return
+
+ for (localized, gen) in (
+ (False, generated_files),
+ (True, localized_generated_files),
+ ):
+ for f in gen:
+ flags = gen[f]
+ outputs = f
+ inputs = []
+ if flags.script:
+ method = "main"
+ script = SourcePath(context, flags.script).full_path
+
+ # Deal with cases like "C:\\path\\to\\script.py:function".
+ if ".py:" in script:
+ script, method = script.rsplit(".py:", 1)
+ script += ".py"
+
+ if not os.path.exists(script):
+ raise SandboxValidationError(
+ "Script for generating %s does not exist: %s" % (f, script),
+ context,
+ )
+ if os.path.splitext(script)[1] != ".py":
+ raise SandboxValidationError(
+ "Script for generating %s does not end in .py: %s"
+ % (f, script),
+ context,
+ )
+ else:
+ script = None
+ method = None
+
+ for i in flags.inputs:
+ p = Path(context, i)
+ if isinstance(p, SourcePath) and not os.path.exists(p.full_path):
+ raise SandboxValidationError(
+ "Input for generating %s does not exist: %s"
+ % (f, p.full_path),
+ context,
+ )
+ inputs.append(p)
+
+ yield GeneratedFile(
+ context,
+ script,
+ method,
+ outputs,
+ inputs,
+ flags.flags,
+ localized=localized,
+ force=flags.force,
+ )
+
+ def _process_test_manifests(self, context):
+ for prefix, info in TEST_MANIFESTS.items():
+ for path, manifest in context.get("%s_MANIFESTS" % prefix, []):
+ for obj in self._process_test_manifest(context, info, path, manifest):
+ yield obj
+
+ for flavor in REFTEST_FLAVORS:
+ for path, manifest in context.get("%s_MANIFESTS" % flavor.upper(), []):
+ for obj in self._process_reftest_manifest(
+ context, flavor, path, manifest
+ ):
+ yield obj
+
+ def _process_test_manifest(self, context, info, manifest_path, mpmanifest):
+ flavor, install_root, install_subdir, package_tests = info
+
+ path = manifest_path.full_path
+ manifest_dir = mozpath.dirname(path)
+ manifest_reldir = mozpath.dirname(
+ mozpath.relpath(path, context.config.topsrcdir)
+ )
+ manifest_sources = [
+ mozpath.relpath(pth, context.config.topsrcdir)
+ for pth in mpmanifest.source_files
+ ]
+ install_prefix = mozpath.join(install_root, install_subdir)
+
+ try:
+ if not mpmanifest.tests:
+ raise SandboxValidationError("Empty test manifest: %s" % path, context)
+
+ defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
+ obj = TestManifest(
+ context,
+ path,
+ mpmanifest,
+ flavor=flavor,
+ install_prefix=install_prefix,
+ relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
+ sources=manifest_sources,
+ dupe_manifest="dupe-manifest" in defaults,
+ )
+
+ filtered = mpmanifest.tests
+
+ missing = [t["name"] for t in filtered if not os.path.exists(t["path"])]
+ if missing:
+ raise SandboxValidationError(
+ "Test manifest (%s) lists "
+ "test that does not exist: %s" % (path, ", ".join(missing)),
+ context,
+ )
+
+ out_dir = mozpath.join(install_prefix, manifest_reldir)
+
+ def process_support_files(test):
+ install_info = self._test_files_converter.convert_support_files(
+ test, install_root, manifest_dir, out_dir
+ )
+
+ obj.pattern_installs.extend(install_info.pattern_installs)
+ for source, dest in install_info.installs:
+ obj.installs[source] = (dest, False)
+ obj.external_installs |= install_info.external_installs
+ for install_path in install_info.deferred_installs:
+ if all(
+ [
+ "*" not in install_path,
+ not os.path.isfile(
+ mozpath.join(context.config.topsrcdir, install_path[2:])
+ ),
+ install_path not in install_info.external_installs,
+ ]
+ ):
+ raise SandboxValidationError(
+ "Error processing test "
+ "manifest %s: entry in support-files not present "
+ "in the srcdir: %s" % (path, install_path),
+ context,
+ )
+
+ obj.deferred_installs |= install_info.deferred_installs
+
+ for test in filtered:
+ obj.tests.append(test)
+
+ # Some test files are compiled and should not be copied into the
+ # test package. They function as identifiers rather than files.
+ if package_tests:
+ manifest_relpath = mozpath.relpath(
+ test["path"], mozpath.dirname(test["manifest"])
+ )
+ obj.installs[mozpath.normpath(test["path"])] = (
+ (mozpath.join(out_dir, manifest_relpath)),
+ True,
+ )
+
+ process_support_files(test)
+
+ for path, m_defaults in mpmanifest.manifest_defaults.items():
+ process_support_files(m_defaults)
+
+ # We also copy manifests into the output directory,
+ # including manifests from [include:foo] directives.
+ for mpath in mpmanifest.manifests():
+ mpath = mozpath.normpath(mpath)
+ out_path = mozpath.join(out_dir, mozpath.basename(mpath))
+ obj.installs[mpath] = (out_path, False)
+
+ # Some manifests reference files that are auto generated as
+ # part of the build or shouldn't be installed for some
+ # reason. Here, we prune those files from the install set.
+ # FUTURE we should be able to detect autogenerated files from
+ # other build metadata. Once we do that, we can get rid of this.
+ for f in defaults.get("generated-files", "").split():
+ # We re-raise otherwise the stack trace isn't informative.
+ try:
+ del obj.installs[mozpath.join(manifest_dir, f)]
+ except KeyError:
+ raise SandboxValidationError(
+ "Error processing test "
+ "manifest %s: entry in generated-files not present "
+ "elsewhere in manifest: %s" % (path, f),
+ context,
+ )
+
+ yield obj
+ except (AssertionError, Exception):
+ raise SandboxValidationError(
+ "Error processing test "
+ "manifest file %s: %s"
+ % (path, "\n".join(traceback.format_exception(*sys.exc_info()))),
+ context,
+ )
+
+ def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
+ manifest_full_path = manifest_path.full_path
+ manifest_reldir = mozpath.dirname(
+ mozpath.relpath(manifest_full_path, context.config.topsrcdir)
+ )
+
+ # reftest manifests don't come from manifest parser. But they are
+ # similar enough that we can use the same emitted objects. Note
+ # that we don't perform any installs for reftests.
+ obj = TestManifest(
+ context,
+ manifest_full_path,
+ manifest,
+ flavor=flavor,
+ install_prefix="%s/" % flavor,
+ relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)),
+ )
+ obj.tests = list(sorted(manifest.tests, key=lambda t: t["path"]))
+
+ yield obj
+
+ def _process_jar_manifests(self, context):
+ jar_manifests = context.get("JAR_MANIFESTS", [])
+ if len(jar_manifests) > 1:
+ raise SandboxValidationError(
+ "While JAR_MANIFESTS is a list, "
+ "it is currently limited to one value.",
+ context,
+ )
+
+ for path in jar_manifests:
+ yield JARManifest(context, path)
+
+ # Temporary test to look for jar.mn files that creep in without using
+ # the new declaration. Before, we didn't require jar.mn files to
+ # declared anywhere (they were discovered). This will detect people
+ # relying on the old behavior.
+ if os.path.exists(os.path.join(context.srcdir, "jar.mn")):
+ if "jar.mn" not in jar_manifests:
+ raise SandboxValidationError(
+ "A jar.mn exists but it "
+ "is not referenced in the moz.build file. "
+ "Please define JAR_MANIFESTS.",
+ context,
+ )
+
+ def _emit_directory_traversal_from_context(self, context):
+ o = DirectoryTraversal(context)
+ o.dirs = context.get("DIRS", [])
+
+ # Some paths have a subconfigure, yet also have a moz.build. Those
+ # shouldn't end up in self._external_paths.
+ if o.objdir:
+ self._external_paths -= {o.relobjdir}
+
+ yield o
diff --git a/python/mozbuild/mozbuild/frontend/gyp_reader.py b/python/mozbuild/mozbuild/frontend/gyp_reader.py
new file mode 100644
index 0000000000..cd69dfddce
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py
@@ -0,0 +1,497 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import time
+
+import gyp
+import gyp.msvs_emulation
+import mozpack.path as mozpath
+import six
+from mozpack.files import FileFinder
+
+from mozbuild import shellutil
+from mozbuild.util import expand_variables
+
+from .context import VARIABLES, ObjDirPath, SourcePath, TemplateContext
+from .sandbox import alphabetical_sorted
+
+# Define this module as gyp.generator.mozbuild so that gyp can use it
+# as a generator under the name "mozbuild".
+sys.modules["gyp.generator.mozbuild"] = sys.modules[__name__]
+
+# build/gyp_chromium does this:
+# script_dir = os.path.dirname(os.path.realpath(__file__))
+# chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+# sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+# We're not importing gyp_chromium, but we want both script_dir and
+# chrome_src for the default includes, so go backwards from the pylib
+# directory, which is the parent directory of gyp module.
+chrome_src = mozpath.abspath(
+ mozpath.join(mozpath.dirname(gyp.__file__), "../../../../..")
+)
+script_dir = mozpath.join(chrome_src, "build")
+
+
+# Default variables gyp uses when evaluating gyp files.
+generator_default_variables = {}
+for dirname in [
+ "INTERMEDIATE_DIR",
+ "SHARED_INTERMEDIATE_DIR",
+ "PRODUCT_DIR",
+ "LIB_DIR",
+ "SHARED_LIB_DIR",
+]:
+ # Some gyp steps fail if these are empty(!).
+ generator_default_variables[dirname] = "$" + dirname
+
+for unused in [
+ "RULE_INPUT_PATH",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "STATIC_LIB_PREFIX",
+ "STATIC_LIB_SUFFIX",
+ "SHARED_LIB_PREFIX",
+ "SHARED_LIB_SUFFIX",
+ "LINKER_SUPPORTS_ICF",
+]:
+ generator_default_variables[unused] = ""
+
+
+class GypContext(TemplateContext):
+ """Specialized Context for use with data extracted from Gyp.
+
+ config is the ConfigEnvironment for this context.
+ relobjdir is the object directory that will be used for this context,
+ relative to the topobjdir defined in the ConfigEnvironment.
+ """
+
+ def __init__(self, config, relobjdir):
+ self._relobjdir = relobjdir
+ TemplateContext.__init__(
+ self, template="Gyp", allowed_variables=VARIABLES, config=config
+ )
+
+
+def handle_actions(actions, context, action_overrides):
+ idir = "$INTERMEDIATE_DIR/"
+ for action in actions:
+ name = action["action_name"]
+ if name not in action_overrides:
+ raise RuntimeError("GYP action %s not listed in action_overrides" % name)
+ outputs = action["outputs"]
+ if len(outputs) > 1:
+ raise NotImplementedError(
+ "GYP actions with more than one output not supported: %s" % name
+ )
+ output = outputs[0]
+ if not output.startswith(idir):
+ raise NotImplementedError(
+ "GYP actions outputting to somewhere other than "
+ "<(INTERMEDIATE_DIR) not supported: %s" % output
+ )
+ output = output[len(idir) :]
+ context["GENERATED_FILES"] += [output]
+ g = context["GENERATED_FILES"][output]
+ g.script = action_overrides[name]
+ g.inputs = action["inputs"]
+
+
+def handle_copies(copies, context):
+ dist = "$PRODUCT_DIR/dist/"
+ for copy in copies:
+ dest = copy["destination"]
+ if not dest.startswith(dist):
+ raise NotImplementedError(
+ "GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s"
+ % dest
+ )
+ dest_paths = dest[len(dist) :].split("/")
+ exports = context["EXPORTS"]
+ while dest_paths:
+ exports = getattr(exports, dest_paths.pop(0))
+ exports += sorted(copy["files"], key=lambda x: x.lower())
+
+
+def process_gyp_result(
+ gyp_result,
+ gyp_dir_attrs,
+ path,
+ config,
+ output,
+ non_unified_sources,
+ action_overrides,
+):
+ flat_list, targets, data = gyp_result
+ no_chromium = gyp_dir_attrs.no_chromium
+ no_unified = gyp_dir_attrs.no_unified
+
+ # Process all targets from the given gyp files and its dependencies.
+ # The path given to AllTargets needs to use os.sep, while the frontend code
+ # gives us paths normalized with forward slash separator.
+ for target in sorted(
+ gyp.common.AllTargets(flat_list, targets, path.replace("/", os.sep))
+ ):
+ build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
+
+ # Each target is given its own objdir. The base of that objdir
+ # is derived from the relative path from the root gyp file path
+ # to the current build_file, placed under the given output
+ # directory. Since several targets can be in a given build_file,
+ # separate them in subdirectories using the build_file basename
+ # and the target_name.
+ reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path))
+ subdir = "%s_%s" % (
+ mozpath.splitext(mozpath.basename(build_file))[0],
+ target_name,
+ )
+ # Emit a context for each target.
+ context = GypContext(
+ config,
+ mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir),
+ )
+ context.add_source(mozpath.abspath(build_file))
+ # The list of included files returned by gyp are relative to build_file
+ for f in data[build_file]["included_files"]:
+ context.add_source(
+ mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))
+ )
+
+ spec = targets[target]
+
+ # Derive which gyp configuration to use based on MOZ_DEBUG.
+ c = "Debug" if config.substs.get("MOZ_DEBUG") else "Release"
+ if c not in spec["configurations"]:
+ raise RuntimeError(
+ "Missing %s gyp configuration for target %s "
+ "in %s" % (c, target_name, build_file)
+ )
+ target_conf = spec["configurations"][c]
+
+ if "actions" in spec:
+ handle_actions(spec["actions"], context, action_overrides)
+ if "copies" in spec:
+ handle_copies(spec["copies"], context)
+
+ use_libs = []
+ libs = []
+
+ def add_deps(s):
+ for t in s.get("dependencies", []) + s.get("dependencies_original", []):
+ ty = targets[t]["type"]
+ if ty in ("static_library", "shared_library"):
+ l = targets[t]["target_name"]
+ if l not in use_libs:
+ use_libs.append(l)
+ # Manually expand out transitive dependencies--
+ # gyp won't do this for static libs or none targets.
+ if ty in ("static_library", "none"):
+ add_deps(targets[t])
+ libs.extend(spec.get("libraries", []))
+
+ # XXX: this sucks, but webrtc breaks with this right now because
+ # it builds a library called 'gtest' and we just get lucky
+ # that it isn't in USE_LIBS by that name anywhere.
+ if no_chromium:
+ add_deps(spec)
+
+ os_libs = []
+ for l in libs:
+ if l.startswith("-"):
+ if l.startswith("-l"):
+ # Remove "-l" for consumption in OS_LIBS. Other flags
+ # are passed through unchanged.
+ l = l[2:]
+ if l not in os_libs:
+ os_libs.append(l)
+ elif l.endswith(".lib"):
+ l = l[:-4]
+ if l not in os_libs:
+ os_libs.append(l)
+ elif l:
+ # For library names passed in from moz.build.
+ l = os.path.basename(l)
+ if l not in use_libs:
+ use_libs.append(l)
+
+ if spec["type"] == "none":
+ if not ("actions" in spec or "copies" in spec):
+ continue
+ elif spec["type"] in ("static_library", "shared_library", "executable"):
+ # Remove leading 'lib' from the target_name if any, and use as
+ # library name.
+ name = six.ensure_text(spec["target_name"])
+ if spec["type"] in ("static_library", "shared_library"):
+ if name.startswith("lib"):
+ name = name[3:]
+ context["LIBRARY_NAME"] = name
+ else:
+ context["PROGRAM"] = name
+ if spec["type"] == "shared_library":
+ context["FORCE_SHARED_LIB"] = True
+ elif (
+ spec["type"] == "static_library"
+ and spec.get("variables", {}).get("no_expand_libs", "0") == "1"
+ ):
+ # PSM links a NSS static library, but our folded libnss
+ # doesn't actually export everything that all of the
+ # objects within would need, so that one library
+ # should be built as a real static library.
+ context["NO_EXPAND_LIBS"] = True
+ if use_libs:
+ context["USE_LIBS"] = sorted(use_libs, key=lambda s: s.lower())
+ if os_libs:
+ context["OS_LIBS"] = os_libs
+ # gyp files contain headers and asm sources in sources lists.
+ sources = []
+ unified_sources = []
+ extensions = set()
+ use_defines_in_asflags = False
+ for f in spec.get("sources", []):
+ ext = mozpath.splitext(f)[-1]
+ extensions.add(ext)
+ if f.startswith("$INTERMEDIATE_DIR/"):
+ s = ObjDirPath(context, f.replace("$INTERMEDIATE_DIR/", "!"))
+ else:
+ s = SourcePath(context, f)
+ if ext == ".h":
+ continue
+ if ext == ".def":
+ context["SYMBOLS_FILE"] = s
+ elif ext != ".S" and not no_unified and s not in non_unified_sources:
+ unified_sources.append(s)
+ else:
+ sources.append(s)
+ # The Mozilla build system doesn't use DEFINES for building
+ # ASFILES.
+ if ext == ".s":
+ use_defines_in_asflags = True
+
+ # The context expects alphabetical order when adding sources
+ context["SOURCES"] = alphabetical_sorted(sources)
+ context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources)
+
+ defines = target_conf.get("defines", [])
+ if config.substs["CC_TYPE"] == "clang-cl" and no_chromium:
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
+ # Hack: MsvsSettings._TargetConfig tries to compare a str to an int,
+ # so convert manually.
+ msvs_settings.vs_version.short_name = int(
+ msvs_settings.vs_version.short_name
+ )
+ defines.extend(msvs_settings.GetComputedDefines(c))
+ for define in defines:
+ if "=" in define:
+ name, value = define.split("=", 1)
+ context["DEFINES"][name] = value
+ else:
+ context["DEFINES"][define] = True
+
+ product_dir_dist = "$PRODUCT_DIR/dist/"
+ for include in target_conf.get("include_dirs", []):
+ if include.startswith(product_dir_dist):
+ # special-case includes of <(PRODUCT_DIR)/dist/ to match
+ # handle_copies above. This is used for NSS' exports.
+ include = "!/dist/include/" + include[len(product_dir_dist) :]
+ elif include.startswith(config.topobjdir):
+ # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
+ include = "!/" + mozpath.relpath(include, config.topobjdir)
+ else:
+ # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
+ #
+ # NB: gyp files sometimes have actual absolute paths (e.g.
+ # /usr/include32) and sometimes paths that moz.build considers
+ # absolute, i.e. starting from topsrcdir. There's no good way
+ # to tell them apart here, and the actual absolute paths are
+ # likely bogus. In any event, actual absolute paths will be
+ # filtered out by trying to find them in topsrcdir.
+ #
+ # We do allow !- and %-prefixed paths, assuming they come
+ # from moz.build and will be handled the same way as if they
+ # were given to LOCAL_INCLUDES in moz.build.
+ if include.startswith("/"):
+ resolved = mozpath.abspath(
+ mozpath.join(config.topsrcdir, include[1:])
+ )
+ elif not include.startswith(("!", "%")):
+ resolved = mozpath.abspath(
+ mozpath.join(mozpath.dirname(build_file), include)
+ )
+ if not include.startswith(("!", "%")) and not os.path.exists(
+ resolved
+ ):
+ continue
+ context["LOCAL_INCLUDES"] += [include]
+
+ context["ASFLAGS"] = target_conf.get("asflags_mozilla", [])
+ if use_defines_in_asflags and defines:
+ context["ASFLAGS"] += ["-D" + d for d in defines]
+ if config.substs["OS_TARGET"] == "SunOS":
+ context["LDFLAGS"] = target_conf.get("ldflags", [])
+ flags = target_conf.get("cflags_mozilla", [])
+ if flags:
+ suffix_map = {
+ ".c": "CFLAGS",
+ ".cpp": "CXXFLAGS",
+ ".cc": "CXXFLAGS",
+ ".m": "CMFLAGS",
+ ".mm": "CMMFLAGS",
+ }
+ variables = (suffix_map[e] for e in extensions if e in suffix_map)
+ for var in variables:
+ for f in flags:
+ # We may be getting make variable references out of the
+ # gyp data, and we don't want those in emitted data, so
+ # substitute them with their actual value.
+ f = expand_variables(f, config.substs).split()
+ if not f:
+ continue
+ # the result may be a string or a list.
+ if isinstance(f, six.string_types):
+ context[var].append(f)
+ else:
+ context[var].extend(f)
+ else:
+ # Ignore other types because we don't have
+ # anything using them, and we're not testing them. They can be
+ # added when that becomes necessary.
+ raise NotImplementedError("Unsupported gyp target type: %s" % spec["type"])
+
+ if not no_chromium:
+ # Add some features to all contexts. Put here in case LOCAL_INCLUDES
+ # order matters.
+ context["LOCAL_INCLUDES"] += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ ]
+ # These get set via VC project file settings for normal GYP builds.
+ if config.substs["OS_TARGET"] == "WINNT":
+ context["DEFINES"]["UNICODE"] = True
+ context["DEFINES"]["_UNICODE"] = True
+ context["COMPILE_FLAGS"]["OS_INCLUDES"] = []
+
+ for key, value in gyp_dir_attrs.sandbox_vars.items():
+ if context.get(key) and isinstance(context[key], list):
+ # If we have a key from sanbox_vars that's also been
+ # populated here we use the value from sandbox_vars as our
+ # basis rather than overriding outright.
+ context[key] = value + context[key]
+ elif context.get(key) and isinstance(context[key], dict):
+ context[key].update(value)
+ else:
+ context[key] = value
+
+ yield context
+
+
+# A version of gyp.Load that doesn't return the generator (because module objects
+# aren't Pickle-able, and we don't use it anyway).
+def load_gyp(*args):
+ _, flat_list, targets, data = gyp.Load(*args)
+ return flat_list, targets, data
+
+
+class GypProcessor(object):
+ """Reads a gyp configuration in the background using the given executor and
+ emits GypContexts for the backend to process.
+
+ config is a ConfigEnvironment, path is the path to a root gyp configuration
+ file, and output is the base path under which the objdir for the various
+ gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
+ from moz.build.
+ """
+
+ def __init__(
+ self,
+ config,
+ gyp_dir_attrs,
+ path,
+ output,
+ executor,
+ action_overrides,
+ non_unified_sources,
+ ):
+ self._path = path
+ self._config = config
+ self._output = output
+ self._non_unified_sources = non_unified_sources
+ self._gyp_dir_attrs = gyp_dir_attrs
+ self._action_overrides = action_overrides
+ self.execution_time = 0.0
+ self._results = []
+
+ # gyp expects plain str instead of unicode. The frontend code gives us
+ # unicode strings, so convert them.
+ if config.substs["CC_TYPE"] == "clang-cl":
+ # This isn't actually used anywhere in this generator, but it's needed
+ # to override the registry detection of VC++ in gyp.
+ os.environ.update(
+ {
+ "GYP_MSVS_OVERRIDE_PATH": "fake_path",
+ "GYP_MSVS_VERSION": config.substs["MSVS_VERSION"],
+ }
+ )
+
+ params = {
+ "parallel": False,
+ "generator_flags": {},
+ "build_files": [path],
+ "root_targets": None,
+ }
+ # The NSS gyp configuration uses CC and CFLAGS to determine the
+ # floating-point ABI on arm.
+ os.environ.update(
+ CC=config.substs["CC"],
+ CFLAGS=shellutil.quote(*config.substs["CC_BASE_FLAGS"]),
+ )
+
+ if gyp_dir_attrs.no_chromium:
+ includes = []
+ depth = mozpath.dirname(path)
+ else:
+ depth = chrome_src
+ # Files that gyp_chromium always includes
+ includes = [mozpath.join(script_dir, "gyp_includes", "common.gypi")]
+ finder = FileFinder(chrome_src)
+ includes.extend(
+ mozpath.join(chrome_src, name)
+ for name, _ in finder.find("*/supplement.gypi")
+ )
+
+ str_vars = dict(gyp_dir_attrs.variables)
+ str_vars["python"] = sys.executable
+ self._gyp_loader_future = executor.submit(
+ load_gyp, [path], "mozbuild", str_vars, includes, depth, params
+ )
+
+ @property
+ def results(self):
+ if self._results:
+ for res in self._results:
+ yield res
+ else:
+ # We report our execution time as the time spent blocked in a call
+ # to `result`, which is the only case a gyp processor will
+ # contribute significantly to total wall time.
+ t0 = time.monotonic()
+ flat_list, targets, data = self._gyp_loader_future.result()
+ self.execution_time += time.monotonic() - t0
+ results = []
+ for res in process_gyp_result(
+ (flat_list, targets, data),
+ self._gyp_dir_attrs,
+ self._path,
+ self._config,
+ self._output,
+ self._non_unified_sources,
+ self._action_overrides,
+ ):
+ results.append(res)
+ yield res
+ self._results = results
diff --git a/python/mozbuild/mozbuild/frontend/mach_commands.py b/python/mozbuild/mozbuild/frontend/mach_commands.py
new file mode 100644
index 0000000000..6d379977df
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/mach_commands.py
@@ -0,0 +1,338 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import sys
+from collections import defaultdict
+
+import mozpack.path as mozpath
+from mach.decorators import Command, CommandArgument, SubCommand
+
+TOPSRCDIR = os.path.abspath(os.path.join(__file__, "../../../../../"))
+
+
+class InvalidPathException(Exception):
+ """Represents an error due to an invalid path."""
+
+
+@Command(
+ "mozbuild-reference",
+ category="build-dev",
+ description="View reference documentation on mozbuild files.",
+ virtualenv_name="docs",
+)
+@CommandArgument(
+ "symbol",
+ default=None,
+ nargs="*",
+ help="Symbol to view help on. If not specified, all will be shown.",
+)
+@CommandArgument(
+ "--name-only",
+ "-n",
+ default=False,
+ action="store_true",
+ help="Print symbol names only.",
+)
+def reference(command_context, symbol, name_only=False):
+ import mozbuild.frontend.context as m
+ from mozbuild.sphinx import (
+ format_module,
+ function_reference,
+ special_reference,
+ variable_reference,
+ )
+
+ if name_only:
+ for s in sorted(m.VARIABLES.keys()):
+ print(s)
+
+ for s in sorted(m.FUNCTIONS.keys()):
+ print(s)
+
+ for s in sorted(m.SPECIAL_VARIABLES.keys()):
+ print(s)
+
+ return 0
+
+ if len(symbol):
+ for s in symbol:
+ if s in m.VARIABLES:
+ for line in variable_reference(s, *m.VARIABLES[s]):
+ print(line)
+ continue
+ elif s in m.FUNCTIONS:
+ for line in function_reference(s, *m.FUNCTIONS[s]):
+ print(line)
+ continue
+ elif s in m.SPECIAL_VARIABLES:
+ for line in special_reference(s, *m.SPECIAL_VARIABLES[s]):
+ print(line)
+ continue
+
+ print("Could not find symbol: %s" % s)
+ return 1
+
+ return 0
+
+ for line in format_module(m):
+ print(line)
+
+ return 0
+
+
+@Command(
+ "file-info", category="build-dev", description="Query for metadata about files."
+)
+def file_info(command_context):
+ """Show files metadata derived from moz.build files.
+
+ moz.build files contain "Files" sub-contexts for declaring metadata
+ against file patterns. This command suite is used to query that data.
+ """
+
+
+@SubCommand(
+ "file-info",
+ "bugzilla-component",
+ "Show Bugzilla component info for files listed.",
+)
+@CommandArgument("-r", "--rev", help="Version control revision to look up info from")
+@CommandArgument(
+ "--format",
+ choices={"json", "plain"},
+ default="plain",
+ help="Output format",
+ dest="fmt",
+)
+@CommandArgument("paths", nargs="+", help="Paths whose data to query")
+def file_info_bugzilla(command_context, paths, rev=None, fmt=None):
+ """Show Bugzilla component for a set of files.
+
+ Given a requested set of files (which can be specified using
+ wildcards), print the Bugzilla component for each file.
+ """
+ components = defaultdict(set)
+ try:
+ for p, m in _get_files_info(command_context, paths, rev=rev).items():
+ components[m.get("BUG_COMPONENT")].add(p)
+ except InvalidPathException as e:
+ print(e)
+ return 1
+
+ if fmt == "json":
+ data = {}
+ for component, files in components.items():
+ if not component:
+ continue
+ for f in files:
+ data[f] = [component.product, component.component]
+
+ json.dump(data, sys.stdout, sort_keys=True, indent=2)
+ return
+ elif fmt == "plain":
+ comp_to_file = sorted(
+ (
+ "UNKNOWN"
+ if component is None
+ else "%s :: %s" % (component.product, component.component),
+ sorted(files),
+ )
+ for component, files in components.items()
+ )
+ for component, files in comp_to_file:
+ print(component)
+ for f in files:
+ print(" %s" % f)
+ else:
+ print("unhandled output format: %s" % fmt)
+ return 1
+
+
+@SubCommand(
+ "file-info", "missing-bugzilla", "Show files missing Bugzilla component info"
+)
+@CommandArgument("-r", "--rev", help="Version control revision to look up info from")
+@CommandArgument(
+ "--format",
+ choices={"json", "plain"},
+ dest="fmt",
+ default="plain",
+ help="Output format",
+)
+@CommandArgument("paths", nargs="+", help="Paths whose data to query")
+def file_info_missing_bugzilla(command_context, paths, rev=None, fmt=None):
+ missing = set()
+
+ try:
+ for p, m in _get_files_info(command_context, paths, rev=rev).items():
+ if "BUG_COMPONENT" not in m:
+ missing.add(p)
+ except InvalidPathException as e:
+ print(e)
+ return 1
+
+ if fmt == "json":
+ json.dump({"missing": sorted(missing)}, sys.stdout, indent=2)
+ return
+ elif fmt == "plain":
+ for f in sorted(missing):
+ print(f)
+ else:
+ print("unhandled output format: %s" % fmt)
+ return 1
+
+
+@SubCommand(
+ "file-info",
+ "bugzilla-automation",
+ "Perform Bugzilla metadata analysis as required for automation",
+)
+@CommandArgument("out_dir", help="Where to write files")
+def bugzilla_automation(command_context, out_dir):
+ """Analyze and validate Bugzilla metadata as required by automation.
+
+ This will write out JSON and gzipped JSON files for Bugzilla metadata.
+
+ The exit code will be non-0 if Bugzilla metadata fails validation.
+ """
+ import gzip
+
+ missing_component = set()
+ seen_components = set()
+ component_by_path = {}
+
+ # TODO operate in VCS space. This requires teaching the VCS reader
+ # to understand wildcards and/or for the relative path issue in the
+ # VCS finder to be worked out.
+ for p, m in sorted(_get_files_info(command_context, ["**"]).items()):
+ if "BUG_COMPONENT" not in m:
+ missing_component.add(p)
+ print(
+ "FileToBugzillaMappingError: Missing Bugzilla component: "
+ "%s - Set the BUG_COMPONENT in the moz.build file to fix "
+ "the issue." % p
+ )
+ continue
+
+ c = m["BUG_COMPONENT"]
+ seen_components.add(c)
+ component_by_path[p] = [c.product, c.component]
+
+ print("Examined %d files" % len(component_by_path))
+
+ # We also have a normalized versions of the file to components mapping
+ # that requires far less storage space by eliminating redundant strings.
+ indexed_components = {
+ i: [c.product, c.component] for i, c in enumerate(sorted(seen_components))
+ }
+ components_index = {tuple(v): k for k, v in indexed_components.items()}
+ normalized_component = {"components": indexed_components, "paths": {}}
+
+ for p, c in component_by_path.items():
+ d = normalized_component["paths"]
+ while "/" in p:
+ base, p = p.split("/", 1)
+ d = d.setdefault(base, {})
+
+ d[p] = components_index[tuple(c)]
+
+ if not os.path.exists(out_dir):
+ os.makedirs(out_dir)
+
+ components_json = os.path.join(out_dir, "components.json")
+ print("Writing %s" % components_json)
+ with open(components_json, "w") as fh:
+ json.dump(component_by_path, fh, sort_keys=True, indent=2)
+
+ missing_json = os.path.join(out_dir, "missing.json")
+ print("Writing %s" % missing_json)
+ with open(missing_json, "w") as fh:
+ json.dump({"missing": sorted(missing_component)}, fh, indent=2)
+
+ indexed_components_json = os.path.join(out_dir, "components-normalized.json")
+ print("Writing %s" % indexed_components_json)
+ with open(indexed_components_json, "w") as fh:
+ # Don't indent so file is as small as possible.
+ json.dump(normalized_component, fh, sort_keys=True)
+
+ # Write compressed versions of JSON files.
+ for p in (components_json, indexed_components_json, missing_json):
+ gzip_path = "%s.gz" % p
+ print("Writing %s" % gzip_path)
+ with open(p, "rb") as ifh, gzip.open(gzip_path, "wb") as ofh:
+ while True:
+ data = ifh.read(32768)
+ if not data:
+ break
+ ofh.write(data)
+
+ # Causes CI task to fail if files are missing Bugzilla annotation.
+ if missing_component:
+ return 1
+
+
+def _get_files_info(command_context, paths, rev=None):
+ reader = command_context.mozbuild_reader(config_mode="empty", vcs_revision=rev)
+
+ # Normalize to relative from topsrcdir.
+ relpaths = []
+ for p in paths:
+ a = mozpath.abspath(p)
+ if not mozpath.basedir(a, [command_context.topsrcdir]):
+ raise InvalidPathException("path is outside topsrcdir: %s" % p)
+
+ relpaths.append(mozpath.relpath(a, command_context.topsrcdir))
+
+ # Expand wildcards.
+ # One variable is for ordering. The other for membership tests.
+ # (Membership testing on a list can be slow.)
+ allpaths = []
+ all_paths_set = set()
+ for p in relpaths:
+ if "*" not in p:
+ if p not in all_paths_set:
+ if not os.path.exists(mozpath.join(command_context.topsrcdir, p)):
+ print("(%s does not exist; ignoring)" % p, file=sys.stderr)
+ continue
+
+ all_paths_set.add(p)
+ allpaths.append(p)
+ continue
+
+ if rev:
+ raise InvalidPathException("cannot use wildcard in version control mode")
+
+ # finder is rooted at / for now.
+ # TODO bug 1171069 tracks changing to relative.
+ search = mozpath.join(command_context.topsrcdir, p)[1:]
+ for path, f in reader.finder.find(search):
+ path = path[len(command_context.topsrcdir) :]
+ if path not in all_paths_set:
+ all_paths_set.add(path)
+ allpaths.append(path)
+
+ return reader.files_info(allpaths)
+
+
+@SubCommand(
+ "file-info", "schedules", "Show the combined SCHEDULES for the files listed."
+)
+@CommandArgument("paths", nargs="+", help="Paths whose data to query")
+def file_info_schedules(command_context, paths):
+ """Show what is scheduled by the given files.
+
+ Given a requested set of files (which can be specified using
+ wildcards), print the total set of scheduled components.
+ """
+ from mozbuild.frontend.reader import BuildReader, EmptyConfig
+
+ config = EmptyConfig(TOPSRCDIR)
+ reader = BuildReader(config)
+ schedules = set()
+ for p, m in reader.files_info(paths).items():
+ schedules |= set(m["SCHEDULES"].components)
+
+ print(", ".join(schedules))
diff --git a/python/mozbuild/mozbuild/frontend/reader.py b/python/mozbuild/mozbuild/frontend/reader.py
new file mode 100644
index 0000000000..9d624b37ec
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/reader.py
@@ -0,0 +1,1432 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains code for reading metadata from the build system into
+# data structures.
+
+r"""Read build frontend files into data structures.
+
+In terms of code architecture, the main interface is BuildReader. BuildReader
+starts with a root mozbuild file. It creates a new execution environment for
+this file, which is represented by the Sandbox class. The Sandbox class is used
+to fill a Context, representing the output of an individual mozbuild file. The
+
+The BuildReader contains basic logic for traversing a tree of mozbuild files.
+It does this by examining specific variables populated during execution.
+"""
+
+import ast
+import inspect
+import logging
+import os
+import sys
+import textwrap
+import time
+import traceback
+import types
+from collections import OrderedDict, defaultdict
+from concurrent.futures.process import ProcessPoolExecutor
+from io import StringIO
+from itertools import chain
+from multiprocessing import cpu_count
+
+import mozpack.path as mozpath
+import six
+from mozpack.files import FileFinder
+from six import string_types
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.base import ExecutionSummary
+from mozbuild.util import (
+ EmptyValue,
+ HierarchicalStringList,
+ ReadOnlyDefaultDict,
+ memoize,
+)
+
+from .context import (
+ DEPRECATION_HINTS,
+ FUNCTIONS,
+ SPECIAL_VARIABLES,
+ SUBCONTEXTS,
+ VARIABLES,
+ Context,
+ ContextDerivedValue,
+ Files,
+ SourcePath,
+ SubContext,
+ TemplateContext,
+)
+from .sandbox import (
+ Sandbox,
+ SandboxError,
+ SandboxExecutionError,
+ SandboxLoadError,
+ default_finder,
+)
+
+if six.PY2:
+ type_type = types.TypeType
+else:
+ type_type = type
+
+
+def log(logger, level, action, params, formatter):
+ logger.log(level, formatter, extra={"action": action, "params": params})
+
+
+class EmptyConfig(object):
+ """A config object that is empty.
+
+ This config object is suitable for using with a BuildReader on a vanilla
+ checkout, without any existing configuration. The config is simply
+ bootstrapped from a top source directory path.
+ """
+
+ class PopulateOnGetDict(ReadOnlyDefaultDict):
+ """A variation on ReadOnlyDefaultDict that populates during .get().
+
+ This variation is needed because CONFIG uses .get() to access members.
+ Without it, None (instead of our EmptyValue types) would be returned.
+ """
+
+ def get(self, key, default=None):
+ return self[key]
+
+ default_substs = {
+ # These 2 variables are used semi-frequently and it isn't worth
+ # changing all the instances.
+ "MOZ_APP_NAME": "empty",
+ "MOZ_CHILD_PROCESS_NAME": "empty",
+ # Needed to prevent js/src's config.status from loading.
+ "JS_STANDALONE": "1",
+ }
+
+ def __init__(self, topsrcdir, substs=None):
+ self.topsrcdir = topsrcdir
+ self.topobjdir = ""
+
+ self.substs = self.PopulateOnGetDict(EmptyValue, substs or self.default_substs)
+ self.defines = self.substs
+ self.error_is_fatal = False
+
+
+def is_read_allowed(path, config):
+ """Whether we are allowed to load a mozbuild file at the specified path.
+
+ This is used as cheap security to ensure the build is isolated to known
+ source directories.
+
+ We are allowed to read from the main source directory and any defined
+ external source directories. The latter is to allow 3rd party applications
+ to hook into our build system.
+ """
+ assert os.path.isabs(path)
+ assert os.path.isabs(config.topsrcdir)
+
+ path = mozpath.normpath(path)
+ topsrcdir = mozpath.normpath(config.topsrcdir)
+
+ if mozpath.basedir(path, [topsrcdir]):
+ return True
+
+ return False
+
+
+class SandboxCalledError(SandboxError):
+ """Represents an error resulting from calling the error() function."""
+
+ def __init__(self, file_stack, message):
+ SandboxError.__init__(self, file_stack)
+ self.message = message
+
+
+class MozbuildSandbox(Sandbox):
+ """Implementation of a Sandbox tailored for mozbuild files.
+
+ We expose a few useful functions and expose the set of variables defining
+ Mozilla's build system.
+
+ context is a Context instance.
+
+ metadata is a dict of metadata that can be used during the sandbox
+ evaluation.
+ """
+
+ def __init__(self, context, metadata={}, finder=default_finder):
+ assert isinstance(context, Context)
+
+ Sandbox.__init__(self, context, finder=finder)
+
+ self._log = logging.getLogger(__name__)
+
+ self.metadata = dict(metadata)
+ exports = self.metadata.get("exports", {})
+ self.exports = set(exports.keys())
+ context.update(exports)
+ self.templates = self.metadata.setdefault("templates", {})
+ self.special_variables = self.metadata.setdefault(
+ "special_variables", SPECIAL_VARIABLES
+ )
+ self.functions = self.metadata.setdefault("functions", FUNCTIONS)
+ self.subcontext_types = self.metadata.setdefault("subcontexts", SUBCONTEXTS)
+
+ def __getitem__(self, key):
+ if key in self.special_variables:
+ return self.special_variables[key][0](self._context)
+ if key in self.functions:
+ return self._create_function(self.functions[key])
+ if key in self.subcontext_types:
+ return self._create_subcontext(self.subcontext_types[key])
+ if key in self.templates:
+ return self._create_template_wrapper(self.templates[key])
+ return Sandbox.__getitem__(self, key)
+
+ def __contains__(self, key):
+ if any(
+ key in d
+ for d in (
+ self.special_variables,
+ self.functions,
+ self.subcontext_types,
+ self.templates,
+ )
+ ):
+ return True
+
+ return Sandbox.__contains__(self, key)
+
+ def __setitem__(self, key, value):
+ if key in self.special_variables and value is self[key]:
+ return
+ if (
+ key in self.special_variables
+ or key in self.functions
+ or key in self.subcontext_types
+ ):
+ raise KeyError('Cannot set "%s" because it is a reserved keyword' % key)
+ if key in self.exports:
+ self._context[key] = value
+ self.exports.remove(key)
+ return
+ Sandbox.__setitem__(self, key, value)
+
+ def exec_file(self, path):
+ """Override exec_file to normalize paths and restrict file loading.
+
+ Paths will be rejected if they do not fall under topsrcdir or one of
+ the external roots.
+ """
+
+ # realpath() is needed for true security. But, this isn't for security
+ # protection, so it is omitted.
+ if not is_read_allowed(path, self._context.config):
+ raise SandboxLoadError(
+ self._context.source_stack, sys.exc_info()[2], illegal_path=path
+ )
+
+ Sandbox.exec_file(self, path)
+
+ def _export(self, varname):
+ """Export the variable to all subdirectories of the current path."""
+
+ exports = self.metadata.setdefault("exports", dict())
+ if varname in exports:
+ raise Exception("Variable has already been exported: %s" % varname)
+
+ try:
+ # Doing a regular self._context[varname] causes a set as a side
+ # effect. By calling the dict method instead, we don't have any
+ # side effects.
+ exports[varname] = dict.__getitem__(self._context, varname)
+ except KeyError:
+ self.last_name_error = KeyError("global_ns", "get_unknown", varname)
+ raise self.last_name_error
+
+ def recompute_exports(self):
+ """Recompute the variables to export to subdirectories with the current
+ values in the subdirectory."""
+
+ if "exports" in self.metadata:
+ for key in self.metadata["exports"]:
+ self.metadata["exports"][key] = self[key]
+
+ def _include(self, path):
+ """Include and exec another file within the context of this one."""
+
+ # path is a SourcePath
+ self.exec_file(path.full_path)
+
+ def _warning(self, message):
+ # FUTURE consider capturing warnings in a variable instead of printing.
+ print("WARNING: %s" % message, file=sys.stderr)
+
+ def _error(self, message):
+ if self._context.error_is_fatal:
+ raise SandboxCalledError(self._context.source_stack, message)
+ else:
+ self._warning(message)
+
+ def _template_decorator(self, func):
+ """Registers a template function."""
+
+ if not inspect.isfunction(func):
+ raise Exception(
+ "`template` is a function decorator. You must "
+ "use it as `@template` preceding a function declaration."
+ )
+
+ name = func.__name__
+
+ if name in self.templates:
+ raise KeyError(
+ 'A template named "%s" was already declared in %s.'
+ % (name, self.templates[name].path)
+ )
+
+ if name.islower() or name.isupper() or name[0].islower():
+ raise NameError("Template function names must be CamelCase.")
+
+ self.templates[name] = TemplateFunction(func, self)
+
+ @memoize
+ def _create_subcontext(self, cls):
+ """Return a function object that creates SubContext instances."""
+
+ def fn(*args, **kwargs):
+ return cls(self._context, *args, **kwargs)
+
+ return fn
+
+ @memoize
+ def _create_function(self, function_def):
+ """Returns a function object for use within the sandbox for the given
+ function definition.
+
+ The wrapper function does type coercion on the function arguments
+ """
+ func, args_def, doc = function_def
+
+ def function(*args):
+ def coerce(arg, type):
+ if not isinstance(arg, type):
+ if issubclass(type, ContextDerivedValue):
+ arg = type(self._context, arg)
+ else:
+ arg = type(arg)
+ return arg
+
+ args = [coerce(arg, type) for arg, type in zip(args, args_def)]
+ return func(self)(*args)
+
+ return function
+
+ @memoize
+ def _create_template_wrapper(self, template):
+ """Returns a function object for use within the sandbox for the given
+ TemplateFunction instance..
+
+ When a moz.build file contains a reference to a template call, the
+ sandbox needs a function to execute. This is what this method returns.
+ That function creates a new sandbox for execution of the template.
+ After the template is executed, the data from its execution is merged
+ with the context of the calling sandbox.
+ """
+
+ def template_wrapper(*args, **kwargs):
+ context = TemplateContext(
+ template=template.name,
+ allowed_variables=self._context._allowed_variables,
+ config=self._context.config,
+ )
+ context.add_source(self._context.current_path)
+ for p in self._context.all_paths:
+ context.add_source(p)
+
+ sandbox = MozbuildSandbox(
+ context,
+ metadata={
+ # We should arguably set these defaults to something else.
+ # Templates, for example, should arguably come from the state
+ # of the sandbox from when the template was declared, not when
+ # it was instantiated. Bug 1137319.
+ "functions": self.metadata.get("functions", {}),
+ "special_variables": self.metadata.get("special_variables", {}),
+ "subcontexts": self.metadata.get("subcontexts", {}),
+ "templates": self.metadata.get("templates", {}),
+ },
+ finder=self._finder,
+ )
+
+ template.exec_in_sandbox(sandbox, *args, **kwargs)
+
+ # This is gross, but allows the merge to happen. Eventually, the
+ # merging will go away and template contexts emitted independently.
+ klass = self._context.__class__
+ self._context.__class__ = TemplateContext
+ # The sandbox will do all the necessary checks for these merges.
+ for key, value in context.items():
+ if isinstance(value, dict):
+ self[key].update(value)
+ elif isinstance(value, (list, HierarchicalStringList)):
+ self[key] += value
+ else:
+ self[key] = value
+ self._context.__class__ = klass
+
+ for p in context.all_paths:
+ self._context.add_source(p)
+
+ return template_wrapper
+
+
+class TemplateFunction(object):
+ def __init__(self, func, sandbox):
+ self.path = func.__code__.co_filename
+ self.name = func.__name__
+
+ code = func.__code__
+ firstlineno = code.co_firstlineno
+ lines = sandbox._current_source.splitlines(True)
+ if lines:
+ # Older versions of python 2.7 had a buggy inspect.getblock() that
+ # would ignore the last line if it didn't terminate with a newline.
+ if not lines[-1].endswith("\n"):
+ lines[-1] += "\n"
+ lines = inspect.getblock(lines[firstlineno - 1 :])
+
+ # The code lines we get out of inspect.getsourcelines look like
+ # @template
+ # def Template(*args, **kwargs):
+ # VAR = 'value'
+ # ...
+ func_ast = ast.parse("".join(lines), self.path)
+ # Remove decorators
+ func_ast.body[0].decorator_list = []
+ # Adjust line numbers accordingly
+ ast.increment_lineno(func_ast, firstlineno - 1)
+
+ # When using a custom dictionary for function globals/locals, Cpython
+ # actually never calls __getitem__ and __setitem__, so we need to
+ # modify the AST so that accesses to globals are properly directed
+ # to a dict. AST wants binary_type for this in Py2 and text_type for
+ # this in Py3, so cast to str.
+ self._global_name = str("_data")
+ # In case '_data' is a name used for a variable in the function code,
+ # prepend more underscores until we find an unused name.
+ while (
+ self._global_name in code.co_names or self._global_name in code.co_varnames
+ ):
+ self._global_name += str("_")
+ func_ast = self.RewriteName(sandbox, self._global_name).visit(func_ast)
+
+ # Execute the rewritten code. That code now looks like:
+ # def Template(*args, **kwargs):
+ # _data['VAR'] = 'value'
+ # ...
+ # The result of executing this code is the creation of a 'Template'
+ # function object in the global namespace.
+ glob = {"__builtins__": sandbox._builtins}
+ func = types.FunctionType(
+ compile(func_ast, self.path, "exec"),
+ glob,
+ self.name,
+ func.__defaults__,
+ func.__closure__,
+ )
+ func()
+
+ self._func = glob[self.name]
+
+ def exec_in_sandbox(self, sandbox, *args, **kwargs):
+ """Executes the template function in the given sandbox."""
+ # Create a new function object associated with the execution sandbox
+ glob = {self._global_name: sandbox, "__builtins__": sandbox._builtins}
+ func = types.FunctionType(
+ self._func.__code__,
+ glob,
+ self.name,
+ self._func.__defaults__,
+ self._func.__closure__,
+ )
+ sandbox.exec_function(func, args, kwargs, self.path, becomes_current_path=False)
+
+ class RewriteName(ast.NodeTransformer):
+ """AST Node Transformer to rewrite variable accesses to go through
+ a dict.
+ """
+
+ def __init__(self, sandbox, global_name):
+ self._sandbox = sandbox
+ self._global_name = global_name
+
+ def visit_Str(self, node):
+ node.s = six.ensure_text(node.s)
+ return node
+
+ def visit_Name(self, node):
+ # Modify uppercase variable references and names known to the
+ # sandbox as if they were retrieved from a dict instead.
+ if not node.id.isupper() and node.id not in self._sandbox:
+ return node
+
+ def c(new_node):
+ return ast.copy_location(new_node, node)
+
+ return c(
+ ast.Subscript(
+ value=c(ast.Name(id=self._global_name, ctx=ast.Load())),
+ slice=c(ast.Index(value=c(ast.Str(s=node.id)))),
+ ctx=node.ctx,
+ )
+ )
+
+
+class SandboxValidationError(Exception):
+ """Represents an error encountered when validating sandbox results."""
+
+ def __init__(self, message, context):
+ Exception.__init__(self, message)
+ self.context = context
+
+ def __str__(self):
+ s = StringIO()
+
+ delim = "=" * 30
+ s.write("\n%s\nFATAL ERROR PROCESSING MOZBUILD FILE\n%s\n\n" % (delim, delim))
+
+ s.write("The error occurred while processing the following file or ")
+ s.write("one of the files it includes:\n")
+ s.write("\n")
+ s.write(" %s/moz.build\n" % self.context.srcdir)
+ s.write("\n")
+
+ s.write("The error occurred when validating the result of ")
+ s.write("the execution. The reported error is:\n")
+ s.write("\n")
+ s.write(
+ "".join(
+ " %s\n" % l
+ for l in super(SandboxValidationError, self).__str__().splitlines()
+ )
+ )
+ s.write("\n")
+
+ return s.getvalue()
+
+
+class BuildReaderError(Exception):
+ """Represents errors encountered during BuildReader execution.
+
+ The main purpose of this class is to facilitate user-actionable error
+ messages. Execution errors should say:
+
+ - Why they failed
+ - Where they failed
+ - What can be done to prevent the error
+
+ A lot of the code in this class should arguably be inside sandbox.py.
+ However, extraction is somewhat difficult given the additions
+ MozbuildSandbox has over Sandbox (e.g. the concept of included files -
+ which affect error messages, of course).
+ """
+
+ def __init__(
+ self,
+ file_stack,
+ trace,
+ sandbox_exec_error=None,
+ sandbox_load_error=None,
+ validation_error=None,
+ other_error=None,
+ sandbox_called_error=None,
+ ):
+
+ self.file_stack = file_stack
+ self.trace = trace
+ self.sandbox_called_error = sandbox_called_error
+ self.sandbox_exec = sandbox_exec_error
+ self.sandbox_load = sandbox_load_error
+ self.validation_error = validation_error
+ self.other = other_error
+
+ @property
+ def main_file(self):
+ return self.file_stack[-1]
+
+ @property
+ def actual_file(self):
+ # We report the file that called out to the file that couldn't load.
+ if self.sandbox_load is not None:
+ if len(self.sandbox_load.file_stack) > 1:
+ return self.sandbox_load.file_stack[-2]
+
+ if len(self.file_stack) > 1:
+ return self.file_stack[-2]
+
+ if self.sandbox_error is not None and len(self.sandbox_error.file_stack):
+ return self.sandbox_error.file_stack[-1]
+
+ return self.file_stack[-1]
+
+ @property
+ def sandbox_error(self):
+ return self.sandbox_exec or self.sandbox_load or self.sandbox_called_error
+
+ def __str__(self):
+ s = StringIO()
+
+ delim = "=" * 30
+ s.write("\n%s\nFATAL ERROR PROCESSING MOZBUILD FILE\n%s\n\n" % (delim, delim))
+
+ s.write("The error occurred while processing the following file:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.actual_file)
+ s.write("\n")
+
+ if self.actual_file != self.main_file and not self.sandbox_load:
+ s.write("This file was included as part of processing:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.main_file)
+ s.write("\n")
+
+ if self.sandbox_error is not None:
+ self._print_sandbox_error(s)
+ elif self.validation_error is not None:
+ s.write("The error occurred when validating the result of ")
+ s.write("the execution. The reported error is:\n")
+ s.write("\n")
+ s.write(
+ "".join(
+ " %s\n" % l
+ for l in six.text_type(self.validation_error).splitlines()
+ )
+ )
+ s.write("\n")
+ else:
+ s.write("The error appears to be part of the %s " % __name__)
+ s.write("Python module itself! It is possible you have stumbled ")
+ s.write("across a legitimate bug.\n")
+ s.write("\n")
+
+ for l in traceback.format_exception(
+ type(self.other), self.other, self.trace
+ ):
+ s.write(six.ensure_text(l))
+
+ return s.getvalue()
+
+ def _print_sandbox_error(self, s):
+ # Try to find the frame of the executed code.
+ script_frame = None
+
+ # We don't currently capture the trace for SandboxCalledError.
+ # Therefore, we don't get line numbers from the moz.build file.
+ # FUTURE capture this.
+ trace = getattr(self.sandbox_error, "trace", None)
+ frames = []
+ if trace:
+ frames = traceback.extract_tb(trace)
+ for frame in frames:
+ if frame[0] == self.actual_file:
+ script_frame = frame
+
+ # Reset if we enter a new execution context. This prevents errors
+ # in this module from being attributes to a script.
+ elif frame[0] == __file__ and frame[2] == "exec_function":
+ script_frame = None
+
+ if script_frame is not None:
+ s.write("The error was triggered on line %d " % script_frame[1])
+ s.write("of this file:\n")
+ s.write("\n")
+ s.write(" %s\n" % script_frame[3])
+ s.write("\n")
+
+ if self.sandbox_called_error is not None:
+ self._print_sandbox_called_error(s)
+ return
+
+ if self.sandbox_load is not None:
+ self._print_sandbox_load_error(s)
+ return
+
+ self._print_sandbox_exec_error(s)
+
+ def _print_sandbox_called_error(self, s):
+ assert self.sandbox_called_error is not None
+
+ s.write("A moz.build file called the error() function.\n")
+ s.write("\n")
+ s.write("The error it encountered is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_called_error.message)
+ s.write("\n")
+ s.write("Correct the error condition and try again.\n")
+
+ def _print_sandbox_load_error(self, s):
+ assert self.sandbox_load is not None
+
+ if self.sandbox_load.illegal_path is not None:
+ s.write("The underlying problem is an illegal file access. ")
+ s.write("This is likely due to trying to access a file ")
+ s.write("outside of the top source directory.\n")
+ s.write("\n")
+ s.write("The path whose access was denied is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_load.illegal_path)
+ s.write("\n")
+ s.write("Modify the script to not access this file and ")
+ s.write("try again.\n")
+ return
+
+ if self.sandbox_load.read_error is not None:
+ if not os.path.exists(self.sandbox_load.read_error):
+ s.write("The underlying problem is we referenced a path ")
+ s.write("that does not exist. That path is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_load.read_error)
+ s.write("\n")
+ s.write("Either create the file if it needs to exist or ")
+ s.write("do not reference it.\n")
+ else:
+ s.write("The underlying problem is a referenced path could ")
+ s.write("not be read. The trouble path is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_load.read_error)
+ s.write("\n")
+ s.write("It is possible the path is not correct. Is it ")
+ s.write("pointing to a directory? It could also be a file ")
+ s.write("permissions issue. Ensure that the file is ")
+ s.write("readable.\n")
+
+ return
+
+ # This module is buggy if you see this.
+ raise AssertionError("SandboxLoadError with unhandled properties!")
+
+ def _print_sandbox_exec_error(self, s):
+ assert self.sandbox_exec is not None
+
+ inner = self.sandbox_exec.exc_value
+
+ if isinstance(inner, SyntaxError):
+ s.write("The underlying problem is a Python syntax error ")
+ s.write("on line %d:\n" % inner.lineno)
+ s.write("\n")
+ s.write(" %s\n" % inner.text)
+ if inner.offset:
+ s.write((" " * (inner.offset + 4)) + "^\n")
+ s.write("\n")
+ s.write("Fix the syntax error and try again.\n")
+ return
+
+ if isinstance(inner, KeyError):
+ self._print_keyerror(inner, s)
+ elif isinstance(inner, ValueError):
+ self._print_valueerror(inner, s)
+ else:
+ self._print_exception(inner, s)
+
+ def _print_keyerror(self, inner, s):
+ if not inner.args or inner.args[0] not in ("global_ns", "local_ns"):
+ self._print_exception(inner, s)
+ return
+
+ if inner.args[0] == "global_ns":
+ import difflib
+
+ verb = None
+ if inner.args[1] == "get_unknown":
+ verb = "read"
+ elif inner.args[1] == "set_unknown":
+ verb = "write"
+ elif inner.args[1] == "reassign":
+ s.write("The underlying problem is an attempt to reassign ")
+ s.write("a reserved UPPERCASE variable.\n")
+ s.write("\n")
+ s.write("The reassigned variable causing the error is:\n")
+ s.write("\n")
+ s.write(" %s\n" % inner.args[2])
+ s.write("\n")
+ s.write('Maybe you meant "+=" instead of "="?\n')
+ return
+ else:
+ raise AssertionError("Unhandled global_ns: %s" % inner.args[1])
+
+ s.write("The underlying problem is an attempt to %s " % verb)
+ s.write("a reserved UPPERCASE variable that does not exist.\n")
+ s.write("\n")
+ s.write("The variable %s causing the error is:\n" % verb)
+ s.write("\n")
+ s.write(" %s\n" % inner.args[2])
+ s.write("\n")
+ close_matches = difflib.get_close_matches(
+ inner.args[2], VARIABLES.keys(), 2
+ )
+ if close_matches:
+ s.write("Maybe you meant %s?\n" % " or ".join(close_matches))
+ s.write("\n")
+
+ if inner.args[2] in DEPRECATION_HINTS:
+ s.write(
+ "%s\n" % textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip()
+ )
+ return
+
+ s.write("Please change the file to not use this variable.\n")
+ s.write("\n")
+ s.write("For reference, the set of valid variables is:\n")
+ s.write("\n")
+ s.write(", ".join(sorted(VARIABLES.keys())) + "\n")
+ return
+
+ s.write("The underlying problem is a reference to an undefined ")
+ s.write("local variable:\n")
+ s.write("\n")
+ s.write(" %s\n" % inner.args[2])
+ s.write("\n")
+ s.write("Please change the file to not reference undefined ")
+ s.write("variables and try again.\n")
+
+ def _print_valueerror(self, inner, s):
+ if not inner.args or inner.args[0] not in ("global_ns", "local_ns"):
+ self._print_exception(inner, s)
+ return
+
+ assert inner.args[1] == "set_type"
+
+ s.write("The underlying problem is an attempt to write an illegal ")
+ s.write("value to a special variable.\n")
+ s.write("\n")
+ s.write("The variable whose value was rejected is:\n")
+ s.write("\n")
+ s.write(" %s" % inner.args[2])
+ s.write("\n")
+ s.write("The value being written to it was of the following type:\n")
+ s.write("\n")
+ s.write(" %s\n" % type(inner.args[3]).__name__)
+ s.write("\n")
+ s.write("This variable expects the following type(s):\n")
+ s.write("\n")
+ if type(inner.args[4]) == type_type:
+ s.write(" %s\n" % inner.args[4].__name__)
+ else:
+ for t in inner.args[4]:
+ s.write(" %s\n" % t.__name__)
+ s.write("\n")
+ s.write("Change the file to write a value of the appropriate type ")
+ s.write("and try again.\n")
+
+ def _print_exception(self, e, s):
+ s.write("An error was encountered as part of executing the file ")
+ s.write("itself. The error appears to be the fault of the script.\n")
+ s.write("\n")
+ s.write("The error as reported by Python is:\n")
+ s.write("\n")
+ s.write(" %s\n" % traceback.format_exception_only(type(e), e))
+
+
+class BuildReader(object):
+ """Read a tree of mozbuild files into data structures.
+
+ This is where the build system starts. You give it a tree configuration
+ (the output of configuration) and it executes the moz.build files and
+ collects the data they define.
+
+ The reader can optionally call a callable after each sandbox is evaluated
+ but before its evaluated content is processed. This gives callers the
+ opportunity to modify contexts before side-effects occur from their
+ content. This callback receives the ``Context`` containing the result of
+ each sandbox evaluation. Its return value is ignored.
+ """
+
+ def __init__(self, config, finder=default_finder):
+ self.config = config
+
+ self._log = logging.getLogger(__name__)
+ self._read_files = set()
+ self._execution_stack = []
+ self.finder = finder
+
+ # Finder patterns to ignore when searching for moz.build files.
+ ignores = {
+ # Ignore fake moz.build files used for testing moz.build.
+ "python/mozbuild/mozbuild/test",
+ "testing/mozbase/moztest/tests/data",
+ # Ignore object directories.
+ "obj*",
+ }
+
+ self._relevant_mozbuild_finder = FileFinder(
+ self.config.topsrcdir, ignore=ignores
+ )
+
+ # Also ignore any other directories that could be objdirs, they don't
+ # necessarily start with the string 'obj'.
+ for path, f in self._relevant_mozbuild_finder.find("*/config.status"):
+ self._relevant_mozbuild_finder.ignore.add(os.path.dirname(path))
+
+ max_workers = cpu_count()
+ if sys.platform.startswith("win"):
+ # In python 3, on Windows, ProcessPoolExecutor uses
+ # _winapi.WaitForMultipleObjects, which doesn't work on large
+ # number of objects. It also has some automatic capping to avoid
+ # _winapi.WaitForMultipleObjects being unhappy as a consequence,
+ # but that capping is actually insufficient in python 3.7 and 3.8
+ # (as well as inexistent in older versions). So we cap ourselves
+ # to 60, see https://bugs.python.org/issue26903#msg365886.
+ max_workers = min(max_workers, 60)
+ self._gyp_worker_pool = ProcessPoolExecutor(max_workers=max_workers)
+ self._gyp_processors = []
+ self._execution_time = 0.0
+ self._file_count = 0
+ self._gyp_execution_time = 0.0
+ self._gyp_file_count = 0
+
+ def summary(self):
+ return ExecutionSummary(
+ "Finished reading {file_count:d} moz.build files in "
+ "{execution_time:.2f}s",
+ file_count=self._file_count,
+ execution_time=self._execution_time,
+ )
+
+ def gyp_summary(self):
+ return ExecutionSummary(
+ "Read {file_count:d} gyp files in parallel contributing "
+ "{execution_time:.2f}s to total wall time",
+ file_count=self._gyp_file_count,
+ execution_time=self._gyp_execution_time,
+ )
+
+ def read_topsrcdir(self):
+ """Read the tree of linked moz.build files.
+
+ This starts with the tree's top-most moz.build file and descends into
+ all linked moz.build files until all relevant files have been evaluated.
+
+ This is a generator of Context instances. As each moz.build file is
+ read, a new Context is created and emitted.
+ """
+ path = mozpath.join(self.config.topsrcdir, "moz.build")
+ for r in self.read_mozbuild(path, self.config):
+ yield r
+ all_gyp_paths = set()
+ for g in self._gyp_processors:
+ for gyp_context in g.results:
+ all_gyp_paths |= gyp_context.all_paths
+ yield gyp_context
+ self._gyp_execution_time += g.execution_time
+ self._gyp_file_count += len(all_gyp_paths)
+ self._gyp_worker_pool.shutdown()
+
+ def all_mozbuild_paths(self):
+ """Iterator over all available moz.build files.
+
+ This method has little to do with the reader. It should arguably belong
+ elsewhere.
+ """
+ # In the future, we may traverse moz.build files by looking
+ # for DIRS references in the AST, even if a directory is added behind
+ # a conditional. For now, just walk the filesystem.
+ for path, f in self._relevant_mozbuild_finder.find("**/moz.build"):
+ yield path
+
+ def find_variables_from_ast(self, variables, path=None):
+ """Finds all assignments to the specified variables by parsing
+ moz.build abstract syntax trees.
+
+ This function only supports two cases, as detailed below.
+
+ 1) A dict. Keys and values should both be strings, e.g:
+
+ VARIABLE['foo'] = 'bar'
+
+ This is an `Assign` node with a `Subscript` target. The `Subscript`'s
+ value is a `Name` node with id "VARIABLE". The slice of this target is
+ an `Index` node and its value is a `Str` with value "foo".
+
+ 2) A simple list. Values should be strings, e.g: The target of the
+ assignment should be a Name node. Values should be a List node,
+ whose elements are Str nodes. e.g:
+
+ VARIABLE += ['foo']
+
+ This is an `AugAssign` node with a `Name` target with id "VARIABLE".
+ The value is a `List` node containing one `Str` element whose value is
+ "foo".
+
+ With a little work, this function could support other types of
+ assignment. But if we end up writing a lot of AST code, it might be
+ best to import a high-level AST manipulation library into the tree.
+
+ Args:
+ variables (list): A list of variable assignments to capture.
+ path (str): A path relative to the source dir. If specified, only
+ `moz.build` files relevant to this path will be parsed. Otherwise
+ all `moz.build` files are parsed.
+
+ Returns:
+ A generator that generates tuples of the form `(<moz.build path>,
+ <variable name>, <key>, <value>)`. The `key` will only be
+ defined if the variable is an object, otherwise it is `None`.
+ """
+
+ if isinstance(variables, string_types):
+ variables = [variables]
+
+ def assigned_variable(node):
+ # This is not correct, but we don't care yet.
+ if hasattr(node, "targets"):
+ # Nothing in moz.build does multi-assignment (yet). So error if
+ # we see it.
+ assert len(node.targets) == 1
+
+ target = node.targets[0]
+ else:
+ target = node.target
+
+ if isinstance(target, ast.Subscript):
+ if not isinstance(target.value, ast.Name):
+ return None, None
+ name = target.value.id
+ elif isinstance(target, ast.Name):
+ name = target.id
+ else:
+ return None, None
+
+ if name not in variables:
+ return None, None
+
+ key = None
+ if isinstance(target, ast.Subscript):
+ # We need to branch to deal with python version differences.
+ if isinstance(target.slice, ast.Constant):
+ # Python >= 3.9
+ assert isinstance(target.slice.value, str)
+ key = target.slice.value
+ else:
+ # Others
+ assert isinstance(target.slice, ast.Index)
+ assert isinstance(target.slice.value, ast.Str)
+ key = target.slice.value.s
+
+ return name, key
+
+ def assigned_values(node):
+ value = node.value
+ if isinstance(value, ast.List):
+ for v in value.elts:
+ assert isinstance(v, ast.Str)
+ yield v.s
+ else:
+ assert isinstance(value, ast.Str)
+ yield value.s
+
+ assignments = []
+
+ class Visitor(ast.NodeVisitor):
+ def helper(self, node):
+ name, key = assigned_variable(node)
+ if not name:
+ return
+
+ for v in assigned_values(node):
+ assignments.append((name, key, v))
+
+ def visit_Assign(self, node):
+ self.helper(node)
+
+ def visit_AugAssign(self, node):
+ self.helper(node)
+
+ if path:
+ mozbuild_paths = chain(*self._find_relevant_mozbuilds([path]).values())
+ else:
+ mozbuild_paths = self.all_mozbuild_paths()
+
+ for p in mozbuild_paths:
+ assignments[:] = []
+ full = os.path.join(self.config.topsrcdir, p)
+
+ with open(full, "rb") as fh:
+ source = fh.read()
+
+ tree = ast.parse(source, full)
+ Visitor().visit(tree)
+
+ for name, key, value in assignments:
+ yield p, name, key, value
+
+ def read_mozbuild(self, path, config, descend=True, metadata={}):
+ """Read and process a mozbuild file, descending into children.
+
+ This starts with a single mozbuild file, executes it, and descends into
+ other referenced files per our traversal logic.
+
+ The traversal logic is to iterate over the ``*DIRS`` variables, treating
+ each element as a relative directory path. For each encountered
+ directory, we will open the moz.build file located in that
+ directory in a new Sandbox and process it.
+
+ If descend is True (the default), we will descend into child
+ directories and files per variable values.
+
+ Arbitrary metadata in the form of a dict can be passed into this
+ function. This feature is intended to facilitate the build reader
+ injecting state and annotations into moz.build files that is
+ independent of the sandbox's execution context.
+
+ Traversal is performed depth first (for no particular reason).
+ """
+ self._execution_stack.append(path)
+ try:
+ for s in self._read_mozbuild(
+ path, config, descend=descend, metadata=metadata
+ ):
+ yield s
+
+ except BuildReaderError as bre:
+ raise bre
+
+ except SandboxCalledError as sce:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], sandbox_called_error=sce
+ )
+
+ except SandboxExecutionError as se:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], sandbox_exec_error=se
+ )
+
+ except SandboxLoadError as sle:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], sandbox_load_error=sle
+ )
+
+ except SandboxValidationError as ve:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], validation_error=ve
+ )
+
+ except Exception as e:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], other_error=e
+ )
+
+ def _read_mozbuild(self, path, config, descend, metadata):
+ path = mozpath.normpath(path)
+ log(
+ self._log,
+ logging.DEBUG,
+ "read_mozbuild",
+ {"path": path},
+ "Reading file: {path}".format(path=path),
+ )
+
+ if path in self._read_files:
+ log(
+ self._log,
+ logging.WARNING,
+ "read_already",
+ {"path": path},
+ "File already read. Skipping: {path}".format(path=path),
+ )
+ return
+
+ self._read_files.add(path)
+
+ time_start = time.monotonic()
+
+ topobjdir = config.topobjdir
+
+ relpath = mozpath.relpath(path, config.topsrcdir)
+ reldir = mozpath.dirname(relpath)
+
+ if mozpath.dirname(relpath) == "js/src" and not config.substs.get(
+ "JS_STANDALONE"
+ ):
+ config = ConfigEnvironment.from_config_status(
+ mozpath.join(topobjdir, reldir, "config.status")
+ )
+ config.topobjdir = topobjdir
+
+ context = Context(VARIABLES, config, self.finder)
+ sandbox = MozbuildSandbox(context, metadata=metadata, finder=self.finder)
+ sandbox.exec_file(path)
+ self._execution_time += time.monotonic() - time_start
+ self._file_count += len(context.all_paths)
+
+ # Yield main context before doing any processing. This gives immediate
+ # consumers an opportunity to change state before our remaining
+ # processing is performed.
+ yield context
+
+ # We need the list of directories pre-gyp processing for later.
+ dirs = list(context.get("DIRS", []))
+
+ curdir = mozpath.dirname(path)
+
+ for target_dir in context.get("GYP_DIRS", []):
+ gyp_dir = context["GYP_DIRS"][target_dir]
+ for v in ("input", "variables"):
+ if not getattr(gyp_dir, v):
+ raise SandboxValidationError(
+ "Missing value for " 'GYP_DIRS["%s"].%s' % (target_dir, v),
+ context,
+ )
+
+ # The make backend assumes contexts for sub-directories are
+ # emitted after their parent, so accumulate the gyp contexts.
+ # We could emit the parent context before processing gyp
+ # configuration, but we need to add the gyp objdirs to that context
+ # first.
+ from .gyp_reader import GypProcessor
+
+ non_unified_sources = set()
+ for s in gyp_dir.non_unified_sources:
+ source = SourcePath(context, s)
+ if not self.finder.get(source.full_path):
+ raise SandboxValidationError("Cannot find %s." % source, context)
+ non_unified_sources.add(source)
+ action_overrides = {}
+ for action, script in six.iteritems(gyp_dir.action_overrides):
+ action_overrides[action] = SourcePath(context, script)
+
+ gyp_processor = GypProcessor(
+ context.config,
+ gyp_dir,
+ mozpath.join(curdir, gyp_dir.input),
+ mozpath.join(context.objdir, target_dir),
+ self._gyp_worker_pool,
+ action_overrides,
+ non_unified_sources,
+ )
+ self._gyp_processors.append(gyp_processor)
+
+ for subcontext in sandbox.subcontexts:
+ yield subcontext
+
+ # Traverse into referenced files.
+
+ # It's very tempting to use a set here. Unfortunately, the recursive
+ # make backend needs order preserved. Once we autogenerate all backend
+ # files, we should be able to convert this to a set.
+ recurse_info = OrderedDict()
+ for d in dirs:
+ if d in recurse_info:
+ raise SandboxValidationError(
+ "Directory (%s) registered multiple times"
+ % (mozpath.relpath(d.full_path, context.srcdir)),
+ context,
+ )
+
+ recurse_info[d] = {}
+ for key in sandbox.metadata:
+ if key == "exports":
+ sandbox.recompute_exports()
+
+ recurse_info[d][key] = dict(sandbox.metadata[key])
+
+ for path, child_metadata in recurse_info.items():
+ child_path = path.join("moz.build").full_path
+
+ # Ensure we don't break out of the topsrcdir. We don't do realpath
+ # because it isn't necessary. If there are symlinks in the srcdir,
+ # that's not our problem. We're not a hosted application: we don't
+ # need to worry about security too much.
+ if not is_read_allowed(child_path, context.config):
+ raise SandboxValidationError(
+ "Attempting to process file outside of allowed paths: %s"
+ % child_path,
+ context,
+ )
+
+ if not descend:
+ continue
+
+ for res in self.read_mozbuild(
+ child_path, context.config, metadata=child_metadata
+ ):
+ yield res
+
+ self._execution_stack.pop()
+
+ def _find_relevant_mozbuilds(self, paths):
+ """Given a set of filesystem paths, find all relevant moz.build files.
+
+ We assume that a moz.build file in the directory ancestry of a given path
+ is relevant to that path. Let's say we have the following files on disk::
+
+ moz.build
+ foo/moz.build
+ foo/baz/moz.build
+ foo/baz/file1
+ other/moz.build
+ other/file2
+
+ If ``foo/baz/file1`` is passed in, the relevant moz.build files are
+ ``moz.build``, ``foo/moz.build``, and ``foo/baz/moz.build``. For
+ ``other/file2``, the relevant moz.build files are ``moz.build`` and
+ ``other/moz.build``.
+
+ Returns a dict of input paths to a list of relevant moz.build files.
+ The root moz.build file is first and the leaf-most moz.build is last.
+ """
+ root = self.config.topsrcdir
+ result = {}
+
+ @memoize
+ def exists(path):
+ return self._relevant_mozbuild_finder.get(path) is not None
+
+ def itermozbuild(path):
+ subpath = ""
+ yield "moz.build"
+ for part in mozpath.split(path):
+ subpath = mozpath.join(subpath, part)
+ yield mozpath.join(subpath, "moz.build")
+
+ for path in sorted(paths):
+ path = mozpath.normpath(path)
+ if os.path.isabs(path):
+ if not mozpath.basedir(path, [root]):
+ raise Exception("Path outside topsrcdir: %s" % path)
+ path = mozpath.relpath(path, root)
+
+ result[path] = [p for p in itermozbuild(path) if exists(p)]
+
+ return result
+
+ def read_relevant_mozbuilds(self, paths):
+ """Read and process moz.build files relevant for a set of paths.
+
+ For an iterable of relative-to-root filesystem paths ``paths``,
+ find all moz.build files that may apply to them based on filesystem
+ hierarchy and read those moz.build files.
+
+ The return value is a 2-tuple. The first item is a dict mapping each
+ input filesystem path to a list of Context instances that are relevant
+ to that path. The second item is a list of all Context instances. Each
+ Context instance is in both data structures.
+ """
+ relevants = self._find_relevant_mozbuilds(paths)
+
+ topsrcdir = self.config.topsrcdir
+
+ # Source moz.build file to directories to traverse.
+ dirs = defaultdict(set)
+ # Relevant path to absolute paths of relevant contexts.
+ path_mozbuilds = {}
+
+ # There is room to improve this code (and the code in
+ # _find_relevant_mozbuilds) to better handle multiple files in the same
+ # directory. Bug 1136966 tracks.
+ for path, mbpaths in relevants.items():
+ path_mozbuilds[path] = [mozpath.join(topsrcdir, p) for p in mbpaths]
+
+ for i, mbpath in enumerate(mbpaths[0:-1]):
+ source_dir = mozpath.dirname(mbpath)
+ target_dir = mozpath.dirname(mbpaths[i + 1])
+
+ d = mozpath.normpath(mozpath.join(topsrcdir, mbpath))
+ dirs[d].add(mozpath.relpath(target_dir, source_dir))
+
+ # Exporting doesn't work reliably in tree traversal mode. Override
+ # the function to no-op.
+ functions = dict(FUNCTIONS)
+
+ def export(sandbox):
+ return lambda varname: None
+
+ functions["export"] = tuple([export] + list(FUNCTIONS["export"][1:]))
+
+ metadata = {
+ "functions": functions,
+ }
+
+ contexts = defaultdict(list)
+ all_contexts = []
+ for context in self.read_mozbuild(
+ mozpath.join(topsrcdir, "moz.build"), self.config, metadata=metadata
+ ):
+ # Explicitly set directory traversal variables to override default
+ # traversal rules.
+ if not isinstance(context, SubContext):
+ for v in ("DIRS", "GYP_DIRS"):
+ context[v][:] = []
+
+ context["DIRS"] = sorted(dirs[context.main_path])
+
+ contexts[context.main_path].append(context)
+ all_contexts.append(context)
+
+ result = {}
+ for path, paths in path_mozbuilds.items():
+ result[path] = six.moves.reduce(
+ lambda x, y: x + y, (contexts[p] for p in paths), []
+ )
+
+ return result, all_contexts
+
+ def files_info(self, paths):
+ """Obtain aggregate data from Files for a set of files.
+
+ Given a set of input paths, determine which moz.build files may
+ define metadata for them, evaluate those moz.build files, and
+ apply file metadata rules defined within to determine metadata
+ values for each file requested.
+
+ Essentially, for each input path:
+
+ 1. Determine the set of moz.build files relevant to that file by
+ looking for moz.build files in ancestor directories.
+ 2. Evaluate moz.build files starting with the most distant.
+ 3. Iterate over Files sub-contexts.
+ 4. If the file pattern matches the file we're seeking info on,
+ apply attribute updates.
+ 5. Return the most recent value of attributes.
+ """
+ paths, _ = self.read_relevant_mozbuilds(paths)
+
+ r = {}
+
+ # Only do wildcard matching if the '*' character is present.
+ # Otherwise, mozpath.match will match directories, which we've
+ # arbitrarily chosen to not allow.
+ def path_matches_pattern(relpath, pattern):
+ if pattern == relpath:
+ return True
+
+ return "*" in pattern and mozpath.match(relpath, pattern)
+
+ for path, ctxs in paths.items():
+ # Should be normalized by read_relevant_mozbuilds.
+ assert "\\" not in path
+
+ flags = Files(Context())
+
+ for ctx in ctxs:
+ if not isinstance(ctx, Files):
+ continue
+
+ # read_relevant_mozbuilds() normalizes paths and ensures that
+ # the contexts have paths in the ancestry of the path. When
+ # iterating over tens of thousands of paths, mozpath.relpath()
+ # can be very expensive. So, given our assumptions about paths,
+ # we implement an optimized version.
+ ctx_rel_dir = ctx.relsrcdir
+ if ctx_rel_dir:
+ assert path.startswith(ctx_rel_dir)
+ relpath = path[len(ctx_rel_dir) + 1 :]
+ else:
+ relpath = path
+
+ if any(path_matches_pattern(relpath, p) for p in ctx.patterns):
+ flags += ctx
+
+ r[path] = flags
+
+ return r
diff --git a/python/mozbuild/mozbuild/frontend/sandbox.py b/python/mozbuild/mozbuild/frontend/sandbox.py
new file mode 100644
index 0000000000..088e817cb0
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/sandbox.py
@@ -0,0 +1,313 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Python sandbox implementation for build files.
+
+This module contains classes for Python sandboxes that execute in a
+highly-controlled environment.
+
+The main class is `Sandbox`. This provides an execution environment for Python
+code and is used to fill a Context instance for the takeaway information from
+the execution.
+
+Code in this module takes a different approach to exception handling compared
+to what you'd see elsewhere in Python. Arguments to built-in exceptions like
+KeyError are machine parseable. This machine-friendly data is used to present
+user-friendly error messages in the case of errors.
+"""
+
+import os
+import sys
+import weakref
+
+import six
+from mozpack.files import FileFinder
+
+from mozbuild.util import ReadOnlyDict, exec_
+
+from .context import Context
+
+default_finder = FileFinder("/")
+
+
+def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False):
+ """sorted() replacement for the sandbox, ordering alphabetically by
+ default.
+ """
+ return sorted(iterable, key=key, reverse=reverse)
+
+
+class SandboxError(Exception):
+ def __init__(self, file_stack):
+ self.file_stack = file_stack
+
+
+class SandboxExecutionError(SandboxError):
+ """Represents errors encountered during execution of a Sandbox.
+
+ This is a simple container exception. It's purpose is to capture state
+ so something else can report on it.
+ """
+
+ def __init__(self, file_stack, exc_type, exc_value, trace):
+ SandboxError.__init__(self, file_stack)
+
+ self.exc_type = exc_type
+ self.exc_value = exc_value
+ self.trace = trace
+
+
+class SandboxLoadError(SandboxError):
+ """Represents errors encountered when loading a file for execution.
+
+ This exception represents errors in a Sandbox that occurred as part of
+ loading a file. The error could have occurred in the course of executing
+ a file. If so, the file_stack will be non-empty and the file that caused
+ the load will be on top of the stack.
+ """
+
+ def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
+ SandboxError.__init__(self, file_stack)
+
+ self.trace = trace
+ self.illegal_path = illegal_path
+ self.read_error = read_error
+
+
+class Sandbox(dict):
+ """Represents a sandbox for executing Python code.
+
+ This class provides a sandbox for execution of a single mozbuild frontend
+ file. The results of that execution is stored in the Context instance given
+ as the ``context`` argument.
+
+ Sandbox is effectively a glorified wrapper around compile() + exec(). You
+ point it at some Python code and it executes it. The main difference from
+ executing Python code like normal is that the executed code is very limited
+ in what it can do: the sandbox only exposes a very limited set of Python
+ functionality. Only specific types and functions are available. This
+ prevents executed code from doing things like import modules, open files,
+ etc.
+
+ Sandbox instances act as global namespace for the sandboxed execution
+ itself. They shall not be used to access the results of the execution.
+ Those results are available in the given Context instance after execution.
+
+ The Sandbox itself is responsible for enforcing rules such as forbidding
+ reassignment of variables.
+
+ Implementation note: Sandbox derives from dict because exec() insists that
+ what it is given for namespaces is a dict.
+ """
+
+ # The default set of builtins.
+ BUILTINS = ReadOnlyDict(
+ {
+ # Only real Python built-ins should go here.
+ "None": None,
+ "False": False,
+ "True": True,
+ "sorted": alphabetical_sorted,
+ "int": int,
+ "set": set,
+ "tuple": tuple,
+ }
+ )
+
+ def __init__(self, context, finder=default_finder):
+ """Initialize a Sandbox ready for execution."""
+ self._builtins = self.BUILTINS
+ dict.__setitem__(self, "__builtins__", self._builtins)
+
+ assert isinstance(self._builtins, ReadOnlyDict)
+ assert isinstance(context, Context)
+
+ # Contexts are modeled as a stack because multiple context managers
+ # may be active.
+ self._active_contexts = [context]
+
+ # Seen sub-contexts. Will be populated with other Context instances
+ # that were related to execution of this instance.
+ self.subcontexts = []
+
+ # We need to record this because it gets swallowed as part of
+ # evaluation.
+ self._last_name_error = None
+
+ # Current literal source being executed.
+ self._current_source = None
+
+ self._finder = finder
+
+ @property
+ def _context(self):
+ return self._active_contexts[-1]
+
+ def exec_file(self, path):
+ """Execute code at a path in the sandbox.
+
+ The path must be absolute.
+ """
+ assert os.path.isabs(path)
+
+ try:
+ source = six.ensure_text(self._finder.get(path).read())
+ except Exception:
+ raise SandboxLoadError(
+ self._context.source_stack, sys.exc_info()[2], read_error=path
+ )
+
+ self.exec_source(source, path)
+
+ def exec_source(self, source, path=""):
+ """Execute Python code within a string.
+
+ The passed string should contain Python code to be executed. The string
+ will be compiled and executed.
+
+ You should almost always go through exec_file() because exec_source()
+ does not perform extra path normalization. This can cause relative
+ paths to behave weirdly.
+ """
+
+ def execute():
+ # compile() inherits the __future__ from the module by default. We
+ # do want Unicode literals.
+ code = compile(source, path, "exec")
+ # We use ourself as the global namespace for the execution. There
+ # is no need for a separate local namespace as moz.build execution
+ # is flat, namespace-wise.
+ old_source = self._current_source
+ self._current_source = source
+ try:
+ exec_(code, self)
+ finally:
+ self._current_source = old_source
+
+ self.exec_function(execute, path=path)
+
+ def exec_function(
+ self, func, args=(), kwargs={}, path="", becomes_current_path=True
+ ):
+ """Execute function with the given arguments in the sandbox."""
+ if path and becomes_current_path:
+ self._context.push_source(path)
+
+ old_sandbox = self._context._sandbox
+ self._context._sandbox = weakref.ref(self)
+
+ # We don't have to worry about bytecode generation here because we are
+ # too low-level for that. However, we could add bytecode generation via
+ # the marshall module if parsing performance were ever an issue.
+
+ old_source = self._current_source
+ self._current_source = None
+ try:
+ func(*args, **kwargs)
+ except SandboxError as e:
+ raise e
+ except NameError as e:
+ # A NameError is raised when a variable could not be found.
+ # The original KeyError has been dropped by the interpreter.
+ # However, we should have it cached in our instance!
+
+ # Unless a script is doing something wonky like catching NameError
+ # itself (that would be silly), if there is an exception on the
+ # global namespace, that's our error.
+ actual = e
+
+ if self._last_name_error is not None:
+ actual = self._last_name_error
+ source_stack = self._context.source_stack
+ if not becomes_current_path:
+ # Add current file to the stack because it wasn't added before
+ # sandbox execution.
+ source_stack.append(path)
+ raise SandboxExecutionError(
+ source_stack, type(actual), actual, sys.exc_info()[2]
+ )
+
+ except Exception:
+ # Need to copy the stack otherwise we get a reference and that is
+ # mutated during the finally.
+ exc = sys.exc_info()
+ source_stack = self._context.source_stack
+ if not becomes_current_path:
+ # Add current file to the stack because it wasn't added before
+ # sandbox execution.
+ source_stack.append(path)
+ raise SandboxExecutionError(source_stack, exc[0], exc[1], exc[2])
+ finally:
+ self._current_source = old_source
+ self._context._sandbox = old_sandbox
+ if path and becomes_current_path:
+ self._context.pop_source()
+
+ def push_subcontext(self, context):
+ """Push a SubContext onto the execution stack.
+
+ When called, the active context will be set to the specified context,
+ meaning all variable accesses will go through it. We also record this
+ SubContext as having been executed as part of this sandbox.
+ """
+ self._active_contexts.append(context)
+ if context not in self.subcontexts:
+ self.subcontexts.append(context)
+
+ def pop_subcontext(self, context):
+ """Pop a SubContext off the execution stack.
+
+ SubContexts must be pushed and popped in opposite order. This is
+ validated as part of the function call to ensure proper consumer API
+ use.
+ """
+ popped = self._active_contexts.pop()
+ assert popped == context
+
+ def __getitem__(self, key):
+ if key.isupper():
+ try:
+ return self._context[key]
+ except Exception as e:
+ self._last_name_error = e
+ raise
+
+ return dict.__getitem__(self, key)
+
+ def __setitem__(self, key, value):
+ if key in self._builtins or key == "__builtins__":
+ raise KeyError("Cannot reassign builtins")
+
+ if key.isupper():
+ # Forbid assigning over a previously set value. Interestingly, when
+ # doing FOO += ['bar'], python actually does something like:
+ # foo = namespace.__getitem__('FOO')
+ # foo.__iadd__(['bar'])
+ # namespace.__setitem__('FOO', foo)
+ # This means __setitem__ is called with the value that is already
+ # in the dict, when doing +=, which is permitted.
+ if key in self._context and self._context[key] is not value:
+ raise KeyError("global_ns", "reassign", key)
+
+ if (
+ key not in self._context
+ and isinstance(value, (list, dict))
+ and not value
+ ):
+ raise KeyError("Variable %s assigned an empty value." % key)
+
+ self._context[key] = value
+ else:
+ dict.__setitem__(self, key, value)
+
+ def get(self, key, default=None):
+ raise NotImplementedError("Not supported")
+
+ def __iter__(self):
+ raise NotImplementedError("Not supported")
+
+ def __contains__(self, key):
+ if key.isupper():
+ return key in self._context
+ return dict.__contains__(self, key)