summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozbuild/action
diff options
context:
space:
mode:
Diffstat (limited to 'python/mozbuild/mozbuild/action')
-rw-r--r--python/mozbuild/mozbuild/action/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/action/buildlist.py49
-rw-r--r--python/mozbuild/mozbuild/action/check_binary.py343
-rw-r--r--python/mozbuild/mozbuild/action/download_wpt_manifest.py21
-rw-r--r--python/mozbuild/mozbuild/action/dump_env.py30
-rw-r--r--python/mozbuild/mozbuild/action/dumpsymbols.py109
-rw-r--r--python/mozbuild/mozbuild/action/exe_7z_archive.py89
-rw-r--r--python/mozbuild/mozbuild/action/fat_aar.py185
-rw-r--r--python/mozbuild/mozbuild/action/file_generate.py155
-rw-r--r--python/mozbuild/mozbuild/action/file_generate_wrapper.py38
-rw-r--r--python/mozbuild/mozbuild/action/generate_symbols_file.py95
-rw-r--r--python/mozbuild/mozbuild/action/html_fragment_preprocesor.py101
-rw-r--r--python/mozbuild/mozbuild/action/install.py22
-rw-r--r--python/mozbuild/mozbuild/action/jar_maker.py16
-rw-r--r--python/mozbuild/mozbuild/action/l10n_merge.py42
-rw-r--r--python/mozbuild/mozbuild/action/langpack_localeNames.json426
-rw-r--r--python/mozbuild/mozbuild/action/langpack_manifest.py587
-rw-r--r--python/mozbuild/mozbuild/action/make_dmg.py67
-rw-r--r--python/mozbuild/mozbuild/action/make_unzip.py25
-rw-r--r--python/mozbuild/mozbuild/action/node.py137
-rw-r--r--python/mozbuild/mozbuild/action/package_generated_sources.py42
-rw-r--r--python/mozbuild/mozbuild/action/preprocessor.py24
-rw-r--r--python/mozbuild/mozbuild/action/process_define_files.py115
-rw-r--r--python/mozbuild/mozbuild/action/process_install_manifest.py125
-rw-r--r--python/mozbuild/mozbuild/action/symbols_archive.py89
-rw-r--r--python/mozbuild/mozbuild/action/test_archive.py875
-rwxr-xr-xpython/mozbuild/mozbuild/action/tooltool.py1714
-rw-r--r--python/mozbuild/mozbuild/action/unify_symbols.py49
-rw-r--r--python/mozbuild/mozbuild/action/unify_tests.py65
-rw-r--r--python/mozbuild/mozbuild/action/unpack_dmg.py52
-rw-r--r--python/mozbuild/mozbuild/action/util.py24
-rw-r--r--python/mozbuild/mozbuild/action/webidl.py19
-rw-r--r--python/mozbuild/mozbuild/action/wrap_rustc.py79
-rw-r--r--python/mozbuild/mozbuild/action/xpccheck.py109
-rwxr-xr-xpython/mozbuild/mozbuild/action/xpidl-process.py153
-rw-r--r--python/mozbuild/mozbuild/action/zip.py52
36 files changed, 6123 insertions, 0 deletions
diff --git a/python/mozbuild/mozbuild/action/__init__.py b/python/mozbuild/mozbuild/action/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/__init__.py
diff --git a/python/mozbuild/mozbuild/action/buildlist.py b/python/mozbuild/mozbuild/action/buildlist.py
new file mode 100644
index 0000000000..ab32ad92cc
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/buildlist.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""A generic script to add entries to a file
+if the entry does not already exist.
+
+Usage: buildlist.py <filename> <entry> [<entry> ...]
+"""
+import io
+import os
+import sys
+
+from mozbuild.action.util import log_build_task
+from mozbuild.util import ensureParentDir, lock_file
+
+
+def addEntriesToListFile(listFile, entries):
+ """Given a file ``listFile`` containing one entry per line,
+ add each entry in ``entries`` to the file, unless it is already
+ present."""
+ ensureParentDir(listFile)
+ lock = lock_file(listFile + ".lck")
+ try:
+ if os.path.exists(listFile):
+ f = io.open(listFile)
+ existing = set(x.strip() for x in f.readlines())
+ f.close()
+ else:
+ existing = set()
+ for e in entries:
+ if e not in existing:
+ existing.add(e)
+ with io.open(listFile, "w", newline="\n") as f:
+ f.write("\n".join(sorted(existing)) + "\n")
+ finally:
+ del lock # Explicitly release the lock_file to free it
+
+
+def main(args):
+ if len(args) < 2:
+ print("Usage: buildlist.py <list file> <entry> [<entry> ...]", file=sys.stderr)
+ return 1
+
+ return addEntriesToListFile(args[0], args[1:])
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/check_binary.py b/python/mozbuild/mozbuild/action/check_binary.py
new file mode 100644
index 0000000000..baf39860de
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/check_binary.py
@@ -0,0 +1,343 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+import buildconfig
+from mozpack.executables import ELF, UNKNOWN, get_type
+from packaging.version import Version
+
+from mozbuild.action.util import log_build_task
+from mozbuild.util import memoize
+
+STDCXX_MAX_VERSION = Version("3.4.19")
+CXXABI_MAX_VERSION = Version("1.3.7")
+GLIBC_MAX_VERSION = Version("2.17")
+LIBGCC_MAX_VERSION = Version("4.8")
+
+HOST = {"platform": buildconfig.substs["HOST_OS_ARCH"], "readelf": "readelf"}
+
+TARGET = {
+ "platform": buildconfig.substs["OS_TARGET"],
+ "readelf": buildconfig.substs.get("READELF", "readelf"),
+}
+
+ADDR_RE = re.compile(r"[0-9a-f]{8,16}")
+
+if buildconfig.substs.get("HAVE_64BIT_BUILD"):
+ GUESSED_NSMODULE_SIZE = 8
+else:
+ GUESSED_NSMODULE_SIZE = 4
+
+
+get_type = memoize(get_type)
+
+
+@memoize
+def get_output(*cmd):
+ env = dict(os.environ)
+ env[b"LC_ALL"] = b"C"
+ return subprocess.check_output(cmd, env=env, universal_newlines=True).splitlines()
+
+
+class Skip(RuntimeError):
+ pass
+
+
+class Empty(RuntimeError):
+ pass
+
+
+def at_least_one(iter):
+ saw_one = False
+ for item in iter:
+ saw_one = True
+ yield item
+ if not saw_one:
+ raise Empty()
+
+
+# Iterates the symbol table on ELF binaries.
+def iter_elf_symbols(target, binary, all=False):
+ ty = get_type(binary)
+ # Static libraries are ar archives. Assume they are ELF.
+ if ty == UNKNOWN and open(binary, "rb").read(8) == b"!<arch>\n":
+ ty = ELF
+ assert ty == ELF
+ for line in get_output(
+ target["readelf"], "--wide", "--syms" if all else "--dyn-syms", binary
+ ):
+ data = line.split()
+ if not (len(data) >= 8 and data[0].endswith(":") and data[0][:-1].isdigit()):
+ continue
+ n, addr, size, type, bind, vis, index, name = data[:8]
+
+ if "@" in name:
+ name, ver = name.rsplit("@", 1)
+ while name.endswith("@"):
+ name = name[:-1]
+ else:
+ ver = None
+ yield {
+ "addr": int(addr, 16),
+ # readelf output may contain decimal values or hexadecimal
+ # values prefixed with 0x for the size. Let python autodetect.
+ "size": int(size, 0),
+ "name": name,
+ "version": ver,
+ }
+
+
+def iter_readelf_dynamic(target, binary):
+ for line in get_output(target["readelf"], "-d", binary):
+ data = line.split(None, 2)
+ if data and len(data) == 3 and data[0].startswith("0x"):
+ yield data[1].rstrip(")").lstrip("("), data[2]
+
+
+def check_binary_compat(target, binary):
+ if get_type(binary) != ELF:
+ raise Skip()
+ checks = (
+ ("libstdc++", "GLIBCXX_", STDCXX_MAX_VERSION),
+ ("libstdc++", "CXXABI_", CXXABI_MAX_VERSION),
+ ("libgcc", "GCC_", LIBGCC_MAX_VERSION),
+ ("libc", "GLIBC_", GLIBC_MAX_VERSION),
+ )
+
+ unwanted = {}
+ try:
+ for sym in at_least_one(iter_elf_symbols(target, binary)):
+ # Only check versions on undefined symbols
+ if sym["addr"] != 0:
+ continue
+
+ # No version to check
+ if not sym["version"]:
+ continue
+
+ for _, prefix, max_version in checks:
+ if sym["version"].startswith(prefix):
+ version = Version(sym["version"][len(prefix) :])
+ if version > max_version:
+ unwanted.setdefault(prefix, []).append(sym)
+ except Empty:
+ raise RuntimeError("Could not parse llvm-objdump output?")
+ if unwanted:
+ error = []
+ for lib, prefix, _ in checks:
+ if prefix in unwanted:
+ error.append(
+ "We do not want these {} symbol versions to be used:".format(lib)
+ )
+ error.extend(
+ " {} ({})".format(s["name"], s["version"]) for s in unwanted[prefix]
+ )
+ raise RuntimeError("\n".join(error))
+
+
+def check_textrel(target, binary):
+ if target is HOST or get_type(binary) != ELF:
+ raise Skip()
+ try:
+ for tag, value in at_least_one(iter_readelf_dynamic(target, binary)):
+ if tag == "TEXTREL" or (tag == "FLAGS" and "TEXTREL" in value):
+ raise RuntimeError(
+ "We do not want text relocations in libraries and programs"
+ )
+ except Empty:
+ raise RuntimeError("Could not parse readelf output?")
+
+
+def ishex(s):
+ try:
+ int(s, 16)
+ return True
+ except ValueError:
+ return False
+
+
+def is_libxul(binary):
+ basename = os.path.basename(binary).lower()
+ return "xul" in basename
+
+
+def check_pt_load(target, binary):
+ if target is HOST or get_type(binary) != ELF or not is_libxul(binary):
+ raise Skip()
+ count = 0
+ for line in get_output(target["readelf"], "-l", binary):
+ data = line.split()
+ if data and data[0] == "LOAD":
+ count += 1
+ if count <= 1:
+ raise RuntimeError("Expected more than one PT_LOAD segment")
+
+
+def check_mozglue_order(target, binary):
+ if target is HOST or target["platform"] != "Android":
+ raise Skip()
+ # While this is very unlikely (libc being added by the compiler at the end
+ # of the linker command line), if libmozglue.so ends up after libc.so, all
+ # hell breaks loose, so better safe than sorry, and check it's actually the
+ # case.
+ try:
+ mozglue = libc = None
+ for n, (tag, value) in enumerate(
+ at_least_one(iter_readelf_dynamic(target, binary))
+ ):
+ if tag == "NEEDED":
+ if "[libmozglue.so]" in value:
+ mozglue = n
+ elif "[libc.so]" in value:
+ libc = n
+ if libc is None:
+ raise RuntimeError("libc.so is not linked?")
+ if mozglue is not None and libc < mozglue:
+ raise RuntimeError("libmozglue.so must be linked before libc.so")
+ except Empty:
+ raise RuntimeError("Could not parse readelf output?")
+
+
+def check_networking(target, binary):
+ retcode = 0
+ networking_functions = set(
+ [
+ # socketpair is not concerning; it is restricted to AF_UNIX
+ "connect",
+ "accept",
+ "listen",
+ "getsockname",
+ "getsockopt",
+ "recv",
+ "send",
+ # We would be concerned by recvmsg and sendmsg; but we believe
+ # they are okay as documented in 1376621#c23
+ "gethostbyname",
+ "gethostbyaddr",
+ "gethostent",
+ "sethostent",
+ "endhostent",
+ "gethostent_r",
+ "gethostbyname2",
+ "gethostbyaddr_r",
+ "gethostbyname_r",
+ "gethostbyname2_r",
+ "getservent",
+ "getservbyname",
+ "getservbyport",
+ "setservent",
+ "getprotoent",
+ "getprotobyname",
+ "getprotobynumber",
+ "setprotoent",
+ "endprotoent",
+ ]
+ )
+ bad_occurences_names = set()
+
+ try:
+ for sym in at_least_one(iter_elf_symbols(target, binary, all=True)):
+ if sym["addr"] == 0 and sym["name"] in networking_functions:
+ bad_occurences_names.add(sym["name"])
+ except Empty:
+ raise RuntimeError("Could not parse llvm-objdump output?")
+
+ basename = os.path.basename(binary)
+ if bad_occurences_names:
+ s = (
+ "TEST-UNEXPECTED-FAIL | check_networking | {} | Identified {} "
+ + "networking function(s) being imported in the rust static library ({})"
+ )
+ print(
+ s.format(
+ basename,
+ len(bad_occurences_names),
+ ",".join(sorted(bad_occurences_names)),
+ ),
+ file=sys.stderr,
+ )
+ retcode = 1
+ elif buildconfig.substs.get("MOZ_AUTOMATION"):
+ print("TEST-PASS | check_networking | {}".format(basename))
+ return retcode
+
+
+def checks(target, binary):
+ # The clang-plugin is built as target but is really a host binary.
+ # Cheat and pretend we were passed the right argument.
+ if "clang-plugin" in binary:
+ target = HOST
+ checks = []
+ if buildconfig.substs.get("MOZ_STDCXX_COMPAT") and target["platform"] == "Linux":
+ checks.append(check_binary_compat)
+
+ # Disabled for local builds because of readelf performance: See bug 1472496
+ if not buildconfig.substs.get("DEVELOPER_OPTIONS"):
+ checks.append(check_textrel)
+ checks.append(check_pt_load)
+ checks.append(check_mozglue_order)
+
+ retcode = 0
+ basename = os.path.basename(binary)
+ for c in checks:
+ try:
+ name = c.__name__
+ c(target, binary)
+ if buildconfig.substs.get("MOZ_AUTOMATION"):
+ print("TEST-PASS | {} | {}".format(name, basename))
+ except Skip:
+ pass
+ except RuntimeError as e:
+ print(
+ "TEST-UNEXPECTED-FAIL | {} | {} | {}".format(name, basename, str(e)),
+ file=sys.stderr,
+ )
+ retcode = 1
+ return retcode
+
+
+def main(args):
+ parser = argparse.ArgumentParser(description="Check built binaries")
+
+ parser.add_argument(
+ "--host", action="store_true", help="Perform checks for a host binary"
+ )
+ parser.add_argument(
+ "--target", action="store_true", help="Perform checks for a target binary"
+ )
+ parser.add_argument(
+ "--networking",
+ action="store_true",
+ help="Perform checks for networking functions",
+ )
+
+ parser.add_argument(
+ "binary", metavar="PATH", help="Location of the binary to check"
+ )
+
+ options = parser.parse_args(args)
+
+ if options.host == options.target:
+ print("Exactly one of --host or --target must be given", file=sys.stderr)
+ return 1
+
+ if options.networking and options.host:
+ print("--networking is only valid with --target", file=sys.stderr)
+ return 1
+
+ if options.networking:
+ return check_networking(TARGET, options.binary)
+ elif options.host:
+ return checks(HOST, options.binary)
+ elif options.target:
+ return checks(TARGET, options.binary)
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/download_wpt_manifest.py b/python/mozbuild/mozbuild/action/download_wpt_manifest.py
new file mode 100644
index 0000000000..84f4a15d14
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/download_wpt_manifest.py
@@ -0,0 +1,21 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This action is used to generate the wpt manifest
+
+import sys
+
+import buildconfig
+
+
+def main():
+ print("Downloading wpt manifest")
+ sys.path.insert(0, buildconfig.topsrcdir)
+ import manifestupdate
+
+ return 0 if manifestupdate.run(buildconfig.topsrcdir, buildconfig.topobjdir) else 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/python/mozbuild/mozbuild/action/dump_env.py b/python/mozbuild/mozbuild/action/dump_env.py
new file mode 100644
index 0000000000..ec178700eb
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/dump_env.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# We invoke a Python program to dump our environment in order to get
+# native paths printed on Windows so that these paths can be incorporated
+# into Python configure's environment.
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
+
+from shellutil import quote
+
+
+def environ():
+ # We would use six.ensure_text but the global Python isn't guaranteed to have
+ # the correct version of six installed.
+ def ensure_text(s):
+ if sys.version_info > (3, 0) or isinstance(s, unicode):
+ # os.environ always returns string keys and values in Python 3.
+ return s
+ else:
+ return s.decode("utf-8")
+
+ return [(ensure_text(k), ensure_text(v)) for (k, v) in os.environ.items()]
+
+
+for key, value in environ():
+ print("%s=%s" % (key, quote(value)))
diff --git a/python/mozbuild/mozbuild/action/dumpsymbols.py b/python/mozbuild/mozbuild/action/dumpsymbols.py
new file mode 100644
index 0000000000..0af2c1c4e5
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/dumpsymbols.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+import buildconfig
+
+
+def dump_symbols(target, tracking_file, count_ctors=False):
+ # Our tracking file, if present, will contain path(s) to the previously generated
+ # symbols. Remove them in this case so we don't simply accumulate old symbols
+ # during incremental builds.
+ if os.path.isfile(os.path.normpath(tracking_file)):
+ with open(tracking_file, "r") as fh:
+ files = fh.read().splitlines()
+ dirs = set(os.path.dirname(f) for f in files)
+ for d in dirs:
+ shutil.rmtree(
+ os.path.join(buildconfig.topobjdir, "dist", "crashreporter-symbols", d),
+ ignore_errors=True,
+ )
+
+ # Build default args for symbolstore.py based on platform.
+ sym_store_args = []
+
+ dump_syms_bin = buildconfig.substs["DUMP_SYMS"]
+ os_arch = buildconfig.substs["OS_ARCH"]
+ if os_arch == "WINNT":
+ sym_store_args.extend(["-c", "--vcs-info"])
+ if "PDBSTR" in buildconfig.substs:
+ sym_store_args.append("-i")
+ elif os_arch == "Darwin":
+ cpu = {
+ "x86": "i386",
+ "aarch64": "arm64",
+ }.get(buildconfig.substs["TARGET_CPU"], buildconfig.substs["TARGET_CPU"])
+ sym_store_args.extend(["-c", "-a", cpu, "--vcs-info"])
+ elif os_arch == "Linux":
+ sym_store_args.extend(["-c", "--vcs-info"])
+
+ sym_store_args.append(
+ "--install-manifest=%s,%s"
+ % (
+ os.path.join(
+ buildconfig.topobjdir, "_build_manifests", "install", "dist_include"
+ ),
+ os.path.join(buildconfig.topobjdir, "dist", "include"),
+ )
+ )
+ objcopy = buildconfig.substs.get("OBJCOPY")
+ if objcopy:
+ os.environ["OBJCOPY"] = objcopy
+
+ if buildconfig.substs.get("MOZ_THUNDERBIRD"):
+ sym_store_args.extend(["-s", os.path.join(buildconfig.topsrcdir, "comm")])
+
+ args = (
+ [
+ sys.executable,
+ os.path.join(
+ buildconfig.topsrcdir,
+ "toolkit",
+ "crashreporter",
+ "tools",
+ "symbolstore.py",
+ ),
+ ]
+ + sym_store_args
+ + [
+ "-s",
+ buildconfig.topsrcdir,
+ dump_syms_bin,
+ os.path.join(buildconfig.topobjdir, "dist", "crashreporter-symbols"),
+ os.path.abspath(target),
+ ]
+ )
+ if count_ctors:
+ args.append("--count-ctors")
+ print("Running: %s" % " ".join(args))
+ out_files = subprocess.check_output(args, universal_newlines=True)
+ with open(tracking_file, "w", encoding="utf-8", newline="\n") as fh:
+ fh.write(out_files)
+ fh.flush()
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ usage="Usage: dumpsymbols.py <library or program> <tracking file>"
+ )
+ parser.add_argument(
+ "--count-ctors",
+ action="store_true",
+ default=False,
+ help="Count static initializers",
+ )
+ parser.add_argument("library_or_program", help="Path to library or program")
+ parser.add_argument("tracking_file", help="Tracking file")
+ args = parser.parse_args()
+
+ return dump_symbols(args.library_or_program, args.tracking_file, args.count_ctors)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/exe_7z_archive.py b/python/mozbuild/mozbuild/action/exe_7z_archive.py
new file mode 100644
index 0000000000..b0d35be2bf
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/exe_7z_archive.py
@@ -0,0 +1,89 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import buildconfig
+import mozpack.path as mozpath
+
+from mozbuild.base import BuildEnvironmentNotFoundException
+
+
+def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
+ tmpdir = tempfile.mkdtemp(prefix="tmp")
+ try:
+ if pkg_dir:
+ shutil.move(pkg_dir, "core")
+
+ if use_upx:
+ final_sfx = mozpath.join(tmpdir, "7zSD.sfx")
+ upx = buildconfig.substs.get("UPX", "upx")
+ wine = buildconfig.substs.get("WINE")
+ if wine and upx.lower().endswith(".exe"):
+ cmd = [wine, upx]
+ else:
+ cmd = [upx]
+ subprocess.check_call(
+ cmd
+ + [
+ "--best",
+ "-o",
+ final_sfx,
+ sfx_package,
+ ]
+ )
+ else:
+ final_sfx = sfx_package
+
+ try:
+ sevenz = buildconfig.config.substs["7Z"]
+ except BuildEnvironmentNotFoundException:
+ # configure hasn't been run, just use the default
+ sevenz = "7z"
+ subprocess.check_call(
+ [
+ sevenz,
+ "a",
+ "-r",
+ "-t7z",
+ mozpath.join(tmpdir, "app.7z"),
+ "-mx",
+ "-m0=BCJ2",
+ "-m1=LZMA:d25",
+ "-m2=LZMA:d19",
+ "-m3=LZMA:d19",
+ "-mb0:1",
+ "-mb0s1:2",
+ "-mb0s2:3",
+ ]
+ )
+
+ with open(package, "wb") as o:
+ for i in [final_sfx, tagfile, mozpath.join(tmpdir, "app.7z")]:
+ shutil.copyfileobj(open(i, "rb"), o)
+ os.chmod(package, 0o0755)
+ finally:
+ if pkg_dir:
+ shutil.move("core", pkg_dir)
+ shutil.rmtree(tmpdir)
+
+
+def main(args):
+ if len(args) != 4:
+ print(
+ "Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>",
+ file=sys.stderr,
+ )
+ return 1
+ else:
+ archive_exe(args[0], args[1], args[2], args[3], args[4])
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/fat_aar.py b/python/mozbuild/mozbuild/action/fat_aar.py
new file mode 100644
index 0000000000..d17d4696a0
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/fat_aar.py
@@ -0,0 +1,185 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Fetch and unpack architecture-specific Maven zips, verify cross-architecture
+compatibility, and ready inputs to an Android multi-architecture fat AAR build.
+"""
+
+import argparse
+import sys
+from collections import OrderedDict, defaultdict
+from hashlib import sha1 # We don't need a strong hash to compare inputs.
+from io import BytesIO
+from zipfile import ZipFile
+
+import mozpack.path as mozpath
+import six
+from mozpack.copier import FileCopier
+from mozpack.files import JarFinder
+from mozpack.mozjar import JarReader
+from mozpack.packager.unpack import UnpackFinder
+
+
+def fat_aar(distdir, aars_paths, no_process=False, no_compatibility_check=False):
+ if no_process:
+ print("Not processing architecture-specific artifact Maven AARs.")
+ return 0
+
+ # Map {filename: {fingerprint: [arch1, arch2, ...]}}.
+ diffs = defaultdict(lambda: defaultdict(list))
+ missing_arch_prefs = set()
+ # Collect multi-architecture inputs to the fat AAR.
+ copier = FileCopier()
+
+ for arch, aar_path in aars_paths.items():
+ # Map old non-architecture-specific path to new architecture-specific path.
+ old_rewrite_map = {
+ "greprefs.js": "{}/greprefs.js".format(arch),
+ "defaults/pref/geckoview-prefs.js": "defaults/pref/{}/geckoview-prefs.js".format(
+ arch
+ ),
+ }
+
+ # Architecture-specific preferences files.
+ arch_prefs = set(old_rewrite_map.values())
+ missing_arch_prefs |= set(arch_prefs)
+
+ jar_finder = JarFinder(aar_path, JarReader(aar_path))
+ for path, fileobj in UnpackFinder(jar_finder):
+ # Native libraries go straight through.
+ if mozpath.match(path, "jni/**"):
+ copier.add(path, fileobj)
+
+ elif path in arch_prefs:
+ copier.add(path, fileobj)
+
+ elif path in ("classes.jar", "annotations.zip"):
+ # annotations.zip differs due to timestamps, but the contents should not.
+
+ # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt,
+ # and it's not worth working around, so we use Python's zip functionality
+ # instead.
+ z = ZipFile(BytesIO(fileobj.open().read()))
+ for r in z.namelist():
+ fingerprint = sha1(z.open(r).read()).hexdigest()
+ diffs["{}!/{}".format(path, r)][fingerprint].append(arch)
+
+ else:
+ fingerprint = sha1(six.ensure_binary(fileobj.open().read())).hexdigest()
+ # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here,
+ # since in practice they will never overlap.
+ diffs[path][fingerprint].append(arch)
+
+ missing_arch_prefs.discard(path)
+
+ # Some differences are allowed across the architecture-specific AARs. We could allow-list
+ # the actual content, but it's not necessary right now.
+ allow_pattern_list = {
+ "AndroidManifest.xml", # Min SDK version is different for 32- and 64-bit builds.
+ "classes.jar!/org/mozilla/gecko/util/HardwareUtils.class", # Min SDK as well.
+ "classes.jar!/org/mozilla/geckoview/BuildConfig.class",
+ # Each input captures its CPU architecture.
+ "chrome/toolkit/content/global/buildconfig.html",
+ # Bug 1556162: localized resources are not deterministic across
+ # per-architecture builds triggered from the same push.
+ "**/*.ftl",
+ "**/*.dtd",
+ "**/*.properties",
+ }
+
+ not_allowed = OrderedDict()
+
+ def format_diffs(ds):
+ # Like ' armeabi-v7a, arm64-v8a -> XXX\n x86, x86_64 -> YYY'.
+ return "\n".join(
+ sorted(
+ " {archs} -> {fingerprint}".format(
+ archs=", ".join(sorted(archs)), fingerprint=fingerprint
+ )
+ for fingerprint, archs in ds.items()
+ )
+ )
+
+ for p, ds in sorted(diffs.items()):
+ if len(ds) <= 1:
+ # Only one hash across all inputs: roll on.
+ continue
+
+ if any(mozpath.match(p, pat) for pat in allow_pattern_list):
+ print(
+ 'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'.format(
+ path=p, ds_repr=format_diffs(ds)
+ )
+ )
+ continue
+
+ not_allowed[p] = ds
+
+ for p, ds in not_allowed.items():
+ print(
+ 'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'.format(
+ path=p, ds_repr=format_diffs(ds)
+ )
+ )
+
+ for missing in sorted(missing_arch_prefs):
+ print(
+ "Disallowed: Inputs missing expected architecture-specific input: {missing}".format(
+ missing=missing
+ )
+ )
+
+ if not no_compatibility_check and (missing_arch_prefs or not_allowed):
+ return 1
+
+ output_dir = mozpath.join(distdir, "output")
+ copier.copy(output_dir)
+
+ return 0
+
+
+_ALL_ARCHS = ("armeabi-v7a", "arm64-v8a", "x86_64", "x86")
+
+
+def main(argv):
+ description = """Unpack architecture-specific Maven AARs, verify cross-architecture
+compatibility, and ready inputs to an Android multi-architecture fat AAR build."""
+
+ parser = argparse.ArgumentParser(description=description)
+ parser.add_argument(
+ "--no-process", action="store_true", help="Do not process Maven AARs."
+ )
+ parser.add_argument(
+ "--no-compatibility-check",
+ action="store_true",
+ help="Do not fail if Maven AARs are not compatible.",
+ )
+ parser.add_argument("--distdir", required=True)
+
+ for arch in _ALL_ARCHS:
+ command_line_flag = arch.replace("_", "-")
+ parser.add_argument("--{}".format(command_line_flag), dest=arch)
+
+ args = parser.parse_args(argv)
+
+ args_dict = vars(args)
+
+ aars_paths = {
+ arch: args_dict.get(arch) for arch in _ALL_ARCHS if args_dict.get(arch)
+ }
+
+ if not aars_paths:
+ raise ValueError("You must provide at least one AAR file!")
+
+ return fat_aar(
+ args.distdir,
+ aars_paths,
+ no_process=args.no_process,
+ no_compatibility_check=args.no_compatibility_check,
+ )
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/file_generate.py b/python/mozbuild/mozbuild/action/file_generate.py
new file mode 100644
index 0000000000..98dec4e359
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/file_generate.py
@@ -0,0 +1,155 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Given a Python script and arguments describing the output file, and
+# the arguments that can be used to generate the output file, call the
+# script's |main| method with appropriate arguments.
+
+import argparse
+import importlib.util
+import os
+import sys
+import traceback
+
+import buildconfig
+import six
+
+from mozbuild.action.util import log_build_task
+from mozbuild.makeutil import Makefile
+from mozbuild.pythonutil import iter_modules_in_path
+from mozbuild.util import FileAvoidWrite
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ "Generate a file from a Python script", add_help=False
+ )
+ parser.add_argument(
+ "--locale", metavar="locale", type=six.text_type, help="The locale in use."
+ )
+ parser.add_argument(
+ "python_script",
+ metavar="python-script",
+ type=six.text_type,
+ help="The Python script to run",
+ )
+ parser.add_argument(
+ "method_name",
+ metavar="method-name",
+ type=six.text_type,
+ help="The method of the script to invoke",
+ )
+ parser.add_argument(
+ "output_file",
+ metavar="output-file",
+ type=six.text_type,
+ help="The file to generate",
+ )
+ parser.add_argument(
+ "dep_file",
+ metavar="dep-file",
+ type=six.text_type,
+ help="File to write any additional make dependencies to",
+ )
+ parser.add_argument(
+ "dep_target",
+ metavar="dep-target",
+ type=six.text_type,
+ help="Make target to use in the dependencies file",
+ )
+ parser.add_argument(
+ "additional_arguments",
+ metavar="arg",
+ nargs=argparse.REMAINDER,
+ help="Additional arguments to the script's main() method",
+ )
+
+ args = parser.parse_args(argv)
+
+ kwargs = {}
+ if args.locale:
+ kwargs["locale"] = args.locale
+ script = args.python_script
+ # Permit the script to import modules from the same directory in which it
+ # resides. The justification for doing this is that if we were invoking
+ # the script as:
+ #
+ # python script arg1...
+ #
+ # then importing modules from the script's directory would come for free.
+ # Since we're invoking the script in a roundabout way, we provide this
+ # bit of convenience.
+ sys.path.append(os.path.dirname(script))
+ spec = importlib.util.spec_from_file_location("script", script)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ method = args.method_name
+ if not hasattr(module, method):
+ print(
+ 'Error: script "{0}" is missing a {1} method'.format(script, method),
+ file=sys.stderr,
+ )
+ return 1
+
+ ret = 1
+ try:
+ with FileAvoidWrite(args.output_file, readmode="rb") as output:
+ try:
+ ret = module.__dict__[method](
+ output, *args.additional_arguments, **kwargs
+ )
+ except Exception:
+ # Ensure that we don't overwrite the file if the script failed.
+ output.avoid_writing_to_file()
+ raise
+
+ # The following values indicate a statement of success:
+ # - a set() (see below)
+ # - 0
+ # - False
+ # - None
+ #
+ # Everything else is an error (so scripts can conveniently |return
+ # 1| or similar). If a set is returned, the elements of the set
+ # indicate additional dependencies that will be listed in the deps
+ # file. Python module imports are automatically included as
+ # dependencies.
+ if isinstance(ret, set):
+ deps = set(six.ensure_text(s) for s in ret)
+ # The script succeeded, so reset |ret| to indicate that.
+ ret = None
+ else:
+ deps = set()
+
+ # Only write out the dependencies if the script was successful
+ if not ret:
+ # Add dependencies on any python modules that were imported by
+ # the script.
+ deps |= set(
+ six.ensure_text(s)
+ for s in iter_modules_in_path(
+ buildconfig.topsrcdir, buildconfig.topobjdir
+ )
+ )
+ # Add dependencies on any buildconfig items that were accessed
+ # by the script.
+ deps |= set(six.ensure_text(s) for s in buildconfig.get_dependencies())
+
+ mk = Makefile()
+ mk.create_rule([args.dep_target]).add_dependencies(deps)
+ with FileAvoidWrite(args.dep_file) as dep_file:
+ mk.dump(dep_file)
+ else:
+ # Ensure that we don't overwrite the file if the script failed.
+ output.avoid_writing_to_file()
+
+ except IOError as e:
+ print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
+ traceback.print_exc()
+ return 1
+ return ret
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/file_generate_wrapper.py b/python/mozbuild/mozbuild/action/file_generate_wrapper.py
new file mode 100644
index 0000000000..b6c030bbf6
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/file_generate_wrapper.py
@@ -0,0 +1,38 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import subprocess
+import sys
+from pathlib import Path
+
+import buildconfig
+
+
+def action(fh, script, target_dir, *args):
+ fh.close()
+ os.unlink(fh.name)
+
+ args = list(args)
+ objdir = Path.cwd()
+ topsrcdir = Path(buildconfig.topsrcdir)
+
+ def make_absolute(base_path, p):
+ return Path(base_path) / Path(p.lstrip("/"))
+
+ try:
+ abs_target_dir = str(make_absolute(objdir, target_dir))
+ abs_script = make_absolute(topsrcdir, script)
+ script = [str(abs_script)]
+ if abs_script.suffix == ".py":
+ script = [sys.executable] + script
+ subprocess.check_call(script + args, cwd=abs_target_dir)
+ except Exception:
+ relative = os.path.relpath(__file__, topsrcdir)
+ print(
+ "%s:action caught exception. params=%s\n"
+ % (relative, json.dumps([script, target_dir] + args, indent=2))
+ )
+ raise
diff --git a/python/mozbuild/mozbuild/action/generate_symbols_file.py b/python/mozbuild/mozbuild/action/generate_symbols_file.py
new file mode 100644
index 0000000000..955a676c08
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/generate_symbols_file.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+from io import StringIO
+
+import buildconfig
+
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import DefinesAction
+
+
+def generate_symbols_file(output, *args):
+ """ """
+ parser = argparse.ArgumentParser()
+ parser.add_argument("input")
+ parser.add_argument("-D", action=DefinesAction)
+ parser.add_argument("-U", action="append", default=[])
+ args = parser.parse_args(args)
+ input = os.path.abspath(args.input)
+
+ pp = Preprocessor()
+ pp.context.update(buildconfig.defines["ALLDEFINES"])
+ if args.D:
+ pp.context.update(args.D)
+ for undefine in args.U:
+ if undefine in pp.context:
+ del pp.context[undefine]
+ # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines
+ if buildconfig.substs.get("MOZ_DEBUG"):
+ pp.context["DEBUG"] = "1"
+ # Ensure @DATA@ works as expected (see the Windows section further below)
+ if buildconfig.substs["OS_TARGET"] == "WINNT":
+ pp.context["DATA"] = "DATA"
+ else:
+ pp.context["DATA"] = ""
+ pp.out = StringIO()
+ pp.do_filter("substitution")
+ pp.do_include(input)
+
+ symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()]
+
+ libname, ext = os.path.splitext(os.path.basename(output.name))
+
+ if buildconfig.substs["OS_TARGET"] == "WINNT":
+ # A def file is generated for MSVC link.exe that looks like the
+ # following:
+ # LIBRARY library.dll
+ # EXPORTS
+ # symbol1
+ # symbol2
+ # ...
+ #
+ # link.exe however requires special markers for data symbols, so in
+ # that case the symbols look like:
+ # data_symbol1 DATA
+ # data_symbol2 DATA
+ # ...
+ #
+ # In the input file, this is just annotated with the following syntax:
+ # data_symbol1 @DATA@
+ # data_symbol2 @DATA@
+ # ...
+ # The DATA variable is "simply" expanded by the preprocessor, to
+ # nothing on non-Windows, such that we only get the symbol name on
+ # those platforms, and to DATA on Windows, so that the "DATA" part
+ # is, in fact, part of the symbol name as far as the symbols variable
+ # is concerned.
+ assert ext == ".def"
+ output.write("LIBRARY %s\nEXPORTS\n %s\n" % (libname, "\n ".join(symbols)))
+ elif (
+ buildconfig.substs.get("GCC_USE_GNU_LD")
+ or buildconfig.substs["OS_TARGET"] == "SunOS"
+ ):
+ # A linker version script is generated for GNU LD that looks like the
+ # following:
+ # liblibrary.so {
+ # global:
+ # symbol1;
+ # symbol2;
+ # ...
+ # local:
+ # *;
+ # };
+ output.write(
+ "%s {\nglobal:\n %s;\nlocal:\n *;\n};" % (libname, ";\n ".join(symbols))
+ )
+ elif buildconfig.substs["OS_TARGET"] == "Darwin":
+ # A list of symbols is generated for Apple ld that simply lists all
+ # symbols, with an underscore prefix.
+ output.write("".join("_%s\n" % s for s in symbols))
+
+ return set(pp.includes)
diff --git a/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py b/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py
new file mode 100644
index 0000000000..f957318a7f
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py
@@ -0,0 +1,101 @@
+import json
+import re
+import xml.etree.ElementTree as ET
+from pathlib import Path
+
+JS_FILE_TEMPLATE = """\
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+const EXPORTED_SYMBOLS = ["getHTMLFragment"];
+
+const Fragments = {json_string};
+
+/*
+ * Loads HTML fragment strings pulled from fragment documents.
+ * @param key - key identifying HTML fragment
+ *
+ * @return raw HTML/XHTML string
+ */
+const getHTMLFragment = key => Fragments[key];
+"""
+
+RE_COLLAPSE_WHITESPACE = re.compile(r"\s+")
+
+
+def get_fragment_key(path, template_name=None):
+ key = Path(path).stem
+ if template_name:
+ key += "/" + template_name
+ return key
+
+
+def fill_html_fragments_map(fragment_map, path, template, doctype=None):
+ # collape white space
+ for elm in template.iter():
+ if elm.text:
+ elm.text = RE_COLLAPSE_WHITESPACE.sub(" ", elm.text)
+ if elm.tail:
+ elm.tail = RE_COLLAPSE_WHITESPACE.sub(" ", elm.tail)
+ key = get_fragment_key(path, template.attrib.get("name"))
+ xml = "".join(ET.tostring(elm, encoding="unicode") for elm in template).strip()
+ if doctype:
+ xml = doctype + "\n" + xml
+ fragment_map[key] = xml
+
+
+def get_html_fragments_from_file(fragment_map, path):
+ for _, (name, value) in ET.iterparse(path, events=["start-ns"]):
+ ET.register_namespace(name, value)
+ tree = ET.parse(path)
+ root = tree.getroot()
+ sub_templates = root.findall("{http://www.w3.org/1999/xhtml}template")
+ # if all nested nodes are templates then treat as list of templates
+ if len(sub_templates) == len(root):
+ doctype = ""
+ for template in sub_templates:
+ if template.get("doctype") == "true":
+ doctype = template.text.strip()
+ break
+ for template in sub_templates:
+ if template.get("doctype") != "true":
+ fill_html_fragments_map(fragment_map, path, template, doctype)
+ else:
+ fill_html_fragments_map(fragment_map, path, root, None)
+
+
+def generate(output, *inputs):
+ """Builds an html fragments loader JS file from the input xml file(s)
+
+ The xml files are expected to be in the format of:
+ `<template>...xhtml markup...</template>`
+
+ or `<template><template name="fragment_name">...xhtml markup...</template>...</template>`
+ Where there are multiple templates. All markup is expected to be properly namespaced.
+
+ In the JS file, calling getHTMLFragment(key) will return the HTML string from the xml file
+ that matches the key.
+
+ The key format is `filename_without_extension/template_name` for files with
+ multiple templates, or just `filename_without_extension` for files with one template.
+ `filename_without_extension` is the xml filename without the .xml extension
+ and `template_name` is the name attribute of template node containing the xml fragment.
+
+ Arguments:
+ output -- File handle to JS file being generated
+ inputs -- list of xml filenames to include in loader
+
+ Returns:
+ The set of dependencies which should trigger this command to be re-run.
+ This is ultimately returned to the build system for use by the backend
+ to ensure that incremental rebuilds happen when any dependency changes.
+ """
+
+ fragment_map = {}
+ for file in inputs:
+ get_html_fragments_from_file(fragment_map, file)
+ json_string = json.dumps(fragment_map, separators=(",", ":"))
+ contents = JS_FILE_TEMPLATE.format(json_string=json_string)
+ output.write(contents)
+ return set(inputs)
diff --git a/python/mozbuild/mozbuild/action/install.py b/python/mozbuild/mozbuild/action/install.py
new file mode 100644
index 0000000000..02f0f2694a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/install.py
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# A simple script to invoke mozinstall from the command line without depending
+# on a build config.
+
+import sys
+
+import mozinstall
+
+
+def main(args):
+ if len(args) != 2:
+ print("Usage: install.py [src] [dest]")
+ return 1
+ src, dest = args
+ mozinstall.install(src, dest)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/jar_maker.py b/python/mozbuild/mozbuild/action/jar_maker.py
new file mode 100644
index 0000000000..a244b66a52
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/jar_maker.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+import mozbuild.jar
+from mozbuild.action.util import log_build_task
+
+
+def main(args):
+ return mozbuild.jar.main(args)
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/l10n_merge.py b/python/mozbuild/mozbuild/action/l10n_merge.py
new file mode 100644
index 0000000000..1a04d60107
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/l10n_merge.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import shutil
+import sys
+
+from mozbuild.util import ensureParentDir
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Merge l10n files.")
+ parser.add_argument("--output", help="Path to write merged output")
+ parser.add_argument("--ref-file", help="Path to reference file (en-US)")
+ parser.add_argument("--l10n-file", help="Path to locale file")
+
+ args = parser.parse_args(argv)
+
+ from compare_locales.compare import ContentComparer, Observer
+ from compare_locales.paths import File
+
+ cc = ContentComparer([Observer()])
+ cc.compare(
+ File(args.ref_file, args.ref_file, ""),
+ File(args.l10n_file, args.l10n_file, ""),
+ args.output,
+ )
+
+ ensureParentDir(args.output)
+ if not os.path.exists(args.output):
+ src = args.l10n_file
+ if not os.path.exists(args.l10n_file):
+ src = args.ref_file
+ shutil.copy(src, args.output)
+
+ return 0
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/langpack_localeNames.json b/python/mozbuild/mozbuild/action/langpack_localeNames.json
new file mode 100644
index 0000000000..9014c7717a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/langpack_localeNames.json
@@ -0,0 +1,426 @@
+{
+ "ach": {
+ "english": "Acoli",
+ "native": "Acholi"
+ },
+ "af": {
+ "native": "Afrikaans"
+ },
+ "an": {
+ "english": "Aragonese",
+ "native": "Aragonés"
+ },
+ "ar": {
+ "english": "Arabic",
+ "native": "العربية"
+ },
+ "ast": {
+ "english": "Asturian",
+ "native": "Asturianu"
+ },
+ "az": {
+ "english": "Azerbaijani",
+ "native": "Azərbaycanca"
+ },
+ "be": {
+ "english": "Belarusian",
+ "native": "Беларуская"
+ },
+ "bg": {
+ "english": "Bulgarian",
+ "native": "Български"
+ },
+ "bn": {
+ "english": "Bangla",
+ "native": "বাংলা"
+ },
+ "bo": {
+ "english": "Tibetan",
+ "native": "བོད་སྐད"
+ },
+ "br": {
+ "english": "Breton",
+ "native": "Brezhoneg"
+ },
+ "brx": {
+ "english": "Bodo",
+ "native": "बड़ो"
+ },
+ "bs": {
+ "english": "Bosnian",
+ "native": "Bosanski"
+ },
+ "ca": {
+ "english": "Catalan",
+ "native": "Català"
+ },
+ "ca-valencia": {
+ "english": "Catalan, Valencian",
+ "native": "Català (Valencià)"
+ },
+ "cak": {
+ "native": "Kaqchikel"
+ },
+ "cs": {
+ "english": "Czech",
+ "native": "Čeština"
+ },
+ "cy": {
+ "english": "Welsh",
+ "native": "Cymraeg"
+ },
+ "da": {
+ "english": "Danish",
+ "native": "Dansk"
+ },
+ "de": {
+ "english": "German",
+ "native": "Deutsch"
+ },
+ "dsb": {
+ "english": "Lower Sorbian",
+ "native": "Dolnoserbšćina"
+ },
+ "el": {
+ "english": "Greek",
+ "native": "Ελληνικά"
+ },
+ "en-CA": {
+ "native": "English (CA)"
+ },
+ "en-GB": {
+ "native": "English (GB)"
+ },
+ "en-US": {
+ "native": "English (US)"
+ },
+ "eo": {
+ "native": "Esperanto"
+ },
+ "es-AR": {
+ "english": "Spanish, Argentina",
+ "native": "Español (AR)"
+ },
+ "es-CL": {
+ "english": "Spanish, Chile",
+ "native": "Español (CL)"
+ },
+ "es-ES": {
+ "english": "Spanish, Spain",
+ "native": "Español (ES)"
+ },
+ "es-MX": {
+ "english": "Spanish, Mexico",
+ "native": "Español (MX)"
+ },
+ "et": {
+ "english": "Estonian",
+ "native": "Eesti"
+ },
+ "eu": {
+ "english": "Basque",
+ "native": "Euskara"
+ },
+ "fa": {
+ "english": "Persian",
+ "native": "فارسی"
+ },
+ "ff": {
+ "english": "Fulah",
+ "native": "Pulaar"
+ },
+ "fi": {
+ "english": "Finnish",
+ "native": "Suomi"
+ },
+ "fr": {
+ "english": "French",
+ "native": "Français"
+ },
+ "fur": {
+ "english": "Friulian",
+ "native": "Furlan"
+ },
+ "fy-NL": {
+ "english": "Frisian",
+ "native": "Frysk"
+ },
+ "ga-IE": {
+ "english": "Irish",
+ "native": "Gaeilge"
+ },
+ "gd": {
+ "english": "Scottish Gaelic",
+ "native": "Gàidhlig"
+ },
+ "gl": {
+ "english": "Galician",
+ "native": "Galego"
+ },
+ "gn": {
+ "native": "Guarani"
+ },
+ "gu-IN": {
+ "english": "Gujarati",
+ "native": "ગુજરાતી"
+ },
+ "he": {
+ "english": "Hebrew",
+ "native": "עברית"
+ },
+ "hi-IN": {
+ "english": "Hindi",
+ "native": "हिन्दी"
+ },
+ "hr": {
+ "english": "Croatian",
+ "native": "Hrvatski"
+ },
+ "hsb": {
+ "english": "Upper Sorbian",
+ "native": "Hornjoserbšćina"
+ },
+ "hu": {
+ "english": "Hungarian",
+ "native": "Magyar"
+ },
+ "hy-AM": {
+ "english": "Armenian",
+ "native": "հայերեն"
+ },
+ "ia": {
+ "native": "Interlingua"
+ },
+ "id": {
+ "english": "Indonesian",
+ "native": "Indonesia"
+ },
+ "is": {
+ "english": "Icelandic",
+ "native": "Islenska"
+ },
+ "it": {
+ "english": "Italian",
+ "native": "Italiano"
+ },
+ "ja": {
+ "english": "Japanese",
+ "native": "日本語"
+ },
+ "ja-JP-mac": {
+ "english": "Japanese",
+ "native": "日本語"
+ },
+ "ka": {
+ "english": "Georgian",
+ "native": "ქართული"
+ },
+ "kab": {
+ "english": "Kabyle",
+ "native": "Taqbaylit"
+ },
+ "kk": {
+ "english": "Kazakh",
+ "native": "қазақ тілі"
+ },
+ "km": {
+ "english": "Khmer",
+ "native": "ខ្មែរ"
+ },
+ "kn": {
+ "english": "Kannada",
+ "native": "ಕನ್ನಡ"
+ },
+ "ko": {
+ "english": "Korean",
+ "native": "한국어"
+ },
+ "lij": {
+ "english": "Ligurian",
+ "native": "Ligure"
+ },
+ "lo": {
+ "english": "Lao",
+ "native": "ລາວ"
+ },
+ "lt": {
+ "english": "Lithuanian",
+ "native": "Lietuvių"
+ },
+ "ltg": {
+ "english": "Latgalian",
+ "native": "Latgalīšu"
+ },
+ "lv": {
+ "english": "Latvian",
+ "native": "Latviešu"
+ },
+ "mk": {
+ "english": "Macedonian",
+ "native": "македонски"
+ },
+ "ml": {
+ "english": "Malayalam",
+ "native": "മലയാളം"
+ },
+ "mr": {
+ "english": "Marathi",
+ "native": "मराठी"
+ },
+ "ms": {
+ "english": "Malay",
+ "native": "Melayu"
+ },
+ "my": {
+ "english": "Burmese",
+ "native": "မြန်မာ"
+ },
+ "nb-NO": {
+ "english": "Norwegian Bokmål",
+ "native": "Norsk Bokmål"
+ },
+ "ne-NP": {
+ "english": "Nepali",
+ "native": "नेपाली"
+ },
+ "nl": {
+ "english": "Dutch",
+ "native": "Nederlands"
+ },
+ "nn-NO": {
+ "english": "Norwegian Nynorsk",
+ "native": "Nynorsk"
+ },
+ "oc": {
+ "native": "Occitan"
+ },
+ "or": {
+ "english": "Odia",
+ "native": "ଓଡ଼ିଆ"
+ },
+ "pa-IN": {
+ "english": "Punjabi",
+ "native": "ਪੰਜਾਬੀ"
+ },
+ "pl": {
+ "english": "Polish",
+ "native": "Polski"
+ },
+ "pt-BR": {
+ "english": "Brazilian Portuguese",
+ "native": "Português (BR)"
+ },
+ "pt-PT": {
+ "english": "Portuguese",
+ "native": "Português (PT)"
+ },
+ "rm": {
+ "english": "Romansh",
+ "native": "Rumantsch"
+ },
+ "ro": {
+ "english": "Romanian",
+ "native": "Română"
+ },
+ "ru": {
+ "english": "Russian",
+ "native": "Русский"
+ },
+ "sc": {
+ "english": "Sardinian",
+ "native": "Sardu"
+ },
+ "sco": {
+ "native": "Scots"
+ },
+ "si": {
+ "english": "Sinhala",
+ "native": "සිංහල"
+ },
+ "sk": {
+ "english": "Slovak",
+ "native": "Slovenčina"
+ },
+ "sl": {
+ "english": "Slovenian",
+ "native": "Slovenščina"
+ },
+ "son": {
+ "english": "Songhai",
+ "native": "Soŋay"
+ },
+ "sq": {
+ "english": "Albanian",
+ "native": "Shqip"
+ },
+ "sr": {
+ "english": "Serbian",
+ "native": "Cрпски"
+ },
+ "sv-SE": {
+ "english": "Swedish",
+ "native": "Svenska"
+ },
+ "szl": {
+ "english": "Silesian",
+ "native": "Ślōnsko"
+ },
+ "ta": {
+ "english": "Tamil",
+ "native": "தமிழ்"
+ },
+ "te": {
+ "english": "Telugu",
+ "native": "తెలుగు"
+ },
+ "tg": {
+ "english": "Tajik",
+ "native": "Тоҷикӣ"
+ },
+ "th": {
+ "english": "Thai",
+ "native": "ไทย"
+ },
+ "tl": {
+ "english": "Filipino",
+ "native": "Tagalog"
+ },
+ "tr": {
+ "english": "Turkish",
+ "native": "Türkçe"
+ },
+ "trs": {
+ "native": "Triqui"
+ },
+ "uk": {
+ "english": "Ukrainian",
+ "native": "Українська"
+ },
+ "ur": {
+ "english": "Urdu",
+ "native": "اردو"
+ },
+ "uz": {
+ "english": "Uzbek",
+ "native": "O‘zbek"
+ },
+ "vi": {
+ "english": "Vietnamese",
+ "native": "Tiếng Việt"
+ },
+ "wo": {
+ "native": "Wolof"
+ },
+ "xh": {
+ "english": "Xhosa",
+ "native": "IsiXhosa"
+ },
+ "zh-CN": {
+ "english": "Simplified Chinese",
+ "native": "简体中文"
+ },
+ "zh-TW": {
+ "english": "Traditional Chinese",
+ "native": "正體中文"
+ }
+}
diff --git a/python/mozbuild/mozbuild/action/langpack_manifest.py b/python/mozbuild/mozbuild/action/langpack_manifest.py
new file mode 100644
index 0000000000..c79539cbce
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/langpack_manifest.py
@@ -0,0 +1,587 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+###
+# This script generates a web manifest JSON file based on the xpi-stage
+# directory structure. It extracts data necessary to produce the complete
+# manifest file for a language pack:
+# from the `langpack-manifest.ftl` file in the locale directory;
+# from chrome registry entries;
+# and from other information in the `xpi-stage` directory.
+###
+
+import argparse
+import datetime
+import io
+import json
+import logging
+import os
+import re
+import sys
+import time
+
+import fluent.syntax.ast as FTL
+import mozpack.path as mozpath
+import mozversioncontrol
+import requests
+from fluent.syntax.parser import FluentParser
+from mozpack.chrome.manifest import Manifest, ManifestLocale, parse_manifest
+
+from mozbuild.configure.util import Version
+
+
+def write_file(path, content):
+ with io.open(path, "w", encoding="utf-8") as out:
+ out.write(content + "\n")
+
+
+pushlog_api_url = "{0}/json-rev/{1}"
+
+
+def get_build_date():
+ """Return the current date or SOURCE_DATE_EPOCH, if set."""
+ return datetime.datetime.utcfromtimestamp(
+ int(os.environ.get("SOURCE_DATE_EPOCH", time.time()))
+ )
+
+
+###
+# Retrieves a UTC datetime of the push for the current commit from a
+# mercurial clone directory. The SOURCE_DATE_EPOCH environment
+# variable is honored, for reproducibility.
+#
+# Args:
+# path (str) - path to a directory
+#
+# Returns:
+# (datetime) - a datetime object
+#
+# Example:
+# dt = get_dt_from_hg("/var/vcs/l10n-central/pl")
+# dt == datetime(2017, 10, 11, 23, 31, 54, 0)
+###
+def get_dt_from_hg(path):
+ with mozversioncontrol.get_repository_object(path=path) as repo:
+ phase = repo._run("log", "-r", ".", "-T" "{phase}")
+ if phase.strip() != "public":
+ return get_build_date()
+ repo_url = repo._run("paths", "default")
+ repo_url = repo_url.strip().replace("ssh://", "https://")
+ repo_url = repo_url.replace("hg://", "https://")
+ cs = repo._run("log", "-r", ".", "-T" "{node}")
+
+ url = pushlog_api_url.format(repo_url, cs)
+ session = requests.Session()
+ try:
+ response = session.get(url)
+ except Exception as e:
+ msg = "Failed to retrieve push timestamp using {}\nError: {}".format(url, e)
+ raise Exception(msg)
+
+ data = response.json()
+
+ try:
+ date = data["pushdate"][0]
+ except KeyError as exc:
+ msg = "{}\ndata is: {}".format(
+ str(exc), json.dumps(data, indent=2, sort_keys=True)
+ )
+ raise KeyError(msg)
+
+ return datetime.datetime.utcfromtimestamp(date)
+
+
+###
+# Generates timestamp for a locale based on its path.
+# If possible, will use the commit timestamp from HG repository,
+# and if that fails, will generate the timestamp for `now`.
+#
+# The timestamp format is "{year}{month}{day}{hour}{minute}{second}" and
+# the datetime stored in it is using UTC timezone.
+#
+# Args:
+# path (str) - path to the locale directory
+#
+# Returns:
+# (str) - a timestamp string
+#
+# Example:
+# ts = get_timestamp_for_locale("/var/vcs/l10n-central/pl")
+# ts == "20170914215617"
+###
+def get_timestamp_for_locale(path):
+ dt = None
+ if os.path.isdir(os.path.join(path, ".hg")):
+ dt = get_dt_from_hg(path)
+
+ if dt is None:
+ dt = get_build_date()
+
+ dt = dt.replace(microsecond=0)
+ return dt.strftime("%Y%m%d%H%M%S")
+
+
+###
+# Parses an FTL file into a key-value pair object.
+# Does not support attributes, terms, variables, functions or selectors;
+# only messages with values consisting of text elements and literals.
+#
+# Args:
+# path (str) - a path to an FTL file
+#
+# Returns:
+# (dict) - A mapping of message keys to formatted string values.
+# Empty if the file at `path` was not found.
+#
+# Example:
+# res = parse_flat_ftl('./browser/langpack-metadata.ftl')
+# res == {
+# 'langpack-title': 'Polski',
+# 'langpack-creator': 'mozilla.org',
+# 'langpack-contributors': 'Joe Solon, Suzy Solon'
+# }
+###
+def parse_flat_ftl(path):
+ parser = FluentParser(with_spans=False)
+ try:
+ with open(path, encoding="utf-8") as file:
+ res = parser.parse(file.read())
+ except FileNotFoundError as err:
+ logging.warning(err)
+ return {}
+
+ result = {}
+ for entry in res.body:
+ if isinstance(entry, FTL.Message) and isinstance(entry.value, FTL.Pattern):
+ flat = ""
+ for elem in entry.value.elements:
+ if isinstance(elem, FTL.TextElement):
+ flat += elem.value
+ elif isinstance(elem.expression, FTL.Literal):
+ flat += elem.expression.parse()["value"]
+ else:
+ name = type(elem.expression).__name__
+ raise Exception(f"Unsupported {name} for {entry.id.name} in {path}")
+ result[entry.id.name] = flat.strip()
+ return result
+
+
+##
+# Generates the title and description for the langpack.
+#
+# Uses data stored in a JSON file next to this source,
+# which is expected to have the following format:
+# Record<string, { native: string, english?: string }>
+#
+# If an English name is given and is different from the native one,
+# it will be included in the description and, if within the character limits,
+# also in the name.
+#
+# Length limit for names is 45 characters, for descriptions is 132,
+# return values are truncated if needed.
+#
+# NOTE: If you're updating the native locale names,
+# you should also update the data in
+# toolkit/components/mozintl/mozIntl.sys.mjs.
+#
+# Args:
+# app (str) - Application name
+# locale (str) - Locale identifier
+#
+# Returns:
+# (str, str) - Tuple of title and description
+#
+###
+def get_title_and_description(app, locale):
+ dir = os.path.dirname(__file__)
+ with open(os.path.join(dir, "langpack_localeNames.json"), encoding="utf-8") as nf:
+ names = json.load(nf)
+
+ nameCharLimit = 45
+ descCharLimit = 132
+ nameTemplate = "Language: {}"
+ descTemplate = "{} Language Pack for {}"
+
+ if locale in names:
+ data = names[locale]
+ native = data["native"]
+ english = data["english"] if "english" in data else native
+
+ if english != native:
+ title = nameTemplate.format(f"{native} ({english})")
+ if len(title) > nameCharLimit:
+ title = nameTemplate.format(native)
+ description = descTemplate.format(app, f"{native} ({locale}) – {english}")
+ else:
+ title = nameTemplate.format(native)
+ description = descTemplate.format(app, f"{native} ({locale})")
+ else:
+ title = nameTemplate.format(locale)
+ description = descTemplate.format(app, locale)
+
+ return title[:nameCharLimit], description[:descCharLimit]
+
+
+###
+# Build the manifest author string based on the author string
+# and optionally adding the list of contributors, if provided.
+#
+# Args:
+# ftl (dict) - a key-value mapping of locale-specific strings
+#
+# Returns:
+# (str) - a string to be placed in the author field of the manifest.json
+#
+# Example:
+# s = get_author({
+# 'langpack-creator': 'mozilla.org',
+# 'langpack-contributors': 'Joe Solon, Suzy Solon'
+# })
+# s == 'mozilla.org (contributors: Joe Solon, Suzy Solon)'
+###
+def get_author(ftl):
+ author = ftl["langpack-creator"] if "langpack-creator" in ftl else "mozilla.org"
+ contrib = ftl["langpack-contributors"] if "langpack-contributors" in ftl else ""
+ if contrib:
+ return f"{author} (contributors: {contrib})"
+ else:
+ return author
+
+
+##
+# Converts the list of chrome manifest entry flags to the list of platforms
+# for the langpack manifest.
+#
+# The list of result platforms is taken from AppConstants.platform.
+#
+# Args:
+# flags (FlagList) - a list of Chrome Manifest entry flags
+#
+# Returns:
+# (list) - a list of platform the entry applies to
+#
+# Example:
+# str(flags) == "os==MacOS os==Windows"
+# platforms = convert_entry_flags_to_platform_codes(flags)
+# platforms == ['mac', 'win']
+#
+# The method supports only `os` flag name and equality operator.
+# It will throw if tried with other flags or operators.
+###
+def convert_entry_flags_to_platform_codes(flags):
+ if not flags:
+ return None
+
+ ret = []
+ for key in flags:
+ if key != "os":
+ raise Exception("Unknown flag name")
+
+ for value in flags[key].values:
+ if value[0] != "==":
+ raise Exception("Inequality flag cannot be converted")
+
+ if value[1] == "Android":
+ ret.append("android")
+ elif value[1] == "LikeUnix":
+ ret.append("linux")
+ elif value[1] == "Darwin":
+ ret.append("macosx")
+ elif value[1] == "WINNT":
+ ret.append("win")
+ else:
+ raise Exception("Unknown flag value {0}".format(value[1]))
+
+ return ret
+
+
+###
+# Recursively parse a chrome manifest file appending new entries
+# to the result list
+#
+# The function can handle two entry types: 'locale' and 'manifest'
+#
+# Args:
+# path (str) - a path to a chrome manifest
+# base_path (str) - a path to the base directory all chrome registry
+# entries will be relative to
+# chrome_entries (list) - a list to which entries will be appended to
+#
+# Example:
+#
+# chrome_entries = {}
+# parse_manifest('./chrome.manifest', './', chrome_entries)
+#
+# chrome_entries == [
+# {
+# 'type': 'locale',
+# 'alias': 'devtools',
+# 'locale': 'pl',
+# 'platforms': null,
+# 'path': 'chrome/pl/locale/pl/devtools/'
+# },
+# {
+# 'type': 'locale',
+# 'alias': 'autoconfig',
+# 'locale': 'pl',
+# 'platforms': ['win', 'mac'],
+# 'path': 'chrome/pl/locale/pl/autoconfig/'
+# },
+# ]
+###
+def parse_chrome_manifest(path, base_path, chrome_entries):
+ for entry in parse_manifest(None, path):
+ if isinstance(entry, Manifest):
+ parse_chrome_manifest(
+ os.path.join(os.path.dirname(path), entry.relpath),
+ base_path,
+ chrome_entries,
+ )
+ elif isinstance(entry, ManifestLocale):
+ entry_path = os.path.join(
+ os.path.relpath(os.path.dirname(path), base_path), entry.relpath
+ )
+ chrome_entries.append(
+ {
+ "type": "locale",
+ "alias": entry.name,
+ "locale": entry.id,
+ "platforms": convert_entry_flags_to_platform_codes(entry.flags),
+ "path": mozpath.normsep(entry_path),
+ }
+ )
+ else:
+ raise Exception("Unknown type {0}".format(entry.name))
+
+
+###
+# Gets the version to use in the langpack.
+#
+# This uses the env variable MOZ_BUILD_DATE if it exists to expand the version
+# to be unique in automation.
+#
+# Args:
+# app_version - Application version
+#
+# Returns:
+# str - Version to use
+#
+###
+def get_version_maybe_buildid(app_version):
+ def _extract_numeric_part(part):
+ matches = re.compile("[^\d]").search(part)
+ if matches:
+ part = part[0 : matches.start()]
+ if len(part) == 0:
+ return "0"
+ return part
+
+ parts = [_extract_numeric_part(part) for part in app_version.split(".")]
+
+ buildid = os.environ.get("MOZ_BUILD_DATE")
+ if buildid and len(buildid) != 14:
+ print("Ignoring invalid MOZ_BUILD_DATE: %s" % buildid, file=sys.stderr)
+ buildid = None
+
+ if buildid:
+ # Use simple versioning format, see: Bug 1793925 - The version string
+ # should start with: <firefox major>.<firefox minor>
+ version = ".".join(parts[0:2])
+ # We then break the buildid into two version parts so that the full
+ # version looks like: <firefox major>.<firefox minor>.YYYYMMDD.HHmmss
+ date, time = buildid[:8], buildid[8:]
+ # Leading zeros are not allowed.
+ time = time.lstrip("0")
+ if len(time) == 0:
+ time = "0"
+ version = f"{version}.{date}.{time}"
+ else:
+ version = ".".join(parts)
+
+ return version
+
+
+###
+# Generates a new web manifest dict with values specific for a language pack.
+#
+# Args:
+# locstr (str) - A string with a comma separated list of locales
+# for which resources are embedded in the
+# language pack
+# min_app_ver (str) - A minimum version of the application the language
+# resources are for
+# max_app_ver (str) - A maximum version of the application the language
+# resources are for
+# app_name (str) - The name of the application the language
+# resources are for
+# ftl (dict) - A dictionary of locale-specific strings
+# chrome_entries (dict) - A dictionary of chrome registry entries
+#
+# Returns:
+# (dict) - a web manifest
+#
+# Example:
+# manifest = create_webmanifest(
+# 'pl',
+# '57.0',
+# '57.0.*',
+# 'Firefox',
+# '/var/vcs/l10n-central',
+# {'langpack-title': 'Polski'},
+# chrome_entries
+# )
+# manifest == {
+# 'languages': {
+# 'pl': {
+# 'version': '201709121481',
+# 'chrome_resources': {
+# 'alert': 'chrome/pl/locale/pl/alert/',
+# 'branding': 'browser/chrome/pl/locale/global/',
+# 'global-platform': {
+# 'macosx': 'chrome/pl/locale/pl/global-platform/mac/',
+# 'win': 'chrome/pl/locale/pl/global-platform/win/',
+# 'linux': 'chrome/pl/locale/pl/global-platform/unix/',
+# 'android': 'chrome/pl/locale/pl/global-platform/unix/',
+# },
+# 'forms': 'browser/chrome/pl/locale/forms/',
+# ...
+# }
+# }
+# },
+# 'sources': {
+# 'browser': {
+# 'base_path': 'browser/'
+# }
+# },
+# 'browser_specific_settings': {
+# 'gecko': {
+# 'strict_min_version': '57.0',
+# 'strict_max_version': '57.0.*',
+# 'id': 'langpack-pl@mozilla.org',
+# }
+# },
+# 'version': '57.0',
+# 'name': 'Polski Language Pack',
+# ...
+# }
+###
+def create_webmanifest(
+ locstr,
+ version,
+ min_app_ver,
+ max_app_ver,
+ app_name,
+ l10n_basedir,
+ langpack_eid,
+ ftl,
+ chrome_entries,
+):
+ locales = list(map(lambda loc: loc.strip(), locstr.split(",")))
+ main_locale = locales[0]
+ title, description = get_title_and_description(app_name, main_locale)
+ author = get_author(ftl)
+
+ manifest = {
+ "langpack_id": main_locale,
+ "manifest_version": 2,
+ "browser_specific_settings": {
+ "gecko": {
+ "id": langpack_eid,
+ "strict_min_version": min_app_ver,
+ "strict_max_version": max_app_ver,
+ }
+ },
+ "name": title,
+ "description": description,
+ "version": get_version_maybe_buildid(version),
+ "languages": {},
+ "sources": {"browser": {"base_path": "browser/"}},
+ "author": author,
+ }
+
+ cr = {}
+ for entry in chrome_entries:
+ if entry["type"] == "locale":
+ platforms = entry["platforms"]
+ if platforms:
+ if entry["alias"] not in cr:
+ cr[entry["alias"]] = {}
+ for platform in platforms:
+ cr[entry["alias"]][platform] = entry["path"]
+ else:
+ assert entry["alias"] not in cr
+ cr[entry["alias"]] = entry["path"]
+ else:
+ raise Exception("Unknown type {0}".format(entry["type"]))
+
+ for loc in locales:
+ manifest["languages"][loc] = {
+ "version": get_timestamp_for_locale(os.path.join(l10n_basedir, loc)),
+ "chrome_resources": cr,
+ }
+
+ return json.dumps(manifest, indent=2, ensure_ascii=False)
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--locales", help="List of language codes provided by the langpack"
+ )
+ parser.add_argument("--app-version", help="Version of the application")
+ parser.add_argument(
+ "--max-app-ver", help="Max version of the application the langpack is for"
+ )
+ parser.add_argument(
+ "--app-name", help="Name of the application the langpack is for"
+ )
+ parser.add_argument(
+ "--l10n-basedir", help="Base directory for locales used in the language pack"
+ )
+ parser.add_argument(
+ "--langpack-eid", help="Language pack id to use for this locale"
+ )
+ parser.add_argument(
+ "--metadata",
+ help="FTL file defining langpack metadata",
+ )
+ parser.add_argument("--input", help="Langpack directory.")
+
+ args = parser.parse_args(args)
+
+ chrome_entries = []
+ parse_chrome_manifest(
+ os.path.join(args.input, "chrome.manifest"), args.input, chrome_entries
+ )
+
+ ftl = parse_flat_ftl(args.metadata)
+
+ # Mangle the app version to set min version (remove patch level)
+ min_app_version = args.app_version
+ if "a" not in min_app_version: # Don't mangle alpha versions
+ v = Version(min_app_version)
+ if args.app_name == "SeaMonkey":
+ # SeaMonkey is odd in that <major> hasn't changed for many years.
+ # So min is <major>.<minor>.0
+ min_app_version = "{}.{}.0".format(v.major, v.minor)
+ else:
+ # Language packs should be minversion of {major}.0
+ min_app_version = "{}.0".format(v.major)
+
+ res = create_webmanifest(
+ args.locales,
+ args.app_version,
+ min_app_version,
+ args.max_app_ver,
+ args.app_name,
+ args.l10n_basedir,
+ args.langpack_eid,
+ ftl,
+ chrome_entries,
+ )
+ write_file(os.path.join(args.input, "manifest.json"), res)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/make_dmg.py b/python/mozbuild/mozbuild/action/make_dmg.py
new file mode 100644
index 0000000000..6dc19450fb
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/make_dmg.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import platform
+import sys
+from pathlib import Path
+
+from mozpack import dmg
+
+from mozbuild.bootstrap import bootstrap_toolchain
+from mozbuild.repackaging.application_ini import get_application_ini_value
+
+is_linux = platform.system() == "Linux"
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description="Explode a DMG into its relevant files"
+ )
+
+ parser.add_argument("--dsstore", help="DSStore file from")
+ parser.add_argument("--background", help="Background file from")
+ parser.add_argument("--icon", help="Icon file from")
+ parser.add_argument("--volume-name", help="Disk image volume name")
+
+ parser.add_argument("inpath", metavar="PATH_IN", help="Location of files to pack")
+ parser.add_argument("dmgfile", metavar="DMG_OUT", help="DMG File to create")
+
+ options = parser.parse_args(args)
+
+ extra_files = []
+ if options.dsstore:
+ extra_files.append((options.dsstore, ".DS_Store"))
+ if options.background:
+ extra_files.append((options.background, ".background/background.png"))
+ if options.icon:
+ extra_files.append((options.icon, ".VolumeIcon.icns"))
+
+ if options.volume_name:
+ volume_name = options.volume_name
+ else:
+ volume_name = get_application_ini_value(
+ options.inpath, "App", "CodeName", fallback="Name"
+ )
+
+ # Resolve required tools
+ dmg_tool = bootstrap_toolchain("dmg/dmg")
+ hfs_tool = bootstrap_toolchain("dmg/hfsplus")
+ mkfshfs_tool = bootstrap_toolchain("hfsplus/newfs_hfs")
+
+ dmg.create_dmg(
+ source_directory=Path(options.inpath),
+ output_dmg=Path(options.dmgfile),
+ volume_name=volume_name,
+ extra_files=extra_files,
+ dmg_tool=dmg_tool,
+ hfs_tool=hfs_tool,
+ mkfshfs_tool=mkfshfs_tool,
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/make_unzip.py b/python/mozbuild/mozbuild/action/make_unzip.py
new file mode 100644
index 0000000000..e4d2902f53
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/make_unzip.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import subprocess
+import sys
+
+import buildconfig
+
+
+def make_unzip(package):
+ subprocess.check_call([buildconfig.substs["UNZIP"], package])
+
+
+def main(args):
+ if len(args) != 1:
+ print("Usage: make_unzip.py <package>", file=sys.stderr)
+ return 1
+ else:
+ make_unzip(args[0])
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/node.py b/python/mozbuild/mozbuild/action/node.py
new file mode 100644
index 0000000000..fca0745b80
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/node.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import pipes
+import subprocess
+import sys
+
+import buildconfig
+import six
+
+SCRIPT_ALLOWLIST = [buildconfig.topsrcdir + "/devtools/client/shared/build/build.js"]
+
+ALLOWLIST_ERROR = """
+%s is not
+in SCRIPT_ALLOWLIST in python/mozbuild/mozbuild/action/node.py.
+Using NodeJS from moz.build is currently in beta, and node
+scripts to be executed need to be added to the allowlist and
+reviewed by a build peer so that we can get a better sense of
+how support should evolve. (To consult a build peer, raise a
+question in the #build channel at https://chat.mozilla.org.)
+"""
+
+
+def is_script_in_allowlist(script_path):
+ if script_path in SCRIPT_ALLOWLIST:
+ return True
+
+ return False
+
+
+def execute_node_cmd(node_cmd_list):
+ """Execute the given node command list.
+
+ Arguments:
+ node_cmd_list -- a list of the command and arguments to be executed
+
+ Returns:
+ The set of dependencies which should trigger this command to be re-run.
+ This is ultimately returned to the build system for use by the backend
+ to ensure that incremental rebuilds happen when any dependency changes.
+
+ The node script is expected to output lines for all of the dependencies
+ to stdout, each prefixed by the string "dep:". These lines will make up
+ the returned set of dependencies. Any line not so-prefixed will simply be
+ printed to stderr instead.
+ """
+
+ try:
+ printable_cmd = " ".join(pipes.quote(arg) for arg in node_cmd_list)
+ print('Executing "{}"'.format(printable_cmd), file=sys.stderr)
+ sys.stderr.flush()
+
+ # We need to redirect stderr to a pipe because
+ # https://github.com/nodejs/node/issues/14752 causes issues with make.
+ proc = subprocess.Popen(
+ node_cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+
+ stdout, stderr = proc.communicate()
+ retcode = proc.wait()
+
+ if retcode != 0:
+ print(stderr, file=sys.stderr)
+ sys.stderr.flush()
+ sys.exit(retcode)
+
+ # Process the node script output
+ #
+ # XXX Starting with an empty list means that node scripts can
+ # (intentionally or inadvertently) remove deps. Do we want this?
+ deps = []
+ for line in stdout.splitlines():
+ line = six.ensure_text(line)
+ if "dep:" in line:
+ deps.append(line.replace("dep:", ""))
+ else:
+ print(line, file=sys.stderr)
+ sys.stderr.flush()
+
+ return set(deps)
+
+ except subprocess.CalledProcessError as err:
+ # XXX On Mac (and elsewhere?) "OSError: [Errno 13] Permission denied"
+ # (at least sometimes) means "node executable not found". Can we
+ # disambiguate this from real "Permission denied" errors so that we
+ # can log such problems more clearly?
+ print(
+ """Failed with %s. Be sure to check that your mozconfig doesn't
+ have --disable-nodejs in it. If it does, try removing that line and
+ building again."""
+ % str(err),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def generate(output, node_script, *files):
+ """Call the given node_script to transform the given modules.
+
+ Arguments:
+ output -- a dummy file, used by the build system. Can be ignored.
+ node_script -- the script to be executed. Must be in the SCRIPT_ALLOWLIST
+ files -- files to be transformed, will be passed to the script as arguments
+
+ Returns:
+ The set of dependencies which should trigger this command to be re-run.
+ This is ultimately returned to the build system for use by the backend
+ to ensure that incremental rebuilds happen when any dependency changes.
+ """
+
+ node_interpreter = buildconfig.substs.get("NODEJS")
+ if not node_interpreter:
+ print(
+ """NODEJS not set. Be sure to check that your mozconfig doesn't
+ have --disable-nodejs in it. If it does, try removing that line
+ and building again.""",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ node_script = six.ensure_text(node_script)
+ if not isinstance(node_script, six.text_type):
+ print(
+ "moz.build file didn't pass a valid node script name to execute",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ if not is_script_in_allowlist(node_script):
+ print(ALLOWLIST_ERROR % (node_script), file=sys.stderr)
+ sys.exit(1)
+
+ node_cmd_list = [node_interpreter, node_script]
+ node_cmd_list.extend(files)
+
+ return execute_node_cmd(node_cmd_list)
diff --git a/python/mozbuild/mozbuild/action/package_generated_sources.py b/python/mozbuild/mozbuild/action/package_generated_sources.py
new file mode 100644
index 0000000000..d87a75fc6f
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/package_generated_sources.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import sys
+
+import buildconfig
+import mozpack.path as mozpath
+from mozpack.archive import create_tar_gz_from_files
+from mozpack.files import BaseFile
+
+from mozbuild.generated_sources import get_generated_sources
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Produce archive of generated sources")
+ parser.add_argument("outputfile", help="File to write output to")
+ args = parser.parse_args(argv)
+
+ objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
+
+ def is_valid_entry(entry):
+ if isinstance(entry[1], BaseFile):
+ entry_abspath = mozpath.abspath(entry[1].path)
+ else:
+ entry_abspath = mozpath.abspath(entry[1])
+ if not entry_abspath.startswith(objdir_abspath):
+ print(
+ "Warning: omitting generated source [%s] from archive" % entry_abspath,
+ file=sys.stderr,
+ )
+ return False
+ return True
+
+ files = dict(filter(is_valid_entry, get_generated_sources()))
+ with open(args.outputfile, "wb") as fh:
+ create_tar_gz_from_files(fh, files, compresslevel=5)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/preprocessor.py b/python/mozbuild/mozbuild/action/preprocessor.py
new file mode 100644
index 0000000000..c59a05a90b
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/preprocessor.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozbuild.action.util import log_build_task
+from mozbuild.preprocessor import Preprocessor
+
+
+def generate(output, *args):
+ pp = Preprocessor()
+ pp.out = output
+ pp.handleCommandLine(list(args), True)
+ return set(pp.includes)
+
+
+def main(args):
+ pp = Preprocessor()
+ pp.handleCommandLine(args, True)
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/process_define_files.py b/python/mozbuild/mozbuild/action/process_define_files.py
new file mode 100644
index 0000000000..d775b52b57
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/process_define_files.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import re
+import sys
+
+import mozpack.path as mozpath
+from buildconfig import topobjdir, topsrcdir
+
+from mozbuild.backend.configenvironment import PartialConfigEnvironment
+
+
+def process_define_file(output, input):
+ """Creates the given config header. A config header is generated by
+ taking the corresponding source file and replacing some *#define/#undef*
+ occurences:
+
+ - "#undef NAME" is turned into "#define NAME VALUE"
+ - "#define NAME" is unchanged
+ - "#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
+ - "#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
+ - Whitespaces are preserved.
+
+ As a special rule, "#undef ALLDEFINES" is turned into "#define NAME
+ VALUE" for all the defined variables.
+ """
+
+ path = os.path.abspath(input)
+
+ config = PartialConfigEnvironment(topobjdir)
+
+ if mozpath.basedir(
+ path, [mozpath.join(topsrcdir, "js/src")]
+ ) and not config.substs.get("JS_STANDALONE"):
+ config = PartialConfigEnvironment(mozpath.join(topobjdir, "js", "src"))
+
+ with open(path, "r") as input:
+ r = re.compile(
+ "^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?", re.U
+ )
+ for l in input:
+ m = r.match(l)
+ if m:
+ cmd = m.group("cmd")
+ name = m.group("name")
+ value = m.group("value")
+ if name:
+ if name == "ALLDEFINES":
+ if cmd == "define":
+ raise Exception(
+ "`#define ALLDEFINES` is not allowed in a "
+ "CONFIGURE_DEFINE_FILE"
+ )
+
+ def define_for_name(name, val):
+ """WebRTC files like to define WINVER and _WIN32_WINNT
+ via the command line, which raises a mass of macro
+ redefinition warnings. Just handle those macros
+ specially here."""
+ define = "#define {name} {val}".format(name=name, val=val)
+ if name in ("_WIN32_IE", "_WIN32_WINNT", "WIN32", "WINVER"):
+ return "#if !defined({name})\n{define}\n#endif".format(
+ name=name, define=define
+ )
+ return define
+
+ defines = "\n".join(
+ sorted(
+ define_for_name(name, val)
+ for name, val in config.defines["ALLDEFINES"].items()
+ )
+ )
+ l = l[: m.start("cmd") - 1] + defines + l[m.end("name") :]
+ elif cmd == "define":
+ if value and name in config.defines:
+ l = (
+ l[: m.start("value")]
+ + str(config.defines[name])
+ + l[m.end("value") :]
+ )
+ elif cmd == "undef":
+ if name in config.defines:
+ l = (
+ l[: m.start("cmd")]
+ + "define"
+ + l[m.end("cmd") : m.end("name")]
+ + " "
+ + str(config.defines[name])
+ + l[m.end("name") :]
+ )
+ else:
+ l = "/* " + l[: m.end("name")] + " */" + l[m.end("name") :]
+
+ output.write(l)
+
+ deps = {path}
+ deps.update(config.get_dependencies())
+ return deps
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Process define files.")
+
+ parser.add_argument("input", help="Input define file.")
+
+ args = parser.parse_args(argv)
+
+ return process_define_file(sys.stdout, args.input)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/process_install_manifest.py b/python/mozbuild/mozbuild/action/process_install_manifest.py
new file mode 100644
index 0000000000..faf1376dba
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/process_install_manifest.py
@@ -0,0 +1,125 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import sys
+import time
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.errors import errors
+from mozpack.files import BaseFile, FileFinder
+from mozpack.manifests import InstallManifest
+
+from mozbuild.action.util import log_build_task
+from mozbuild.util import DefinesAction
+
+COMPLETE = (
+ "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; "
+ "Added/updated {updated}; "
+ "Removed {rm_files} files and {rm_dirs} directories."
+)
+
+
+def process_manifest(destdir, paths, track, no_symlinks=False, defines={}):
+
+ if os.path.exists(track):
+ # We use the same format as install manifests for the tracking
+ # data.
+ manifest = InstallManifest(path=track)
+ remove_unaccounted = FileRegistry()
+ dummy_file = BaseFile()
+
+ finder = FileFinder(destdir, find_dotfiles=True)
+ for dest in manifest._dests:
+ for p, f in finder.find(dest):
+ remove_unaccounted.add(p, dummy_file)
+
+ remove_empty_directories = True
+ remove_all_directory_symlinks = True
+
+ else:
+ # If tracking is enabled and there is no file, we don't want to
+ # be removing anything.
+ remove_unaccounted = False
+ remove_empty_directories = False
+ remove_all_directory_symlinks = False
+
+ manifest = InstallManifest()
+ for path in paths:
+ manifest |= InstallManifest(path=path)
+
+ copier = FileCopier()
+ link_policy = "copy" if no_symlinks else "symlink"
+ manifest.populate_registry(
+ copier, defines_override=defines, link_policy=link_policy
+ )
+ with errors.accumulate():
+ result = copier.copy(
+ destdir,
+ remove_unaccounted=remove_unaccounted,
+ remove_all_directory_symlinks=remove_all_directory_symlinks,
+ remove_empty_directories=remove_empty_directories,
+ )
+
+ if track:
+ # We should record files that we actually copied.
+ # It is too late to expand wildcards when the track file is read.
+ manifest.write(path=track, expand_pattern=True)
+
+ return result
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Process install manifest files.")
+
+ parser.add_argument("destdir", help="Destination directory.")
+ parser.add_argument("manifests", nargs="+", help="Path to manifest file(s).")
+ parser.add_argument(
+ "--no-symlinks",
+ action="store_true",
+ help="Do not install symbolic links. Always copy files",
+ )
+ parser.add_argument(
+ "--track",
+ metavar="PATH",
+ required=True,
+ help="Use installed files tracking information from the given path.",
+ )
+ parser.add_argument(
+ "-D",
+ action=DefinesAction,
+ dest="defines",
+ metavar="VAR[=VAL]",
+ help="Define a variable to override what is specified in the manifest",
+ )
+
+ args = parser.parse_args(argv)
+
+ start = time.monotonic()
+
+ result = process_manifest(
+ args.destdir,
+ args.manifests,
+ track=args.track,
+ no_symlinks=args.no_symlinks,
+ defines=args.defines,
+ )
+
+ elapsed = time.monotonic() - start
+
+ print(
+ COMPLETE.format(
+ elapsed=elapsed,
+ dest=args.destdir,
+ existing=result.existing_files_count,
+ updated=result.updated_files_count,
+ rm_files=result.removed_files_count,
+ rm_dirs=result.removed_directories_count,
+ )
+ )
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/symbols_archive.py b/python/mozbuild/mozbuild/action/symbols_archive.py
new file mode 100644
index 0000000000..75ecb71d17
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/symbols_archive.py
@@ -0,0 +1,89 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import sys
+
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+
+
+def make_archive(archive_name, base, exclude, include):
+ compress = ["**/*.sym"]
+ finder = FileFinder(base, ignore=exclude)
+ if not include:
+ include = ["*"]
+ archive_basename = os.path.basename(archive_name)
+
+ def fill_archive(add_file):
+ for pat in include:
+ for p, f in finder.find(pat):
+ print(' Adding to "%s":\n\t"%s"' % (archive_basename, p))
+ add_file(p, f)
+
+ with open(archive_name, "wb") as fh:
+ if archive_basename.endswith(".zip"):
+ from mozpack.mozjar import JarWriter
+
+ with JarWriter(fileobj=fh, compress_level=5) as writer:
+
+ def add_file(p, f):
+ should_compress = any(mozpath.match(p, pat) for pat in compress)
+ writer.add(
+ p.encode("utf-8"),
+ f,
+ mode=f.mode,
+ compress=should_compress,
+ skip_duplicates=True,
+ )
+
+ fill_archive(add_file)
+ elif archive_basename.endswith(".tar.zst"):
+ import tarfile
+
+ import zstandard
+
+ ctx = zstandard.ZstdCompressor(threads=-1)
+ with ctx.stream_writer(fh) as zstdwriter:
+ with tarfile.open(
+ mode="w|", fileobj=zstdwriter, bufsize=1024 * 1024
+ ) as tar:
+
+ def add_file(p, f):
+ info = tar.gettarinfo(os.path.join(base, p), p)
+ tar.addfile(info, f.open())
+
+ fill_archive(add_file)
+ else:
+ raise Exception(
+ "Unsupported archive format for {}".format(archive_basename)
+ )
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Produce a symbols archive")
+ parser.add_argument("archive", help="Which archive to generate")
+ parser.add_argument("base", help="Base directory to package")
+ parser.add_argument(
+ "--full-archive", action="store_true", help="Generate a full symbol archive"
+ )
+
+ args = parser.parse_args(argv)
+
+ excludes = []
+ includes = []
+
+ if args.full_archive:
+ # We allow symbols for tests to be included when building on try
+ if os.environ.get("MH_BRANCH", "unknown") != "try":
+ excludes = ["*test*", "*Test*"]
+ else:
+ includes = ["**/*.sym"]
+
+ make_archive(args.archive, args.base, excludes, includes)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/test_archive.py b/python/mozbuild/mozbuild/action/test_archive.py
new file mode 100644
index 0000000000..06fef60f8d
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/test_archive.py
@@ -0,0 +1,875 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This action is used to produce test archives.
+#
+# Ideally, the data in this file should be defined in moz.build files.
+# It is defined inline because this was easiest to make test archive
+# generation faster.
+
+import argparse
+import itertools
+import os
+import sys
+import time
+
+import buildconfig
+import mozpack.path as mozpath
+from manifestparser import TestManifest
+from mozpack.archive import create_tar_gz_from_files
+from mozpack.copier import FileRegistry
+from mozpack.files import ExistingFile, FileFinder
+from mozpack.manifests import InstallManifest
+from mozpack.mozjar import JarWriter
+from reftest import ReftestManifest
+
+from mozbuild.util import ensureParentDir
+
+STAGE = mozpath.join(buildconfig.topobjdir, "dist", "test-stage")
+
+TEST_HARNESS_BINS = [
+ "BadCertAndPinningServer",
+ "DelegatedCredentialsServer",
+ "EncryptedClientHelloServer",
+ "FaultyServer",
+ "GenerateOCSPResponse",
+ "OCSPStaplingServer",
+ "SanctionsTestServer",
+ "SmokeDMD",
+ "certutil",
+ "crashinject",
+ "geckodriver",
+ "http3server",
+ "minidumpwriter",
+ "pk12util",
+ "screenshot",
+ "screentopng",
+ "ssltunnel",
+ "xpcshell",
+]
+
+TEST_HARNESS_DLLS = ["crashinjectdll", "mozglue"]
+
+GMP_TEST_PLUGIN_DIRS = ["gmp-fake/**", "gmp-fakeopenh264/**"]
+
+# These entries will be used by artifact builds to re-construct an
+# objdir with the appropriate generated support files.
+OBJDIR_TEST_FILES = {
+ "xpcshell": {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/xpcshell",
+ "pattern": "**",
+ "dest": "xpcshell/tests",
+ },
+ "mochitest": {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/testing",
+ "pattern": "mochitest/**",
+ },
+}
+
+
+ARCHIVE_FILES = {
+ "common": [
+ {
+ "source": STAGE,
+ "base": "",
+ "pattern": "**",
+ "ignore": [
+ "cppunittest/**",
+ "condprof/**",
+ "gtest/**",
+ "mochitest/**",
+ "reftest/**",
+ "talos/**",
+ "raptor/**",
+ "awsy/**",
+ "web-platform/**",
+ "xpcshell/**",
+ "updater-dep/**",
+ "jsreftest/**",
+ "jit-test/**",
+ "jittest/**", # To make the ignore checker happy
+ "perftests/**",
+ "fuzztest/**",
+ ],
+ },
+ {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "modules/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/marionette",
+ "patterns": ["client/**", "harness/**", "mach_test_package_commands.py"],
+ "dest": "marionette",
+ "ignore": ["client/docs", "harness/marionette_harness/tests"],
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "manifests": [
+ "testing/marionette/harness/marionette_harness/tests/unit-tests.ini"
+ ],
+ # We also need the manifests and harness_unit tests
+ "pattern": "testing/marionette/harness/marionette_harness/tests/**",
+ "dest": "marionette/tests",
+ },
+ {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "mozbase/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "firefox-ui/**",
+ "ignore": ["firefox-ui/tests"],
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "pattern": "testing/firefox-ui/tests",
+ "dest": "firefox-ui/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "toolkit/components/telemetry/tests/marionette",
+ "pattern": "/**",
+ "dest": "telemetry/marionette",
+ },
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "tps/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "services/sync/",
+ "pattern": "tps/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "services/sync/tests/tps",
+ "pattern": "**",
+ "dest": "tps/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/web-platform/tests/tools/wptserve",
+ "pattern": "**",
+ "dest": "tools/wptserve",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/web-platform/tests/tools/third_party",
+ "pattern": "**",
+ "dest": "tools/wpt_third_party",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "python/mozterm",
+ "pattern": "**",
+ "dest": "tools/mozterm",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "xpcom/geckoprocesstypes_generator",
+ "pattern": "**",
+ "dest": "tools/geckoprocesstypes_generator",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/six",
+ "pattern": "**",
+ "dest": "tools/six",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/distro",
+ "pattern": "**",
+ "dest": "tools/distro",
+ },
+ {"source": buildconfig.topobjdir, "base": "", "pattern": "mozinfo.json"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": [
+ "%s%s" % (f, buildconfig.substs["BIN_SUFFIX"])
+ for f in TEST_HARNESS_BINS
+ ]
+ + [
+ "%s%s%s"
+ % (
+ buildconfig.substs["DLL_PREFIX"],
+ f,
+ buildconfig.substs["DLL_SUFFIX"],
+ )
+ for f in TEST_HARNESS_DLLS
+ ],
+ "dest": "bin",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": GMP_TEST_PLUGIN_DIRS,
+ "dest": "bin/plugins",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": ["dmd.py", "fix_stacks.py"],
+ "dest": "bin",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin/components",
+ "patterns": ["httpd.js"],
+ "dest": "bin/components",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "build/pgo/certs",
+ "pattern": "**",
+ "dest": "certs",
+ },
+ ],
+ "cppunittest": [
+ {"source": STAGE, "base": "", "pattern": "cppunittest/**"},
+ # We don't ship these files if startup cache is disabled, which is
+ # rare. But it shouldn't matter for test archives.
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "startupcache/test",
+ "pattern": "TestStartupCacheTelemetry.*",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "runcppunittests.py",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "remotecppunittests.py",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "cppunittest.ini",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "cppunittest",
+ },
+ ],
+ "gtest": [{"source": STAGE, "base": "", "pattern": "gtest/**"}],
+ "mochitest": [
+ OBJDIR_TEST_FILES["mochitest"],
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/testing",
+ "pattern": "mochitest/**",
+ },
+ {"source": STAGE, "base": "", "pattern": "mochitest/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "mochitest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "mochijar/**",
+ "dest": "mochitest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "specialpowers/**",
+ "dest": "mochitest/extensions",
+ },
+ ],
+ "mozharness": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozharness",
+ "pattern": "**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "pattern": "third_party/python/_venv/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/manifestparser",
+ "pattern": "manifestparser/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozfile",
+ "pattern": "mozfile/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozinfo",
+ "pattern": "mozinfo/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozprocess",
+ "pattern": "mozprocess/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/six",
+ "pattern": "six.py",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/distro",
+ "pattern": "distro.py",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/packaging",
+ "pattern": "**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "python/mozbuild/mozbuild/action",
+ "pattern": "tooltool.py",
+ "dest": "external_tools",
+ },
+ ],
+ "reftest": [
+ {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "reftest/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "reftest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "manifests": [
+ "layout/reftests/reftest.list",
+ "layout/reftests/reftest-qr.list",
+ "testing/crashtest/crashtests.list",
+ ],
+ "dest": "reftest/tests",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "reftest/**",
+ "dest": "reftest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "specialpowers/**",
+ "dest": "reftest",
+ },
+ ],
+ "talos": [
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "talos/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/profiles",
+ "pattern": "**",
+ "dest": "talos/talos/profile_data",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/webkit/PerformanceTests",
+ "pattern": "**",
+ "dest": "talos/talos/tests/webkit/PerformanceTests/",
+ },
+ ],
+ "perftests": [
+ {"source": buildconfig.topsrcdir, "pattern": "testing/mozbase/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "testing/condprofile/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "testing/performance/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "third_party/python/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "tools/lint/eslint/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "**/perftest_*.js"},
+ {"source": buildconfig.topsrcdir, "pattern": "**/hooks_*py"},
+ {"source": buildconfig.topsrcdir, "pattern": "build/autoconf/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "build/moz.configure/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "python/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "build/mach_initialize.py"},
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "python/sites/build.txt",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "python/sites/common.txt",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "python/sites/mach.txt",
+ },
+ {"source": buildconfig.topsrcdir, "pattern": "mach/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "testing/web-platform/tests/tools/third_party/certifi/**",
+ },
+ {"source": buildconfig.topsrcdir, "pattern": "testing/mozharness/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "browser/config/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/modules",
+ "pattern": "**",
+ "dest": "bin/modules",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": [
+ "browser/**",
+ "chrome/**",
+ "chrome.manifest",
+ "components/**",
+ "http3server",
+ "*.ini",
+ "localization/**",
+ "modules/**",
+ "update.locale",
+ "greprefs.js",
+ ],
+ "dest": "bin",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "netwerk/test/http3serverDB",
+ "pattern": "**",
+ "dest": "netwerk/test/http3serverDB",
+ },
+ ],
+ "condprof": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "condprofile/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozfile",
+ "pattern": "**",
+ "dest": "condprofile/mozfile",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozprofile",
+ "pattern": "**",
+ "dest": "condprofile/mozprofile",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozdevice",
+ "pattern": "**",
+ "dest": "condprofile/mozdevice",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozlog",
+ "pattern": "**",
+ "dest": "condprofile/mozlog",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/virtualenv",
+ "pattern": "**",
+ "dest": "condprofile/virtualenv",
+ },
+ ],
+ "raptor": [
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "raptor/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/profiles",
+ "pattern": "**",
+ "dest": "raptor/raptor/profile_data",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/webkit/PerformanceTests",
+ "pattern": "**",
+ "dest": "raptor/raptor/tests/webkit/PerformanceTests/",
+ },
+ ],
+ "awsy": [
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "awsy/**"}
+ ],
+ "web-platform": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "web-platform/meta/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "web-platform/mozilla/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "web-platform/tests/**",
+ "ignore": ["web-platform/tests/tools/wpt_third_party"],
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests",
+ "pattern": "web-platform/**",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "web-platform",
+ },
+ ],
+ "xpcshell": [
+ OBJDIR_TEST_FILES["xpcshell"],
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/xpcshell",
+ "patterns": [
+ "head.js",
+ "mach_test_package_commands.py",
+ "moz-http2/**",
+ "node-http2/**",
+ "node_ip/**",
+ "node-ws/**",
+ "dns-packet/**",
+ "remotexpcshelltests.py",
+ "runxpcshelltests.py",
+ "selftest.py",
+ "xpcshellcommandline.py",
+ ],
+ "dest": "xpcshell",
+ },
+ {"source": STAGE, "base": "", "pattern": "xpcshell/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "xpcshell",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "build",
+ "pattern": "automation.py",
+ "dest": "xpcshell",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/profiles",
+ "pattern": "**",
+ "dest": "xpcshell/profile_data",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "pattern": "http3server%s" % buildconfig.substs["BIN_SUFFIX"],
+ "dest": "xpcshell/http3server",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "netwerk/test/http3serverDB",
+ "pattern": "**",
+ "dest": "xpcshell/http3server/http3serverDB",
+ },
+ ],
+ "updater-dep": [
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/updater-dep",
+ "pattern": "**",
+ "dest": "updater-dep",
+ },
+ # Required by the updater on Linux
+ {
+ "source": buildconfig.topobjdir,
+ "base": "config/external/sqlite",
+ "pattern": "libmozsqlite3.so",
+ "dest": "updater-dep",
+ },
+ ],
+ "jsreftest": [{"source": STAGE, "base": "", "pattern": "jsreftest/**"}],
+ "fuzztest": [
+ {"source": buildconfig.topsrcdir, "pattern": "tools/fuzzing/smoke/**"}
+ ],
+ "jittest": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src",
+ "pattern": "jit-test/**",
+ "dest": "jit-test",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "non262/shell.js",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "non262/Math/shell.js",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "non262/reflect-parse/Match.js",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "lib/**",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src",
+ "pattern": "jsapi.h",
+ "dest": "jit-test",
+ },
+ ],
+}
+
+if buildconfig.substs.get("MOZ_CODE_COVERAGE"):
+ ARCHIVE_FILES["common"].append(
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "python/mozbuild/",
+ "patterns": ["mozpack/**", "mozbuild/codecoverage/**"],
+ }
+ )
+
+
+if buildconfig.substs.get("MOZ_ASAN") and buildconfig.substs.get("CLANG_CL"):
+ asan_dll = {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "pattern": os.path.basename(buildconfig.substs["MOZ_CLANG_RT_ASAN_LIB_PATH"]),
+ "dest": "bin",
+ }
+ ARCHIVE_FILES["common"].append(asan_dll)
+
+
+if buildconfig.substs.get("commtopsrcdir"):
+ commtopsrcdir = buildconfig.substs.get("commtopsrcdir")
+ mozharness_comm = {
+ "source": commtopsrcdir,
+ "base": "testing/mozharness",
+ "pattern": "**",
+ }
+ ARCHIVE_FILES["mozharness"].append(mozharness_comm)
+ marionette_comm = {
+ "source": commtopsrcdir,
+ "base": "",
+ "manifest": "testing/marionette/unit-tests.ini",
+ "dest": "marionette/tests/comm",
+ }
+ ARCHIVE_FILES["common"].append(marionette_comm)
+ thunderbirdinstance = {
+ "source": commtopsrcdir,
+ "base": "testing/marionette",
+ "pattern": "thunderbirdinstance.py",
+ "dest": "marionette/client/marionette_driver",
+ }
+ ARCHIVE_FILES["common"].append(thunderbirdinstance)
+
+
+# "common" is our catch all archive and it ignores things from other archives.
+# Verify nothing sneaks into ARCHIVE_FILES without a corresponding exclusion
+# rule in the "common" archive.
+for k, v in ARCHIVE_FILES.items():
+ # Skip mozharness because it isn't staged.
+ if k in ("common", "mozharness"):
+ continue
+
+ ignores = set(
+ itertools.chain(*(e.get("ignore", []) for e in ARCHIVE_FILES["common"]))
+ )
+
+ if not any(p.startswith("%s/" % k) for p in ignores):
+ raise Exception('"common" ignore list probably should contain %s' % k)
+
+
+def find_generated_harness_files():
+ # TEST_HARNESS_FILES end up in an install manifest at
+ # $topsrcdir/_build_manifests/install/_tests.
+ manifest = InstallManifest(
+ mozpath.join(buildconfig.topobjdir, "_build_manifests", "install", "_tests")
+ )
+ registry = FileRegistry()
+ manifest.populate_registry(registry)
+ # Conveniently, the generated files we care about will already
+ # exist in the objdir, so we can identify relevant files if
+ # they're an `ExistingFile` instance.
+ return [
+ mozpath.join("_tests", p)
+ for p in registry.paths()
+ if isinstance(registry[p], ExistingFile)
+ ]
+
+
+def find_files(archive):
+ extra_entries = []
+ generated_harness_files = find_generated_harness_files()
+
+ if archive == "common":
+ # Construct entries ensuring all our generated harness files are
+ # packaged in the common tests archive.
+ packaged_paths = set()
+ for entry in OBJDIR_TEST_FILES.values():
+ pat = mozpath.join(entry["base"], entry["pattern"])
+ del entry["pattern"]
+ patterns = []
+ for path in generated_harness_files:
+ if mozpath.match(path, pat):
+ patterns.append(path[len(entry["base"]) + 1 :])
+ packaged_paths.add(path)
+ if patterns:
+ entry["patterns"] = patterns
+ extra_entries.append(entry)
+ entry = {"source": buildconfig.topobjdir, "base": "_tests", "patterns": []}
+ for path in set(generated_harness_files) - packaged_paths:
+ entry["patterns"].append(path[len("_tests") + 1 :])
+ extra_entries.append(entry)
+
+ for entry in ARCHIVE_FILES[archive] + extra_entries:
+ source = entry["source"]
+ dest = entry.get("dest")
+ base = entry.get("base", "")
+
+ pattern = entry.get("pattern")
+ patterns = entry.get("patterns", [])
+ if pattern:
+ patterns.append(pattern)
+
+ manifest = entry.get("manifest")
+ manifests = entry.get("manifests", [])
+ if manifest:
+ manifests.append(manifest)
+ if manifests:
+ dirs = find_manifest_dirs(os.path.join(source, base), manifests)
+ patterns.extend({"{}/**".format(d) for d in dirs})
+
+ ignore = list(entry.get("ignore", []))
+ ignore.extend(["**/.flake8", "**/.mkdir.done", "**/*.pyc"])
+
+ if archive not in ("common", "updater-dep") and base.startswith("_tests"):
+ # We may have generated_harness_files to exclude from this entry.
+ for path in generated_harness_files:
+ if path.startswith(base):
+ ignore.append(path[len(base) + 1 :])
+
+ common_kwargs = {"find_dotfiles": True, "ignore": ignore}
+
+ finder = FileFinder(os.path.join(source, base), **common_kwargs)
+
+ for pattern in patterns:
+ for p, f in finder.find(pattern):
+ if dest:
+ p = mozpath.join(dest, p)
+ yield p, f
+
+
+def find_manifest_dirs(topsrcdir, manifests):
+ """Routine to retrieve directories specified in a manifest, relative to topsrcdir.
+
+ It does not recurse into manifests, as we currently have no need for that.
+ """
+ dirs = set()
+
+ for p in manifests:
+ p = os.path.join(topsrcdir, p)
+
+ if p.endswith(".ini"):
+ test_manifest = TestManifest()
+ test_manifest.read(p)
+ dirs |= set([os.path.dirname(m) for m in test_manifest.manifests()])
+
+ elif p.endswith(".list"):
+ m = ReftestManifest()
+ m.load(p)
+ dirs |= m.dirs
+
+ else:
+ raise Exception(
+ '"{}" is not a supported manifest format.'.format(
+ os.path.splitext(p)[1]
+ )
+ )
+
+ dirs = {mozpath.normpath(d[len(topsrcdir) :]).lstrip("/") for d in dirs}
+
+ # Filter out children captured by parent directories because duplicates
+ # will confuse things later on.
+ def parents(p):
+ while True:
+ p = mozpath.dirname(p)
+ if not p:
+ break
+ yield p
+
+ seen = set()
+ for d in sorted(dirs, key=len):
+ if not any(p in seen for p in parents(d)):
+ seen.add(d)
+
+ return sorted(seen)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Produce test archives")
+ parser.add_argument("archive", help="Which archive to generate")
+ parser.add_argument("outputfile", help="File to write output to")
+
+ args = parser.parse_args(argv)
+
+ out_file = args.outputfile
+ if not out_file.endswith((".tar.gz", ".zip")):
+ raise Exception("expected tar.gz or zip output file")
+
+ file_count = 0
+ t_start = time.monotonic()
+ ensureParentDir(out_file)
+ res = find_files(args.archive)
+ with open(out_file, "wb") as fh:
+ # Experimentation revealed that level 5 is significantly faster and has
+ # marginally larger sizes than higher values and is the sweet spot
+ # for optimal compression. Read the detailed commit message that
+ # introduced this for raw numbers.
+ if out_file.endswith(".tar.gz"):
+ files = dict(res)
+ create_tar_gz_from_files(fh, files, compresslevel=5)
+ file_count = len(files)
+ elif out_file.endswith(".zip"):
+ with JarWriter(fileobj=fh, compress_level=5) as writer:
+ for p, f in res:
+ writer.add(
+ p.encode("utf-8"), f.read(), mode=f.mode, skip_duplicates=True
+ )
+ file_count += 1
+ else:
+ raise Exception("unhandled file extension: %s" % out_file)
+
+ duration = time.monotonic() - t_start
+ zip_size = os.path.getsize(args.outputfile)
+ basename = os.path.basename(args.outputfile)
+ print(
+ "Wrote %d files in %d bytes to %s in %.2fs"
+ % (file_count, zip_size, basename, duration)
+ )
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/tooltool.py b/python/mozbuild/mozbuild/action/tooltool.py
new file mode 100755
index 0000000000..002360cd65
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/tooltool.py
@@ -0,0 +1,1714 @@
+#!/usr/bin/env python3
+
+# tooltool is a lookaside cache implemented in Python
+# Copyright (C) 2011 John H. Ford <john@johnford.info>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation version 2
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+
+# A manifest file specifies files in that directory that are stored
+# elsewhere. This file should only list files in the same directory
+# in which the manifest file resides and it should be called
+# 'manifest.tt'
+
+import base64
+import calendar
+import hashlib
+import hmac
+import json
+import logging
+import math
+import optparse
+import os
+import pprint
+import re
+import shutil
+import ssl
+import stat
+import sys
+import tarfile
+import tempfile
+import threading
+import time
+import zipfile
+from contextlib import closing, contextmanager
+from functools import wraps
+from io import BytesIO, open
+from random import random
+from subprocess import PIPE, Popen
+
+if os.name == "nt":
+ import certifi
+
+__version__ = "1.4.0"
+
+# Allowed request header characters:
+# !#$%&'()*+,-./:;<=>?@[]^_`{|}~ and space, a-z, A-Z, 0-9, \, "
+REQUEST_HEADER_ATTRIBUTE_CHARS = re.compile(
+ r"^[ a-zA-Z0-9_\!#\$%&'\(\)\*\+,\-\./\:;<\=>\?@\[\]\^`\{\|\}~]*$"
+)
+DEFAULT_MANIFEST_NAME = "manifest.tt"
+TOOLTOOL_PACKAGE_SUFFIX = ".TOOLTOOL-PACKAGE"
+HAWK_VER = 1
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ six_binary_type = bytes
+ unicode = (
+ str # Silence `pyflakes` from reporting `undefined name 'unicode'` in Python 3.
+ )
+ import urllib.request as urllib2
+ from http.client import HTTPConnection, HTTPSConnection
+ from urllib.error import HTTPError, URLError
+ from urllib.parse import urljoin, urlparse
+ from urllib.request import Request
+else:
+ six_binary_type = str
+ import urllib2
+ from httplib import HTTPConnection, HTTPSConnection
+ from urllib2 import HTTPError, Request, URLError
+ from urlparse import urljoin, urlparse
+
+
+log = logging.getLogger(__name__)
+
+
+# Vendored code from `redo` module
+def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1):
+ """
+ This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo
+ A generator function that sleeps between retries, handles exponential
+ backoff and jitter. The action you are retrying is meant to run after
+ retrier yields.
+ """
+ jitter = jitter or 0 # py35 barfs on the next line if jitter is None
+ if jitter > sleeptime:
+ # To prevent negative sleep times
+ raise Exception(
+ "jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime)
+ )
+
+ sleeptime_real = sleeptime
+ for _ in range(attempts):
+ log.debug("attempt %i/%i", _ + 1, attempts)
+
+ yield sleeptime_real
+
+ if jitter:
+ sleeptime_real = sleeptime + random.uniform(-jitter, jitter)
+ # our jitter should scale along with the sleeptime
+ jitter = jitter * sleepscale
+ else:
+ sleeptime_real = sleeptime
+
+ sleeptime *= sleepscale
+
+ if sleeptime_real > max_sleeptime:
+ sleeptime_real = max_sleeptime
+
+ # Don't need to sleep the last time
+ if _ < attempts - 1:
+ log.debug(
+ "sleeping for %.2fs (attempt %i/%i)", sleeptime_real, _ + 1, attempts
+ )
+ time.sleep(sleeptime_real)
+
+
+def retry(
+ action,
+ attempts=5,
+ sleeptime=60,
+ max_sleeptime=5 * 60,
+ sleepscale=1.5,
+ jitter=1,
+ retry_exceptions=(Exception,),
+ cleanup=None,
+ args=(),
+ kwargs={},
+ log_args=True,
+):
+ """
+ This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo
+ Calls an action function until it succeeds, or we give up.
+ """
+ assert callable(action)
+ assert not cleanup or callable(cleanup)
+
+ action_name = getattr(action, "__name__", action)
+ if log_args and (args or kwargs):
+ log_attempt_args = (
+ "retry: calling %s with args: %s," " kwargs: %s, attempt #%d",
+ action_name,
+ args,
+ kwargs,
+ )
+ else:
+ log_attempt_args = ("retry: calling %s, attempt #%d", action_name)
+
+ if max_sleeptime < sleeptime:
+ log.debug("max_sleeptime %d less than sleeptime %d", max_sleeptime, sleeptime)
+
+ n = 1
+ for _ in retrier(
+ attempts=attempts,
+ sleeptime=sleeptime,
+ max_sleeptime=max_sleeptime,
+ sleepscale=sleepscale,
+ jitter=jitter,
+ ):
+ try:
+ logfn = log.info if n != 1 else log.debug
+ logfn_args = log_attempt_args + (n,)
+ logfn(*logfn_args)
+ return action(*args, **kwargs)
+ except retry_exceptions:
+ log.debug("retry: Caught exception: ", exc_info=True)
+ if cleanup:
+ cleanup()
+ if n == attempts:
+ log.info("retry: Giving up on %s", action_name)
+ raise
+ continue
+ finally:
+ n += 1
+
+
+def retriable(*retry_args, **retry_kwargs):
+ """
+ This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo
+ A decorator factory for retry(). Wrap your function in @retriable(...) to
+ give it retry powers!
+ """
+
+ def _retriable_factory(func):
+ @wraps(func)
+ def _retriable_wrapper(*args, **kwargs):
+ return retry(func, args=args, kwargs=kwargs, *retry_args, **retry_kwargs)
+
+ return _retriable_wrapper
+
+ return _retriable_factory
+
+
+# end of vendored code from redo module
+
+
+def request_has_data(req):
+ if PY3:
+ return req.data is not None
+ return req.has_data()
+
+
+def get_hexdigest(val):
+ return hashlib.sha512(val).hexdigest()
+
+
+class FileRecordJSONEncoderException(Exception):
+ pass
+
+
+class InvalidManifest(Exception):
+ pass
+
+
+class ExceptionWithFilename(Exception):
+ def __init__(self, filename):
+ Exception.__init__(self)
+ self.filename = filename
+
+
+class BadFilenameException(ExceptionWithFilename):
+ pass
+
+
+class DigestMismatchException(ExceptionWithFilename):
+ pass
+
+
+class MissingFileException(ExceptionWithFilename):
+ pass
+
+
+class InvalidCredentials(Exception):
+ pass
+
+
+class BadHeaderValue(Exception):
+ pass
+
+
+def parse_url(url):
+ url_parts = urlparse(url)
+ url_dict = {
+ "scheme": url_parts.scheme,
+ "hostname": url_parts.hostname,
+ "port": url_parts.port,
+ "path": url_parts.path,
+ "resource": url_parts.path,
+ "query": url_parts.query,
+ }
+ if len(url_dict["query"]) > 0:
+ url_dict["resource"] = "%s?%s" % (
+ url_dict["resource"], # pragma: no cover
+ url_dict["query"],
+ )
+
+ if url_parts.port is None:
+ if url_parts.scheme == "http":
+ url_dict["port"] = 80
+ elif url_parts.scheme == "https": # pragma: no cover
+ url_dict["port"] = 443
+ return url_dict
+
+
+def utc_now(offset_in_seconds=0.0):
+ return int(math.floor(calendar.timegm(time.gmtime()) + float(offset_in_seconds)))
+
+
+def random_string(length):
+ return base64.urlsafe_b64encode(os.urandom(length))[:length]
+
+
+def prepare_header_val(val):
+ if isinstance(val, six_binary_type):
+ val = val.decode("utf-8")
+
+ if not REQUEST_HEADER_ATTRIBUTE_CHARS.match(val):
+ raise BadHeaderValue( # pragma: no cover
+ "header value value={val} contained an illegal character".format(
+ val=repr(val)
+ )
+ )
+
+ return val
+
+
+def parse_content_type(content_type): # pragma: no cover
+ if content_type:
+ return content_type.split(";")[0].strip().lower()
+ else:
+ return ""
+
+
+def calculate_payload_hash(algorithm, payload, content_type): # pragma: no cover
+ parts = [
+ part if isinstance(part, six_binary_type) else part.encode("utf8")
+ for part in [
+ "hawk." + str(HAWK_VER) + ".payload\n",
+ parse_content_type(content_type) + "\n",
+ payload or "",
+ "\n",
+ ]
+ ]
+
+ p_hash = hashlib.new(algorithm)
+ for p in parts:
+ p_hash.update(p)
+
+ log.debug(
+ "calculating payload hash from:\n{parts}".format(parts=pprint.pformat(parts))
+ )
+
+ return base64.b64encode(p_hash.digest())
+
+
+def validate_taskcluster_credentials(credentials):
+ if not hasattr(credentials, "__getitem__"):
+ raise InvalidCredentials(
+ "credentials must be a dict-like object"
+ ) # pragma: no cover
+ try:
+ credentials["clientId"]
+ credentials["accessToken"]
+ except KeyError: # pragma: no cover
+ etype, val, tb = sys.exc_info()
+ raise InvalidCredentials("{etype}: {val}".format(etype=etype, val=val))
+
+
+def normalize_header_attr(val):
+ if isinstance(val, six_binary_type):
+ return val.decode("utf-8")
+ return val # pragma: no cover
+
+
+def normalize_string(
+ mac_type,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash,
+):
+ return "\n".join(
+ [
+ normalize_header_attr(header)
+ # The blank lines are important. They follow what the Node Hawk lib does.
+ for header in [
+ "hawk." + str(HAWK_VER) + "." + mac_type,
+ timestamp,
+ nonce,
+ method or "",
+ name or "",
+ host,
+ port,
+ content_hash or "",
+ "", # for ext which is empty in this case
+ "", # Add trailing new line.
+ ]
+ ]
+ )
+
+
+def calculate_mac(
+ mac_type,
+ access_token,
+ algorithm,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash,
+):
+ normalized = normalize_string(
+ mac_type, timestamp, nonce, method, name, host, port, content_hash
+ )
+ log.debug("normalized resource for mac calc: {norm}".format(norm=normalized))
+ digestmod = getattr(hashlib, algorithm)
+
+ if not isinstance(normalized, six_binary_type):
+ normalized = normalized.encode("utf8")
+
+ if not isinstance(access_token, six_binary_type):
+ access_token = access_token.encode("ascii")
+
+ result = hmac.new(access_token, normalized, digestmod)
+ return base64.b64encode(result.digest())
+
+
+def make_taskcluster_header(credentials, req):
+ validate_taskcluster_credentials(credentials)
+
+ url = req.get_full_url()
+ method = req.get_method()
+ algorithm = "sha256"
+ timestamp = str(utc_now())
+ nonce = random_string(6)
+ url_parts = parse_url(url)
+
+ content_hash = None
+ if request_has_data(req):
+ if PY3:
+ data = req.data
+ else:
+ data = req.get_data()
+ content_hash = calculate_payload_hash( # pragma: no cover
+ algorithm,
+ data,
+ # maybe we should detect this from req.headers but we anyway expect json
+ content_type="application/json",
+ )
+
+ mac = calculate_mac(
+ "header",
+ credentials["accessToken"],
+ algorithm,
+ timestamp,
+ nonce,
+ method,
+ url_parts["resource"],
+ url_parts["hostname"],
+ str(url_parts["port"]),
+ content_hash,
+ )
+
+ header = 'Hawk mac="{}"'.format(prepare_header_val(mac))
+
+ if content_hash: # pragma: no cover
+ header = '{}, hash="{}"'.format(header, prepare_header_val(content_hash))
+
+ header = '{header}, id="{id}", ts="{ts}", nonce="{nonce}"'.format(
+ header=header,
+ id=prepare_header_val(credentials["clientId"]),
+ ts=prepare_header_val(timestamp),
+ nonce=prepare_header_val(nonce),
+ )
+
+ log.debug("Hawk header for URL={} method={}: {}".format(url, method, header))
+
+ return header
+
+
+class FileRecord(object):
+ def __init__(
+ self,
+ filename,
+ size,
+ digest,
+ algorithm,
+ unpack=False,
+ version=None,
+ visibility=None,
+ ):
+ object.__init__(self)
+ if "/" in filename or "\\" in filename:
+ log.error(
+ "The filename provided contains path information and is, therefore, invalid."
+ )
+ raise BadFilenameException(filename=filename)
+ self.filename = filename
+ self.size = size
+ self.digest = digest
+ self.algorithm = algorithm
+ self.unpack = unpack
+ self.version = version
+ self.visibility = visibility
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if (
+ self.filename == other.filename
+ and self.size == other.size
+ and self.digest == other.digest
+ and self.algorithm == other.algorithm
+ and self.version == other.version
+ and self.visibility == other.visibility
+ ):
+ return True
+ else:
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __str__(self):
+ return repr(self)
+
+ def __repr__(self):
+ return (
+ "%s.%s(filename='%s', size=%s, digest='%s', algorithm='%s', visibility=%r)"
+ % (
+ __name__,
+ self.__class__.__name__,
+ self.filename,
+ self.size,
+ self.digest,
+ self.algorithm,
+ self.visibility,
+ )
+ )
+
+ def present(self):
+ # Doesn't check validity
+ return os.path.exists(self.filename)
+
+ def validate_size(self):
+ if self.present():
+ return self.size == os.path.getsize(self.filename)
+ else:
+ log.debug("trying to validate size on a missing file, %s", self.filename)
+ raise MissingFileException(filename=self.filename)
+
+ def validate_digest(self):
+ if self.present():
+ with open(self.filename, "rb") as f:
+ return self.digest == digest_file(f, self.algorithm)
+ else:
+ log.debug("trying to validate digest on a missing file, %s', self.filename")
+ raise MissingFileException(filename=self.filename)
+
+ def validate(self):
+ if self.size is None or self.validate_size():
+ if self.validate_digest():
+ return True
+ return False
+
+ def describe(self):
+ if self.present() and self.validate():
+ return "'%s' is present and valid" % self.filename
+ elif self.present():
+ return "'%s' is present and invalid" % self.filename
+ else:
+ return "'%s' is absent" % self.filename
+
+
+def create_file_record(filename, algorithm):
+ fo = open(filename, "rb")
+ stored_filename = os.path.split(filename)[1]
+ fr = FileRecord(
+ stored_filename,
+ os.path.getsize(filename),
+ digest_file(fo, algorithm),
+ algorithm,
+ )
+ fo.close()
+ return fr
+
+
+class FileRecordJSONEncoder(json.JSONEncoder):
+ def encode_file_record(self, obj):
+ if not issubclass(type(obj), FileRecord):
+ err = (
+ "FileRecordJSONEncoder is only for FileRecord and lists of FileRecords, "
+ "not %s" % obj.__class__.__name__
+ )
+ log.warn(err)
+ raise FileRecordJSONEncoderException(err)
+ else:
+ rv = {
+ "filename": obj.filename,
+ "size": obj.size,
+ "algorithm": obj.algorithm,
+ "digest": obj.digest,
+ }
+ if obj.unpack:
+ rv["unpack"] = True
+ if obj.version:
+ rv["version"] = obj.version
+ if obj.visibility is not None:
+ rv["visibility"] = obj.visibility
+ return rv
+
+ def default(self, f):
+ if issubclass(type(f), list):
+ record_list = []
+ for i in f:
+ record_list.append(self.encode_file_record(i))
+ return record_list
+ else:
+ return self.encode_file_record(f)
+
+
+class FileRecordJSONDecoder(json.JSONDecoder):
+
+ """I help the json module materialize a FileRecord from
+ a JSON file. I understand FileRecords and lists of
+ FileRecords. I ignore things that I don't expect for now"""
+
+ # TODO: make this more explicit in what it's looking for
+ # and error out on unexpected things
+
+ def process_file_records(self, obj):
+ if isinstance(obj, list):
+ record_list = []
+ for i in obj:
+ record = self.process_file_records(i)
+ if issubclass(type(record), FileRecord):
+ record_list.append(record)
+ return record_list
+ required_fields = [
+ "filename",
+ "size",
+ "algorithm",
+ "digest",
+ ]
+ if isinstance(obj, dict):
+ missing = False
+ for req in required_fields:
+ if req not in obj:
+ missing = True
+ break
+
+ if not missing:
+ unpack = obj.get("unpack", False)
+ version = obj.get("version", None)
+ visibility = obj.get("visibility", None)
+ rv = FileRecord(
+ obj["filename"],
+ obj["size"],
+ obj["digest"],
+ obj["algorithm"],
+ unpack,
+ version,
+ visibility,
+ )
+ log.debug("materialized %s" % rv)
+ return rv
+ return obj
+
+ def decode(self, s):
+ decoded = json.JSONDecoder.decode(self, s)
+ rv = self.process_file_records(decoded)
+ return rv
+
+
+class Manifest(object):
+
+ valid_formats = ("json",)
+
+ def __init__(self, file_records=None):
+ self.file_records = file_records or []
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if len(self.file_records) != len(other.file_records):
+ log.debug("Manifests differ in number of files")
+ return False
+ # sort the file records by filename before comparing
+ mine = sorted((fr.filename, fr) for fr in self.file_records)
+ theirs = sorted((fr.filename, fr) for fr in other.file_records)
+ return mine == theirs
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __deepcopy__(self, memo):
+ # This is required for a deep copy
+ return Manifest(self.file_records[:])
+
+ def __copy__(self):
+ return Manifest(self.file_records)
+
+ def copy(self):
+ return Manifest(self.file_records[:])
+
+ def present(self):
+ return all(i.present() for i in self.file_records)
+
+ def validate_sizes(self):
+ return all(i.validate_size() for i in self.file_records)
+
+ def validate_digests(self):
+ return all(i.validate_digest() for i in self.file_records)
+
+ def validate(self):
+ return all(i.validate() for i in self.file_records)
+
+ def load(self, data_file, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ try:
+ self.file_records.extend(
+ json.load(data_file, cls=FileRecordJSONDecoder)
+ )
+ except ValueError:
+ raise InvalidManifest("trying to read invalid manifest file")
+
+ def loads(self, data_string, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ try:
+ self.file_records.extend(
+ json.loads(data_string, cls=FileRecordJSONDecoder)
+ )
+ except ValueError:
+ raise InvalidManifest("trying to read invalid manifest file")
+
+ def dump(self, output_file, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ return json.dump(
+ self.file_records,
+ output_file,
+ indent=2,
+ separators=(",", ": "),
+ cls=FileRecordJSONEncoder,
+ )
+
+ def dumps(self, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ return json.dumps(
+ self.file_records,
+ indent=2,
+ separators=(",", ": "),
+ cls=FileRecordJSONEncoder,
+ )
+
+
+def digest_file(f, a):
+ """I take a file like object 'f' and return a hex-string containing
+ of the result of the algorithm 'a' applied to 'f'."""
+ h = hashlib.new(a)
+ chunk_size = 1024 * 10
+ data = f.read(chunk_size)
+ while data:
+ h.update(data)
+ data = f.read(chunk_size)
+ name = repr(f.name) if hasattr(f, "name") else "a file"
+ log.debug("hashed %s with %s to be %s", name, a, h.hexdigest())
+ return h.hexdigest()
+
+
+def execute(cmd):
+ """Execute CMD, logging its stdout at the info level"""
+ process = Popen(cmd, shell=True, stdout=PIPE)
+ while True:
+ line = process.stdout.readline()
+ if not line:
+ break
+ log.info(line.replace("\n", " "))
+ return process.wait() == 0
+
+
+def open_manifest(manifest_file):
+ """I know how to take a filename and load it into a Manifest object"""
+ if os.path.exists(manifest_file):
+ manifest = Manifest()
+ with open(manifest_file, "r" if PY3 else "rb") as f:
+ manifest.load(f)
+ log.debug("loaded manifest from file '%s'" % manifest_file)
+ return manifest
+ else:
+ log.debug("tried to load absent file '%s' as manifest" % manifest_file)
+ raise InvalidManifest("manifest file '%s' does not exist" % manifest_file)
+
+
+def list_manifest(manifest_file):
+ """I know how print all the files in a location"""
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error(
+ "failed to load manifest file at '%s': %s"
+ % (
+ manifest_file,
+ str(e),
+ )
+ )
+ return False
+ for f in manifest.file_records:
+ print(
+ "{}\t{}\t{}".format(
+ "P" if f.present() else "-",
+ "V" if f.present() and f.validate() else "-",
+ f.filename,
+ )
+ )
+ return True
+
+
+def validate_manifest(manifest_file):
+ """I validate that all files in a manifest are present and valid but
+ don't fetch or delete them if they aren't"""
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error(
+ "failed to load manifest file at '%s': %s"
+ % (
+ manifest_file,
+ str(e),
+ )
+ )
+ return False
+ invalid_files = []
+ absent_files = []
+ for f in manifest.file_records:
+ if not f.present():
+ absent_files.append(f)
+ elif not f.validate():
+ invalid_files.append(f)
+ if len(invalid_files + absent_files) == 0:
+ return True
+ else:
+ return False
+
+
+def add_files(manifest_file, algorithm, filenames, version, visibility, unpack):
+ # returns True if all files successfully added, False if not
+ # and doesn't catch library Exceptions. If any files are already
+ # tracked in the manifest, return will be False because they weren't
+ # added
+ all_files_added = True
+ # Create a old_manifest object to add to
+ if os.path.exists(manifest_file):
+ old_manifest = open_manifest(manifest_file)
+ else:
+ old_manifest = Manifest()
+ log.debug("creating a new manifest file")
+ new_manifest = Manifest() # use a different manifest for the output
+ for filename in filenames:
+ log.debug("adding %s" % filename)
+ path, name = os.path.split(filename)
+ new_fr = create_file_record(filename, algorithm)
+ new_fr.version = version
+ new_fr.visibility = visibility
+ new_fr.unpack = unpack
+ log.debug("appending a new file record to manifest file")
+ add = True
+ for fr in old_manifest.file_records:
+ log.debug(
+ "manifest file has '%s'"
+ % "', ".join([x.filename for x in old_manifest.file_records])
+ )
+ if new_fr == fr:
+ log.info("file already in old_manifest")
+ add = False
+ elif filename == fr.filename:
+ log.error(
+ "manifest already contains a different file named %s" % filename
+ )
+ add = False
+ if add:
+ new_manifest.file_records.append(new_fr)
+ log.debug("added '%s' to manifest" % filename)
+ else:
+ all_files_added = False
+ # copy any files in the old manifest that aren't in the new one
+ new_filenames = set(fr.filename for fr in new_manifest.file_records)
+ for old_fr in old_manifest.file_records:
+ if old_fr.filename not in new_filenames:
+ new_manifest.file_records.append(old_fr)
+ if PY3:
+ with open(manifest_file, mode="w") as output:
+ new_manifest.dump(output, fmt="json")
+ else:
+ with open(manifest_file, mode="wb") as output:
+ new_manifest.dump(output, fmt="json")
+ return all_files_added
+
+
+def touch(f):
+ """Used to modify mtime in cached files;
+ mtime is used by the purge command"""
+ try:
+ os.utime(f, None)
+ except OSError:
+ log.warn("impossible to update utime of file %s" % f)
+
+
+def _urlopen(req):
+ ssl_context = None
+ if os.name == "nt":
+ ssl_context = ssl.create_default_context(cafile=certifi.where())
+ return urllib2.urlopen(req, context=ssl_context)
+
+
+@contextmanager
+@retriable(sleeptime=2)
+def request(url, auth_file=None):
+ req = Request(url)
+ _authorize(req, auth_file)
+ with closing(_urlopen(req)) as f:
+ log.debug("opened %s for reading" % url)
+ yield f
+
+
+def fetch_file(base_urls, file_record, grabchunk=1024 * 4, auth_file=None, region=None):
+ # A file which is requested to be fetched that exists locally will be
+ # overwritten by this function
+ fd, temp_path = tempfile.mkstemp(dir=os.getcwd())
+ os.close(fd)
+ fetched_path = None
+ for base_url in base_urls:
+ # Generate the URL for the file on the server side
+ url = urljoin(base_url, "%s/%s" % (file_record.algorithm, file_record.digest))
+ if region is not None:
+ url += "?region=" + region
+
+ log.info("Attempting to fetch from '%s'..." % base_url)
+
+ # Well, the file doesn't exist locally. Let's fetch it.
+ try:
+ with request(url, auth_file) as f, open(temp_path, mode="wb") as out:
+ k = True
+ size = 0
+ while k:
+ # TODO: print statistics as file transfers happen both for info and to stop
+ # buildbot timeouts
+ indata = f.read(grabchunk)
+ out.write(indata)
+ size += len(indata)
+ if len(indata) == 0:
+ k = False
+ log.info(
+ "File %s fetched from %s as %s"
+ % (file_record.filename, base_url, temp_path)
+ )
+ fetched_path = temp_path
+ break
+ except (URLError, HTTPError, ValueError):
+ log.info(
+ "...failed to fetch '%s' from %s" % (file_record.filename, base_url),
+ exc_info=True,
+ )
+ except IOError: # pragma: no cover
+ log.info(
+ "failed to write to temporary file for '%s'" % file_record.filename,
+ exc_info=True,
+ )
+
+ # cleanup temp file in case of issues
+ if fetched_path:
+ return os.path.split(fetched_path)[1]
+ else:
+ try:
+ os.remove(temp_path)
+ except OSError: # pragma: no cover
+ pass
+ return None
+
+
+def clean_path(dirname):
+ """Remove a subtree if is exists. Helper for unpack_file()."""
+ if os.path.exists(dirname):
+ log.info("rm tree: %s" % dirname)
+ shutil.rmtree(dirname)
+
+
+CHECKSUM_SUFFIX = ".checksum"
+
+
+def validate_tar_member(member, path):
+ def _is_within_directory(directory, target):
+ real_directory = os.path.realpath(directory)
+ real_target = os.path.realpath(target)
+ prefix = os.path.commonprefix([real_directory, real_target])
+ return prefix == real_directory
+
+ member_path = os.path.join(path, member.name)
+ if not _is_within_directory(path, member_path):
+ raise Exception("Attempted path traversal in tar file: " + member.name)
+ if member.issym():
+ link_path = os.path.join(os.path.dirname(member_path), member.linkname)
+ if not _is_within_directory(path, link_path):
+ raise Exception("Attempted link path traversal in tar file: " + member.name)
+ if member.mode & (stat.S_ISUID | stat.S_ISGID):
+ raise Exception("Attempted setuid or setgid in tar file: " + member.name)
+
+
+def safe_extract(tar, path=".", *, numeric_owner=False):
+ def _files(tar, path):
+ for member in tar:
+ validate_tar_member(member, path)
+ yield member
+
+ tar.extractall(path, members=_files(tar, path), numeric_owner=numeric_owner)
+
+
+def unpack_file(filename):
+ """Untar `filename`, assuming it is uncompressed or compressed with bzip2,
+ xz, gzip, zst, or unzip a zip file. The file is assumed to contain a single
+ directory with a name matching the base of the given filename.
+ Xz support is handled by shelling out to 'tar'."""
+ if os.path.isfile(filename) and tarfile.is_tarfile(filename):
+ tar_file, zip_ext = os.path.splitext(filename)
+ base_file, tar_ext = os.path.splitext(tar_file)
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ with tarfile.open(filename) as tar:
+ safe_extract(tar)
+ elif os.path.isfile(filename) and filename.endswith(".tar.xz"):
+ base_file = filename.replace(".tar.xz", "")
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ # Not using tar -Jxf because it fails on Windows for some reason.
+ process = Popen(["xz", "-d", "-c", filename], stdout=PIPE)
+ stdout, stderr = process.communicate()
+ if process.returncode != 0:
+ return False
+ fileobj = BytesIO()
+ fileobj.write(stdout)
+ fileobj.seek(0)
+ with tarfile.open(fileobj=fileobj, mode="r|") as tar:
+ safe_extract(tar)
+ elif os.path.isfile(filename) and filename.endswith(".tar.zst"):
+ import zstandard
+
+ base_file = filename.replace(".tar.zst", "")
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ dctx = zstandard.ZstdDecompressor()
+ with dctx.stream_reader(open(filename, "rb")) as fileobj:
+ with tarfile.open(fileobj=fileobj, mode="r|") as tar:
+ safe_extract(tar)
+ elif os.path.isfile(filename) and zipfile.is_zipfile(filename):
+ base_file = filename.replace(".zip", "")
+ clean_path(base_file)
+ log.info('unzipping "%s"' % filename)
+ z = zipfile.ZipFile(filename)
+ z.extractall()
+ z.close()
+ else:
+ log.error("Unknown archive extension for filename '%s'" % filename)
+ return False
+ return True
+
+
+def fetch_files(
+ manifest_file,
+ base_urls,
+ filenames=[],
+ cache_folder=None,
+ auth_file=None,
+ region=None,
+):
+ # Lets load the manifest file
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error(
+ "failed to load manifest file at '%s': %s"
+ % (
+ manifest_file,
+ str(e),
+ )
+ )
+ return False
+
+ # we want to track files already in current working directory AND valid
+ # we will not need to fetch these
+ present_files = []
+
+ # We want to track files that fail to be fetched as well as
+ # files that are fetched
+ failed_files = []
+ fetched_files = []
+
+ # Files that we want to unpack.
+ unpack_files = []
+
+ # Lets go through the manifest and fetch the files that we want
+ for f in manifest.file_records:
+ # case 1: files are already present
+ if f.present():
+ if f.validate():
+ present_files.append(f.filename)
+ if f.unpack:
+ unpack_files.append(f.filename)
+ else:
+ # we have an invalid file here, better to cleanup!
+ # this invalid file needs to be replaced with a good one
+ # from the local cash or fetched from a tooltool server
+ log.info(
+ "File %s is present locally but it is invalid, so I will remove it "
+ "and try to fetch it" % f.filename
+ )
+ os.remove(os.path.join(os.getcwd(), f.filename))
+
+ # check if file is already in cache
+ if cache_folder and f.filename not in present_files:
+ try:
+ shutil.copy(
+ os.path.join(cache_folder, f.digest),
+ os.path.join(os.getcwd(), f.filename),
+ )
+ log.info(
+ "File %s retrieved from local cache %s" % (f.filename, cache_folder)
+ )
+ touch(os.path.join(cache_folder, f.digest))
+
+ filerecord_for_validation = FileRecord(
+ f.filename, f.size, f.digest, f.algorithm
+ )
+ if filerecord_for_validation.validate():
+ present_files.append(f.filename)
+ if f.unpack:
+ unpack_files.append(f.filename)
+ else:
+ # the file copied from the cache is invalid, better to
+ # clean up the cache version itself as well
+ log.warn(
+ "File %s retrieved from cache is invalid! I am deleting it from the "
+ "cache as well" % f.filename
+ )
+ os.remove(os.path.join(os.getcwd(), f.filename))
+ os.remove(os.path.join(cache_folder, f.digest))
+ except IOError:
+ log.info(
+ "File %s not present in local cache folder %s"
+ % (f.filename, cache_folder)
+ )
+
+ # now I will try to fetch all files which are not already present and
+ # valid, appending a suffix to avoid race conditions
+ temp_file_name = None
+ # 'filenames' is the list of filenames to be managed, if this variable
+ # is a non empty list it can be used to filter if filename is in
+ # present_files, it means that I have it already because it was already
+ # either in the working dir or in the cache
+ if (
+ f.filename in filenames or len(filenames) == 0
+ ) and f.filename not in present_files:
+ log.debug("fetching %s" % f.filename)
+ temp_file_name = fetch_file(
+ base_urls, f, auth_file=auth_file, region=region
+ )
+ if temp_file_name:
+ fetched_files.append((f, temp_file_name))
+ else:
+ failed_files.append(f.filename)
+ else:
+ log.debug("skipping %s" % f.filename)
+
+ # lets ensure that fetched files match what the manifest specified
+ for localfile, temp_file_name in fetched_files:
+ # since I downloaded to a temp file, I need to perform all validations on the temp file
+ # this is why filerecord_for_validation is created
+
+ filerecord_for_validation = FileRecord(
+ temp_file_name, localfile.size, localfile.digest, localfile.algorithm
+ )
+
+ if filerecord_for_validation.validate():
+ # great!
+ # I can rename the temp file
+ log.info(
+ "File integrity verified, renaming %s to %s"
+ % (temp_file_name, localfile.filename)
+ )
+ os.rename(
+ os.path.join(os.getcwd(), temp_file_name),
+ os.path.join(os.getcwd(), localfile.filename),
+ )
+
+ if localfile.unpack:
+ unpack_files.append(localfile.filename)
+
+ # if I am using a cache and a new file has just been retrieved from a
+ # remote location, I need to update the cache as well
+ if cache_folder:
+ log.info("Updating local cache %s..." % cache_folder)
+ try:
+ if not os.path.exists(cache_folder):
+ log.info("Creating cache in %s..." % cache_folder)
+ os.makedirs(cache_folder, 0o0700)
+ shutil.copy(
+ os.path.join(os.getcwd(), localfile.filename),
+ os.path.join(cache_folder, localfile.digest),
+ )
+ log.info(
+ "Local cache %s updated with %s"
+ % (cache_folder, localfile.filename)
+ )
+ touch(os.path.join(cache_folder, localfile.digest))
+ except (OSError, IOError):
+ log.warning(
+ "Impossible to add file %s to cache folder %s"
+ % (localfile.filename, cache_folder),
+ exc_info=True,
+ )
+ else:
+ failed_files.append(localfile.filename)
+ log.error("'%s'" % filerecord_for_validation.describe())
+ os.remove(temp_file_name)
+
+ # Unpack files that need to be unpacked.
+ for filename in unpack_files:
+ if not unpack_file(filename):
+ failed_files.append(filename)
+
+ # If we failed to fetch or validate a file, we need to fail
+ if len(failed_files) > 0:
+ log.error("The following files failed: '%s'" % "', ".join(failed_files))
+ return False
+ return True
+
+
+def freespace(p):
+ "Returns the number of bytes free under directory `p`"
+ if sys.platform == "win32": # pragma: no cover
+ # os.statvfs doesn't work on Windows
+ import win32file
+
+ secsPerClus, bytesPerSec, nFreeClus, totClus = win32file.GetDiskFreeSpace(p)
+ return secsPerClus * bytesPerSec * nFreeClus
+ else:
+ r = os.statvfs(p)
+ return r.f_frsize * r.f_bavail
+
+
+def purge(folder, gigs):
+ """If gigs is non 0, it deletes files in `folder` until `gigs` GB are free,
+ starting from older files. If gigs is 0, a full purge will be performed.
+ No recursive deletion of files in subfolder is performed."""
+
+ full_purge = bool(gigs == 0)
+ gigs *= 1024 * 1024 * 1024
+
+ if not full_purge and freespace(folder) >= gigs:
+ log.info("No need to cleanup")
+ return
+
+ files = []
+ for f in os.listdir(folder):
+ p = os.path.join(folder, f)
+ # it deletes files in folder without going into subfolders,
+ # assuming the cache has a flat structure
+ if not os.path.isfile(p):
+ continue
+ mtime = os.path.getmtime(p)
+ files.append((mtime, p))
+
+ # iterate files sorted by mtime
+ for _, f in sorted(files):
+ log.info("removing %s to free up space" % f)
+ try:
+ os.remove(f)
+ except OSError:
+ log.info("Impossible to remove %s" % f, exc_info=True)
+ if not full_purge and freespace(folder) >= gigs:
+ break
+
+
+def _log_api_error(e):
+ if hasattr(e, "hdrs") and e.hdrs["content-type"] == "application/json":
+ json_resp = json.load(e.fp)
+ log.error(
+ "%s: %s" % (json_resp["error"]["name"], json_resp["error"]["description"])
+ )
+ else:
+ log.exception("Error making RelengAPI request:")
+
+
+def _authorize(req, auth_file):
+ is_taskcluster_auth = False
+
+ if not auth_file:
+ try:
+ taskcluster_env_keys = {
+ "clientId": "TASKCLUSTER_CLIENT_ID",
+ "accessToken": "TASKCLUSTER_ACCESS_TOKEN",
+ }
+ auth_content = {k: os.environ[v] for k, v in taskcluster_env_keys.items()}
+ is_taskcluster_auth = True
+ except KeyError:
+ return
+ else:
+ with open(auth_file) as f:
+ auth_content = f.read().strip()
+ try:
+ auth_content = json.loads(auth_content)
+ is_taskcluster_auth = True
+ except Exception:
+ pass
+
+ if is_taskcluster_auth:
+ taskcluster_header = make_taskcluster_header(auth_content, req)
+ log.debug("Using taskcluster credentials in %s" % auth_file)
+ req.add_unredirected_header("Authorization", taskcluster_header)
+ else:
+ log.debug("Using Bearer token in %s" % auth_file)
+ req.add_unredirected_header("Authorization", "Bearer %s" % auth_content)
+
+
+def _send_batch(base_url, auth_file, batch, region):
+ url = urljoin(base_url, "upload")
+ if region is not None:
+ url += "?region=" + region
+ data = json.dumps(batch)
+ if PY3:
+ data = data.encode("utf-8")
+ req = Request(url, data, {"Content-Type": "application/json"})
+ _authorize(req, auth_file)
+ try:
+ resp = _urlopen(req)
+ except (URLError, HTTPError) as e:
+ _log_api_error(e)
+ return None
+ return json.load(resp)["result"]
+
+
+def _s3_upload(filename, file):
+ # urllib2 does not support streaming, so we fall back to good old httplib
+ url = urlparse(file["put_url"])
+ cls = HTTPSConnection if url.scheme == "https" else HTTPConnection
+ host, port = url.netloc.split(":") if ":" in url.netloc else (url.netloc, 443)
+ port = int(port)
+ conn = cls(host, port)
+ try:
+ req_path = "%s?%s" % (url.path, url.query) if url.query else url.path
+ with open(filename, "rb") as f:
+ content = f.read()
+ content_length = len(content)
+ f.seek(0)
+ conn.request(
+ "PUT",
+ req_path,
+ f,
+ {
+ "Content-Type": "application/octet-stream",
+ "Content-Length": str(content_length),
+ },
+ )
+ resp = conn.getresponse()
+ resp_body = resp.read()
+ conn.close()
+ if resp.status != 200:
+ raise RuntimeError(
+ "Non-200 return from AWS: %s %s\n%s"
+ % (resp.status, resp.reason, resp_body)
+ )
+ except Exception:
+ file["upload_exception"] = sys.exc_info()
+ file["upload_ok"] = False
+ else:
+ file["upload_ok"] = True
+
+
+def _notify_upload_complete(base_url, auth_file, file):
+ req = Request(urljoin(base_url, "upload/complete/%(algorithm)s/%(digest)s" % file))
+ _authorize(req, auth_file)
+ try:
+ _urlopen(req)
+ except HTTPError as e:
+ if e.code != 409:
+ _log_api_error(e)
+ return
+ # 409 indicates that the upload URL hasn't expired yet and we
+ # should retry after a delay
+ to_wait = int(e.headers.get("X-Retry-After", 60))
+ log.warning("Waiting %d seconds for upload URLs to expire" % to_wait)
+ time.sleep(to_wait)
+ _notify_upload_complete(base_url, auth_file, file)
+ except Exception:
+ log.exception("While notifying server of upload completion:")
+
+
+def upload(manifest, message, base_urls, auth_file, region):
+ try:
+ manifest = open_manifest(manifest)
+ except InvalidManifest:
+ log.exception("failed to load manifest file at '%s'")
+ return False
+
+ # verify the manifest, since we'll need the files present to upload
+ if not manifest.validate():
+ log.error("manifest is invalid")
+ return False
+
+ if any(fr.visibility is None for fr in manifest.file_records):
+ log.error("All files in a manifest for upload must have a visibility set")
+
+ # convert the manifest to an upload batch
+ batch = {
+ "message": message,
+ "files": {},
+ }
+ for fr in manifest.file_records:
+ batch["files"][fr.filename] = {
+ "size": fr.size,
+ "digest": fr.digest,
+ "algorithm": fr.algorithm,
+ "visibility": fr.visibility,
+ }
+
+ # make the upload request
+ resp = _send_batch(base_urls[0], auth_file, batch, region)
+ if not resp:
+ return None
+ files = resp["files"]
+
+ # Upload the files, each in a thread. This allows us to start all of the
+ # uploads before any of the URLs expire.
+ threads = {}
+ for filename, file in files.items():
+ if "put_url" in file:
+ log.info("%s: starting upload" % (filename,))
+ thd = threading.Thread(target=_s3_upload, args=(filename, file))
+ thd.daemon = 1
+ thd.start()
+ threads[filename] = thd
+ else:
+ log.info("%s: already exists on server" % (filename,))
+
+ # re-join all of those threads as they exit
+ success = True
+ while threads:
+ for filename, thread in list(threads.items()):
+ if not thread.is_alive():
+ # _s3_upload has annotated file with result information
+ file = files[filename]
+ thread.join()
+ if file["upload_ok"]:
+ log.info("%s: uploaded" % filename)
+ else:
+ log.error(
+ "%s: failed" % filename, exc_info=file["upload_exception"]
+ )
+ success = False
+ del threads[filename]
+
+ # notify the server that the uploads are completed. If the notification
+ # fails, we don't consider that an error (the server will notice
+ # eventually)
+ for filename, file in files.items():
+ if "put_url" in file and file["upload_ok"]:
+ log.info("notifying server of upload completion for %s" % (filename,))
+ _notify_upload_complete(base_urls[0], auth_file, file)
+
+ return success
+
+
+def send_operation_on_file(data, base_urls, digest, auth_file):
+ url = base_urls[0]
+ url = urljoin(url, "file/sha512/" + digest)
+
+ data = json.dumps(data)
+
+ req = Request(url, data, {"Content-Type": "application/json"})
+ req.get_method = lambda: "PATCH"
+
+ _authorize(req, auth_file)
+
+ try:
+ _urlopen(req)
+ except (URLError, HTTPError) as e:
+ _log_api_error(e)
+ return False
+ return True
+
+
+def change_visibility(base_urls, digest, visibility, auth_file):
+ data = [
+ {
+ "op": "set_visibility",
+ "visibility": visibility,
+ }
+ ]
+ return send_operation_on_file(data, base_urls, digest, auth_file)
+
+
+def delete_instances(base_urls, digest, auth_file):
+ data = [
+ {
+ "op": "delete_instances",
+ }
+ ]
+ return send_operation_on_file(data, base_urls, digest, auth_file)
+
+
+def process_command(options, args):
+ """I know how to take a list of program arguments and
+ start doing the right thing with them"""
+ cmd = args[0]
+ cmd_args = args[1:]
+ log.debug("processing '%s' command with args '%s'" % (cmd, '", "'.join(cmd_args)))
+ log.debug("using options: %s" % options)
+
+ if cmd == "list":
+ return list_manifest(options["manifest"])
+ if cmd == "validate":
+ return validate_manifest(options["manifest"])
+ elif cmd == "add":
+ return add_files(
+ options["manifest"],
+ options["algorithm"],
+ cmd_args,
+ options["version"],
+ options["visibility"],
+ options["unpack"],
+ )
+ elif cmd == "purge":
+ if options["cache_folder"]:
+ purge(folder=options["cache_folder"], gigs=options["size"])
+ else:
+ log.critical("please specify the cache folder to be purged")
+ return False
+ elif cmd == "fetch":
+ return fetch_files(
+ options["manifest"],
+ options["base_url"],
+ cmd_args,
+ cache_folder=options["cache_folder"],
+ auth_file=options.get("auth_file"),
+ region=options.get("region"),
+ )
+ elif cmd == "upload":
+ if not options.get("message"):
+ log.critical("upload command requires a message")
+ return False
+ return upload(
+ options.get("manifest"),
+ options.get("message"),
+ options.get("base_url"),
+ options.get("auth_file"),
+ options.get("region"),
+ )
+ elif cmd == "change-visibility":
+ if not options.get("digest"):
+ log.critical("change-visibility command requires a digest option")
+ return False
+ if not options.get("visibility"):
+ log.critical("change-visibility command requires a visibility option")
+ return False
+ return change_visibility(
+ options.get("base_url"),
+ options.get("digest"),
+ options.get("visibility"),
+ options.get("auth_file"),
+ )
+ elif cmd == "delete":
+ if not options.get("digest"):
+ log.critical("change-visibility command requires a digest option")
+ return False
+ return delete_instances(
+ options.get("base_url"),
+ options.get("digest"),
+ options.get("auth_file"),
+ )
+ else:
+ log.critical('command "%s" is not implemented' % cmd)
+ return False
+
+
+def main(argv, _skip_logging=False):
+ # Set up option parsing
+ parser = optparse.OptionParser()
+ parser.add_option(
+ "-q",
+ "--quiet",
+ default=logging.INFO,
+ dest="loglevel",
+ action="store_const",
+ const=logging.ERROR,
+ )
+ parser.add_option(
+ "-v", "--verbose", dest="loglevel", action="store_const", const=logging.DEBUG
+ )
+ parser.add_option(
+ "-m",
+ "--manifest",
+ default=DEFAULT_MANIFEST_NAME,
+ dest="manifest",
+ action="store",
+ help="specify the manifest file to be operated on",
+ )
+ parser.add_option(
+ "-d",
+ "--algorithm",
+ default="sha512",
+ dest="algorithm",
+ action="store",
+ help="hashing algorithm to use (only sha512 is allowed)",
+ )
+ parser.add_option(
+ "--digest",
+ default=None,
+ dest="digest",
+ action="store",
+ help="digest hash to change visibility for",
+ )
+ parser.add_option(
+ "--visibility",
+ default=None,
+ dest="visibility",
+ choices=["internal", "public"],
+ help='Visibility level of this file; "internal" is for '
+ "files that cannot be distributed out of the company "
+ 'but not for secrets; "public" files are available to '
+ "anyone without restriction",
+ )
+ parser.add_option(
+ "--unpack",
+ default=False,
+ dest="unpack",
+ action="store_true",
+ help="Request unpacking this file after fetch."
+ " This is helpful with tarballs.",
+ )
+ parser.add_option(
+ "--version",
+ default=None,
+ dest="version",
+ action="store",
+ help="Version string for this file. This annotates the "
+ "manifest entry with a version string to help "
+ "identify the contents.",
+ )
+ parser.add_option(
+ "-o",
+ "--overwrite",
+ default=False,
+ dest="overwrite",
+ action="store_true",
+ help="UNUSED; present for backward compatibility",
+ )
+ parser.add_option(
+ "--url",
+ dest="base_url",
+ action="append",
+ help="RelengAPI URL ending with /tooltool/; default "
+ "is appropriate for Mozilla",
+ )
+ parser.add_option(
+ "-c", "--cache-folder", dest="cache_folder", help="Local cache folder"
+ )
+ parser.add_option(
+ "-s",
+ "--size",
+ help="free space required (in GB)",
+ dest="size",
+ type="float",
+ default=0.0,
+ )
+ parser.add_option(
+ "-r",
+ "--region",
+ help="Preferred AWS region for upload or fetch; " "example: --region=us-west-2",
+ )
+ parser.add_option(
+ "--message",
+ help='The "commit message" for an upload; format with a bug number '
+ "and brief comment",
+ dest="message",
+ )
+ parser.add_option(
+ "--authentication-file",
+ help="Use the RelengAPI token found in the given file to "
+ "authenticate to the RelengAPI server.",
+ dest="auth_file",
+ )
+
+ (options_obj, args) = parser.parse_args(argv[1:])
+
+ if not options_obj.base_url:
+ tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net")
+ taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL")
+ if taskcluster_proxy_url:
+ tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host)
+ else:
+ tooltool_url = "https://{}".format(tooltool_host)
+
+ options_obj.base_url = [tooltool_url]
+
+ # ensure all URLs have a trailing slash
+ def add_slash(url):
+ return url if url.endswith("/") else (url + "/")
+
+ options_obj.base_url = [add_slash(u) for u in options_obj.base_url]
+
+ # expand ~ in --authentication-file
+ if options_obj.auth_file:
+ options_obj.auth_file = os.path.expanduser(options_obj.auth_file)
+
+ # Dictionaries are easier to work with
+ options = vars(options_obj)
+
+ log.setLevel(options["loglevel"])
+
+ # Set up logging, for now just to the console
+ if not _skip_logging: # pragma: no cover
+ ch = logging.StreamHandler()
+ cf = logging.Formatter("%(levelname)s - %(message)s")
+ ch.setFormatter(cf)
+ log.addHandler(ch)
+
+ if options["algorithm"] != "sha512":
+ parser.error("only --algorithm sha512 is supported")
+
+ if len(args) < 1:
+ parser.error("You must specify a command")
+
+ return 0 if process_command(options, args) else 1
+
+
+if __name__ == "__main__": # pragma: no cover
+ sys.exit(main(sys.argv))
diff --git a/python/mozbuild/mozbuild/action/unify_symbols.py b/python/mozbuild/mozbuild/action/unify_symbols.py
new file mode 100644
index 0000000000..4e96a010b2
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/unify_symbols.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+
+from mozpack.copier import FileCopier
+from mozpack.errors import errors
+from mozpack.files import FileFinder
+from mozpack.unify import UnifiedFinder
+
+
+class UnifiedSymbolsFinder(UnifiedFinder):
+ def unify_file(self, path, file1, file2):
+ # We expect none of the files to overlap.
+ if not file2:
+ return file1
+ if not file1:
+ return file2
+ errors.error(
+ "{} is in both {} and {}".format(
+ path, self._finder1.base, self._finder2.base
+ )
+ )
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Merge two crashreporter symbols directories."
+ )
+ parser.add_argument("dir1", help="Directory")
+ parser.add_argument("dir2", help="Directory to merge")
+
+ options = parser.parse_args()
+
+ dir1_finder = FileFinder(options.dir1)
+ dir2_finder = FileFinder(options.dir2)
+ finder = UnifiedSymbolsFinder(dir1_finder, dir2_finder)
+
+ copier = FileCopier()
+ with errors.accumulate():
+ for p, f in finder:
+ copier.add(p, f)
+
+ copier.copy(options.dir1, skip_if_older=False)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/action/unify_tests.py b/python/mozbuild/mozbuild/action/unify_tests.py
new file mode 100644
index 0000000000..d94ebade1b
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/unify_tests.py
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+
+import buildconfig
+import mozpack.path as mozpath
+from mozpack.copier import FileCopier
+from mozpack.errors import errors
+from mozpack.files import FileFinder
+from mozpack.unify import UnifiedFinder
+
+
+class UnifiedTestFinder(UnifiedFinder):
+ def unify_file(self, path, file1, file2):
+ unified = super(UnifiedTestFinder, self).unify_file(path, file1, file2)
+ basename = mozpath.basename(path)
+ if basename == "mozinfo.json":
+ # The mozinfo.json files contain processor info, which differs
+ # between both ends.
+ # Remove the block when this assert is hit.
+ assert not unified
+ errors.ignore_errors()
+ self._report_difference(path, file1, file2)
+ errors.ignore_errors(False)
+ return file1
+ elif basename == "dump_syms_mac":
+ # At the moment, the dump_syms_mac executable is a x86_64 binary
+ # on both ends. We can't create a universal executable from twice
+ # the same executable.
+ # When this assert hits, remove this block.
+ assert file1.open().read() == file2.open().read()
+ return file1
+ return unified
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Merge two directories, creating Universal binaries for "
+ "executables and libraries they contain."
+ )
+ parser.add_argument("dir1", help="Directory")
+ parser.add_argument("dir2", help="Directory to merge")
+
+ options = parser.parse_args()
+
+ buildconfig.substs["OS_ARCH"] = "Darwin"
+ buildconfig.substs["LIPO"] = os.environ.get("LIPO")
+
+ dir1_finder = FileFinder(options.dir1, find_executables=True, find_dotfiles=True)
+ dir2_finder = FileFinder(options.dir2, find_executables=True, find_dotfiles=True)
+ finder = UnifiedTestFinder(dir1_finder, dir2_finder)
+
+ copier = FileCopier()
+ with errors.accumulate():
+ for p, f in finder:
+ copier.add(p, f)
+
+ copier.copy(options.dir1, skip_if_older=False)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/action/unpack_dmg.py b/python/mozbuild/mozbuild/action/unpack_dmg.py
new file mode 100644
index 0000000000..74e4091549
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/unpack_dmg.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import sys
+from pathlib import Path
+
+from mozpack import dmg
+
+from mozbuild.bootstrap import bootstrap_toolchain
+
+
+def _path_or_none(input: str):
+ if not input:
+ return None
+ return Path(input)
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description="Explode a DMG into its relevant files"
+ )
+
+ parser.add_argument("--dsstore", help="DSStore file from")
+ parser.add_argument("--background", help="Background file from")
+ parser.add_argument("--icon", help="Icon file from")
+
+ parser.add_argument("dmgfile", metavar="DMG_IN", help="DMG File to Unpack")
+ parser.add_argument(
+ "outpath", metavar="PATH_OUT", help="Location to put unpacked files"
+ )
+
+ options = parser.parse_args(args)
+
+ dmg_tool = bootstrap_toolchain("dmg/dmg")
+ hfs_tool = bootstrap_toolchain("dmg/hfsplus")
+
+ dmg.extract_dmg(
+ dmgfile=Path(options.dmgfile),
+ output=Path(options.outpath),
+ dmg_tool=Path(dmg_tool),
+ hfs_tool=Path(hfs_tool),
+ dsstore=_path_or_none(options.dsstore),
+ background=_path_or_none(options.background),
+ icon=_path_or_none(options.icon),
+ )
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/util.py b/python/mozbuild/mozbuild/action/util.py
new file mode 100644
index 0000000000..d4102629ff
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/util.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import sys
+import time
+
+
+def log_build_task(f, *args, **kwargs):
+ """Run the given function, representing an entire build task, and log the
+ BUILDTASK metadata row to stdout.
+ """
+ start = time.monotonic()
+ try:
+ return f(*args, **kwargs)
+ finally:
+ end = time.monotonic()
+ print(
+ "BUILDTASK %s"
+ % json.dumps(
+ {"argv": sys.argv, "start": start, "end": end, "context": None}
+ )
+ )
diff --git a/python/mozbuild/mozbuild/action/webidl.py b/python/mozbuild/mozbuild/action/webidl.py
new file mode 100644
index 0000000000..81c2c2a507
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/webidl.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozwebidlcodegen import create_build_system_manager
+
+from mozbuild.action.util import log_build_task
+
+
+def main(argv):
+ """Perform WebIDL code generation required by the build system."""
+ manager = create_build_system_manager()
+ manager.generate_build_files()
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/wrap_rustc.py b/python/mozbuild/mozbuild/action/wrap_rustc.py
new file mode 100644
index 0000000000..d865438c47
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/wrap_rustc.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import subprocess
+import sys
+
+
+def parse_outputs(crate_output, dep_outputs, pass_l_flag):
+ env = {}
+ args = []
+
+ def parse_line(line):
+ if line.startswith("cargo:"):
+ return line[len("cargo:") :].split("=", 1)
+
+ def parse_file(f):
+ with open(f) as fh:
+ return [parse_line(line.rstrip()) for line in fh.readlines()]
+
+ for f in dep_outputs:
+ for entry in parse_file(f):
+ if not entry:
+ continue
+ key, value = entry
+ if key == "rustc-link-search":
+ args += ["-L", value]
+ elif key == "rustc-flags":
+ flags = value.split()
+ for flag, val in zip(flags[0::2], flags[1::2]):
+ if flag == "-l" and f == crate_output:
+ args += ["-l", val]
+ elif flag == "-L":
+ args += ["-L", val]
+ else:
+ raise Exception(
+ "Unknown flag passed through "
+ '"cargo:rustc-flags": "%s"' % flag
+ )
+ elif key == "rustc-link-lib" and f == crate_output:
+ args += ["-l", value]
+ elif key == "rustc-cfg" and f == crate_output:
+ args += ["--cfg", value]
+ elif key == "rustc-env" and f == crate_output:
+ env_key, env_value = value.split("=", 1)
+ env[env_key] = env_value
+ elif key == "rerun-if-changed":
+ pass
+ elif key == "rerun-if-env-changed":
+ pass
+ elif key == "warning":
+ pass
+ elif key:
+ # Todo: Distinguish between direct and transitive
+ # dependencies so we can pass metadata environment
+ # variables correctly.
+ pass
+
+ return env, args
+
+
+def wrap_rustc(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--crate-out", nargs="?")
+ parser.add_argument("--deps-out", nargs="*")
+ parser.add_argument("--cwd")
+ parser.add_argument("--pass-l-flag", action="store_true")
+ parser.add_argument("--cmd", nargs=argparse.REMAINDER)
+ args = parser.parse_args(args)
+
+ new_env, new_args = parse_outputs(args.crate_out, args.deps_out, args.pass_l_flag)
+ os.environ.update(new_env)
+ return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
+
+
+if __name__ == "__main__":
+ sys.exit(wrap_rustc(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/xpccheck.py b/python/mozbuild/mozbuild/action/xpccheck.py
new file mode 100644
index 0000000000..4b59577cce
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/xpccheck.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""A generic script to verify all test files are in the
+corresponding .ini file.
+
+Usage: xpccheck.py <directory> [<directory> ...]
+"""
+
+import os
+import sys
+from glob import glob
+
+import manifestparser
+
+
+def getIniTests(testdir):
+ mp = manifestparser.ManifestParser(strict=False)
+ mp.read(os.path.join(testdir, "xpcshell.ini"))
+ return mp.tests
+
+
+def verifyDirectory(initests, directory):
+ files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+ for f in files:
+ if not os.path.isfile(f):
+ continue
+
+ name = os.path.basename(f)
+ if name.endswith(".in"):
+ name = name[:-3]
+
+ if not name.endswith(".js"):
+ continue
+
+ found = False
+ for test in initests:
+ if os.path.join(os.path.abspath(directory), name) == test["path"]:
+ found = True
+ break
+
+ if not found:
+ print(
+ (
+ "TEST-UNEXPECTED-FAIL | xpccheck | test "
+ "%s is missing from test manifest %s!"
+ )
+ % (
+ name,
+ os.path.join(directory, "xpcshell.ini"),
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def verifyIniFile(initests, directory):
+ files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+ for test in initests:
+ name = test["path"].split("/")[-1]
+
+ found = False
+ for f in files:
+
+ fname = f.split("/")[-1]
+ if fname.endswith(".in"):
+ fname = ".in".join(fname.split(".in")[:-1])
+
+ if os.path.join(os.path.abspath(directory), fname) == test["path"]:
+ found = True
+ break
+
+ if not found:
+ print(
+ (
+ "TEST-UNEXPECTED-FAIL | xpccheck | found "
+ "%s in xpcshell.ini and not in directory '%s'"
+ )
+ % (
+ name,
+ directory,
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def main(argv):
+ if len(argv) < 2:
+ print(
+ "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ for d in argv[1:]:
+ # xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
+ # we copy all files (including xpcshell.ini from the sibling directory.
+ if d.endswith("toolkit/mozapps/extensions/test/xpcshell-unpack"):
+ continue
+
+ initests = getIniTests(d)
+ verifyDirectory(initests, d)
+ verifyIniFile(initests, d)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/xpidl-process.py b/python/mozbuild/mozbuild/action/xpidl-process.py
new file mode 100755
index 0000000000..99f2a83f5e
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/xpidl-process.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script is used to generate an output header and xpt file for
+# input IDL file(s). It's purpose is to directly support the build
+# system. The API will change to meet the needs of the build system.
+
+import argparse
+import os
+import sys
+
+import six
+from buildconfig import topsrcdir
+from mozpack import path as mozpath
+from xpidl import jsonxpt
+from xpidl.header import print_header
+from xpidl.rust import print_rust_bindings
+from xpidl.rust_macros import print_rust_macros_bindings
+from xpidl.xpidl import IDLParser
+
+from mozbuild.action.util import log_build_task
+from mozbuild.makeutil import Makefile
+from mozbuild.pythonutil import iter_modules_in_path
+from mozbuild.util import FileAvoidWrite
+
+
+def process(
+ input_dirs,
+ inc_paths,
+ bindings_conf,
+ header_dir,
+ xpcrs_dir,
+ xpt_dir,
+ deps_dir,
+ module,
+ idl_files,
+):
+ p = IDLParser()
+
+ xpts = []
+ mk = Makefile()
+ rule = mk.create_rule()
+
+ glbl = {}
+ exec(open(bindings_conf, encoding="utf-8").read(), glbl)
+ webidlconfig = glbl["DOMInterfaces"]
+
+ # Write out dependencies for Python modules we import. If this list isn't
+ # up to date, we will not re-process XPIDL files if the processor changes.
+ rule.add_dependencies(six.ensure_text(s) for s in iter_modules_in_path(topsrcdir))
+
+ for path in idl_files:
+ basename = os.path.basename(path)
+ stem, _ = os.path.splitext(basename)
+ idl_data = open(path, encoding="utf-8").read()
+
+ idl = p.parse(idl_data, filename=path)
+ idl.resolve(inc_paths, p, webidlconfig)
+
+ header_path = os.path.join(header_dir, "%s.h" % stem)
+ rs_rt_path = os.path.join(xpcrs_dir, "rt", "%s.rs" % stem)
+ rs_bt_path = os.path.join(xpcrs_dir, "bt", "%s.rs" % stem)
+
+ xpts.append(jsonxpt.build_typelib(idl))
+
+ rule.add_dependencies(six.ensure_text(s) for s in idl.deps)
+
+ # The print_* functions don't actually do anything with the
+ # passed-in path other than writing it into the file to let people
+ # know where the original source was. This script receives
+ # absolute paths, which are not so great to embed in header files
+ # (they mess with deterministic generation of files on different
+ # machines, Searchfox logic, shared compilation caches, etc.), so
+ # we pass in fake paths that are the same across compilations, but
+ # should still enable people to figure out where to go.
+ relpath = mozpath.relpath(path, topsrcdir)
+
+ with FileAvoidWrite(header_path) as fh:
+ print_header(idl, fh, path, relpath)
+
+ with FileAvoidWrite(rs_rt_path) as fh:
+ print_rust_bindings(idl, fh, relpath)
+
+ with FileAvoidWrite(rs_bt_path) as fh:
+ print_rust_macros_bindings(idl, fh, relpath)
+
+ # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
+ # number of different changes in the code, which may not cause the .xpt
+ # files to be changed in any way. This means that make will re-run us every
+ # time a build is run whether or not anything changed. To fix this we
+ # unconditionally write out the file.
+ xpt_path = os.path.join(xpt_dir, "%s.xpt" % module)
+ with open(xpt_path, "w", encoding="utf-8", newline="\n") as fh:
+ jsonxpt.write(jsonxpt.link(xpts), fh)
+
+ rule.add_targets([six.ensure_text(xpt_path)])
+ if deps_dir:
+ deps_path = os.path.join(deps_dir, "%s.pp" % module)
+ with FileAvoidWrite(deps_path) as fh:
+ mk.dump(fh)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--depsdir", help="Directory in which to write dependency files."
+ )
+ parser.add_argument(
+ "--bindings-conf", help="Path to the WebIDL binding configuration file."
+ )
+ parser.add_argument(
+ "--input-dir",
+ dest="input_dirs",
+ action="append",
+ default=[],
+ help="Directory(ies) in which to find source .idl files.",
+ )
+ parser.add_argument("headerdir", help="Directory in which to write header files.")
+ parser.add_argument(
+ "xpcrsdir", help="Directory in which to write rust xpcom binding files."
+ )
+ parser.add_argument("xptdir", help="Directory in which to write xpt file.")
+ parser.add_argument(
+ "module", help="Final module name to use for linked output xpt file."
+ )
+ parser.add_argument("idls", nargs="+", help="Source .idl file(s).")
+ parser.add_argument(
+ "-I",
+ dest="incpath",
+ action="append",
+ default=[],
+ help="Extra directories where to look for included .idl files.",
+ )
+
+ args = parser.parse_args(argv)
+ incpath = [os.path.join(topsrcdir, p) for p in args.incpath]
+ process(
+ args.input_dirs,
+ incpath,
+ args.bindings_conf,
+ args.headerdir,
+ args.xpcrsdir,
+ args.xptdir,
+ args.depsdir,
+ args.module,
+ args.idls,
+ )
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/zip.py b/python/mozbuild/mozbuild/action/zip.py
new file mode 100644
index 0000000000..e0dcbe020f
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/zip.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script creates a zip file, but will also strip any binaries
+# it finds before adding them to the zip.
+
+import argparse
+import sys
+
+import mozpack.path as mozpath
+from mozpack.copier import Jarrer
+from mozpack.errors import errors
+from mozpack.files import FileFinder
+from mozpack.path import match
+
+from mozbuild.action.util import log_build_task
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-C",
+ metavar="DIR",
+ default=".",
+ help="Change to given directory before considering " "other paths",
+ )
+ parser.add_argument("--strip", action="store_true", help="Strip executables")
+ parser.add_argument(
+ "-x",
+ metavar="EXCLUDE",
+ default=[],
+ action="append",
+ help="Exclude files that match the pattern",
+ )
+ parser.add_argument("zip", help="Path to zip file to write")
+ parser.add_argument("input", nargs="+", help="Path to files to add to zip")
+ args = parser.parse_args(args)
+
+ jarrer = Jarrer()
+
+ with errors.accumulate():
+ finder = FileFinder(args.C, find_executables=args.strip)
+ for path in args.input:
+ for p, f in finder.find(path):
+ if not any([match(p, exclude) for exclude in args.x]):
+ jarrer.add(p, f)
+ jarrer.copy(mozpath.join(args.C, args.zip))
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])