summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozpack
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:44:51 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:44:51 +0000
commit9e3c08db40b8916968b9f30096c7be3f00ce9647 (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /python/mozbuild/mozpack
parentInitial commit. (diff)
downloadthunderbird-9e3c08db40b8916968b9f30096c7be3f00ce9647.tar.xz
thunderbird-9e3c08db40b8916968b9f30096c7be3f00ce9647.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'python/mozbuild/mozpack')
-rw-r--r--python/mozbuild/mozpack/__init__.py0
-rw-r--r--python/mozbuild/mozpack/apple_pkg/Distribution.template19
-rw-r--r--python/mozbuild/mozpack/apple_pkg/PackageInfo.template19
-rw-r--r--python/mozbuild/mozpack/archive.py153
-rw-r--r--python/mozbuild/mozpack/chrome/__init__.py0
-rw-r--r--python/mozbuild/mozpack/chrome/flags.py278
-rw-r--r--python/mozbuild/mozpack/chrome/manifest.py400
-rw-r--r--python/mozbuild/mozpack/copier.py605
-rw-r--r--python/mozbuild/mozpack/dmg.py230
-rw-r--r--python/mozbuild/mozpack/errors.py151
-rw-r--r--python/mozbuild/mozpack/executables.py140
-rw-r--r--python/mozbuild/mozpack/files.py1271
-rw-r--r--python/mozbuild/mozpack/macpkg.py217
-rw-r--r--python/mozbuild/mozpack/manifests.py483
-rw-r--r--python/mozbuild/mozpack/mozjar.py842
-rw-r--r--python/mozbuild/mozpack/packager/__init__.py445
-rw-r--r--python/mozbuild/mozpack/packager/formats.py354
-rw-r--r--python/mozbuild/mozpack/packager/l10n.py304
-rw-r--r--python/mozbuild/mozpack/packager/unpack.py200
-rw-r--r--python/mozbuild/mozpack/path.py246
-rw-r--r--python/mozbuild/mozpack/pkg.py299
-rw-r--r--python/mozbuild/mozpack/test/__init__.py0
-rw-r--r--python/mozbuild/mozpack/test/data/test_data1
-rw-r--r--python/mozbuild/mozpack/test/python.ini18
-rw-r--r--python/mozbuild/mozpack/test/support/minify_js_verify.py15
-rw-r--r--python/mozbuild/mozpack/test/test_archive.py197
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_flags.py150
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_manifest.py176
-rw-r--r--python/mozbuild/mozpack/test/test_copier.py548
-rw-r--r--python/mozbuild/mozpack/test/test_errors.py95
-rw-r--r--python/mozbuild/mozpack/test/test_files.py1362
-rw-r--r--python/mozbuild/mozpack/test/test_manifests.py465
-rw-r--r--python/mozbuild/mozpack/test/test_mozjar.py350
-rw-r--r--python/mozbuild/mozpack/test/test_packager.py630
-rw-r--r--python/mozbuild/mozpack/test/test_packager_formats.py537
-rw-r--r--python/mozbuild/mozpack/test/test_packager_l10n.py153
-rw-r--r--python/mozbuild/mozpack/test/test_packager_unpack.py67
-rw-r--r--python/mozbuild/mozpack/test/test_path.py152
-rw-r--r--python/mozbuild/mozpack/test/test_pkg.py138
-rw-r--r--python/mozbuild/mozpack/test/test_unify.py250
-rw-r--r--python/mozbuild/mozpack/unify.py265
41 files changed, 12225 insertions, 0 deletions
diff --git a/python/mozbuild/mozpack/__init__.py b/python/mozbuild/mozpack/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/__init__.py
diff --git a/python/mozbuild/mozpack/apple_pkg/Distribution.template b/python/mozbuild/mozpack/apple_pkg/Distribution.template
new file mode 100644
index 0000000000..2f4b9484d9
--- /dev/null
+++ b/python/mozbuild/mozpack/apple_pkg/Distribution.template
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<installer-gui-script minSpecVersion="1">
+ <pkg-ref id="${CFBundleIdentifier}">
+ <bundle-version>
+ <bundle CFBundleShortVersionString="${CFBundleShortVersionString}" CFBundleVersion="${CFBundleVersion}" id="${CFBundleIdentifier}" path="${app_name}.app"/>
+ </bundle-version>
+ </pkg-ref>
+ <options customize="never" require-scripts="false" hostArchitectures="x86_64,arm64"/>
+ <choices-outline>
+ <line choice="default">
+ <line choice="${CFBundleIdentifier}"/>
+ </line>
+ </choices-outline>
+ <choice id="default"/>
+ <choice id="${CFBundleIdentifier}" visible="false">
+ <pkg-ref id="${CFBundleIdentifier}"/>
+ </choice>
+ <pkg-ref id="${CFBundleIdentifier}" version="${simple_version}" installKBytes="${installKBytes}">#${app_name_url_encoded}.pkg</pkg-ref>
+</installer-gui-script> \ No newline at end of file
diff --git a/python/mozbuild/mozpack/apple_pkg/PackageInfo.template b/python/mozbuild/mozpack/apple_pkg/PackageInfo.template
new file mode 100644
index 0000000000..74d47e396c
--- /dev/null
+++ b/python/mozbuild/mozpack/apple_pkg/PackageInfo.template
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<pkg-info overwrite-permissions="true" relocatable="false" identifier="${CFBundleIdentifier}" postinstall-action="none" version="${simple_version}" format-version="2" generator-version="InstallCmds-681 (18F132)" install-location="/Applications" auth="root">
+ <payload numberOfFiles="${numberOfFiles}" installKBytes="${installKBytes}"/>
+ <bundle path="./${app_name}.app" id="${CFBundleIdentifier}" CFBundleShortVersionString="${CFBundleShortVersionString}" CFBundleVersion="${CFBundleVersion}"/>
+ <bundle-version>
+ <bundle id="${CFBundleIdentifier}"/>
+ </bundle-version>
+ <upgrade-bundle>
+ <bundle id="${CFBundleIdentifier}"/>
+ </upgrade-bundle>
+ <update-bundle/>
+ <atomic-update-bundle/>
+ <strict-identifier>
+ <bundle id="${CFBundleIdentifier}"/>
+ </strict-identifier>
+ <relocate>
+ <bundle id="${CFBundleIdentifier}"/>
+ </relocate>
+</pkg-info> \ No newline at end of file
diff --git a/python/mozbuild/mozpack/archive.py b/python/mozbuild/mozpack/archive.py
new file mode 100644
index 0000000000..89bf14b179
--- /dev/null
+++ b/python/mozbuild/mozpack/archive.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import bz2
+import gzip
+import stat
+import tarfile
+
+from .files import BaseFile, File
+
+# 2016-01-01T00:00:00+0000
+DEFAULT_MTIME = 1451606400
+
+
+# Python 3.9 contains this change:
+# https://github.com/python/cpython/commit/674935b8caf33e47c78f1b8e197b1b77a04992d2
+# which changes the output of tar creation compared to earlier versions.
+# As this code is used to generate tar files that are meant to be deterministic
+# across versions of python (specifically, it's used as part of computing the hash
+# of docker images, which needs to be identical between CI (which uses python 3.8),
+# and developer environments (using arbitrary versions of python, at this point,
+# most probably more recent than 3.9)).
+# What we do is subblass TarInfo so that if used on python >= 3.9, it reproduces the
+# behavior from python < 3.9.
+# Here's how it goes:
+# - the behavior in python >= 3.9 is the same as python < 3.9 when the type encoded
+# in the tarinfo is CHRTYPE or BLKTYPE.
+# - the value of the type is only compared in the context of choosing which behavior
+# to take
+# - we replace the type with the same value (so that using the value has no changes)
+# but that pretends to be the same as CHRTYPE so that the condition that enables the
+# old behavior is taken.
+class HackedType(bytes):
+ def __eq__(self, other):
+ if other == tarfile.CHRTYPE:
+ return True
+ return self == other
+
+
+class TarInfo(tarfile.TarInfo):
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ info["type"] = HackedType(info["type"])
+ return tarfile.TarInfo._create_header(info, format, encoding, errors)
+
+
+def create_tar_from_files(fp, files):
+ """Create a tar file deterministically.
+
+ Receives a dict mapping names of files in the archive to local filesystem
+ paths or ``mozpack.files.BaseFile`` instances.
+
+ The files will be archived and written to the passed file handle opened
+ for writing.
+
+ Only regular files can be written.
+
+ FUTURE accept a filename argument (or create APIs to write files)
+ """
+ # The format is explicitly set to tarfile.GNU_FORMAT, because this default format
+ # has been changed in Python 3.8.
+ with tarfile.open(
+ name="", mode="w", fileobj=fp, dereference=True, format=tarfile.GNU_FORMAT
+ ) as tf:
+ for archive_path, f in sorted(files.items()):
+ if not isinstance(f, BaseFile):
+ f = File(f)
+
+ ti = TarInfo(archive_path)
+ ti.mode = f.mode or 0o0644
+ ti.type = tarfile.REGTYPE
+
+ if not ti.isreg():
+ raise ValueError("not a regular file: %s" % f)
+
+ # Disallow setuid and setgid bits. This is an arbitrary restriction.
+ # However, since we set uid/gid to root:root, setuid and setgid
+ # would be a glaring security hole if the archive were
+ # uncompressed as root.
+ if ti.mode & (stat.S_ISUID | stat.S_ISGID):
+ raise ValueError("cannot add file with setuid or setgid set: " "%s" % f)
+
+ # Set uid, gid, username, and group as deterministic values.
+ ti.uid = 0
+ ti.gid = 0
+ ti.uname = ""
+ ti.gname = ""
+
+ # Set mtime to a constant value.
+ ti.mtime = DEFAULT_MTIME
+
+ ti.size = f.size()
+ # tarfile wants to pass a size argument to read(). So just
+ # wrap/buffer in a proper file object interface.
+ tf.addfile(ti, f.open())
+
+
+def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9):
+ """Create a tar.gz file deterministically from files.
+
+ This is a glorified wrapper around ``create_tar_from_files`` that
+ adds gzip compression.
+
+ The passed file handle should be opened for writing in binary mode.
+ When the function returns, all data has been written to the handle.
+ """
+ # Offset 3-7 in the gzip header contains an mtime. Pin it to a known
+ # value so output is deterministic.
+ gf = gzip.GzipFile(
+ filename=filename or "",
+ mode="wb",
+ fileobj=fp,
+ compresslevel=compresslevel,
+ mtime=DEFAULT_MTIME,
+ )
+ with gf:
+ create_tar_from_files(gf, files)
+
+
+class _BZ2Proxy(object):
+ """File object that proxies writes to a bz2 compressor."""
+
+ def __init__(self, fp, compresslevel=9):
+ self.fp = fp
+ self.compressor = bz2.BZ2Compressor(compresslevel)
+ self.pos = 0
+
+ def tell(self):
+ return self.pos
+
+ def write(self, data):
+ data = self.compressor.compress(data)
+ self.pos += len(data)
+ self.fp.write(data)
+
+ def close(self):
+ data = self.compressor.flush()
+ self.pos += len(data)
+ self.fp.write(data)
+
+
+def create_tar_bz2_from_files(fp, files, compresslevel=9):
+ """Create a tar.bz2 file deterministically from files.
+
+ This is a glorified wrapper around ``create_tar_from_files`` that
+ adds bzip2 compression.
+
+ This function is similar to ``create_tar_gzip_from_files()``.
+ """
+ proxy = _BZ2Proxy(fp, compresslevel=compresslevel)
+ create_tar_from_files(proxy, files)
+ proxy.close()
diff --git a/python/mozbuild/mozpack/chrome/__init__.py b/python/mozbuild/mozpack/chrome/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/__init__.py
diff --git a/python/mozbuild/mozpack/chrome/flags.py b/python/mozbuild/mozpack/chrome/flags.py
new file mode 100644
index 0000000000..6b096c862a
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/flags.py
@@ -0,0 +1,278 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+from collections import OrderedDict
+
+import six
+from packaging.version import Version
+
+from mozpack.errors import errors
+
+
+class Flag(object):
+ """
+ Class for flags in manifest entries in the form:
+ "flag" (same as "flag=true")
+ "flag=yes|true|1"
+ "flag=no|false|0"
+ """
+
+ def __init__(self, name):
+ """
+ Initialize a Flag with the given name.
+ """
+ self.name = name
+ self.value = None
+
+ def add_definition(self, definition):
+ """
+ Add a flag value definition. Replaces any previously set value.
+ """
+ if definition == self.name:
+ self.value = True
+ return
+ assert definition.startswith(self.name)
+ if definition[len(self.name)] != "=":
+ return errors.fatal("Malformed flag: %s" % definition)
+ value = definition[len(self.name) + 1 :]
+ if value in ("yes", "true", "1", "no", "false", "0"):
+ self.value = value
+ else:
+ return errors.fatal("Unknown value in: %s" % definition)
+
+ def matches(self, value):
+ """
+ Return whether the flag value matches the given value. The values
+ are canonicalized for comparison.
+ """
+ if value in ("yes", "true", "1", True):
+ return self.value in ("yes", "true", "1", True)
+ if value in ("no", "false", "0", False):
+ return self.value in ("no", "false", "0", False, None)
+ raise RuntimeError("Invalid value: %s" % value)
+
+ def __str__(self):
+ """
+ Serialize the flag value in the same form given to the last
+ add_definition() call.
+ """
+ if self.value is None:
+ return ""
+ if self.value is True:
+ return self.name
+ return "%s=%s" % (self.name, self.value)
+
+ def __eq__(self, other):
+ return str(self) == other
+
+
+class StringFlag(object):
+ """
+ Class for string flags in manifest entries in the form:
+ "flag=string"
+ "flag!=string"
+ """
+
+ def __init__(self, name):
+ """
+ Initialize a StringFlag with the given name.
+ """
+ self.name = name
+ self.values = []
+
+ def add_definition(self, definition):
+ """
+ Add a string flag definition.
+ """
+ assert definition.startswith(self.name)
+ value = definition[len(self.name) :]
+ if value.startswith("="):
+ self.values.append(("==", value[1:]))
+ elif value.startswith("!="):
+ self.values.append(("!=", value[2:]))
+ else:
+ return errors.fatal("Malformed flag: %s" % definition)
+
+ def matches(self, value):
+ """
+ Return whether one of the string flag definitions matches the given
+ value.
+ For example,
+
+ flag = StringFlag('foo')
+ flag.add_definition('foo!=bar')
+ flag.matches('bar') returns False
+ flag.matches('qux') returns True
+ flag = StringFlag('foo')
+ flag.add_definition('foo=bar')
+ flag.add_definition('foo=baz')
+ flag.matches('bar') returns True
+ flag.matches('baz') returns True
+ flag.matches('qux') returns False
+ """
+ if not self.values:
+ return True
+ for comparison, val in self.values:
+ if eval("value %s val" % comparison):
+ return True
+ return False
+
+ def __str__(self):
+ """
+ Serialize the flag definitions in the same form given to each
+ add_definition() call.
+ """
+ res = []
+ for comparison, val in self.values:
+ if comparison == "==":
+ res.append("%s=%s" % (self.name, val))
+ else:
+ res.append("%s!=%s" % (self.name, val))
+ return " ".join(res)
+
+ def __eq__(self, other):
+ return str(self) == other
+
+
+class VersionFlag(object):
+ """
+ Class for version flags in manifest entries in the form:
+ "flag=version"
+ "flag<=version"
+ "flag<version"
+ "flag>=version"
+ "flag>version"
+ """
+
+ def __init__(self, name):
+ """
+ Initialize a VersionFlag with the given name.
+ """
+ self.name = name
+ self.values = []
+
+ def add_definition(self, definition):
+ """
+ Add a version flag definition.
+ """
+ assert definition.startswith(self.name)
+ value = definition[len(self.name) :]
+ if value.startswith("="):
+ self.values.append(("==", Version(value[1:])))
+ elif len(value) > 1 and value[0] in ["<", ">"]:
+ if value[1] == "=":
+ if len(value) < 3:
+ return errors.fatal("Malformed flag: %s" % definition)
+ self.values.append((value[0:2], Version(value[2:])))
+ else:
+ self.values.append((value[0], Version(value[1:])))
+ else:
+ return errors.fatal("Malformed flag: %s" % definition)
+
+ def matches(self, value):
+ """
+ Return whether one of the version flag definitions matches the given
+ value.
+ For example,
+
+ flag = VersionFlag('foo')
+ flag.add_definition('foo>=1.0')
+ flag.matches('1.0') returns True
+ flag.matches('1.1') returns True
+ flag.matches('0.9') returns False
+ flag = VersionFlag('foo')
+ flag.add_definition('foo>=1.0')
+ flag.add_definition('foo<0.5')
+ flag.matches('0.4') returns True
+ flag.matches('1.0') returns True
+ flag.matches('0.6') returns False
+ """
+ value = Version(value)
+ if not self.values:
+ return True
+ for comparison, val in self.values:
+ if eval("value %s val" % comparison):
+ return True
+ return False
+
+ def __str__(self):
+ """
+ Serialize the flag definitions in the same form given to each
+ add_definition() call.
+ """
+ res = []
+ for comparison, val in self.values:
+ if comparison == "==":
+ res.append("%s=%s" % (self.name, val))
+ else:
+ res.append("%s%s%s" % (self.name, comparison, val))
+ return " ".join(res)
+
+ def __eq__(self, other):
+ return str(self) == other
+
+
+class Flags(OrderedDict):
+ """
+ Class to handle a set of flags definitions given on a single manifest
+ entry.
+
+ """
+
+ FLAGS = {
+ "application": StringFlag,
+ "appversion": VersionFlag,
+ "platformversion": VersionFlag,
+ "contentaccessible": Flag,
+ "os": StringFlag,
+ "osversion": VersionFlag,
+ "abi": StringFlag,
+ "platform": Flag,
+ "xpcnativewrappers": Flag,
+ "tablet": Flag,
+ "process": StringFlag,
+ "backgroundtask": StringFlag,
+ }
+ RE = re.compile(r"([!<>=]+)")
+
+ def __init__(self, *flags):
+ """
+ Initialize a set of flags given in string form.
+ flags = Flags('contentaccessible=yes', 'appversion>=3.5')
+ """
+ OrderedDict.__init__(self)
+ for f in flags:
+ name = self.RE.split(f)
+ name = name[0]
+ if name not in self.FLAGS:
+ errors.fatal("Unknown flag: %s" % name)
+ continue
+ if name not in self:
+ self[name] = self.FLAGS[name](name)
+ self[name].add_definition(f)
+
+ def __str__(self):
+ """
+ Serialize the set of flags.
+ """
+ return " ".join(str(self[k]) for k in self)
+
+ def match(self, **filter):
+ """
+ Return whether the set of flags match the set of given filters.
+ flags = Flags('contentaccessible=yes', 'appversion>=3.5',
+ 'application=foo')
+
+ flags.match(application='foo') returns True
+ flags.match(application='foo', appversion='3.5') returns True
+ flags.match(application='foo', appversion='3.0') returns False
+
+ """
+ for name, value in six.iteritems(filter):
+ if name not in self:
+ continue
+ if not self[name].matches(value):
+ return False
+ return True
diff --git a/python/mozbuild/mozpack/chrome/manifest.py b/python/mozbuild/mozpack/chrome/manifest.py
new file mode 100644
index 0000000000..14c11d4c1d
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/manifest.py
@@ -0,0 +1,400 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+
+import six
+from six.moves.urllib.parse import urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.flags import Flags
+from mozpack.errors import errors
+
+
+class ManifestEntry(object):
+ """
+ Base class for all manifest entry types.
+ Subclasses may define the following class or member variables:
+
+ - localized: indicates whether the manifest entry is used for localized
+ data.
+ - type: the manifest entry type (e.g. 'content' in
+ 'content global content/global/')
+ - allowed_flags: a set of flags allowed to be defined for the given
+ manifest entry type.
+
+ A manifest entry is attached to a base path, defining where the manifest
+ entry is bound to, and that is used to find relative paths defined in
+ entries.
+ """
+
+ localized = False
+ type = None
+ allowed_flags = [
+ "application",
+ "platformversion",
+ "os",
+ "osversion",
+ "abi",
+ "xpcnativewrappers",
+ "tablet",
+ "process",
+ "contentaccessible",
+ "backgroundtask",
+ ]
+
+ def __init__(self, base, *flags):
+ """
+ Initialize a manifest entry with the given base path and flags.
+ """
+ self.base = base
+ self.flags = Flags(*flags)
+ if not all(f in self.allowed_flags for f in self.flags):
+ errors.fatal(
+ "%s unsupported for %s manifest entries"
+ % (
+ ",".join(f for f in self.flags if f not in self.allowed_flags),
+ self.type,
+ )
+ )
+
+ def serialize(self, *args):
+ """
+ Serialize the manifest entry.
+ """
+ entry = [self.type] + list(args)
+ flags = str(self.flags)
+ if flags:
+ entry.append(flags)
+ return " ".join(entry)
+
+ def __eq__(self, other):
+ return self.base == other.base and str(self) == str(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "<%s@%s>" % (str(self), self.base)
+
+ def move(self, base):
+ """
+ Return a new manifest entry with a different base path.
+ """
+ return parse_manifest_line(base, str(self))
+
+ def rebase(self, base):
+ """
+ Return a new manifest entry with all relative paths defined in the
+ entry relative to a new base directory.
+ The base class doesn't define relative paths, so it is equivalent to
+ move().
+ """
+ return self.move(base)
+
+
+class ManifestEntryWithRelPath(ManifestEntry):
+ """
+ Abstract manifest entry type with a relative path definition.
+ """
+
+ def __init__(self, base, relpath, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.relpath = relpath
+
+ def __str__(self):
+ return self.serialize(self.relpath)
+
+ def rebase(self, base):
+ """
+ Return a new manifest entry with all relative paths defined in the
+ entry relative to a new base directory.
+ """
+ clone = ManifestEntry.rebase(self, base)
+ clone.relpath = mozpath.rebase(self.base, base, self.relpath)
+ return clone
+
+ @property
+ def path(self):
+ return mozpath.normpath(mozpath.join(self.base, self.relpath))
+
+
+class Manifest(ManifestEntryWithRelPath):
+ """
+ Class for 'manifest' entries.
+ manifest some/path/to/another.manifest
+ """
+
+ type = "manifest"
+
+
+class ManifestChrome(ManifestEntryWithRelPath):
+ """
+ Abstract class for chrome entries.
+ """
+
+ def __init__(self, base, name, relpath, *flags):
+ ManifestEntryWithRelPath.__init__(self, base, relpath, *flags)
+ self.name = name
+
+ @property
+ def location(self):
+ return mozpath.join(self.base, self.relpath)
+
+
+class ManifestContent(ManifestChrome):
+ """
+ Class for 'content' entries.
+ content global content/global/
+ """
+
+ type = "content"
+ allowed_flags = ManifestChrome.allowed_flags + [
+ "contentaccessible",
+ "platform",
+ ]
+
+ def __str__(self):
+ return self.serialize(self.name, self.relpath)
+
+
+class ManifestMultiContent(ManifestChrome):
+ """
+ Abstract class for chrome entries with multiple definitions.
+ Used for locale and skin entries.
+ """
+
+ type = None
+
+ def __init__(self, base, name, id, relpath, *flags):
+ ManifestChrome.__init__(self, base, name, relpath, *flags)
+ self.id = id
+
+ def __str__(self):
+ return self.serialize(self.name, self.id, self.relpath)
+
+
+class ManifestLocale(ManifestMultiContent):
+ """
+ Class for 'locale' entries.
+ locale global en-US content/en-US/
+ locale global fr content/fr/
+ """
+
+ localized = True
+ type = "locale"
+
+
+class ManifestSkin(ManifestMultiContent):
+ """
+ Class for 'skin' entries.
+ skin global classic/1.0 content/skin/classic/
+ """
+
+ type = "skin"
+
+
+class ManifestOverload(ManifestEntry):
+ """
+ Abstract class for chrome entries defining some kind of overloading.
+ Used for overlay, override or style entries.
+ """
+
+ type = None
+
+ def __init__(self, base, overloaded, overload, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.overloaded = overloaded
+ self.overload = overload
+
+ def __str__(self):
+ return self.serialize(self.overloaded, self.overload)
+
+
+class ManifestOverlay(ManifestOverload):
+ """
+ Class for 'overlay' entries.
+ overlay chrome://global/content/viewSource.xul \
+ chrome://browser/content/viewSourceOverlay.xul
+ """
+
+ type = "overlay"
+
+
+class ManifestStyle(ManifestOverload):
+ """
+ Class for 'style' entries.
+ style chrome://global/content/viewSource.xul \
+ chrome://browser/skin/
+ """
+
+ type = "style"
+
+
+class ManifestOverride(ManifestOverload):
+ """
+ Class for 'override' entries.
+ override chrome://global/locale/netError.dtd \
+ chrome://browser/locale/netError.dtd
+ """
+
+ type = "override"
+
+
+class ManifestResource(ManifestEntry):
+ """
+ Class for 'resource' entries.
+ resource gre-resources toolkit/res/
+ resource services-sync resource://gre/modules/services-sync/
+
+ The target may be a relative path or a resource or chrome url.
+ """
+
+ type = "resource"
+
+ def __init__(self, base, name, target, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.name = name
+ self.target = target
+
+ def __str__(self):
+ return self.serialize(self.name, self.target)
+
+ def rebase(self, base):
+ u = urlparse(self.target)
+ if u.scheme and u.scheme != "jar":
+ return ManifestEntry.rebase(self, base)
+ clone = ManifestEntry.rebase(self, base)
+ clone.target = mozpath.rebase(self.base, base, self.target)
+ return clone
+
+
+class ManifestBinaryComponent(ManifestEntryWithRelPath):
+ """
+ Class for 'binary-component' entries.
+ binary-component some/path/to/a/component.dll
+ """
+
+ type = "binary-component"
+
+
+class ManifestComponent(ManifestEntryWithRelPath):
+ """
+ Class for 'component' entries.
+ component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js
+ """
+
+ type = "component"
+
+ def __init__(self, base, cid, file, *flags):
+ ManifestEntryWithRelPath.__init__(self, base, file, *flags)
+ self.cid = cid
+
+ def __str__(self):
+ return self.serialize(self.cid, self.relpath)
+
+
+class ManifestInterfaces(ManifestEntryWithRelPath):
+ """
+ Class for 'interfaces' entries.
+ interfaces foo.xpt
+ """
+
+ type = "interfaces"
+
+
+class ManifestCategory(ManifestEntry):
+ """
+ Class for 'category' entries.
+ category command-line-handler m-browser @mozilla.org/browser/clh;
+ """
+
+ type = "category"
+
+ def __init__(self, base, category, name, value, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.category = category
+ self.name = name
+ self.value = value
+
+ def __str__(self):
+ return self.serialize(self.category, self.name, self.value)
+
+
+class ManifestContract(ManifestEntry):
+ """
+ Class for 'contract' entries.
+ contract @mozilla.org/foo;1 {b2bba4df-057d-41ea-b6b1-94a10a8ede68}
+ """
+
+ type = "contract"
+
+ def __init__(self, base, contractID, cid, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.contractID = contractID
+ self.cid = cid
+
+ def __str__(self):
+ return self.serialize(self.contractID, self.cid)
+
+
+# All manifest classes by their type name.
+MANIFESTS_TYPES = dict(
+ [
+ (c.type, c)
+ for c in globals().values()
+ if type(c) == type
+ and issubclass(c, ManifestEntry)
+ and hasattr(c, "type")
+ and c.type
+ ]
+)
+
+MANIFEST_RE = re.compile(r"^#.*$")
+
+
+def parse_manifest_line(base, line):
+ """
+ Parse a line from a manifest file with the given base directory and
+ return the corresponding ManifestEntry instance.
+ """
+ # Remove comments
+ cmd = MANIFEST_RE.sub("", line).strip().split()
+ if not cmd:
+ return None
+ if not cmd[0] in MANIFESTS_TYPES:
+ return errors.fatal("Unknown manifest directive: %s" % cmd[0])
+ return MANIFESTS_TYPES[cmd[0]](base, *cmd[1:])
+
+
+def parse_manifest(root, path, fileobj=None):
+ """
+ Parse a manifest file.
+ """
+ base = mozpath.dirname(path)
+ if root:
+ path = os.path.normpath(os.path.abspath(os.path.join(root, path)))
+ if not fileobj:
+ fileobj = open(path)
+ linenum = 0
+ for line in fileobj:
+ line = six.ensure_text(line)
+ linenum += 1
+ with errors.context(path, linenum):
+ e = parse_manifest_line(base, line)
+ if e:
+ yield e
+
+
+def is_manifest(path):
+ """
+ Return whether the given path is that of a manifest file.
+ """
+ return (
+ path.endswith(".manifest")
+ and not path.endswith(".CRT.manifest")
+ and not path.endswith(".exe.manifest")
+ and os.path.basename(path) != "cose.manifest"
+ )
diff --git a/python/mozbuild/mozpack/copier.py b/python/mozbuild/mozpack/copier.py
new file mode 100644
index 0000000000..c042e5432f
--- /dev/null
+++ b/python/mozbuild/mozpack/copier.py
@@ -0,0 +1,605 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import concurrent.futures as futures
+import errno
+import os
+import stat
+import sys
+from collections import Counter, OrderedDict, defaultdict
+
+import six
+
+import mozpack.path as mozpath
+from mozpack.errors import errors
+from mozpack.files import BaseFile, Dest
+
+
+class FileRegistry(object):
+ """
+ Generic container to keep track of a set of BaseFile instances. It
+ preserves the order under which the files are added, but doesn't keep
+ track of empty directories (directories are not stored at all).
+ The paths associated with the BaseFile instances are relative to an
+ unspecified (virtual) root directory.
+
+ registry = FileRegistry()
+ registry.add('foo/bar', file_instance)
+ """
+
+ def __init__(self):
+ self._files = OrderedDict()
+ self._required_directories = Counter()
+ self._partial_paths_cache = {}
+
+ def _partial_paths(self, path):
+ """
+ Turn "foo/bar/baz/zot" into ["foo/bar/baz", "foo/bar", "foo"].
+ """
+ dir_name = path.rpartition("/")[0]
+ if not dir_name:
+ return []
+
+ partial_paths = self._partial_paths_cache.get(dir_name)
+ if partial_paths:
+ return partial_paths
+
+ partial_paths = [dir_name] + self._partial_paths(dir_name)
+
+ self._partial_paths_cache[dir_name] = partial_paths
+ return partial_paths
+
+ def add(self, path, content):
+ """
+ Add a BaseFile instance to the container, under the given path.
+ """
+ assert isinstance(content, BaseFile)
+ if path in self._files:
+ return errors.error("%s already added" % path)
+ if self._required_directories[path] > 0:
+ return errors.error("Can't add %s: it is a required directory" % path)
+ # Check whether any parent of the given path is already stored
+ partial_paths = self._partial_paths(path)
+ for partial_path in partial_paths:
+ if partial_path in self._files:
+ return errors.error("Can't add %s: %s is a file" % (path, partial_path))
+ self._files[path] = content
+ self._required_directories.update(partial_paths)
+
+ def match(self, pattern):
+ """
+ Return the list of paths, stored in the container, matching the
+ given pattern. See the mozpack.path.match documentation for a
+ description of the handled patterns.
+ """
+ if "*" in pattern:
+ return [p for p in self.paths() if mozpath.match(p, pattern)]
+ if pattern == "":
+ return self.paths()
+ if pattern in self._files:
+ return [pattern]
+ return [p for p in self.paths() if mozpath.basedir(p, [pattern]) == pattern]
+
+ def remove(self, pattern):
+ """
+ Remove paths matching the given pattern from the container. See the
+ mozpack.path.match documentation for a description of the handled
+ patterns.
+ """
+ items = self.match(pattern)
+ if not items:
+ return errors.error(
+ "Can't remove %s: %s"
+ % (pattern, "not matching anything previously added")
+ )
+ for i in items:
+ del self._files[i]
+ self._required_directories.subtract(self._partial_paths(i))
+
+ def paths(self):
+ """
+ Return all paths stored in the container, in the order they were added.
+ """
+ return list(self._files)
+
+ def __len__(self):
+ """
+ Return number of paths stored in the container.
+ """
+ return len(self._files)
+
+ def __contains__(self, pattern):
+ raise RuntimeError(
+ "'in' operator forbidden for %s. Use contains()." % self.__class__.__name__
+ )
+
+ def contains(self, pattern):
+ """
+ Return whether the container contains paths matching the given
+ pattern. See the mozpack.path.match documentation for a description of
+ the handled patterns.
+ """
+ return len(self.match(pattern)) > 0
+
+ def __getitem__(self, path):
+ """
+ Return the BaseFile instance stored in the container for the given
+ path.
+ """
+ return self._files[path]
+
+ def __iter__(self):
+ """
+ Iterate over all (path, BaseFile instance) pairs from the container.
+ for path, file in registry:
+ (...)
+ """
+ return six.iteritems(self._files)
+
+ def required_directories(self):
+ """
+ Return the set of directories required by the paths in the container,
+ in no particular order. The returned directories are relative to an
+ unspecified (virtual) root directory (and do not include said root
+ directory).
+ """
+ return set(k for k, v in self._required_directories.items() if v > 0)
+
+ def output_to_inputs_tree(self):
+ """
+ Return a dictionary mapping each output path to the set of its
+ required input paths.
+
+ All paths are normalized.
+ """
+ tree = {}
+ for output, file in self:
+ output = mozpath.normpath(output)
+ tree[output] = set(mozpath.normpath(f) for f in file.inputs())
+ return tree
+
+ def input_to_outputs_tree(self):
+ """
+ Return a dictionary mapping each input path to the set of
+ impacted output paths.
+
+ All paths are normalized.
+ """
+ tree = defaultdict(set)
+ for output, file in self:
+ output = mozpath.normpath(output)
+ for input in file.inputs():
+ input = mozpath.normpath(input)
+ tree[input].add(output)
+ return dict(tree)
+
+
+class FileRegistrySubtree(object):
+ """A proxy class to give access to a subtree of an existing FileRegistry.
+
+ Note this doesn't implement the whole FileRegistry interface."""
+
+ def __new__(cls, base, registry):
+ if not base:
+ return registry
+ return object.__new__(cls)
+
+ def __init__(self, base, registry):
+ self._base = base
+ self._registry = registry
+
+ def _get_path(self, path):
+ # mozpath.join will return a trailing slash if path is empty, and we
+ # don't want that.
+ return mozpath.join(self._base, path) if path else self._base
+
+ def add(self, path, content):
+ return self._registry.add(self._get_path(path), content)
+
+ def match(self, pattern):
+ return [
+ mozpath.relpath(p, self._base)
+ for p in self._registry.match(self._get_path(pattern))
+ ]
+
+ def remove(self, pattern):
+ return self._registry.remove(self._get_path(pattern))
+
+ def paths(self):
+ return [p for p, f in self]
+
+ def __len__(self):
+ return len(self.paths())
+
+ def contains(self, pattern):
+ return self._registry.contains(self._get_path(pattern))
+
+ def __getitem__(self, path):
+ return self._registry[self._get_path(path)]
+
+ def __iter__(self):
+ for p, f in self._registry:
+ if mozpath.basedir(p, [self._base]):
+ yield mozpath.relpath(p, self._base), f
+
+
+class FileCopyResult(object):
+ """Represents results of a FileCopier.copy operation."""
+
+ def __init__(self):
+ self.updated_files = set()
+ self.existing_files = set()
+ self.removed_files = set()
+ self.removed_directories = set()
+
+ @property
+ def updated_files_count(self):
+ return len(self.updated_files)
+
+ @property
+ def existing_files_count(self):
+ return len(self.existing_files)
+
+ @property
+ def removed_files_count(self):
+ return len(self.removed_files)
+
+ @property
+ def removed_directories_count(self):
+ return len(self.removed_directories)
+
+
+class FileCopier(FileRegistry):
+ """
+ FileRegistry with the ability to copy the registered files to a separate
+ directory.
+ """
+
+ def copy(
+ self,
+ destination,
+ skip_if_older=True,
+ remove_unaccounted=True,
+ remove_all_directory_symlinks=True,
+ remove_empty_directories=True,
+ ):
+ """
+ Copy all registered files to the given destination path. The given
+ destination can be an existing directory, or not exist at all. It
+ can't be e.g. a file.
+ The copy process acts a bit like rsync: files are not copied when they
+ don't need to (see mozpack.files for details on file.copy).
+
+ By default, files in the destination directory that aren't
+ registered are removed and empty directories are deleted. In
+ addition, all directory symlinks in the destination directory
+ are deleted: this is a conservative approach to ensure that we
+ never accidently write files into a directory that is not the
+ destination directory. In the worst case, we might have a
+ directory symlink in the object directory to the source
+ directory.
+
+ To disable removing of unregistered files, pass
+ remove_unaccounted=False. To disable removing empty
+ directories, pass remove_empty_directories=False. In rare
+ cases, you might want to maintain directory symlinks in the
+ destination directory (at least those that are not required to
+ be regular directories): pass
+ remove_all_directory_symlinks=False. Exercise caution with
+ this flag: you almost certainly do not want to preserve
+ directory symlinks.
+
+ Returns a FileCopyResult that details what changed.
+ """
+ assert isinstance(destination, six.string_types)
+ assert not os.path.exists(destination) or os.path.isdir(destination)
+
+ result = FileCopyResult()
+ have_symlinks = hasattr(os, "symlink")
+ destination = os.path.normpath(destination)
+
+ # We create the destination directory specially. We can't do this as
+ # part of the loop doing mkdir() below because that loop munges
+ # symlinks and permissions and parent directories of the destination
+ # directory may have their own weird schema. The contract is we only
+ # manage children of destination, not its parents.
+ try:
+ os.makedirs(destination)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ # Because we could be handling thousands of files, code in this
+ # function is optimized to minimize system calls. We prefer CPU time
+ # in Python over possibly I/O bound filesystem calls to stat() and
+ # friends.
+
+ required_dirs = set([destination])
+ required_dirs |= set(
+ os.path.normpath(os.path.join(destination, d))
+ for d in self.required_directories()
+ )
+
+ # Ensure destination directories are in place and proper.
+ #
+ # The "proper" bit is important. We need to ensure that directories
+ # have appropriate permissions or we will be unable to discover
+ # and write files. Furthermore, we need to verify directories aren't
+ # symlinks.
+ #
+ # Symlinked directories (a symlink whose target is a directory) are
+ # incompatible with us because our manifest talks in terms of files,
+ # not directories. If we leave symlinked directories unchecked, we
+ # would blindly follow symlinks and this might confuse file
+ # installation. For example, if an existing directory is a symlink
+ # to directory X and we attempt to install a symlink in this directory
+ # to a file in directory X, we may create a recursive symlink!
+ for d in sorted(required_dirs, key=len):
+ try:
+ os.mkdir(d)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ # We allow the destination to be a symlink because the caller
+ # is responsible for managing the destination and we assume
+ # they know what they are doing.
+ if have_symlinks and d != destination:
+ st = os.lstat(d)
+ if stat.S_ISLNK(st.st_mode):
+ # While we have remove_unaccounted, it doesn't apply
+ # to directory symlinks because if it did, our behavior
+ # could be very wrong.
+ os.remove(d)
+ os.mkdir(d)
+
+ if not os.access(d, os.W_OK):
+ umask = os.umask(0o077)
+ os.umask(umask)
+ os.chmod(d, 0o777 & ~umask)
+
+ if isinstance(remove_unaccounted, FileRegistry):
+ existing_files = set(
+ os.path.normpath(os.path.join(destination, p))
+ for p in remove_unaccounted.paths()
+ )
+ existing_dirs = set(
+ os.path.normpath(os.path.join(destination, p))
+ for p in remove_unaccounted.required_directories()
+ )
+ existing_dirs |= {os.path.normpath(destination)}
+ else:
+ # While we have remove_unaccounted, it doesn't apply to empty
+ # directories because it wouldn't make sense: an empty directory
+ # is empty, so removing it should have no effect.
+ existing_dirs = set()
+ existing_files = set()
+ for root, dirs, files in os.walk(destination):
+ # We need to perform the same symlink detection as above.
+ # os.walk() doesn't follow symlinks into directories by
+ # default, so we need to check dirs (we can't wait for root).
+ if have_symlinks:
+ filtered = []
+ for d in dirs:
+ full = os.path.join(root, d)
+ st = os.lstat(full)
+ if stat.S_ISLNK(st.st_mode):
+ # This directory symlink is not a required
+ # directory: any such symlink would have been
+ # removed and a directory created above.
+ if remove_all_directory_symlinks:
+ os.remove(full)
+ result.removed_files.add(os.path.normpath(full))
+ else:
+ existing_files.add(os.path.normpath(full))
+ else:
+ filtered.append(d)
+
+ dirs[:] = filtered
+
+ existing_dirs.add(os.path.normpath(root))
+
+ for d in dirs:
+ existing_dirs.add(os.path.normpath(os.path.join(root, d)))
+
+ for f in files:
+ existing_files.add(os.path.normpath(os.path.join(root, f)))
+
+ # Now we reconcile the state of the world against what we want.
+ dest_files = set()
+
+ # Install files.
+ #
+ # Creating/appending new files on Windows/NTFS is slow. So we use a
+ # thread pool to speed it up significantly. The performance of this
+ # loop is so critical to common build operations on Linux that the
+ # overhead of the thread pool is worth avoiding, so we have 2 code
+ # paths. We also employ a low water mark to prevent thread pool
+ # creation if number of files is too small to benefit.
+ copy_results = []
+ if sys.platform == "win32" and len(self) > 100:
+ with futures.ThreadPoolExecutor(4) as e:
+ fs = []
+ for p, f in self:
+ destfile = os.path.normpath(os.path.join(destination, p))
+ fs.append((destfile, e.submit(f.copy, destfile, skip_if_older)))
+
+ copy_results = [(path, f.result) for path, f in fs]
+ else:
+ for p, f in self:
+ destfile = os.path.normpath(os.path.join(destination, p))
+ copy_results.append((destfile, f.copy(destfile, skip_if_older)))
+
+ for destfile, copy_result in copy_results:
+ dest_files.add(destfile)
+ if copy_result:
+ result.updated_files.add(destfile)
+ else:
+ result.existing_files.add(destfile)
+
+ # Remove files no longer accounted for.
+ if remove_unaccounted:
+ for f in existing_files - dest_files:
+ # Windows requires write access to remove files.
+ if os.name == "nt" and not os.access(f, os.W_OK):
+ # It doesn't matter what we set permissions to since we
+ # will remove this file shortly.
+ os.chmod(f, 0o600)
+
+ os.remove(f)
+ result.removed_files.add(f)
+
+ if not remove_empty_directories:
+ return result
+
+ # Figure out which directories can be removed. This is complicated
+ # by the fact we optionally remove existing files. This would be easy
+ # if we walked the directory tree after installing files. But, we're
+ # trying to minimize system calls.
+
+ # Start with the ideal set.
+ remove_dirs = existing_dirs - required_dirs
+
+ # Then don't remove directories if we didn't remove unaccounted files
+ # and one of those files exists.
+ if not remove_unaccounted:
+ parents = set()
+ pathsep = os.path.sep
+ for f in existing_files:
+ path = f
+ while True:
+ # All the paths are normalized and relative by this point,
+ # so os.path.dirname would only do extra work.
+ dirname = path.rpartition(pathsep)[0]
+ if dirname in parents:
+ break
+ parents.add(dirname)
+ path = dirname
+ remove_dirs -= parents
+
+ # Remove empty directories that aren't required.
+ for d in sorted(remove_dirs, key=len, reverse=True):
+ try:
+ try:
+ os.rmdir(d)
+ except OSError as e:
+ if e.errno in (errno.EPERM, errno.EACCES):
+ # Permissions may not allow deletion. So ensure write
+ # access is in place before attempting to rmdir again.
+ os.chmod(d, 0o700)
+ os.rmdir(d)
+ else:
+ raise
+ except OSError as e:
+ # If remove_unaccounted is a # FileRegistry, then we have a
+ # list of directories that may not be empty, so ignore rmdir
+ # ENOTEMPTY errors for them.
+ if (
+ isinstance(remove_unaccounted, FileRegistry)
+ and e.errno == errno.ENOTEMPTY
+ ):
+ continue
+ raise
+ result.removed_directories.add(d)
+
+ return result
+
+
+class Jarrer(FileRegistry, BaseFile):
+ """
+ FileRegistry with the ability to copy and pack the registered files as a
+ jar file. Also acts as a BaseFile instance, to be copied with a FileCopier.
+ """
+
+ def __init__(self, compress=True):
+ """
+ Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation
+ for details on the compress argument.
+ """
+ self.compress = compress
+ self._preload = []
+ self._compress_options = {} # Map path to compress boolean option.
+ FileRegistry.__init__(self)
+
+ def add(self, path, content, compress=None):
+ FileRegistry.add(self, path, content)
+ if compress is not None:
+ self._compress_options[path] = compress
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Pack all registered files in the given destination jar. The given
+ destination jar may be a path to jar file, or a Dest instance for
+ a jar file.
+ If the destination jar file exists, its (compressed) contents are used
+ instead of the registered BaseFile instances when appropriate.
+ """
+
+ class DeflaterDest(Dest):
+ """
+ Dest-like class, reading from a file-like object initially, but
+ switching to a Deflater object if written to.
+
+ dest = DeflaterDest(original_file)
+ dest.read() # Reads original_file
+ dest.write(data) # Creates a Deflater and write data there
+ dest.read() # Re-opens the Deflater and reads from it
+ """
+
+ def __init__(self, orig=None, compress=True):
+ self.mode = None
+ self.deflater = orig
+ self.compress = compress
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ assert self.mode is None
+ self.mode = "r"
+ return self.deflater.read(length)
+
+ def write(self, data):
+ if self.mode != "w":
+ from mozpack.mozjar import Deflater
+
+ self.deflater = Deflater(self.compress)
+ self.mode = "w"
+ self.deflater.write(data)
+
+ def exists(self):
+ return self.deflater is not None
+
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ assert isinstance(dest, Dest)
+
+ from mozpack.mozjar import JarReader, JarWriter
+
+ try:
+ old_jar = JarReader(fileobj=dest)
+ except Exception:
+ old_jar = []
+
+ old_contents = dict([(f.filename, f) for f in old_jar])
+
+ with JarWriter(fileobj=dest, compress=self.compress) as jar:
+ for path, file in self:
+ compress = self._compress_options.get(path, self.compress)
+ if path in old_contents:
+ deflater = DeflaterDest(old_contents[path], compress)
+ else:
+ deflater = DeflaterDest(compress=compress)
+ file.copy(deflater, skip_if_older)
+ jar.add(path, deflater.deflater, mode=file.mode, compress=compress)
+ if self._preload:
+ jar.preload(self._preload)
+
+ def open(self):
+ raise RuntimeError("unsupported")
+
+ def preload(self, paths):
+ """
+ Add the given set of paths to the list of preloaded files. See
+ mozpack.mozjar.JarWriter documentation for details on jar preloading.
+ """
+ self._preload.extend(paths)
diff --git a/python/mozbuild/mozpack/dmg.py b/python/mozbuild/mozpack/dmg.py
new file mode 100644
index 0000000000..334f3a69cc
--- /dev/null
+++ b/python/mozbuild/mozpack/dmg.py
@@ -0,0 +1,230 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import platform
+import shutil
+import subprocess
+from pathlib import Path
+from typing import List
+
+import mozfile
+
+from mozbuild.util import ensureParentDir
+
+is_linux = platform.system() == "Linux"
+is_osx = platform.system() == "Darwin"
+
+
+def chmod(dir):
+ "Set permissions of DMG contents correctly"
+ subprocess.check_call(["chmod", "-R", "a+rX,a-st,u+w,go-w", dir])
+
+
+def rsync(source: Path, dest: Path):
+ "rsync the contents of directory source into directory dest"
+ # Ensure a trailing slash on directories so rsync copies the *contents* of source.
+ raw_source = str(source)
+ if source.is_dir():
+ raw_source = str(source) + "/"
+ subprocess.check_call(["rsync", "-a", "--copy-unsafe-links", raw_source, dest])
+
+
+def set_folder_icon(dir: Path, tmpdir: Path, hfs_tool: Path = None):
+ "Set HFS attributes of dir to use a custom icon"
+ if is_linux:
+ hfs = tmpdir / "staged.hfs"
+ subprocess.check_call([hfs_tool, hfs, "attr", "/", "C"])
+ elif is_osx:
+ subprocess.check_call(["SetFile", "-a", "C", dir])
+
+
+def generate_hfs_file(
+ stagedir: Path, tmpdir: Path, volume_name: str, mkfshfs_tool: Path
+):
+ """
+ When cross compiling, we zero fill an hfs file, that we will turn into
+ a DMG. To do so we test the size of the staged dir, and add some slight
+ padding to that.
+ """
+ hfs = tmpdir / "staged.hfs"
+ output = subprocess.check_output(["du", "-s", stagedir])
+ size = int(output.split()[0]) / 1000 # Get in MB
+ size = int(size * 1.02) # Bump the used size slightly larger.
+ # Setup a proper file sized out with zero's
+ subprocess.check_call(
+ [
+ "dd",
+ "if=/dev/zero",
+ "of={}".format(hfs),
+ "bs=1M",
+ "count={}".format(size),
+ ]
+ )
+ subprocess.check_call([mkfshfs_tool, "-v", volume_name, hfs])
+
+
+def create_app_symlink(stagedir: Path, tmpdir: Path, hfs_tool: Path = None):
+ """
+ Make a symlink to /Applications. The symlink name is a space
+ so we don't have to localize it. The Applications folder icon
+ will be shown in Finder, which should be clear enough for users.
+ """
+ if is_linux:
+ hfs = os.path.join(tmpdir, "staged.hfs")
+ subprocess.check_call([hfs_tool, hfs, "symlink", "/ ", "/Applications"])
+ elif is_osx:
+ os.symlink("/Applications", stagedir / " ")
+
+
+def create_dmg_from_staged(
+ stagedir: Path,
+ output_dmg: Path,
+ tmpdir: Path,
+ volume_name: str,
+ hfs_tool: Path = None,
+ dmg_tool: Path = None,
+):
+ "Given a prepared directory stagedir, produce a DMG at output_dmg."
+ if is_linux:
+ # The dmg tool doesn't create the destination directories, and silently
+ # returns success if the parent directory doesn't exist.
+ ensureParentDir(output_dmg)
+
+ hfs = os.path.join(tmpdir, "staged.hfs")
+ subprocess.check_call([hfs_tool, hfs, "addall", stagedir])
+ subprocess.check_call(
+ [dmg_tool, "build", hfs, output_dmg],
+ # dmg is seriously chatty
+ stdout=subprocess.DEVNULL,
+ )
+ elif is_osx:
+ hybrid = tmpdir / "hybrid.dmg"
+ subprocess.check_call(
+ [
+ "hdiutil",
+ "makehybrid",
+ "-hfs",
+ "-hfs-volume-name",
+ volume_name,
+ "-hfs-openfolder",
+ stagedir,
+ "-ov",
+ stagedir,
+ "-o",
+ hybrid,
+ ]
+ )
+ subprocess.check_call(
+ [
+ "hdiutil",
+ "convert",
+ "-format",
+ "UDBZ",
+ "-imagekey",
+ "bzip2-level=9",
+ "-ov",
+ hybrid,
+ "-o",
+ output_dmg,
+ ]
+ )
+
+
+def create_dmg(
+ source_directory: Path,
+ output_dmg: Path,
+ volume_name: str,
+ extra_files: List[tuple],
+ dmg_tool: Path,
+ hfs_tool: Path,
+ mkfshfs_tool: Path,
+):
+ """
+ Create a DMG disk image at the path output_dmg from source_directory.
+
+ Use volume_name as the disk image volume name, and
+ use extra_files as a list of tuples of (filename, relative path) to copy
+ into the disk image.
+ """
+ if platform.system() not in ("Darwin", "Linux"):
+ raise Exception("Don't know how to build a DMG on '%s'" % platform.system())
+
+ with mozfile.TemporaryDirectory() as tmp:
+ tmpdir = Path(tmp)
+ stagedir = tmpdir / "stage"
+ stagedir.mkdir()
+
+ # Copy the app bundle over using rsync
+ rsync(source_directory, stagedir)
+ # Copy extra files
+ for source, target in extra_files:
+ full_target = stagedir / target
+ full_target.parent.mkdir(parents=True, exist_ok=True)
+ shutil.copyfile(source, full_target)
+ if is_linux:
+ # Not needed in osx
+ generate_hfs_file(stagedir, tmpdir, volume_name, mkfshfs_tool)
+ create_app_symlink(stagedir, tmpdir, hfs_tool)
+ # Set the folder attributes to use a custom icon
+ set_folder_icon(stagedir, tmpdir, hfs_tool)
+ chmod(stagedir)
+ create_dmg_from_staged(
+ stagedir, output_dmg, tmpdir, volume_name, hfs_tool, dmg_tool
+ )
+
+
+def extract_dmg_contents(
+ dmgfile: Path,
+ destdir: Path,
+ dmg_tool: Path = None,
+ hfs_tool: Path = None,
+):
+ if is_linux:
+ with mozfile.TemporaryDirectory() as tmpdir:
+ hfs_file = os.path.join(tmpdir, "firefox.hfs")
+ subprocess.check_call(
+ [dmg_tool, "extract", dmgfile, hfs_file],
+ # dmg is seriously chatty
+ stdout=subprocess.DEVNULL,
+ )
+ subprocess.check_call([hfs_tool, hfs_file, "extractall", "/", destdir])
+ else:
+ # TODO: find better way to resolve topsrcdir (checkout directory)
+ topsrcdir = Path(__file__).parent.parent.parent.parent.resolve()
+ unpack_diskimage = topsrcdir / "build/package/mac_osx/unpack-diskimage"
+ unpack_mountpoint = Path("/tmp/app-unpack")
+ subprocess.check_call([unpack_diskimage, dmgfile, unpack_mountpoint, destdir])
+
+
+def extract_dmg(
+ dmgfile: Path,
+ output: Path,
+ dmg_tool: Path = None,
+ hfs_tool: Path = None,
+ dsstore: Path = None,
+ icon: Path = None,
+ background: Path = None,
+):
+ if platform.system() not in ("Darwin", "Linux"):
+ raise Exception("Don't know how to extract a DMG on '%s'" % platform.system())
+
+ with mozfile.TemporaryDirectory() as tmp:
+ tmpdir = Path(tmp)
+ extract_dmg_contents(dmgfile, tmpdir, dmg_tool, hfs_tool)
+ applications_symlink = tmpdir / " "
+ if applications_symlink.is_symlink():
+ # Rsync will fail on the presence of this symlink
+ applications_symlink.unlink()
+ rsync(tmpdir, output)
+
+ if dsstore:
+ dsstore.parent.mkdir(parents=True, exist_ok=True)
+ rsync(tmpdir / ".DS_Store", dsstore)
+ if background:
+ background.parent.mkdir(parents=True, exist_ok=True)
+ rsync(tmpdir / ".background" / background.name, background)
+ if icon:
+ icon.parent.mkdir(parents=True, exist_ok=True)
+ rsync(tmpdir / ".VolumeIcon.icns", icon)
diff --git a/python/mozbuild/mozpack/errors.py b/python/mozbuild/mozpack/errors.py
new file mode 100644
index 0000000000..25c0e8549c
--- /dev/null
+++ b/python/mozbuild/mozpack/errors.py
@@ -0,0 +1,151 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from contextlib import contextmanager
+
+
+class ErrorMessage(Exception):
+ """Exception type raised from errors.error() and errors.fatal()"""
+
+
+class AccumulatedErrors(Exception):
+ """Exception type raised from errors.accumulate()"""
+
+
+class ErrorCollector(object):
+ """
+ Error handling/logging class. A global instance, errors, is provided for
+ convenience.
+
+ Warnings, errors and fatal errors may be logged by calls to the following
+ functions:
+ - errors.warn(message)
+ - errors.error(message)
+ - errors.fatal(message)
+
+ Warnings only send the message on the logging output, while errors and
+ fatal errors send the message and throw an ErrorMessage exception. The
+ exception, however, may be deferred. See further below.
+
+ Errors may be ignored by calling:
+ - errors.ignore_errors()
+
+ After calling that function, only fatal errors throw an exception.
+
+ The warnings, errors or fatal errors messages may be augmented with context
+ information when a context is provided. Context is defined by a pair
+ (filename, linenumber), and may be set with errors.context() used as a
+
+ context manager:
+
+ .. code-block:: python
+
+ with errors.context(filename, linenumber):
+ errors.warn(message)
+
+ Arbitrary nesting is supported, both for errors.context calls:
+
+ .. code-block:: python
+
+ with errors.context(filename1, linenumber1):
+ errors.warn(message)
+ with errors.context(filename2, linenumber2):
+ errors.warn(message)
+
+ as well as for function calls:
+
+ .. code-block:: python
+
+ def func():
+ errors.warn(message)
+ with errors.context(filename, linenumber):
+ func()
+
+ Errors and fatal errors can have their exception thrown at a later time,
+ allowing for several different errors to be reported at once before
+ throwing. This is achieved with errors.accumulate() as a context manager:
+
+ .. code-block:: python
+
+ with errors.accumulate():
+ if test1:
+ errors.error(message1)
+ if test2:
+ errors.error(message2)
+
+ In such cases, a single AccumulatedErrors exception is thrown, but doesn't
+ contain information about the exceptions. The logged messages do.
+ """
+
+ out = sys.stderr
+ WARN = 1
+ ERROR = 2
+ FATAL = 3
+ _level = ERROR
+ _context = []
+ _count = None
+
+ def ignore_errors(self, ignore=True):
+ if ignore:
+ self._level = self.FATAL
+ else:
+ self._level = self.ERROR
+
+ def _full_message(self, level, msg):
+ if level >= self._level:
+ level = "error"
+ else:
+ level = "warning"
+ if self._context:
+ file, line = self._context[-1]
+ return "%s: %s:%d: %s" % (level, file, line, msg)
+ return "%s: %s" % (level, msg)
+
+ def _handle(self, level, msg):
+ msg = self._full_message(level, msg)
+ if level >= self._level:
+ if self._count is None:
+ raise ErrorMessage(msg)
+ self._count += 1
+ print(msg, file=self.out)
+
+ def fatal(self, msg):
+ self._handle(self.FATAL, msg)
+
+ def error(self, msg):
+ self._handle(self.ERROR, msg)
+
+ def warn(self, msg):
+ self._handle(self.WARN, msg)
+
+ def get_context(self):
+ if self._context:
+ return self._context[-1]
+
+ @contextmanager
+ def context(self, file, line):
+ if file and line:
+ self._context.append((file, line))
+ yield
+ if file and line:
+ self._context.pop()
+
+ @contextmanager
+ def accumulate(self):
+ assert self._count is None
+ self._count = 0
+ yield
+ count = self._count
+ self._count = None
+ if count:
+ raise AccumulatedErrors()
+
+ @property
+ def count(self):
+ # _count can be None.
+ return self._count if self._count else 0
+
+
+errors = ErrorCollector()
diff --git a/python/mozbuild/mozpack/executables.py b/python/mozbuild/mozpack/executables.py
new file mode 100644
index 0000000000..dd6849cabe
--- /dev/null
+++ b/python/mozbuild/mozpack/executables.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import struct
+import subprocess
+from io import BytesIO
+
+from mozpack.errors import errors
+
+MACHO_SIGNATURES = [
+ 0xFEEDFACE, # mach-o 32-bits big endian
+ 0xCEFAEDFE, # mach-o 32-bits little endian
+ 0xFEEDFACF, # mach-o 64-bits big endian
+ 0xCFFAEDFE, # mach-o 64-bits little endian
+]
+
+FAT_SIGNATURE = 0xCAFEBABE # mach-o FAT binary
+
+ELF_SIGNATURE = 0x7F454C46 # Elf binary
+
+UNKNOWN = 0
+MACHO = 1
+ELF = 2
+
+
+def get_type(path_or_fileobj):
+ """
+ Check the signature of the give file and returns what kind of executable
+ matches.
+ """
+ if hasattr(path_or_fileobj, "peek"):
+ f = BytesIO(path_or_fileobj.peek(8))
+ elif hasattr(path_or_fileobj, "read"):
+ f = path_or_fileobj
+ else:
+ f = open(path_or_fileobj, "rb")
+ signature = f.read(4)
+ if len(signature) < 4:
+ return UNKNOWN
+ signature = struct.unpack(">L", signature)[0]
+ if signature == ELF_SIGNATURE:
+ return ELF
+ if signature in MACHO_SIGNATURES:
+ return MACHO
+ if signature != FAT_SIGNATURE:
+ return UNKNOWN
+ # We have to sanity check the second four bytes, because Java class
+ # files use the same magic number as Mach-O fat binaries.
+ # This logic is adapted from file(1), which says that Mach-O uses
+ # these bytes to count the number of architectures within, while
+ # Java uses it for a version number. Conveniently, there are only
+ # 18 labelled Mach-O architectures, and Java's first released
+ # class format used the version 43.0.
+ num = f.read(4)
+ if len(num) < 4:
+ return UNKNOWN
+ num = struct.unpack(">L", num)[0]
+ if num < 20:
+ return MACHO
+ return UNKNOWN
+
+
+def is_executable(path):
+ """
+ Return whether a given file path points to an executable or a library,
+ where an executable or library is identified by:
+ - the file extension on OS/2 and WINNT
+ - the file signature on OS/X and ELF systems (GNU/Linux, Android, BSD, Solaris)
+
+ As this function is intended for use to choose between the ExecutableFile
+ and File classes in FileFinder, and choosing ExecutableFile only matters
+ on OS/2, OS/X, ELF and WINNT (in GCC build) systems, we don't bother
+ detecting other kind of executables.
+ """
+ from buildconfig import substs
+
+ if not os.path.exists(path):
+ return False
+
+ if substs["OS_ARCH"] == "WINNT":
+ return path.lower().endswith((substs["DLL_SUFFIX"], substs["BIN_SUFFIX"]))
+
+ return get_type(path) != UNKNOWN
+
+
+def may_strip(path):
+ """
+ Return whether strip() should be called
+ """
+ from buildconfig import substs
+
+ # Bug 1658632: clang-11-based strip complains about d3dcompiler_47.dll.
+ # It's not clear why this happens, but as a quick fix just avoid stripping
+ # this DLL. It's not from our build anyway.
+ if "d3dcompiler" in path:
+ return False
+ return bool(substs.get("PKG_STRIP"))
+
+
+def strip(path):
+ """
+ Execute the STRIP command with STRIP_FLAGS on the given path.
+ """
+ from buildconfig import substs
+
+ strip = substs["STRIP"]
+ flags = substs.get("STRIP_FLAGS", [])
+ cmd = [strip] + flags + [path]
+ if subprocess.call(cmd) != 0:
+ errors.fatal("Error executing " + " ".join(cmd))
+
+
+def may_elfhack(path):
+ """
+ Return whether elfhack() should be called
+ """
+ # elfhack only supports libraries. We should check the ELF header for
+ # the right flag, but checking the file extension works too.
+ from buildconfig import substs
+
+ return (
+ "USE_ELF_HACK" in substs
+ and substs["USE_ELF_HACK"]
+ and path.endswith(substs["DLL_SUFFIX"])
+ and "COMPILE_ENVIRONMENT" in substs
+ and substs["COMPILE_ENVIRONMENT"]
+ )
+
+
+def elfhack(path):
+ """
+ Execute the elfhack command on the given path.
+ """
+ from buildconfig import topobjdir
+
+ cmd = [os.path.join(topobjdir, "build/unix/elfhack/elfhack"), path]
+ if subprocess.call(cmd) != 0:
+ errors.fatal("Error executing " + " ".join(cmd))
diff --git a/python/mozbuild/mozpack/files.py b/python/mozbuild/mozpack/files.py
new file mode 100644
index 0000000000..691c248b02
--- /dev/null
+++ b/python/mozbuild/mozpack/files.py
@@ -0,0 +1,1271 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import bisect
+import codecs
+import errno
+import inspect
+import os
+import platform
+import shutil
+import stat
+import subprocess
+import uuid
+from collections import OrderedDict
+from io import BytesIO
+from itertools import chain, takewhile
+from tarfile import TarFile, TarInfo
+from tempfile import NamedTemporaryFile, mkstemp
+
+import six
+from jsmin import JavascriptMinify
+
+import mozbuild.makeutil as makeutil
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import FileAvoidWrite, ensure_unicode, memoize
+from mozpack.chrome.manifest import ManifestEntry, ManifestInterfaces
+from mozpack.errors import ErrorMessage, errors
+from mozpack.executables import elfhack, is_executable, may_elfhack, may_strip, strip
+from mozpack.mozjar import JarReader
+
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+
+# For clean builds, copying files on win32 using CopyFile through ctypes is
+# ~2x as fast as using shutil.copyfile.
+if platform.system() != "Windows":
+ _copyfile = shutil.copyfile
+else:
+ import ctypes
+
+ _kernel32 = ctypes.windll.kernel32
+ _CopyFileA = _kernel32.CopyFileA
+ _CopyFileW = _kernel32.CopyFileW
+
+ def _copyfile(src, dest):
+ # False indicates `dest` should be overwritten if it exists already.
+ if isinstance(src, six.text_type) and isinstance(dest, six.text_type):
+ _CopyFileW(src, dest, False)
+ elif isinstance(src, str) and isinstance(dest, str):
+ _CopyFileA(src, dest, False)
+ else:
+ raise TypeError("mismatched path types!")
+
+
+# Helper function; ensures we always open files with the correct encoding when
+# opening them in text mode.
+def _open(path, mode="r"):
+ if six.PY3 and "b" not in mode:
+ return open(path, mode, encoding="utf-8")
+ return open(path, mode)
+
+
+class Dest(object):
+ """
+ Helper interface for BaseFile.copy. The interface works as follows:
+ - read() and write() can be used to sequentially read/write from the underlying file.
+ - a call to read() after a write() will re-open the underlying file and read from it.
+ - a call to write() after a read() will re-open the underlying file, emptying it, and write to it.
+ """
+
+ def __init__(self, path):
+ self.file = None
+ self.mode = None
+ self.path = ensure_unicode(path)
+
+ @property
+ def name(self):
+ return self.path
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ self.file = _open(self.path, mode="rb")
+ self.mode = "r"
+ return self.file.read(length)
+
+ def write(self, data):
+ if self.mode != "w":
+ self.file = _open(self.path, mode="wb")
+ self.mode = "w"
+ to_write = six.ensure_binary(data)
+ return self.file.write(to_write)
+
+ def exists(self):
+ return os.path.exists(self.path)
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+ self.file.close()
+ self.file = None
+
+
+class BaseFile(object):
+ """
+ Base interface and helper for file copying. Derived class may implement
+ their own copy function, or rely on BaseFile.copy using the open() member
+ function and/or the path property.
+ """
+
+ @staticmethod
+ def is_older(first, second):
+ """
+ Compares the modification time of two files, and returns whether the
+ ``first`` file is older than the ``second`` file.
+ """
+ # os.path.getmtime returns a result in seconds with precision up to
+ # the microsecond. But microsecond is too precise because
+ # shutil.copystat only copies milliseconds, and seconds is not
+ # enough precision.
+ return int(os.path.getmtime(first) * 1000) <= int(
+ os.path.getmtime(second) * 1000
+ )
+
+ @staticmethod
+ def any_newer(dest, inputs):
+ """
+ Compares the modification time of ``dest`` to multiple input files, and
+ returns whether any of the ``inputs`` is newer (has a later mtime) than
+ ``dest``.
+ """
+ # os.path.getmtime returns a result in seconds with precision up to
+ # the microsecond. But microsecond is too precise because
+ # shutil.copystat only copies milliseconds, and seconds is not
+ # enough precision.
+ dest_mtime = int(os.path.getmtime(dest) * 1000)
+ for input in inputs:
+ try:
+ src_mtime = int(os.path.getmtime(input) * 1000)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ # If an input file was removed, we should update.
+ return True
+ raise
+ if dest_mtime < src_mtime:
+ return True
+ return False
+
+ @staticmethod
+ def normalize_mode(mode):
+ # Normalize file mode:
+ # - keep file type (e.g. S_IFREG)
+ ret = stat.S_IFMT(mode)
+ # - expand user read and execute permissions to everyone
+ if mode & 0o0400:
+ ret |= 0o0444
+ if mode & 0o0100:
+ ret |= 0o0111
+ # - keep user write permissions
+ if mode & 0o0200:
+ ret |= 0o0200
+ # - leave away sticky bit, setuid, setgid
+ return ret
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Copy the BaseFile content to the destination given as a string or a
+ Dest instance. Avoids replacing existing files if the BaseFile content
+ matches that of the destination, or in case of plain files, if the
+ destination is newer than the original file. This latter behaviour is
+ disabled when skip_if_older is False.
+ Returns whether a copy was actually performed (True) or not (False).
+ """
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ can_skip_content_check = False
+ if not dest.exists():
+ can_skip_content_check = True
+ elif getattr(self, "path", None) and getattr(dest, "path", None):
+ if skip_if_older and BaseFile.is_older(self.path, dest.path):
+ return False
+ elif os.path.getsize(self.path) != os.path.getsize(dest.path):
+ can_skip_content_check = True
+
+ if can_skip_content_check:
+ if getattr(self, "path", None) and getattr(dest, "path", None):
+ # The destination directory must exist, or CopyFile will fail.
+ destdir = os.path.dirname(dest.path)
+ try:
+ os.makedirs(destdir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ _copyfile(self.path, dest.path)
+ shutil.copystat(self.path, dest.path)
+ else:
+ # Ensure the file is always created
+ if not dest.exists():
+ dest.write(b"")
+ shutil.copyfileobj(self.open(), dest)
+ return True
+
+ src = self.open()
+ accumulated_src_content = []
+ while True:
+ dest_content = dest.read(32768)
+ src_content = src.read(32768)
+ accumulated_src_content.append(src_content)
+ if len(dest_content) == len(src_content) == 0:
+ break
+ # If the read content differs between origin and destination,
+ # write what was read up to now, and copy the remainder.
+ if six.ensure_binary(dest_content) != six.ensure_binary(src_content):
+ dest.write(b"".join(accumulated_src_content))
+ shutil.copyfileobj(src, dest)
+ break
+ if hasattr(self, "path") and hasattr(dest, "path"):
+ shutil.copystat(self.path, dest.path)
+ return True
+
+ def open(self):
+ """
+ Return a file-like object allowing to read() the content of the
+ associated file. This is meant to be overloaded in subclasses to return
+ a custom file-like object.
+ """
+ assert self.path is not None
+ return open(self.path, "rb")
+
+ def read(self):
+ raise NotImplementedError("BaseFile.read() not implemented. Bug 1170329.")
+
+ def size(self):
+ """Returns size of the entry.
+
+ Derived classes are highly encouraged to override this with a more
+ optimal implementation.
+ """
+ return len(self.read())
+
+ @property
+ def mode(self):
+ """
+ Return the file's unix mode, or None if it has no meaning.
+ """
+ return None
+
+ def inputs(self):
+ """
+ Return an iterable of the input file paths that impact this output file.
+ """
+ raise NotImplementedError("BaseFile.inputs() not implemented.")
+
+
+class File(BaseFile):
+ """
+ File class for plain files.
+ """
+
+ def __init__(self, path):
+ self.path = ensure_unicode(path)
+
+ @property
+ def mode(self):
+ """
+ Return the file's unix mode, as returned by os.stat().st_mode.
+ """
+ if platform.system() == "Windows":
+ return None
+ assert self.path is not None
+ mode = os.stat(self.path).st_mode
+ return self.normalize_mode(mode)
+
+ def read(self):
+ """Return the contents of the file."""
+ with open(self.path, "rb") as fh:
+ return fh.read()
+
+ def size(self):
+ return os.stat(self.path).st_size
+
+ def inputs(self):
+ return (self.path,)
+
+
+class ExecutableFile(File):
+ """
+ File class for executable and library files on OS/2, OS/X and ELF systems.
+ (see mozpack.executables.is_executable documentation).
+ """
+
+ def __init__(self, path):
+ File.__init__(self, path)
+
+ def copy(self, dest, skip_if_older=True):
+ real_dest = dest
+ if not isinstance(dest, six.string_types):
+ fd, dest = mkstemp()
+ os.close(fd)
+ os.remove(dest)
+ assert isinstance(dest, six.string_types)
+ # If File.copy didn't actually copy because dest is newer, check the
+ # file sizes. If dest is smaller, it means it is already stripped and
+ # elfhacked, so we can skip.
+ if not File.copy(self, dest, skip_if_older) and os.path.getsize(
+ self.path
+ ) > os.path.getsize(dest):
+ return False
+ try:
+ if may_strip(dest):
+ strip(dest)
+ if may_elfhack(dest):
+ elfhack(dest)
+ except ErrorMessage:
+ os.remove(dest)
+ raise
+
+ if real_dest != dest:
+ f = File(dest)
+ ret = f.copy(real_dest, skip_if_older)
+ os.remove(dest)
+ return ret
+ return True
+
+
+class AbsoluteSymlinkFile(File):
+ """File class that is copied by symlinking (if available).
+
+ This class only works if the target path is absolute.
+ """
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise ValueError("Symlink target not absolute: %s" % path)
+
+ File.__init__(self, path)
+
+ def copy(self, dest, skip_if_older=True):
+ assert isinstance(dest, six.string_types)
+
+ # The logic in this function is complicated by the fact that symlinks
+ # aren't universally supported. So, where symlinks aren't supported, we
+ # fall back to file copying. Keep in mind that symlink support is
+ # per-filesystem, not per-OS.
+
+ # Handle the simple case where symlinks are definitely not supported by
+ # falling back to file copy.
+ if not hasattr(os, "symlink"):
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # Always verify the symlink target path exists.
+ if not os.path.exists(self.path):
+ errors.fatal("Symlink target path does not exist: %s" % self.path)
+
+ st = None
+
+ try:
+ st = os.lstat(dest)
+ except OSError as ose:
+ if ose.errno != errno.ENOENT:
+ raise
+
+ # If the dest is a symlink pointing to us, we have nothing to do.
+ # If it's the wrong symlink, the filesystem must support symlinks,
+ # so we replace with a proper symlink.
+ if st and stat.S_ISLNK(st.st_mode):
+ link = os.readlink(dest)
+ if link == self.path:
+ return False
+
+ os.remove(dest)
+ os.symlink(self.path, dest)
+ return True
+
+ # If the destination doesn't exist, we try to create a symlink. If that
+ # fails, we fall back to copy code.
+ if not st:
+ try:
+ os.symlink(self.path, dest)
+ return True
+ except OSError:
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # Now the complicated part. If the destination exists, we could be
+ # replacing a file with a symlink. Or, the filesystem may not support
+ # symlinks. We want to minimize I/O overhead for performance reasons,
+ # so we keep the existing destination file around as long as possible.
+ # A lot of the system calls would be eliminated if we cached whether
+ # symlinks are supported. However, even if we performed a single
+ # up-front test of whether the root of the destination directory
+ # supports symlinks, there's no guarantee that all operations for that
+ # dest (or source) would be on the same filesystem and would support
+ # symlinks.
+ #
+ # Our strategy is to attempt to create a new symlink with a random
+ # name. If that fails, we fall back to copy mode. If that works, we
+ # remove the old destination and move the newly-created symlink into
+ # its place.
+
+ temp_dest = os.path.join(os.path.dirname(dest), str(uuid.uuid4()))
+ try:
+ os.symlink(self.path, temp_dest)
+ # TODO Figure out exactly how symlink creation fails and only trap
+ # that.
+ except EnvironmentError:
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # If removing the original file fails, don't forget to clean up the
+ # temporary symlink.
+ try:
+ os.remove(dest)
+ except EnvironmentError:
+ os.remove(temp_dest)
+ raise
+
+ os.rename(temp_dest, dest)
+ return True
+
+
+class HardlinkFile(File):
+ """File class that is copied by hard linking (if available)
+
+ This is similar to the AbsoluteSymlinkFile, but with hard links. The symlink
+ implementation requires paths to be absolute, because they are resolved at
+ read time, which makes relative paths messy. Hard links resolve paths at
+ link-creation time, so relative paths are fine.
+ """
+
+ def copy(self, dest, skip_if_older=True):
+ assert isinstance(dest, six.string_types)
+
+ if not hasattr(os, "link"):
+ return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older)
+
+ try:
+ path_st = os.stat(self.path)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ errors.fatal("Hard link target path does not exist: %s" % self.path)
+ else:
+ raise
+
+ st = None
+ try:
+ st = os.lstat(dest)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if st:
+ # The dest already points to the right place.
+ if st.st_dev == path_st.st_dev and st.st_ino == path_st.st_ino:
+ return False
+ # The dest exists and it points to the wrong place
+ os.remove(dest)
+
+ # At this point, either the dest used to exist and we just deleted it,
+ # or it never existed. We can now safely create the hard link.
+ try:
+ os.link(self.path, dest)
+ except OSError:
+ # If we can't hard link, fall back to copying
+ return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older)
+ return True
+
+
+class ExistingFile(BaseFile):
+ """
+ File class that represents a file that may exist but whose content comes
+ from elsewhere.
+
+ This purpose of this class is to account for files that are installed via
+ external means. It is typically only used in manifests or in registries to
+ account for files.
+
+ When asked to copy, this class does nothing because nothing is known about
+ the source file/data.
+
+ Instances of this class come in two flavors: required and optional. If an
+ existing file is required, it must exist during copy() or an error is
+ raised.
+ """
+
+ def __init__(self, required):
+ self.required = required
+
+ def copy(self, dest, skip_if_older=True):
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ if not self.required:
+ return
+
+ if not dest.exists():
+ errors.fatal("Required existing file doesn't exist: %s" % dest.path)
+
+ def inputs(self):
+ return ()
+
+
+class PreprocessedFile(BaseFile):
+ """
+ File class for a file that is preprocessed. PreprocessedFile.copy() runs
+ the preprocessor on the file to create the output.
+ """
+
+ def __init__(
+ self,
+ path,
+ depfile_path,
+ marker,
+ defines,
+ extra_depends=None,
+ silence_missing_directive_warnings=False,
+ ):
+ self.path = ensure_unicode(path)
+ self.depfile = ensure_unicode(depfile_path)
+ self.marker = marker
+ self.defines = defines
+ self.extra_depends = list(extra_depends or [])
+ self.silence_missing_directive_warnings = silence_missing_directive_warnings
+
+ def inputs(self):
+ pp = Preprocessor(defines=self.defines, marker=self.marker)
+ pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)
+
+ with _open(self.path, "r") as input:
+ with _open(os.devnull, "w") as output:
+ pp.processFile(input=input, output=output)
+
+ # This always yields at least self.path.
+ return pp.includes
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Invokes the preprocessor to create the destination file.
+ """
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ # We have to account for the case where the destination exists and is a
+ # symlink to something. Since we know the preprocessor is certainly not
+ # going to create a symlink, we can just remove the existing one. If the
+ # destination is not a symlink, we leave it alone, since we're going to
+ # overwrite its contents anyway.
+ # If symlinks aren't supported at all, we can skip this step.
+ # See comment in AbsoluteSymlinkFile about Windows.
+ if hasattr(os, "symlink") and platform.system() != "Windows":
+ if os.path.islink(dest.path):
+ os.remove(dest.path)
+
+ pp_deps = set(self.extra_depends)
+
+ # If a dependency file was specified, and it exists, add any
+ # dependencies from that file to our list.
+ if self.depfile and os.path.exists(self.depfile):
+ target = mozpath.normpath(dest.name)
+ with _open(self.depfile, "rt") as fileobj:
+ for rule in makeutil.read_dep_makefile(fileobj):
+ if target in rule.targets():
+ pp_deps.update(rule.dependencies())
+
+ skip = False
+ if dest.exists() and skip_if_older:
+ # If a dependency file was specified, and it doesn't exist,
+ # assume that the preprocessor needs to be rerun. That will
+ # regenerate the dependency file.
+ if self.depfile and not os.path.exists(self.depfile):
+ skip = False
+ else:
+ skip = not BaseFile.any_newer(dest.path, pp_deps)
+
+ if skip:
+ return False
+
+ deps_out = None
+ if self.depfile:
+ deps_out = FileAvoidWrite(self.depfile)
+ pp = Preprocessor(defines=self.defines, marker=self.marker)
+ pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)
+
+ with _open(self.path, "r") as input:
+ pp.processFile(input=input, output=dest, depfile=deps_out)
+
+ dest.close()
+ if self.depfile:
+ deps_out.close()
+
+ return True
+
+
+class GeneratedFile(BaseFile):
+ """
+ File class for content with no previous existence on the filesystem.
+ """
+
+ def __init__(self, content):
+ self._content = content
+
+ @property
+ def content(self):
+ if inspect.isfunction(self._content):
+ self._content = self._content()
+ return six.ensure_binary(self._content)
+
+ @content.setter
+ def content(self, content):
+ self._content = content
+
+ def open(self):
+ return BytesIO(self.content)
+
+ def read(self):
+ return self.content
+
+ def size(self):
+ return len(self.content)
+
+ def inputs(self):
+ return ()
+
+
+class DeflatedFile(BaseFile):
+ """
+ File class for members of a jar archive. DeflatedFile.copy() effectively
+ extracts the file from the jar archive.
+ """
+
+ def __init__(self, file):
+ from mozpack.mozjar import JarFileReader
+
+ assert isinstance(file, JarFileReader)
+ self.file = file
+
+ def open(self):
+ self.file.seek(0)
+ return self.file
+
+
+class ExtractedTarFile(GeneratedFile):
+ """
+ File class for members of a tar archive. Contents of the underlying file
+ are extracted immediately and stored in memory.
+ """
+
+ def __init__(self, tar, info):
+ assert isinstance(info, TarInfo)
+ assert isinstance(tar, TarFile)
+ GeneratedFile.__init__(self, tar.extractfile(info).read())
+ self._unix_mode = self.normalize_mode(info.mode)
+
+ @property
+ def mode(self):
+ return self._unix_mode
+
+ def read(self):
+ return self.content
+
+
+class ManifestFile(BaseFile):
+ """
+ File class for a manifest file. It takes individual manifest entries (using
+ the add() and remove() member functions), and adjusts them to be relative
+ to the base path for the manifest, given at creation.
+ Example:
+ There is a manifest entry "content foobar foobar/content/" relative
+ to "foobar/chrome". When packaging, the entry will be stored in
+ jar:foobar/omni.ja!/chrome/chrome.manifest, which means the entry
+ will have to be relative to "chrome" instead of "foobar/chrome". This
+ doesn't really matter when serializing the entry, since this base path
+ is not written out, but it matters when moving the entry at the same
+ time, e.g. to jar:foobar/omni.ja!/chrome.manifest, which we don't do
+ currently but could in the future.
+ """
+
+ def __init__(self, base, entries=None):
+ self._base = base
+ self._entries = []
+ self._interfaces = []
+ for e in entries or []:
+ self.add(e)
+
+ def add(self, entry):
+ """
+ Add the given entry to the manifest. Entries are rebased at open() time
+ instead of add() time so that they can be more easily remove()d.
+ """
+ assert isinstance(entry, ManifestEntry)
+ if isinstance(entry, ManifestInterfaces):
+ self._interfaces.append(entry)
+ else:
+ self._entries.append(entry)
+
+ def remove(self, entry):
+ """
+ Remove the given entry from the manifest.
+ """
+ assert isinstance(entry, ManifestEntry)
+ if isinstance(entry, ManifestInterfaces):
+ self._interfaces.remove(entry)
+ else:
+ self._entries.remove(entry)
+
+ def open(self):
+ """
+ Return a file-like object allowing to read() the serialized content of
+ the manifest.
+ """
+ content = "".join(
+ "%s\n" % e.rebase(self._base)
+ for e in chain(self._entries, self._interfaces)
+ )
+ return BytesIO(six.ensure_binary(content))
+
+ def __iter__(self):
+ """
+ Iterate over entries in the manifest file.
+ """
+ return chain(self._entries, self._interfaces)
+
+ def isempty(self):
+ """
+ Return whether there are manifest entries to write
+ """
+ return len(self._entries) + len(self._interfaces) == 0
+
+
+class MinifiedCommentStripped(BaseFile):
+ """
+ File class for content minified by stripping comments. This wraps around a
+ BaseFile instance, and removes lines starting with a # from its content.
+ """
+
+ def __init__(self, file):
+ assert isinstance(file, BaseFile)
+ self._file = file
+
+ def open(self):
+ """
+ Return a file-like object allowing to read() the minified content of
+ the underlying file.
+ """
+ content = "".join(
+ l
+ for l in [six.ensure_text(s) for s in self._file.open().readlines()]
+ if not l.startswith("#")
+ )
+ return BytesIO(six.ensure_binary(content))
+
+
+class MinifiedJavaScript(BaseFile):
+ """
+ File class for minifying JavaScript files.
+ """
+
+ def __init__(self, file, verify_command=None):
+ assert isinstance(file, BaseFile)
+ self._file = file
+ self._verify_command = verify_command
+
+ def open(self):
+ output = six.StringIO()
+ minify = JavascriptMinify(
+ codecs.getreader("utf-8")(self._file.open()), output, quote_chars="'\"`"
+ )
+ minify.minify()
+ output.seek(0)
+ output_source = six.ensure_binary(output.getvalue())
+ output = BytesIO(output_source)
+
+ if not self._verify_command:
+ return output
+
+ input_source = self._file.open().read()
+
+ with NamedTemporaryFile("wb+") as fh1, NamedTemporaryFile("wb+") as fh2:
+ fh1.write(input_source)
+ fh2.write(output_source)
+ fh1.flush()
+ fh2.flush()
+
+ try:
+ args = list(self._verify_command)
+ args.extend([fh1.name, fh2.name])
+ subprocess.check_output(
+ args, stderr=subprocess.STDOUT, universal_newlines=True
+ )
+ except subprocess.CalledProcessError as e:
+ errors.warn(
+ "JS minification verification failed for %s:"
+ % (getattr(self._file, "path", "<unknown>"))
+ )
+ # Prefix each line with "Warning:" so mozharness doesn't
+ # think these error messages are real errors.
+ for line in e.output.splitlines():
+ errors.warn(line)
+
+ return self._file.open()
+
+ return output
+
+
+class BaseFinder(object):
+ def __init__(
+ self, base, minify=False, minify_js=False, minify_js_verify_command=None
+ ):
+ """
+ Initializes the instance with a reference base directory.
+
+ The optional minify argument specifies whether minification of code
+ should occur. minify_js is an additional option to control minification
+ of JavaScript. It requires minify to be True.
+
+ minify_js_verify_command can be used to optionally verify the results
+ of JavaScript minification. If defined, it is expected to be an iterable
+ that will constitute the first arguments to a called process which will
+ receive the filenames of the original and minified JavaScript files.
+ The invoked process can then verify the results. If minification is
+ rejected, the process exits with a non-0 exit code and the original
+ JavaScript source is used. An example value for this argument is
+ ('/path/to/js', '/path/to/verify/script.js').
+ """
+ if minify_js and not minify:
+ raise ValueError("minify_js requires minify.")
+
+ self.base = base
+ self._minify = minify
+ self._minify_js = minify_js
+ self._minify_js_verify_command = minify_js_verify_command
+
+ def find(self, pattern):
+ """
+ Yield path, BaseFile_instance pairs for all files under the base
+ directory and its subdirectories that match the given pattern. See the
+ mozpack.path.match documentation for a description of the handled
+ patterns.
+ """
+ while pattern.startswith("/"):
+ pattern = pattern[1:]
+ for p, f in self._find(pattern):
+ yield p, self._minify_file(p, f)
+
+ def get(self, path):
+ """Obtain a single file.
+
+ Where ``find`` is tailored towards matching multiple files, this method
+ is used for retrieving a single file. Use this method when performance
+ is critical.
+
+ Returns a ``BaseFile`` if at most one file exists or ``None`` otherwise.
+ """
+ files = list(self.find(path))
+ if len(files) != 1:
+ return None
+ return files[0][1]
+
+ def __iter__(self):
+ """
+ Iterates over all files under the base directory (excluding files
+ starting with a '.' and files at any level under a directory starting
+ with a '.').
+ for path, file in finder:
+ ...
+ """
+ return self.find("")
+
+ def __contains__(self, pattern):
+ raise RuntimeError(
+ "'in' operator forbidden for %s. Use contains()." % self.__class__.__name__
+ )
+
+ def contains(self, pattern):
+ """
+ Return whether some files under the base directory match the given
+ pattern. See the mozpack.path.match documentation for a description of
+ the handled patterns.
+ """
+ return any(self.find(pattern))
+
+ def _minify_file(self, path, file):
+ """
+ Return an appropriate MinifiedSomething wrapper for the given BaseFile
+ instance (file), according to the file type (determined by the given
+ path), if the FileFinder was created with minification enabled.
+ Otherwise, just return the given BaseFile instance.
+ """
+ if not self._minify or isinstance(file, ExecutableFile):
+ return file
+
+ if path.endswith((".ftl", ".properties")):
+ return MinifiedCommentStripped(file)
+
+ if self._minify_js and path.endswith((".js", ".jsm")):
+ return MinifiedJavaScript(file, self._minify_js_verify_command)
+
+ return file
+
+ def _find_helper(self, pattern, files, file_getter):
+ """Generic implementation of _find.
+
+ A few *Finder implementations share logic for returning results.
+ This function implements the custom logic.
+
+ The ``file_getter`` argument is a callable that receives a path
+ that is known to exist. The callable should return a ``BaseFile``
+ instance.
+ """
+ if "*" in pattern:
+ for p in files:
+ if mozpath.match(p, pattern):
+ yield p, file_getter(p)
+ elif pattern == "":
+ for p in files:
+ yield p, file_getter(p)
+ elif pattern in files:
+ yield pattern, file_getter(pattern)
+ else:
+ for p in files:
+ if mozpath.basedir(p, [pattern]) == pattern:
+ yield p, file_getter(p)
+
+
+class FileFinder(BaseFinder):
+ """
+ Helper to get appropriate BaseFile instances from the file system.
+ """
+
+ def __init__(
+ self,
+ base,
+ find_executables=False,
+ ignore=(),
+ ignore_broken_symlinks=False,
+ find_dotfiles=False,
+ **kargs
+ ):
+ """
+ Create a FileFinder for files under the given base directory.
+
+ The find_executables argument determines whether the finder needs to
+ try to guess whether files are executables. Disabling this guessing
+ when not necessary can speed up the finder significantly.
+
+ ``ignore`` accepts an iterable of patterns to ignore. Entries are
+ strings that match paths relative to ``base`` using
+ ``mozpath.match()``. This means if an entry corresponds
+ to a directory, all files under that directory will be ignored. If
+ an entry corresponds to a file, that particular file will be ignored.
+ ``ignore_broken_symlinks`` is passed by the packager to work around an
+ issue with the build system not cleaning up stale files in some common
+ cases. See bug 1297381.
+ """
+ BaseFinder.__init__(self, base, **kargs)
+ self.find_dotfiles = find_dotfiles
+ self.find_executables = find_executables
+ self.ignore = ignore
+ self.ignore_broken_symlinks = ignore_broken_symlinks
+
+ def _find(self, pattern):
+ """
+ Actual implementation of FileFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ Note all files with a name starting with a '.' are ignored when
+ scanning directories, but are not ignored when explicitely requested.
+ """
+ if "*" in pattern:
+ return self._find_glob("", mozpath.split(pattern))
+ elif os.path.isdir(os.path.join(self.base, pattern)):
+ return self._find_dir(pattern)
+ else:
+ f = self.get(pattern)
+ return ((pattern, f),) if f else ()
+
+ def _find_dir(self, path):
+ """
+ Actual implementation of FileFinder.find() when the given pattern
+ corresponds to an existing directory under the base directory.
+ Ignores file names starting with a '.' under the given path. If the
+ path itself has leafs starting with a '.', they are not ignored.
+ """
+ for p in self.ignore:
+ if mozpath.match(path, p):
+ return
+
+ # The sorted makes the output idempotent. Otherwise, we are
+ # likely dependent on filesystem implementation details, such as
+ # inode ordering.
+ for p in sorted(os.listdir(os.path.join(self.base, path))):
+ if p.startswith("."):
+ if p in (".", ".."):
+ continue
+ if not self.find_dotfiles:
+ continue
+ for p_, f in self._find(mozpath.join(path, p)):
+ yield p_, f
+
+ def get(self, path):
+ srcpath = os.path.join(self.base, path)
+ if not os.path.lexists(srcpath):
+ return None
+
+ if self.ignore_broken_symlinks and not os.path.exists(srcpath):
+ return None
+
+ for p in self.ignore:
+ if mozpath.match(path, p):
+ return None
+
+ if self.find_executables and is_executable(srcpath):
+ return ExecutableFile(srcpath)
+ else:
+ return File(srcpath)
+
+ def _find_glob(self, base, pattern):
+ """
+ Actual implementation of FileFinder.find() when the given pattern
+ contains globbing patterns ('*' or '**'). This is meant to be an
+ equivalent of:
+ for p, f in self:
+ if mozpath.match(p, pattern):
+ yield p, f
+ but avoids scanning the entire tree.
+ """
+ if not pattern:
+ for p, f in self._find(base):
+ yield p, f
+ elif pattern[0] == "**":
+ for p, f in self._find(base):
+ if mozpath.match(p, mozpath.join(*pattern)):
+ yield p, f
+ elif "*" in pattern[0]:
+ if not os.path.exists(os.path.join(self.base, base)):
+ return
+
+ for p in self.ignore:
+ if mozpath.match(base, p):
+ return
+
+ # See above comment w.r.t. sorted() and idempotent behavior.
+ for p in sorted(os.listdir(os.path.join(self.base, base))):
+ if p.startswith(".") and not pattern[0].startswith("."):
+ continue
+ if mozpath.match(p, pattern[0]):
+ for p_, f in self._find_glob(mozpath.join(base, p), pattern[1:]):
+ yield p_, f
+ else:
+ for p, f in self._find_glob(mozpath.join(base, pattern[0]), pattern[1:]):
+ yield p, f
+
+
+class JarFinder(BaseFinder):
+ """
+ Helper to get appropriate DeflatedFile instances from a JarReader.
+ """
+
+ def __init__(self, base, reader, **kargs):
+ """
+ Create a JarFinder for files in the given JarReader. The base argument
+ is used as an indication of the Jar file location.
+ """
+ assert isinstance(reader, JarReader)
+ BaseFinder.__init__(self, base, **kargs)
+ self._files = OrderedDict((f.filename, f) for f in reader)
+
+ def _find(self, pattern):
+ """
+ Actual implementation of JarFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ """
+ return self._find_helper(
+ pattern, self._files, lambda x: DeflatedFile(self._files[x])
+ )
+
+
+class TarFinder(BaseFinder):
+ """
+ Helper to get files from a TarFile.
+ """
+
+ def __init__(self, base, tar, **kargs):
+ """
+ Create a TarFinder for files in the given TarFile. The base argument
+ is used as an indication of the Tar file location.
+ """
+ assert isinstance(tar, TarFile)
+ self._tar = tar
+ BaseFinder.__init__(self, base, **kargs)
+ self._files = OrderedDict((f.name, f) for f in tar if f.isfile())
+
+ def _find(self, pattern):
+ """
+ Actual implementation of TarFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ """
+ return self._find_helper(
+ pattern, self._files, lambda x: ExtractedTarFile(self._tar, self._files[x])
+ )
+
+
+class ComposedFinder(BaseFinder):
+ """
+ Composes multiple File Finders in some sort of virtual file system.
+
+ A ComposedFinder is initialized from a dictionary associating paths
+ to `*Finder instances.`
+
+ Note this could be optimized to be smarter than getting all the files
+ in advance.
+ """
+
+ def __init__(self, finders):
+ # Can't import globally, because of the dependency of mozpack.copier
+ # on this module.
+ from mozpack.copier import FileRegistry
+
+ self.files = FileRegistry()
+
+ for base, finder in sorted(six.iteritems(finders)):
+ if self.files.contains(base):
+ self.files.remove(base)
+ for p, f in finder.find(""):
+ self.files.add(mozpath.join(base, p), f)
+
+ def find(self, pattern):
+ for p in self.files.match(pattern):
+ yield p, self.files[p]
+
+
+class MercurialFile(BaseFile):
+ """File class for holding data from Mercurial."""
+
+ def __init__(self, client, rev, path):
+ self._content = client.cat(
+ [six.ensure_binary(path)], rev=six.ensure_binary(rev)
+ )
+
+ def open(self):
+ return BytesIO(six.ensure_binary(self._content))
+
+ def read(self):
+ return self._content
+
+
+class MercurialRevisionFinder(BaseFinder):
+ """A finder that operates on a specific Mercurial revision."""
+
+ def __init__(self, repo, rev=".", recognize_repo_paths=False, **kwargs):
+ """Create a finder attached to a specific revision in a repository.
+
+ If no revision is given, open the parent of the working directory.
+
+ ``recognize_repo_paths`` will enable a mode where ``.get()`` will
+ recognize full paths that include the repo's path. Typically Finder
+ instances are "bound" to a base directory and paths are relative to
+ that directory. This mode changes that. When this mode is activated,
+ ``.find()`` will not work! This mode exists to support the moz.build
+ reader, which uses absolute paths instead of relative paths. The reader
+ should eventually be rewritten to use relative paths and this hack
+ should be removed (TODO bug 1171069).
+ """
+ if not hglib:
+ raise Exception("hglib package not found")
+
+ super(MercurialRevisionFinder, self).__init__(base=repo, **kwargs)
+
+ self._root = mozpath.normpath(repo).rstrip("/")
+ self._recognize_repo_paths = recognize_repo_paths
+
+ # We change directories here otherwise we have to deal with relative
+ # paths.
+ oldcwd = os.getcwd()
+ os.chdir(self._root)
+ try:
+ self._client = hglib.open(path=repo, encoding=b"utf-8")
+ finally:
+ os.chdir(oldcwd)
+ self._rev = rev if rev is not None else "."
+ self._files = OrderedDict()
+
+ # Immediately populate the list of files in the repo since nearly every
+ # operation requires this list.
+ out = self._client.rawcommand(
+ [
+ b"files",
+ b"--rev",
+ six.ensure_binary(self._rev),
+ ]
+ )
+ for relpath in out.splitlines():
+ # Mercurial may use \ as path separator on Windows. So use
+ # normpath().
+ self._files[six.ensure_text(mozpath.normpath(relpath))] = None
+
+ def _find(self, pattern):
+ if self._recognize_repo_paths:
+ raise NotImplementedError("cannot use find with recognize_repo_path")
+
+ return self._find_helper(pattern, self._files, self._get)
+
+ def get(self, path):
+ path = mozpath.normpath(path)
+ if self._recognize_repo_paths:
+ if not path.startswith(self._root):
+ raise ValueError(
+ "lookups in recognize_repo_paths mode must be "
+ "prefixed with repo path: %s" % path
+ )
+ path = path[len(self._root) + 1 :]
+
+ try:
+ return self._get(path)
+ except KeyError:
+ return None
+
+ def _get(self, path):
+ # We lazy populate self._files because potentially creating tens of
+ # thousands of MercurialFile instances for every file in the repo is
+ # inefficient.
+ f = self._files[path]
+ if not f:
+ f = MercurialFile(self._client, self._rev, path)
+ self._files[path] = f
+
+ return f
+
+
+class FileListFinder(BaseFinder):
+ """Finder for a literal list of file names."""
+
+ def __init__(self, files):
+ """files must be a sorted list."""
+ self._files = files
+
+ @memoize
+ def _match(self, pattern):
+ """Return a sorted list of all files matching the given pattern."""
+ # We don't use the utility _find_helper method because it's not tuned
+ # for performance in the way that we would like this class to be. That's
+ # a possible avenue for refactoring here.
+ ret = []
+ # We do this as an optimization to figure out where in the sorted list
+ # to search and where to stop searching.
+ components = pattern.split("/")
+ prefix = "/".join(takewhile(lambda s: "*" not in s, components))
+ start = bisect.bisect_left(self._files, prefix)
+ for i in six.moves.range(start, len(self._files)):
+ f = self._files[i]
+ if not f.startswith(prefix):
+ break
+ # Skip hidden files while scanning.
+ if "/." in f[len(prefix) :]:
+ continue
+ if mozpath.match(f, pattern):
+ ret.append(f)
+ return ret
+
+ def find(self, pattern):
+ pattern = pattern.strip("/")
+ for path in self._match(pattern):
+ yield path, File(path)
diff --git a/python/mozbuild/mozpack/macpkg.py b/python/mozbuild/mozpack/macpkg.py
new file mode 100644
index 0000000000..cbeacbb388
--- /dev/null
+++ b/python/mozbuild/mozpack/macpkg.py
@@ -0,0 +1,217 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# TODO: Eventually consolidate with mozpack.pkg module. This is kept separate
+# for now because of the vast difference in API, and to avoid churn for the
+# users of this module (docker images, macos SDK artifacts) when changes are
+# necessary in mozpack.pkg
+import bz2
+import concurrent.futures
+import io
+import lzma
+import os
+import struct
+import zlib
+from xml.etree.ElementTree import XML
+
+from mozbuild.util import ReadOnlyNamespace
+
+
+class ZlibFile(object):
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+ self.decompressor = zlib.decompressobj()
+ self.buf = b""
+
+ def read(self, length):
+ cutoff = min(length, len(self.buf))
+ result = self.buf[:cutoff]
+ self.buf = self.buf[cutoff:]
+ while len(result) < length:
+ buf = self.fileobj.read(io.DEFAULT_BUFFER_SIZE)
+ if not buf:
+ break
+ buf = self.decompressor.decompress(buf)
+ cutoff = min(length - len(result), len(buf))
+ result += buf[:cutoff]
+ self.buf += buf[cutoff:]
+ return result
+
+
+def unxar(fileobj):
+ magic = fileobj.read(4)
+ if magic != b"xar!":
+ raise Exception("Not a XAR?")
+
+ header_size = fileobj.read(2)
+ header_size = struct.unpack(">H", header_size)[0]
+ if header_size > 64:
+ raise Exception(
+ f"Don't know how to handle a {header_size} bytes XAR header size"
+ )
+ header_size -= 6 # what we've read so far.
+ header = fileobj.read(header_size)
+ if len(header) != header_size:
+ raise Exception("Failed to read XAR header")
+ (
+ version,
+ compressed_toc_len,
+ uncompressed_toc_len,
+ checksum_type,
+ ) = struct.unpack(">HQQL", header[:22])
+ if version != 1:
+ raise Exception(f"XAR version {version} not supported")
+ toc = fileobj.read(compressed_toc_len)
+ base = fileobj.tell()
+ if len(toc) != compressed_toc_len:
+ raise Exception("Failed to read XAR TOC")
+ toc = zlib.decompress(toc)
+ if len(toc) != uncompressed_toc_len:
+ raise Exception("Corrupted XAR?")
+ toc = XML(toc).find("toc")
+ for f in toc.findall("file"):
+ if f.find("type").text != "file":
+ continue
+ filename = f.find("name").text
+ data = f.find("data")
+ length = int(data.find("length").text)
+ size = int(data.find("size").text)
+ offset = int(data.find("offset").text)
+ encoding = data.find("encoding").get("style")
+ fileobj.seek(base + offset, os.SEEK_SET)
+ content = Take(fileobj, length)
+ if encoding == "application/octet-stream":
+ if length != size:
+ raise Exception(f"{length} != {size}")
+ elif encoding == "application/x-bzip2":
+ content = bz2.BZ2File(content)
+ elif encoding == "application/x-gzip":
+ # Despite the encoding saying gzip, it is in fact, a raw zlib stream.
+ content = ZlibFile(content)
+ else:
+ raise Exception(f"XAR encoding {encoding} not supported")
+
+ yield filename, content
+
+
+class Pbzx(object):
+ def __init__(self, fileobj):
+ magic = fileobj.read(4)
+ if magic != b"pbzx":
+ raise Exception("Not a PBZX payload?")
+ # The first thing in the file looks like the size of each
+ # decompressed chunk except the last one. It should match
+ # decompressed_size in all cases except last, but we don't
+ # check.
+ chunk_size = fileobj.read(8)
+ chunk_size = struct.unpack(">Q", chunk_size)[0]
+ executor = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
+ self.chunk_getter = executor.map(self._uncompress_chunk, self._chunker(fileobj))
+ self._init_one_chunk()
+
+ @staticmethod
+ def _chunker(fileobj):
+ while True:
+ header = fileobj.read(16)
+ if header == b"":
+ break
+ if len(header) != 16:
+ raise Exception("Corrupted PBZX payload?")
+ decompressed_size, compressed_size = struct.unpack(">QQ", header)
+ chunk = fileobj.read(compressed_size)
+ yield decompressed_size, compressed_size, chunk
+
+ @staticmethod
+ def _uncompress_chunk(data):
+ decompressed_size, compressed_size, chunk = data
+ if compressed_size != decompressed_size:
+ chunk = lzma.decompress(chunk)
+ if len(chunk) != decompressed_size:
+ raise Exception("Corrupted PBZX payload?")
+ return chunk
+
+ def _init_one_chunk(self):
+ self.offset = 0
+ self.chunk = next(self.chunk_getter, "")
+
+ def read(self, length=None):
+ if length == 0:
+ return b""
+ if length and len(self.chunk) >= self.offset + length:
+ start = self.offset
+ self.offset += length
+ return self.chunk[start : self.offset]
+ else:
+ result = self.chunk[self.offset :]
+ self._init_one_chunk()
+ if self.chunk:
+ # XXX: suboptimal if length is larger than the chunk size
+ result += self.read(None if length is None else length - len(result))
+ return result
+
+
+class Take(object):
+ """
+ File object wrapper that allows to read at most a certain length.
+ """
+
+ def __init__(self, fileobj, limit):
+ self.fileobj = fileobj
+ self.limit = limit
+
+ def read(self, length=None):
+ if length is None:
+ length = self.limit
+ else:
+ length = min(length, self.limit)
+ result = self.fileobj.read(length)
+ self.limit -= len(result)
+ return result
+
+
+def uncpio(fileobj):
+ while True:
+ magic = fileobj.read(6)
+ # CPIO payloads in mac pkg files are using the portable ASCII format.
+ if magic != b"070707":
+ if magic.startswith(b"0707"):
+ raise Exception("Unsupported CPIO format")
+ raise Exception("Not a CPIO header")
+ header = fileobj.read(70)
+ (
+ dev,
+ ino,
+ mode,
+ uid,
+ gid,
+ nlink,
+ rdev,
+ mtime,
+ namesize,
+ filesize,
+ ) = struct.unpack(">6s6s6s6s6s6s6s11s6s11s", header)
+ dev = int(dev, 8)
+ ino = int(ino, 8)
+ mode = int(mode, 8)
+ nlink = int(nlink, 8)
+ namesize = int(namesize, 8)
+ filesize = int(filesize, 8)
+ name = fileobj.read(namesize)
+ if name[-1] != 0:
+ raise Exception("File name is not NUL terminated")
+ name = name[:-1]
+ if name == b"TRAILER!!!":
+ break
+
+ if b"/../" in name or name.startswith(b"../") or name == b"..":
+ raise Exception(".. is forbidden in file name")
+ if name.startswith(b"."):
+ name = name[1:]
+ if name.startswith(b"/"):
+ name = name[1:]
+ content = Take(fileobj, filesize)
+ yield name, ReadOnlyNamespace(mode=mode, nlink=nlink, dev=dev, ino=ino), content
+ # Ensure the content is totally consumed
+ while content.read(4096):
+ pass
diff --git a/python/mozbuild/mozpack/manifests.py b/python/mozbuild/mozpack/manifests.py
new file mode 100644
index 0000000000..2df6c729ea
--- /dev/null
+++ b/python/mozbuild/mozpack/manifests.py
@@ -0,0 +1,483 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+from contextlib import contextmanager
+
+import six
+
+import mozpack.path as mozpath
+
+from .files import (
+ AbsoluteSymlinkFile,
+ ExistingFile,
+ File,
+ FileFinder,
+ GeneratedFile,
+ HardlinkFile,
+ PreprocessedFile,
+)
+
+
+# This probably belongs in a more generic module. Where?
+@contextmanager
+def _auto_fileobj(path, fileobj, mode="r"):
+ if path and fileobj:
+ raise AssertionError("Only 1 of path or fileobj may be defined.")
+
+ if not path and not fileobj:
+ raise AssertionError("Must specified 1 of path or fileobj.")
+
+ if path:
+ fileobj = open(path, mode)
+
+ try:
+ yield fileobj
+ finally:
+ if path:
+ fileobj.close()
+
+
+class UnreadableInstallManifest(Exception):
+ """Raised when an invalid install manifest is parsed."""
+
+
+class InstallManifest(object):
+ """Describes actions to be used with a copier.FileCopier instance.
+
+ This class facilitates serialization and deserialization of data used to
+ construct a copier.FileCopier and to perform copy operations.
+
+ The manifest defines source paths, destination paths, and a mechanism by
+ which the destination file should come into existence.
+
+ Entries in the manifest correspond to the following types:
+
+ copy -- The file specified as the source path will be copied to the
+ destination path.
+
+ link -- The destination path will be a symlink or hardlink to the source
+ path. If symlinks are not supported, a copy will be performed.
+
+ exists -- The destination path is accounted for and won't be deleted by
+ the FileCopier. If the destination path doesn't exist, an error is
+ raised.
+
+ optional -- The destination path is accounted for and won't be deleted by
+ the FileCopier. No error is raised if the destination path does not
+ exist.
+
+ patternlink -- Paths matched by the expression in the source path
+ will be symlinked or hardlinked to the destination directory.
+
+ patterncopy -- Similar to patternlink except files are copied, not
+ symlinked/hardlinked.
+
+ preprocess -- The file specified at the source path will be run through
+ the preprocessor, and the output will be written to the destination
+ path.
+
+ content -- The destination file will be created with the given content.
+
+ Version 1 of the manifest was the initial version.
+ Version 2 added optional path support
+ Version 3 added support for pattern entries.
+ Version 4 added preprocessed file support.
+ Version 5 added content support.
+ """
+
+ CURRENT_VERSION = 5
+
+ FIELD_SEPARATOR = "\x1f"
+
+ # Negative values are reserved for non-actionable items, that is, metadata
+ # that doesn't describe files in the destination.
+ LINK = 1
+ COPY = 2
+ REQUIRED_EXISTS = 3
+ OPTIONAL_EXISTS = 4
+ PATTERN_LINK = 5
+ PATTERN_COPY = 6
+ PREPROCESS = 7
+ CONTENT = 8
+
+ def __init__(self, path=None, fileobj=None):
+ """Create a new InstallManifest entry.
+
+ If path is defined, the manifest will be populated with data from the
+ file path.
+
+ If fileobj is defined, the manifest will be populated with data read
+ from the specified file object.
+
+ Both path and fileobj cannot be defined.
+ """
+ self._dests = {}
+ self._source_files = set()
+
+ if path or fileobj:
+ with _auto_fileobj(path, fileobj, "r") as fh:
+ self._source_files.add(fh.name)
+ self._load_from_fileobj(fh)
+
+ def _load_from_fileobj(self, fileobj):
+ version = fileobj.readline().rstrip()
+ if version not in ("1", "2", "3", "4", "5"):
+ raise UnreadableInstallManifest("Unknown manifest version: %s" % version)
+
+ for line in fileobj:
+ # Explicitly strip on \n so we don't strip out the FIELD_SEPARATOR
+ # as well.
+ line = line.rstrip("\n")
+
+ fields = line.split(self.FIELD_SEPARATOR)
+
+ record_type = int(fields[0])
+
+ if record_type == self.LINK:
+ dest, source = fields[1:]
+ self.add_link(source, dest)
+ continue
+
+ if record_type == self.COPY:
+ dest, source = fields[1:]
+ self.add_copy(source, dest)
+ continue
+
+ if record_type == self.REQUIRED_EXISTS:
+ _, path = fields
+ self.add_required_exists(path)
+ continue
+
+ if record_type == self.OPTIONAL_EXISTS:
+ _, path = fields
+ self.add_optional_exists(path)
+ continue
+
+ if record_type == self.PATTERN_LINK:
+ _, base, pattern, dest = fields[1:]
+ self.add_pattern_link(base, pattern, dest)
+ continue
+
+ if record_type == self.PATTERN_COPY:
+ _, base, pattern, dest = fields[1:]
+ self.add_pattern_copy(base, pattern, dest)
+ continue
+
+ if record_type == self.PREPROCESS:
+ dest, source, deps, marker, defines, warnings = fields[1:]
+
+ self.add_preprocess(
+ source,
+ dest,
+ deps,
+ marker,
+ self._decode_field_entry(defines),
+ silence_missing_directive_warnings=bool(int(warnings)),
+ )
+ continue
+
+ if record_type == self.CONTENT:
+ dest, content = fields[1:]
+
+ self.add_content(
+ six.ensure_text(self._decode_field_entry(content)), dest
+ )
+ continue
+
+ # Don't fail for non-actionable items, allowing
+ # forward-compatibility with those we will add in the future.
+ if record_type >= 0:
+ raise UnreadableInstallManifest("Unknown record type: %d" % record_type)
+
+ def __len__(self):
+ return len(self._dests)
+
+ def __contains__(self, item):
+ return item in self._dests
+
+ def __eq__(self, other):
+ return isinstance(other, InstallManifest) and self._dests == other._dests
+
+ def __neq__(self, other):
+ return not self.__eq__(other)
+
+ def __ior__(self, other):
+ if not isinstance(other, InstallManifest):
+ raise ValueError("Can only | with another instance of InstallManifest.")
+
+ self.add_entries_from(other)
+
+ return self
+
+ def _encode_field_entry(self, data):
+ """Converts an object into a format that can be stored in the manifest file.
+
+ Complex data types, such as ``dict``, need to be converted into a text
+ representation before they can be written to a file.
+ """
+ return json.dumps(data, sort_keys=True)
+
+ def _decode_field_entry(self, data):
+ """Restores an object from a format that can be stored in the manifest file.
+
+ Complex data types, such as ``dict``, need to be converted into a text
+ representation before they can be written to a file.
+ """
+ return json.loads(data)
+
+ def write(self, path=None, fileobj=None, expand_pattern=False):
+ """Serialize this manifest to a file or file object.
+
+ If path is specified, that file will be written to. If fileobj is specified,
+ the serialized content will be written to that file object.
+
+ It is an error if both are specified.
+ """
+ with _auto_fileobj(path, fileobj, "wt") as fh:
+ fh.write("%d\n" % self.CURRENT_VERSION)
+
+ for dest in sorted(self._dests):
+ entry = self._dests[dest]
+
+ if expand_pattern and entry[0] in (
+ self.PATTERN_LINK,
+ self.PATTERN_COPY,
+ ):
+ type, base, pattern, dest = entry
+ type = self.LINK if type == self.PATTERN_LINK else self.COPY
+ finder = FileFinder(base)
+ paths = [f[0] for f in finder.find(pattern)]
+ for path in paths:
+ source = mozpath.join(base, path)
+ parts = ["%d" % type, mozpath.join(dest, path), source]
+ fh.write(
+ "%s\n"
+ % self.FIELD_SEPARATOR.join(
+ six.ensure_text(p) for p in parts
+ )
+ )
+ else:
+ parts = ["%d" % entry[0], dest]
+ parts.extend(entry[1:])
+ fh.write(
+ "%s\n"
+ % self.FIELD_SEPARATOR.join(six.ensure_text(p) for p in parts)
+ )
+
+ def add_link(self, source, dest):
+ """Add a link to this manifest.
+
+ dest will be either a symlink or hardlink to source.
+ """
+ self._add_entry(dest, (self.LINK, source))
+
+ def add_copy(self, source, dest):
+ """Add a copy to this manifest.
+
+ source will be copied to dest.
+ """
+ self._add_entry(dest, (self.COPY, source))
+
+ def add_required_exists(self, dest):
+ """Record that a destination file must exist.
+
+ This effectively prevents the listed file from being deleted.
+ """
+ self._add_entry(dest, (self.REQUIRED_EXISTS,))
+
+ def add_optional_exists(self, dest):
+ """Record that a destination file may exist.
+
+ This effectively prevents the listed file from being deleted. Unlike a
+ "required exists" file, files of this type do not raise errors if the
+ destination file does not exist.
+ """
+ self._add_entry(dest, (self.OPTIONAL_EXISTS,))
+
+ def add_pattern_link(self, base, pattern, dest):
+ """Add a pattern match that results in links being created.
+
+ A ``FileFinder`` will be created with its base set to ``base``
+ and ``FileFinder.find()`` will be called with ``pattern`` to discover
+ source files. Each source file will be either symlinked or hardlinked
+ under ``dest``.
+
+ Filenames under ``dest`` are constructed by taking the path fragment
+ after ``base`` and concatenating it with ``dest``. e.g.
+
+ <base>/foo/bar.h -> <dest>/foo/bar.h
+ """
+ self._add_entry(
+ mozpath.join(dest, pattern), (self.PATTERN_LINK, base, pattern, dest)
+ )
+
+ def add_pattern_copy(self, base, pattern, dest):
+ """Add a pattern match that results in copies.
+
+ See ``add_pattern_link()`` for usage.
+ """
+ self._add_entry(
+ mozpath.join(dest, pattern), (self.PATTERN_COPY, base, pattern, dest)
+ )
+
+ def add_preprocess(
+ self,
+ source,
+ dest,
+ deps,
+ marker="#",
+ defines={},
+ silence_missing_directive_warnings=False,
+ ):
+ """Add a preprocessed file to this manifest.
+
+ ``source`` will be passed through preprocessor.py, and the output will be
+ written to ``dest``.
+ """
+ self._add_entry(
+ dest,
+ (
+ self.PREPROCESS,
+ source,
+ deps,
+ marker,
+ self._encode_field_entry(defines),
+ "1" if silence_missing_directive_warnings else "0",
+ ),
+ )
+
+ def add_content(self, content, dest):
+ """Add a file with the given content."""
+ self._add_entry(
+ dest,
+ (
+ self.CONTENT,
+ self._encode_field_entry(content),
+ ),
+ )
+
+ def _add_entry(self, dest, entry):
+ if dest in self._dests:
+ raise ValueError("Item already in manifest: %s" % dest)
+
+ self._dests[dest] = entry
+
+ def add_entries_from(self, other, base=""):
+ """
+ Copy data from another mozpack.copier.InstallManifest
+ instance, adding an optional base prefix to the destination.
+
+ This allows to merge two manifests into a single manifest, or
+ two take the tagged union of two manifests.
+ """
+ # We must copy source files to ourselves so extra dependencies from
+ # the preprocessor are taken into account. Ideally, we would track
+ # which source file each entry came from. However, this is more
+ # complicated and not yet implemented. The current implementation
+ # will result in over invalidation, possibly leading to performance
+ # loss.
+ self._source_files |= other._source_files
+
+ for dest in sorted(other._dests):
+ new_dest = mozpath.join(base, dest) if base else dest
+ entry = other._dests[dest]
+ if entry[0] in (self.PATTERN_LINK, self.PATTERN_COPY):
+ entry_type, entry_base, entry_pattern, entry_dest = entry
+ new_entry_dest = mozpath.join(base, entry_dest) if base else entry_dest
+ new_entry = (entry_type, entry_base, entry_pattern, new_entry_dest)
+ else:
+ new_entry = tuple(entry)
+
+ self._add_entry(new_dest, new_entry)
+
+ def populate_registry(self, registry, defines_override={}, link_policy="symlink"):
+ """Populate a mozpack.copier.FileRegistry instance with data from us.
+
+ The caller supplied a FileRegistry instance (or at least something that
+ conforms to its interface) and that instance is populated with data
+ from this manifest.
+
+ Defines can be given to override the ones in the manifest for
+ preprocessing.
+
+ The caller can set a link policy. This determines whether symlinks,
+ hardlinks, or copies are used for LINK and PATTERN_LINK.
+ """
+ assert link_policy in ("symlink", "hardlink", "copy")
+ for dest in sorted(self._dests):
+ entry = self._dests[dest]
+ install_type = entry[0]
+
+ if install_type == self.LINK:
+ if link_policy == "symlink":
+ cls = AbsoluteSymlinkFile
+ elif link_policy == "hardlink":
+ cls = HardlinkFile
+ else:
+ cls = File
+ registry.add(dest, cls(entry[1]))
+ continue
+
+ if install_type == self.COPY:
+ registry.add(dest, File(entry[1]))
+ continue
+
+ if install_type == self.REQUIRED_EXISTS:
+ registry.add(dest, ExistingFile(required=True))
+ continue
+
+ if install_type == self.OPTIONAL_EXISTS:
+ registry.add(dest, ExistingFile(required=False))
+ continue
+
+ if install_type in (self.PATTERN_LINK, self.PATTERN_COPY):
+ _, base, pattern, dest = entry
+ finder = FileFinder(base)
+ paths = [f[0] for f in finder.find(pattern)]
+
+ if install_type == self.PATTERN_LINK:
+ if link_policy == "symlink":
+ cls = AbsoluteSymlinkFile
+ elif link_policy == "hardlink":
+ cls = HardlinkFile
+ else:
+ cls = File
+ else:
+ cls = File
+
+ for path in paths:
+ source = mozpath.join(base, path)
+ registry.add(mozpath.join(dest, path), cls(source))
+
+ continue
+
+ if install_type == self.PREPROCESS:
+ defines = self._decode_field_entry(entry[4])
+ if defines_override:
+ defines.update(defines_override)
+ registry.add(
+ dest,
+ PreprocessedFile(
+ entry[1],
+ depfile_path=entry[2],
+ marker=entry[3],
+ defines=defines,
+ extra_depends=self._source_files,
+ silence_missing_directive_warnings=bool(int(entry[5])),
+ ),
+ )
+
+ continue
+
+ if install_type == self.CONTENT:
+ # GeneratedFile expect the buffer interface, which the unicode
+ # type doesn't have, so encode to a str.
+ content = self._decode_field_entry(entry[1]).encode("utf-8")
+ registry.add(dest, GeneratedFile(content))
+ continue
+
+ raise Exception(
+ "Unknown install type defined in manifest: %d" % install_type
+ )
diff --git a/python/mozbuild/mozpack/mozjar.py b/python/mozbuild/mozpack/mozjar.py
new file mode 100644
index 0000000000..6500ebfcec
--- /dev/null
+++ b/python/mozbuild/mozpack/mozjar.py
@@ -0,0 +1,842 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import struct
+import zlib
+from collections import OrderedDict
+from io import BytesIO, UnsupportedOperation
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import six
+
+import mozpack.path as mozpath
+from mozbuild.util import ensure_bytes
+
+JAR_STORED = ZIP_STORED
+JAR_DEFLATED = ZIP_DEFLATED
+MAX_WBITS = 15
+
+
+class JarReaderError(Exception):
+ """Error type for Jar reader errors."""
+
+
+class JarWriterError(Exception):
+ """Error type for Jar writer errors."""
+
+
+class JarStruct(object):
+ """
+ Helper used to define ZIP archive raw data structures. Data structures
+ handled by this helper all start with a magic number, defined in
+ subclasses MAGIC field as a 32-bits unsigned integer, followed by data
+ structured as described in subclasses STRUCT field.
+
+ The STRUCT field contains a list of (name, type) pairs where name is a
+ field name, and the type can be one of 'uint32', 'uint16' or one of the
+ field names. In the latter case, the field is considered to be a string
+ buffer with a length given in that field.
+ For example,
+
+ .. code-block:: python
+
+ STRUCT = [
+ ('version', 'uint32'),
+ ('filename_size', 'uint16'),
+ ('filename', 'filename_size')
+ ]
+
+ describes a structure with a 'version' 32-bits unsigned integer field,
+ followed by a 'filename_size' 16-bits unsigned integer field, followed by a
+ filename_size-long string buffer 'filename'.
+
+ Fields that are used as other fields size are not stored in objects. In the
+ above example, an instance of such subclass would only have two attributes:
+ - obj['version']
+ - obj['filename']
+
+ filename_size would be obtained with len(obj['filename']).
+
+ JarStruct subclasses instances can be either initialized from existing data
+ (deserialized), or with empty fields.
+ """
+
+ TYPE_MAPPING = {"uint32": (b"I", 4), "uint16": (b"H", 2)}
+
+ def __init__(self, data=None):
+ """
+ Create an instance from the given data. Data may be omitted to create
+ an instance with empty fields.
+ """
+ assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
+ self.size_fields = set(
+ t for t in six.itervalues(self.STRUCT) if t not in JarStruct.TYPE_MAPPING
+ )
+ self._values = {}
+ if data:
+ self._init_data(data)
+ else:
+ self._init_empty()
+
+ def _init_data(self, data):
+ """
+ Initialize an instance from data, following the data structure
+ described in self.STRUCT. The self.MAGIC signature is expected at
+ data[:4].
+ """
+ assert data is not None
+ self.signature, size = JarStruct.get_data("uint32", data)
+ if self.signature != self.MAGIC:
+ raise JarReaderError("Bad magic")
+ offset = size
+ # For all fields used as other fields sizes, keep track of their value
+ # separately.
+ sizes = dict((t, 0) for t in self.size_fields)
+ for name, t in six.iteritems(self.STRUCT):
+ if t in JarStruct.TYPE_MAPPING:
+ value, size = JarStruct.get_data(t, data[offset:])
+ else:
+ size = sizes[t]
+ value = data[offset : offset + size]
+ if isinstance(value, memoryview):
+ value = value.tobytes()
+ if name not in sizes:
+ self._values[name] = value
+ else:
+ sizes[name] = value
+ offset += size
+
+ def _init_empty(self):
+ """
+ Initialize an instance with empty fields.
+ """
+ self.signature = self.MAGIC
+ for name, t in six.iteritems(self.STRUCT):
+ if name in self.size_fields:
+ continue
+ self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ""
+
+ @staticmethod
+ def get_data(type, data):
+ """
+ Deserialize a single field of given type (must be one of
+ JarStruct.TYPE_MAPPING) at the given offset in the given data.
+ """
+ assert type in JarStruct.TYPE_MAPPING
+ assert data is not None
+ format, size = JarStruct.TYPE_MAPPING[type]
+ data = data[:size]
+ if isinstance(data, memoryview):
+ data = data.tobytes()
+ return struct.unpack(b"<" + format, data)[0], size
+
+ def serialize(self):
+ """
+ Serialize the data structure according to the data structure definition
+ from self.STRUCT.
+ """
+ serialized = struct.pack(b"<I", self.signature)
+ sizes = dict(
+ (t, name)
+ for name, t in six.iteritems(self.STRUCT)
+ if t not in JarStruct.TYPE_MAPPING
+ )
+ for name, t in six.iteritems(self.STRUCT):
+ if t in JarStruct.TYPE_MAPPING:
+ format, size = JarStruct.TYPE_MAPPING[t]
+ if name in sizes:
+ value = len(self[sizes[name]])
+ else:
+ value = self[name]
+ serialized += struct.pack(b"<" + format, value)
+ else:
+ serialized += ensure_bytes(self[name])
+ return serialized
+
+ @property
+ def size(self):
+ """
+ Return the size of the data structure, given the current values of all
+ variable length fields.
+ """
+ size = JarStruct.TYPE_MAPPING["uint32"][1]
+ for name, type in six.iteritems(self.STRUCT):
+ if type in JarStruct.TYPE_MAPPING:
+ size += JarStruct.TYPE_MAPPING[type][1]
+ else:
+ size += len(self[name])
+ return size
+
+ def __getitem__(self, key):
+ return self._values[key]
+
+ def __setitem__(self, key, value):
+ if key not in self.STRUCT:
+ raise KeyError(key)
+ if key in self.size_fields:
+ raise AttributeError("can't set attribute")
+ self._values[key] = value
+
+ def __contains__(self, key):
+ return key in self._values
+
+ def __iter__(self):
+ return six.iteritems(self._values)
+
+ def __repr__(self):
+ return "<%s %s>" % (
+ self.__class__.__name__,
+ " ".join("%s=%s" % (n, v) for n, v in self),
+ )
+
+
+class JarCdirEnd(JarStruct):
+ """
+ End of central directory record.
+ """
+
+ MAGIC = 0x06054B50
+ STRUCT = OrderedDict(
+ [
+ ("disk_num", "uint16"),
+ ("cdir_disk", "uint16"),
+ ("disk_entries", "uint16"),
+ ("cdir_entries", "uint16"),
+ ("cdir_size", "uint32"),
+ ("cdir_offset", "uint32"),
+ ("comment_size", "uint16"),
+ ("comment", "comment_size"),
+ ]
+ )
+
+
+CDIR_END_SIZE = JarCdirEnd().size
+
+
+class JarCdirEntry(JarStruct):
+ """
+ Central directory file header
+ """
+
+ MAGIC = 0x02014B50
+ STRUCT = OrderedDict(
+ [
+ ("creator_version", "uint16"),
+ ("min_version", "uint16"),
+ ("general_flag", "uint16"),
+ ("compression", "uint16"),
+ ("lastmod_time", "uint16"),
+ ("lastmod_date", "uint16"),
+ ("crc32", "uint32"),
+ ("compressed_size", "uint32"),
+ ("uncompressed_size", "uint32"),
+ ("filename_size", "uint16"),
+ ("extrafield_size", "uint16"),
+ ("filecomment_size", "uint16"),
+ ("disknum", "uint16"),
+ ("internal_attr", "uint16"),
+ ("external_attr", "uint32"),
+ ("offset", "uint32"),
+ ("filename", "filename_size"),
+ ("extrafield", "extrafield_size"),
+ ("filecomment", "filecomment_size"),
+ ]
+ )
+
+
+class JarLocalFileHeader(JarStruct):
+ """
+ Local file header
+ """
+
+ MAGIC = 0x04034B50
+ STRUCT = OrderedDict(
+ [
+ ("min_version", "uint16"),
+ ("general_flag", "uint16"),
+ ("compression", "uint16"),
+ ("lastmod_time", "uint16"),
+ ("lastmod_date", "uint16"),
+ ("crc32", "uint32"),
+ ("compressed_size", "uint32"),
+ ("uncompressed_size", "uint32"),
+ ("filename_size", "uint16"),
+ ("extra_field_size", "uint16"),
+ ("filename", "filename_size"),
+ ("extra_field", "extra_field_size"),
+ ]
+ )
+
+
+class JarFileReader(object):
+ """
+ File-like class for use by JarReader to give access to individual files
+ within a Jar archive.
+ """
+
+ def __init__(self, header, data):
+ """
+ Initialize a JarFileReader. header is the local file header
+ corresponding to the file in the jar archive, data a buffer containing
+ the file data.
+ """
+ assert header["compression"] in [JAR_DEFLATED, JAR_STORED]
+ self._data = data
+ # Copy some local file header fields.
+ for name in ["compressed_size", "uncompressed_size", "crc32"]:
+ setattr(self, name, header[name])
+ self.filename = six.ensure_text(header["filename"])
+ self.compressed = header["compression"] != JAR_STORED
+ self.compress = header["compression"]
+
+ def readable(self):
+ return True
+
+ def read(self, length=-1):
+ """
+ Read some amount of uncompressed data.
+ """
+ return self.uncompressed_data.read(length)
+
+ def readinto(self, b):
+ """
+ Read bytes into a pre-allocated, writable bytes-like object `b` and return
+ the number of bytes read.
+ """
+ return self.uncompressed_data.readinto(b)
+
+ def readlines(self):
+ """
+ Return a list containing all the lines of data in the uncompressed
+ data.
+ """
+ return self.read().splitlines(True)
+
+ def __iter__(self):
+ """
+ Iterator, to support the "for line in fileobj" constructs.
+ """
+ return iter(self.readlines())
+
+ def seek(self, pos, whence=os.SEEK_SET):
+ """
+ Change the current position in the uncompressed data. Subsequent reads
+ will start from there.
+ """
+ return self.uncompressed_data.seek(pos, whence)
+
+ def close(self):
+ """
+ Free the uncompressed data buffer.
+ """
+ self.uncompressed_data.close()
+
+ @property
+ def closed(self):
+ return self.uncompressed_data.closed
+
+ @property
+ def compressed_data(self):
+ """
+ Return the raw compressed data.
+ """
+ return self._data[: self.compressed_size]
+
+ @property
+ def uncompressed_data(self):
+ """
+ Return the uncompressed data.
+ """
+ if hasattr(self, "_uncompressed_data"):
+ return self._uncompressed_data
+ data = self.compressed_data
+ if self.compress == JAR_STORED:
+ data = data.tobytes()
+ elif self.compress == JAR_DEFLATED:
+ data = zlib.decompress(data.tobytes(), -MAX_WBITS)
+ else:
+ assert False # Can't be another value per __init__
+ if len(data) != self.uncompressed_size:
+ raise JarReaderError("Corrupted file? %s" % self.filename)
+ self._uncompressed_data = BytesIO(data)
+ return self._uncompressed_data
+
+
+class JarReader(object):
+ """
+ Class with methods to read Jar files. Can open standard jar files as well
+ as Mozilla jar files (see further details in the JarWriter documentation).
+ """
+
+ def __init__(self, file=None, fileobj=None, data=None):
+ """
+ Opens the given file as a Jar archive. Use the given file-like object
+ if one is given instead of opening the given file name.
+ """
+ if fileobj:
+ data = fileobj.read()
+ elif file:
+ data = open(file, "rb").read()
+ self._data = memoryview(data)
+ # The End of Central Directory Record has a variable size because of
+ # comments it may contain, so scan for it from the end of the file.
+ offset = -CDIR_END_SIZE
+ while True:
+ signature = JarStruct.get_data("uint32", self._data[offset:])[0]
+ if signature == JarCdirEnd.MAGIC:
+ break
+ if offset == -len(self._data):
+ raise JarReaderError("Not a jar?")
+ offset -= 1
+ self._cdir_end = JarCdirEnd(self._data[offset:])
+
+ def close(self):
+ """
+ Free some resources associated with the Jar.
+ """
+ del self._data
+
+ @property
+ def compression(self):
+ entries = self.entries
+ if not entries:
+ return JAR_STORED
+ return max(f["compression"] for f in six.itervalues(entries))
+
+ @property
+ def entries(self):
+ """
+ Return an ordered dict of central directory entries, indexed by
+ filename, in the order they appear in the Jar archive central
+ directory. Directory entries are skipped.
+ """
+ if hasattr(self, "_entries"):
+ return self._entries
+ preload = 0
+ if self.is_optimized:
+ preload = JarStruct.get_data("uint32", self._data)[0]
+ entries = OrderedDict()
+ offset = self._cdir_end["cdir_offset"]
+ for e in six.moves.xrange(self._cdir_end["cdir_entries"]):
+ entry = JarCdirEntry(self._data[offset:])
+ offset += entry.size
+ # Creator host system. 0 is MSDOS, 3 is Unix
+ host = entry["creator_version"] >> 8
+ # External attributes values depend on host above. On Unix the
+ # higher bits are the stat.st_mode value. On MSDOS, the lower bits
+ # are the FAT attributes.
+ xattr = entry["external_attr"]
+ # Skip directories
+ if (host == 0 and xattr & 0x10) or (host == 3 and xattr & (0o040000 << 16)):
+ continue
+ entries[six.ensure_text(entry["filename"])] = entry
+ if entry["offset"] < preload:
+ self._last_preloaded = six.ensure_text(entry["filename"])
+ self._entries = entries
+ return entries
+
+ @property
+ def is_optimized(self):
+ """
+ Return whether the jar archive is optimized.
+ """
+ # In optimized jars, the central directory is at the beginning of the
+ # file, after a single 32-bits value, which is the length of data
+ # preloaded.
+ return self._cdir_end["cdir_offset"] == JarStruct.TYPE_MAPPING["uint32"][1]
+
+ @property
+ def last_preloaded(self):
+ """
+ Return the name of the last file that is set to be preloaded.
+ See JarWriter documentation for more details on preloading.
+ """
+ if hasattr(self, "_last_preloaded"):
+ return self._last_preloaded
+ self._last_preloaded = None
+ self.entries
+ return self._last_preloaded
+
+ def _getreader(self, entry):
+ """
+ Helper to create a JarFileReader corresponding to the given central
+ directory entry.
+ """
+ header = JarLocalFileHeader(self._data[entry["offset"] :])
+ for key, value in entry:
+ if key in header and header[key] != value:
+ raise JarReaderError(
+ "Central directory and file header "
+ + "mismatch. Corrupted archive?"
+ )
+ return JarFileReader(header, self._data[entry["offset"] + header.size :])
+
+ def __iter__(self):
+ """
+ Iterate over all files in the Jar archive, in the form of
+ JarFileReaders.
+ for file in jarReader:
+ ...
+ """
+ for entry in six.itervalues(self.entries):
+ yield self._getreader(entry)
+
+ def __getitem__(self, name):
+ """
+ Get a JarFileReader for the given file name.
+ """
+ return self._getreader(self.entries[name])
+
+ def __contains__(self, name):
+ """
+ Return whether the given file name appears in the Jar archive.
+ """
+ return name in self.entries
+
+
+class JarWriter(object):
+ """
+ Class with methods to write Jar files. Can write more-or-less standard jar
+ archives as well as jar archives optimized for Gecko. See the documentation
+ for the close() member function for a description of both layouts.
+ """
+
+ def __init__(self, file=None, fileobj=None, compress=True, compress_level=9):
+ """
+ Initialize a Jar archive in the given file. Use the given file-like
+ object if one is given instead of opening the given file name.
+ The compress option determines the default behavior for storing data
+ in the jar archive. The optimize options determines whether the jar
+ archive should be optimized for Gecko or not. ``compress_level``
+ defines the zlib compression level. It must be a value between 0 and 9
+ and defaults to 9, the highest and slowest level of compression.
+ """
+ if fileobj:
+ self._data = fileobj
+ else:
+ self._data = open(file, "wb")
+ if compress is True:
+ compress = JAR_DEFLATED
+ self._compress = compress
+ self._compress_level = compress_level
+ self._contents = OrderedDict()
+ self._last_preloaded = None
+
+ def __enter__(self):
+ """
+ Context manager __enter__ method for JarWriter.
+ """
+ return self
+
+ def __exit__(self, type, value, tb):
+ """
+ Context manager __exit__ method for JarWriter.
+ """
+ self.finish()
+
+ def finish(self):
+ """
+ Flush and close the Jar archive.
+
+ Standard jar archives are laid out like the following:
+ - Local file header 1
+ - File data 1
+ - Local file header 2
+ - File data 2
+ - (...)
+ - Central directory entry pointing at Local file header 1
+ - Central directory entry pointing at Local file header 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+
+ Jar archives optimized for Gecko are laid out like the following:
+ - 32-bits unsigned integer giving the amount of data to preload.
+ - Central directory entry pointing at Local file header 1
+ - Central directory entry pointing at Local file header 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+ - Local file header 1
+ - File data 1
+ - Local file header 2
+ - File data 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+
+ The duplication of the End of central directory is to accomodate some
+ Zip reading tools that want an end of central directory structure to
+ follow the central directory entries.
+ """
+ offset = 0
+ headers = {}
+ preload_size = 0
+ # Prepare central directory entries
+ for entry, content in six.itervalues(self._contents):
+ header = JarLocalFileHeader()
+ for name in entry.STRUCT:
+ if name in header:
+ header[name] = entry[name]
+ entry["offset"] = offset
+ offset += len(content) + header.size
+ if six.ensure_text(entry["filename"]) == self._last_preloaded:
+ preload_size = offset
+ headers[entry] = header
+ # Prepare end of central directory
+ end = JarCdirEnd()
+ end["disk_entries"] = len(self._contents)
+ end["cdir_entries"] = end["disk_entries"]
+ end["cdir_size"] = six.moves.reduce(
+ lambda x, y: x + y[0].size, self._contents.values(), 0
+ )
+ # On optimized archives, store the preloaded size and the central
+ # directory entries, followed by the first end of central directory.
+ if preload_size:
+ end["cdir_offset"] = 4
+ offset = end["cdir_size"] + end["cdir_offset"] + end.size
+ preload_size += offset
+ self._data.write(struct.pack("<I", preload_size))
+ for entry, _ in six.itervalues(self._contents):
+ entry["offset"] += offset
+ self._data.write(entry.serialize())
+ self._data.write(end.serialize())
+ # Store local file entries followed by compressed data
+ for entry, content in six.itervalues(self._contents):
+ self._data.write(headers[entry].serialize())
+ if isinstance(content, memoryview):
+ self._data.write(content.tobytes())
+ else:
+ self._data.write(content)
+ # On non optimized archives, store the central directory entries.
+ if not preload_size:
+ end["cdir_offset"] = offset
+ for entry, _ in six.itervalues(self._contents):
+ self._data.write(entry.serialize())
+ # Store the end of central directory.
+ self._data.write(end.serialize())
+ self._data.close()
+
+ def add(self, name, data, compress=None, mode=None, skip_duplicates=False):
+ """
+ Add a new member to the jar archive, with the given name and the given
+ data.
+ The compress option indicates how the given data should be compressed
+ (one of JAR_STORED or JAR_DEFLATE), or compressed according
+ to the default defined when creating the JarWriter (None). True and
+ False are allowed values for backwards compatibility, mapping,
+ respectively, to JAR_DEFLATE and JAR_STORED.
+ When the data should be compressed, it is only really compressed if
+ the compressed size is smaller than the uncompressed size.
+ The mode option gives the unix permissions that should be stored for the
+ jar entry, which defaults to 0o100644 (regular file, u+rw, g+r, o+r) if
+ not specified.
+ If a duplicated member is found skip_duplicates will prevent raising
+ an exception if set to True.
+ The given data may be a buffer, a file-like instance, a Deflater or a
+ JarFileReader instance. The latter two allow to avoid uncompressing
+ data to recompress it.
+ """
+ name = mozpath.normsep(six.ensure_text(name))
+
+ if name in self._contents and not skip_duplicates:
+ raise JarWriterError("File %s already in JarWriter" % name)
+ if compress is None:
+ compress = self._compress
+ if compress is True:
+ compress = JAR_DEFLATED
+ if compress is False:
+ compress = JAR_STORED
+ if isinstance(data, (JarFileReader, Deflater)) and data.compress == compress:
+ deflater = data
+ else:
+ deflater = Deflater(compress, compress_level=self._compress_level)
+ if isinstance(data, (six.binary_type, six.string_types)):
+ deflater.write(data)
+ elif hasattr(data, "read"):
+ try:
+ data.seek(0)
+ except (UnsupportedOperation, AttributeError):
+ pass
+ deflater.write(data.read())
+ else:
+ raise JarWriterError("Don't know how to handle %s" % type(data))
+ # Fill a central directory entry for this new member.
+ entry = JarCdirEntry()
+ entry["creator_version"] = 20
+ if mode is None:
+ # If no mode is given, default to u+rw, g+r, o+r.
+ mode = 0o000644
+ if not mode & 0o777000:
+ # If no file type is given, default to regular file.
+ mode |= 0o100000
+ # Set creator host system (upper byte of creator_version) to 3 (Unix) so
+ # mode is honored when there is one.
+ entry["creator_version"] |= 3 << 8
+ entry["external_attr"] = (mode & 0xFFFF) << 16
+ if deflater.compressed:
+ entry["min_version"] = 20 # Version 2.0 supports deflated streams
+ entry["general_flag"] = 2 # Max compression
+ entry["compression"] = deflater.compress
+ else:
+ entry["min_version"] = 10 # Version 1.0 for stored streams
+ entry["general_flag"] = 0
+ entry["compression"] = JAR_STORED
+ # January 1st, 2010. See bug 592369.
+ entry["lastmod_date"] = ((2010 - 1980) << 9) | (1 << 5) | 1
+ entry["lastmod_time"] = 0
+ entry["crc32"] = deflater.crc32
+ entry["compressed_size"] = deflater.compressed_size
+ entry["uncompressed_size"] = deflater.uncompressed_size
+ entry["filename"] = six.ensure_binary(name)
+ self._contents[name] = entry, deflater.compressed_data
+
+ def preload(self, files):
+ """
+ Set which members of the jar archive should be preloaded when opening
+ the archive in Gecko. This reorders the members according to the order
+ of given list.
+ """
+ new_contents = OrderedDict()
+ for f in files:
+ if f not in self._contents:
+ continue
+ new_contents[f] = self._contents[f]
+ self._last_preloaded = f
+ for f in self._contents:
+ if f not in new_contents:
+ new_contents[f] = self._contents[f]
+ self._contents = new_contents
+
+
+class Deflater(object):
+ """
+ File-like interface to zlib compression. The data is actually not
+ compressed unless the compressed form is smaller than the uncompressed
+ data.
+ """
+
+ def __init__(self, compress=True, compress_level=9):
+ """
+ Initialize a Deflater. The compress argument determines how to
+ compress.
+ """
+ self._data = BytesIO()
+ if compress is True:
+ compress = JAR_DEFLATED
+ elif compress is False:
+ compress = JAR_STORED
+ self.compress = compress
+ if compress == JAR_DEFLATED:
+ self._deflater = zlib.compressobj(compress_level, zlib.DEFLATED, -MAX_WBITS)
+ self._deflated = BytesIO()
+ else:
+ assert compress == JAR_STORED
+ self._deflater = None
+ self.crc32 = 0
+
+ def write(self, data):
+ """
+ Append a buffer to the Deflater.
+ """
+ if isinstance(data, memoryview):
+ data = data.tobytes()
+ data = six.ensure_binary(data)
+ self._data.write(data)
+
+ if self.compress:
+ if self._deflater:
+ self._deflated.write(self._deflater.compress(data))
+ else:
+ raise JarWriterError("Can't write after flush")
+
+ self.crc32 = zlib.crc32(data, self.crc32) & 0xFFFFFFFF
+
+ def close(self):
+ """
+ Close the Deflater.
+ """
+ self._data.close()
+ if self.compress:
+ self._deflated.close()
+
+ def _flush(self):
+ """
+ Flush the underlying zlib compression object.
+ """
+ if self.compress and self._deflater:
+ self._deflated.write(self._deflater.flush())
+ self._deflater = None
+
+ @property
+ def compressed(self):
+ """
+ Return whether the data should be compressed.
+ """
+ return self._compressed_size < self.uncompressed_size
+
+ @property
+ def _compressed_size(self):
+ """
+ Return the real compressed size of the data written to the Deflater. If
+ the Deflater is set not to compress, the uncompressed size is returned.
+ Otherwise, the actual compressed size is returned, whether or not it is
+ a win over the uncompressed size.
+ """
+ if self.compress:
+ self._flush()
+ return self._deflated.tell()
+ return self.uncompressed_size
+
+ @property
+ def compressed_size(self):
+ """
+ Return the compressed size of the data written to the Deflater. If the
+ Deflater is set not to compress, the uncompressed size is returned.
+ Otherwise, if the data should not be compressed (the real compressed
+ size is bigger than the uncompressed size), return the uncompressed
+ size.
+ """
+ if self.compressed:
+ return self._compressed_size
+ return self.uncompressed_size
+
+ @property
+ def uncompressed_size(self):
+ """
+ Return the size of the data written to the Deflater.
+ """
+ return self._data.tell()
+
+ @property
+ def compressed_data(self):
+ """
+ Return the compressed data, if the data should be compressed (real
+ compressed size smaller than the uncompressed size), or the
+ uncompressed data otherwise.
+ """
+ if self.compressed:
+ return self._deflated.getvalue()
+ return self._data.getvalue()
+
+
+class JarLog(dict):
+ """
+ Helper to read the file Gecko generates when setting MOZ_JAR_LOG_FILE.
+ The jar log is then available as a dict with the jar path as key, and
+ the corresponding access log as a list value. Only the first access to
+ a given member of a jar is stored.
+ """
+
+ def __init__(self, file=None, fileobj=None):
+ if not fileobj:
+ fileobj = open(file, "r")
+ for line in fileobj:
+ jar, path = line.strip().split(None, 1)
+ if not jar or not path:
+ continue
+ entry = self.setdefault(jar, [])
+ if path not in entry:
+ entry.append(path)
diff --git a/python/mozbuild/mozpack/packager/__init__.py b/python/mozbuild/mozpack/packager/__init__.py
new file mode 100644
index 0000000000..83b12e4696
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/__init__.py
@@ -0,0 +1,445 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import codecs
+import json
+import os
+import re
+from collections import deque
+
+import six
+
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestChrome,
+ ManifestInterfaces,
+ is_manifest,
+ parse_manifest,
+)
+from mozpack.errors import errors
+
+
+class Component(object):
+ """
+ Class that represents a component in a package manifest.
+ """
+
+ def __init__(self, name, destdir=""):
+ if name.find(" ") > 0:
+ errors.fatal('Malformed manifest: space in component name "%s"' % name)
+ self._name = name
+ self._destdir = destdir
+
+ def __repr__(self):
+ s = self.name
+ if self.destdir:
+ s += ' destdir="%s"' % self.destdir
+ return s
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def destdir(self):
+ return self._destdir
+
+ @staticmethod
+ def _triples(lst):
+ """
+ Split [1, 2, 3, 4, 5, 6, 7] into [(1, 2, 3), (4, 5, 6)].
+ """
+ return zip(*[iter(lst)] * 3)
+
+ KEY_VALUE_RE = re.compile(
+ r"""
+ \s* # optional whitespace.
+ ([a-zA-Z0-9_]+) # key.
+ \s*=\s* # optional space around =.
+ "([^"]*)" # value without surrounding quotes.
+ (?:\s+|$)
+ """,
+ re.VERBOSE,
+ )
+
+ @staticmethod
+ def _split_options(string):
+ """
+ Split 'key1="value1" key2="value2"' into
+ {'key1':'value1', 'key2':'value2'}.
+
+ Returned keys and values are all strings.
+
+ Throws ValueError if the input is malformed.
+ """
+ options = {}
+ splits = Component.KEY_VALUE_RE.split(string)
+ if len(splits) % 3 != 1:
+ # This should never happen -- we expect to always split
+ # into ['', ('key', 'val', '')*].
+ raise ValueError("Bad input")
+ if splits[0]:
+ raise ValueError("Unrecognized input " + splits[0])
+ for key, val, no_match in Component._triples(splits[1:]):
+ if no_match:
+ raise ValueError("Unrecognized input " + no_match)
+ options[key] = val
+ return options
+
+ @staticmethod
+ def _split_component_and_options(string):
+ """
+ Split 'name key1="value1" key2="value2"' into
+ ('name', {'key1':'value1', 'key2':'value2'}).
+
+ Returned name, keys and values are all strings.
+
+ Raises ValueError if the input is malformed.
+ """
+ splits = string.strip().split(None, 1)
+ if not splits:
+ raise ValueError("No component found")
+ component = splits[0].strip()
+ if not component:
+ raise ValueError("No component found")
+ if not re.match("[a-zA-Z0-9_\-]+$", component):
+ raise ValueError("Bad component name " + component)
+ options = Component._split_options(splits[1]) if len(splits) > 1 else {}
+ return component, options
+
+ @staticmethod
+ def from_string(string):
+ """
+ Create a component from a string.
+ """
+ try:
+ name, options = Component._split_component_and_options(string)
+ except ValueError as e:
+ errors.fatal("Malformed manifest: %s" % e)
+ return
+ destdir = options.pop("destdir", "")
+ if options:
+ errors.fatal(
+ "Malformed manifest: options %s not recognized" % options.keys()
+ )
+ return Component(name, destdir=destdir)
+
+
+class PackageManifestParser(object):
+ """
+ Class for parsing of a package manifest, after preprocessing.
+
+ A package manifest is a list of file paths, with some syntaxic sugar:
+ [] designates a toplevel component. Example: [xpcom]
+ - in front of a file specifies it to be removed
+ * wildcard support
+ ** expands to all files and zero or more directories
+ ; file comment
+
+ The parser takes input from the preprocessor line by line, and pushes
+ parsed information to a sink object.
+
+ The add and remove methods of the sink object are called with the
+ current Component instance and a path.
+ """
+
+ def __init__(self, sink):
+ """
+ Initialize the package manifest parser with the given sink.
+ """
+ self._component = Component("")
+ self._sink = sink
+
+ def handle_line(self, str):
+ """
+ Handle a line of input and push the parsed information to the sink
+ object.
+ """
+ # Remove comments.
+ str = str.strip()
+ if not str or str.startswith(";"):
+ return
+ if str.startswith("[") and str.endswith("]"):
+ self._component = Component.from_string(str[1:-1])
+ elif str.startswith("-"):
+ str = str[1:]
+ self._sink.remove(self._component, str)
+ elif "," in str:
+ errors.fatal("Incompatible syntax")
+ else:
+ self._sink.add(self._component, str)
+
+
+class PreprocessorOutputWrapper(object):
+ """
+ File-like helper to handle the preprocessor output and send it to a parser.
+ The parser's handle_line method is called in the relevant errors.context.
+ """
+
+ def __init__(self, preprocessor, parser):
+ self._parser = parser
+ self._pp = preprocessor
+
+ def write(self, str):
+ with errors.context(self._pp.context["FILE"], self._pp.context["LINE"]):
+ self._parser.handle_line(str)
+
+
+def preprocess(input, parser, defines={}):
+ """
+ Preprocess the file-like input with the given defines, and send the
+ preprocessed output line by line to the given parser.
+ """
+ pp = Preprocessor()
+ pp.context.update(defines)
+ pp.do_filter("substitution")
+ pp.out = PreprocessorOutputWrapper(pp, parser)
+ pp.do_include(input)
+
+
+def preprocess_manifest(sink, manifest, defines={}):
+ """
+ Preprocess the given file-like manifest with the given defines, and push
+ the parsed information to a sink. See PackageManifestParser documentation
+ for more details on the sink.
+ """
+ preprocess(manifest, PackageManifestParser(sink), defines)
+
+
+class CallDeque(deque):
+ """
+ Queue of function calls to make.
+ """
+
+ def append(self, function, *args):
+ deque.append(self, (errors.get_context(), function, args))
+
+ def execute(self):
+ while True:
+ try:
+ context, function, args = self.popleft()
+ except IndexError:
+ return
+ if context:
+ with errors.context(context[0], context[1]):
+ function(*args)
+ else:
+ function(*args)
+
+
+class SimplePackager(object):
+ """
+ Helper used to translate and buffer instructions from the
+ SimpleManifestSink to a formatter. Formatters expect some information to be
+ given first that the simple manifest contents can't guarantee before the
+ end of the input.
+ """
+
+ def __init__(self, formatter):
+ self.formatter = formatter
+ # Queue for formatter.add_interfaces()/add_manifest() calls.
+ self._queue = CallDeque()
+ # Queue for formatter.add_manifest() calls for ManifestChrome.
+ self._chrome_queue = CallDeque()
+ # Queue for formatter.add() calls.
+ self._file_queue = CallDeque()
+ # All paths containing addons. (key is path, value is whether it
+ # should be packed or unpacked)
+ self._addons = {}
+ # All manifest paths imported.
+ self._manifests = set()
+ # All manifest paths included from some other manifest.
+ self._included_manifests = {}
+ self._closed = False
+
+ # Parsing RDF is complex, and would require an external library to do
+ # properly. Just go with some hackish but probably sufficient regexp
+ UNPACK_ADDON_RE = re.compile(
+ r"""(?:
+ <em:unpack>true</em:unpack>
+ |em:unpack=(?P<quote>["']?)true(?P=quote)
+ )""",
+ re.VERBOSE,
+ )
+
+ def add(self, path, file):
+ """
+ Add the given BaseFile instance with the given path.
+ """
+ assert not self._closed
+ if is_manifest(path):
+ self._add_manifest_file(path, file)
+ elif path.endswith(".xpt"):
+ self._queue.append(self.formatter.add_interfaces, path, file)
+ else:
+ self._file_queue.append(self.formatter.add, path, file)
+ if mozpath.basename(path) == "install.rdf":
+ addon = True
+ install_rdf = six.ensure_text(file.open().read())
+ if self.UNPACK_ADDON_RE.search(install_rdf):
+ addon = "unpacked"
+ self._add_addon(mozpath.dirname(path), addon)
+ elif mozpath.basename(path) == "manifest.json":
+ manifest = six.ensure_text(file.open().read())
+ try:
+ parsed = json.loads(manifest)
+ except ValueError:
+ pass
+ if isinstance(parsed, dict) and "manifest_version" in parsed:
+ self._add_addon(mozpath.dirname(path), True)
+
+ def _add_addon(self, path, addon_type):
+ """
+ Add the given BaseFile to the collection of addons if a parent
+ directory is not already in the collection.
+ """
+ if mozpath.basedir(path, self._addons) is not None:
+ return
+
+ for dir in self._addons:
+ if mozpath.basedir(dir, [path]) is not None:
+ del self._addons[dir]
+ break
+
+ self._addons[path] = addon_type
+
+ def _add_manifest_file(self, path, file):
+ """
+ Add the given BaseFile with manifest file contents with the given path.
+ """
+ self._manifests.add(path)
+ base = ""
+ if hasattr(file, "path"):
+ # Find the directory the given path is relative to.
+ b = mozpath.normsep(file.path)
+ if b.endswith("/" + path) or b == path:
+ base = os.path.normpath(b[: -len(path)])
+ for e in parse_manifest(base, path, codecs.getreader("utf-8")(file.open())):
+ # ManifestResources need to be given after ManifestChrome, so just
+ # put all ManifestChrome in a separate queue to make them first.
+ if isinstance(e, ManifestChrome):
+ # e.move(e.base) just returns a clone of the entry.
+ self._chrome_queue.append(self.formatter.add_manifest, e.move(e.base))
+ elif not isinstance(e, (Manifest, ManifestInterfaces)):
+ self._queue.append(self.formatter.add_manifest, e.move(e.base))
+ # If a binary component is added to an addon, prevent the addon
+ # from being packed.
+ if isinstance(e, ManifestBinaryComponent):
+ addon = mozpath.basedir(e.base, self._addons)
+ if addon:
+ self._addons[addon] = "unpacked"
+ if isinstance(e, Manifest):
+ if e.flags:
+ errors.fatal("Flags are not supported on " + '"manifest" entries')
+ self._included_manifests[e.path] = path
+
+ def get_bases(self, addons=True):
+ """
+ Return all paths under which root manifests have been found. Root
+ manifests are manifests that are included in no other manifest.
+ `addons` indicates whether to include addon bases as well.
+ """
+ all_bases = set(
+ mozpath.dirname(m) for m in self._manifests - set(self._included_manifests)
+ )
+ if not addons:
+ all_bases -= set(self._addons)
+ else:
+ # If for some reason some detected addon doesn't have a
+ # non-included manifest.
+ all_bases |= set(self._addons)
+ return all_bases
+
+ def close(self):
+ """
+ Push all instructions to the formatter.
+ """
+ self._closed = True
+
+ bases = self.get_bases()
+ broken_bases = sorted(
+ m
+ for m, includer in six.iteritems(self._included_manifests)
+ if mozpath.basedir(m, bases) != mozpath.basedir(includer, bases)
+ )
+ for m in broken_bases:
+ errors.fatal(
+ '"%s" is included from "%s", which is outside "%s"'
+ % (m, self._included_manifests[m], mozpath.basedir(m, bases))
+ )
+ for base in sorted(bases):
+ self.formatter.add_base(base, self._addons.get(base, False))
+ self._chrome_queue.execute()
+ self._queue.execute()
+ self._file_queue.execute()
+
+
+class SimpleManifestSink(object):
+ """
+ Parser sink for "simple" package manifests. Simple package manifests use
+ the format described in the PackageManifestParser documentation, but don't
+ support file removals, and require manifests, interfaces and chrome data to
+ be explicitely listed.
+ Entries starting with bin/ are searched under bin/ in the FileFinder, but
+ are packaged without the bin/ prefix.
+ """
+
+ def __init__(self, finder, formatter):
+ """
+ Initialize the SimpleManifestSink. The given FileFinder is used to
+ get files matching the patterns given in the manifest. The given
+ formatter does the packaging job.
+ """
+ self._finder = finder
+ self.packager = SimplePackager(formatter)
+ self._closed = False
+ self._manifests = set()
+
+ @staticmethod
+ def normalize_path(path):
+ """
+ Remove any bin/ prefix.
+ """
+ if mozpath.basedir(path, ["bin"]) == "bin":
+ return mozpath.relpath(path, "bin")
+ return path
+
+ def add(self, component, pattern):
+ """
+ Add files with the given pattern in the given component.
+ """
+ assert not self._closed
+ added = False
+ for p, f in self._finder.find(pattern):
+ added = True
+ if is_manifest(p):
+ self._manifests.add(p)
+ dest = mozpath.join(component.destdir, SimpleManifestSink.normalize_path(p))
+ self.packager.add(dest, f)
+ if not added:
+ errors.error("Missing file(s): %s" % pattern)
+
+ def remove(self, component, pattern):
+ """
+ Remove files with the given pattern in the given component.
+ """
+ assert not self._closed
+ errors.fatal("Removal is unsupported")
+
+ def close(self, auto_root_manifest=True):
+ """
+ Add possibly missing bits and push all instructions to the formatter.
+ """
+ if auto_root_manifest:
+ # Simple package manifests don't contain the root manifests, so
+ # find and add them.
+ paths = [mozpath.dirname(m) for m in self._manifests]
+ path = mozpath.dirname(mozpath.commonprefix(paths))
+ for p, f in self._finder.find(mozpath.join(path, "chrome.manifest")):
+ if p not in self._manifests:
+ self.packager.add(SimpleManifestSink.normalize_path(p), f)
+ self.packager.close()
diff --git a/python/mozbuild/mozpack/packager/formats.py b/python/mozbuild/mozpack/packager/formats.py
new file mode 100644
index 0000000000..95a6dee2f6
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/formats.py
@@ -0,0 +1,354 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from six.moves.urllib.parse import urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestChrome,
+ ManifestInterfaces,
+ ManifestMultiContent,
+ ManifestResource,
+)
+from mozpack.copier import FileRegistry, FileRegistrySubtree, Jarrer
+from mozpack.errors import errors
+from mozpack.files import ManifestFile
+
+"""
+Formatters are classes receiving packaging instructions and creating the
+appropriate package layout.
+
+There are three distinct formatters, each handling one of the different chrome
+formats:
+ - flat: essentially, copies files from the source with the same file system
+ layout. Manifests entries are grouped in a single manifest per directory,
+ as well as XPT interfaces.
+ - jar: chrome content is packaged in jar files.
+ - omni: chrome content, modules, non-binary components, and many other
+ elements are packaged in an omnijar file for each base directory.
+
+The base interface provides the following methods:
+ - add_base(path [, addon])
+ Register a base directory for an application or GRE, or an addon.
+ Base directories usually contain a root manifest (manifests not
+ included in any other manifest) named chrome.manifest.
+ The optional addon argument tells whether the base directory
+ is that of a packed addon (True), unpacked addon ('unpacked') or
+ otherwise (False).
+ The method may only be called in sorted order of `path` (alphanumeric
+ order, parents before children).
+ - add(path, content)
+ Add the given content (BaseFile instance) at the given virtual path
+ - add_interfaces(path, content)
+ Add the given content (BaseFile instance) as an interface. Equivalent
+ to add(path, content) with the right add_manifest().
+ - add_manifest(entry)
+ Add a ManifestEntry.
+ - contains(path)
+ Returns whether the given virtual path is known of the formatter.
+
+The virtual paths mentioned above are paths as they would be with a flat
+chrome.
+
+Formatters all take a FileCopier instance they will fill with the packaged
+data.
+"""
+
+
+class PiecemealFormatter(object):
+ """
+ Generic formatter that dispatches across different sub-formatters
+ according to paths.
+ """
+
+ def __init__(self, copier):
+ assert isinstance(copier, (FileRegistry, FileRegistrySubtree))
+ self.copier = copier
+ self._sub_formatter = {}
+ self._frozen_bases = False
+
+ def add_base(self, base, addon=False):
+ # Only allow to add a base directory before calls to _get_base()
+ assert not self._frozen_bases
+ assert base not in self._sub_formatter
+ assert all(base > b for b in self._sub_formatter)
+ self._add_base(base, addon)
+
+ def _get_base(self, path):
+ """
+ Return the deepest base directory containing the given path.
+ """
+ self._frozen_bases = True
+ base = mozpath.basedir(path, self._sub_formatter.keys())
+ relpath = mozpath.relpath(path, base) if base else path
+ return base, relpath
+
+ def add(self, path, content):
+ base, relpath = self._get_base(path)
+ if base is None:
+ return self.copier.add(relpath, content)
+ return self._sub_formatter[base].add(relpath, content)
+
+ def add_manifest(self, entry):
+ base, relpath = self._get_base(entry.base)
+ assert base is not None
+ return self._sub_formatter[base].add_manifest(entry.move(relpath))
+
+ def add_interfaces(self, path, content):
+ base, relpath = self._get_base(path)
+ assert base is not None
+ return self._sub_formatter[base].add_interfaces(relpath, content)
+
+ def contains(self, path):
+ assert "*" not in path
+ base, relpath = self._get_base(path)
+ if base is None:
+ return self.copier.contains(relpath)
+ return self._sub_formatter[base].contains(relpath)
+
+
+class FlatFormatter(PiecemealFormatter):
+ """
+ Formatter for the flat package format.
+ """
+
+ def _add_base(self, base, addon=False):
+ self._sub_formatter[base] = FlatSubFormatter(
+ FileRegistrySubtree(base, self.copier)
+ )
+
+
+class FlatSubFormatter(object):
+ """
+ Sub-formatter for the flat package format.
+ """
+
+ def __init__(self, copier):
+ assert isinstance(copier, (FileRegistry, FileRegistrySubtree))
+ self.copier = copier
+ self._chrome_db = {}
+
+ def add(self, path, content):
+ self.copier.add(path, content)
+
+ def add_manifest(self, entry):
+ # Store manifest entries in a single manifest per directory, named
+ # after their parent directory, except for root manifests, all named
+ # chrome.manifest.
+ if entry.base:
+ name = mozpath.basename(entry.base)
+ else:
+ name = "chrome"
+ path = mozpath.normpath(mozpath.join(entry.base, "%s.manifest" % name))
+ if not self.copier.contains(path):
+ # Add a reference to the manifest file in the parent manifest, if
+ # the manifest file is not a root manifest.
+ if entry.base:
+ parent = mozpath.dirname(entry.base)
+ relbase = mozpath.basename(entry.base)
+ relpath = mozpath.join(relbase, mozpath.basename(path))
+ self.add_manifest(Manifest(parent, relpath))
+ self.copier.add(path, ManifestFile(entry.base))
+
+ if isinstance(entry, ManifestChrome):
+ data = self._chrome_db.setdefault(entry.name, {})
+ if isinstance(entry, ManifestMultiContent):
+ entries = data.setdefault(entry.type, {}).setdefault(entry.id, [])
+ else:
+ entries = data.setdefault(entry.type, [])
+ for e in entries:
+ # Ideally, we'd actually check whether entry.flags are more
+ # specific than e.flags, but in practice the following test
+ # is enough for now.
+ if entry == e:
+ errors.warn('"%s" is duplicated. Skipping.' % entry)
+ return
+ if not entry.flags or e.flags and entry.flags == e.flags:
+ errors.fatal('"%s" overrides "%s"' % (entry, e))
+ entries.append(entry)
+
+ self.copier[path].add(entry)
+
+ def add_interfaces(self, path, content):
+ self.copier.add(path, content)
+ self.add_manifest(
+ ManifestInterfaces(mozpath.dirname(path), mozpath.basename(path))
+ )
+
+ def contains(self, path):
+ assert "*" not in path
+ return self.copier.contains(path)
+
+
+class JarFormatter(PiecemealFormatter):
+ """
+ Formatter for the jar package format. Assumes manifest entries related to
+ chrome are registered before the chrome data files are added. Also assumes
+ manifest entries for resources are registered after chrome manifest
+ entries.
+ """
+
+ def __init__(self, copier, compress=True):
+ PiecemealFormatter.__init__(self, copier)
+ self._compress = compress
+
+ def _add_base(self, base, addon=False):
+ if addon is True:
+ jarrer = Jarrer(self._compress)
+ self.copier.add(base + ".xpi", jarrer)
+ self._sub_formatter[base] = FlatSubFormatter(jarrer)
+ else:
+ self._sub_formatter[base] = JarSubFormatter(
+ FileRegistrySubtree(base, self.copier), self._compress
+ )
+
+
+class JarSubFormatter(PiecemealFormatter):
+ """
+ Sub-formatter for the jar package format. It is a PiecemealFormatter that
+ dispatches between further sub-formatter for each of the jar files it
+ dispatches the chrome data to, and a FlatSubFormatter for the non-chrome
+ files.
+ """
+
+ def __init__(self, copier, compress=True):
+ PiecemealFormatter.__init__(self, copier)
+ self._frozen_chrome = False
+ self._compress = compress
+ self._sub_formatter[""] = FlatSubFormatter(copier)
+
+ def _jarize(self, entry, relpath):
+ """
+ Transform a manifest entry in one pointing to chrome data in a jar.
+ Return the corresponding chrome path and the new entry.
+ """
+ base = entry.base
+ basepath = mozpath.split(relpath)[0]
+ chromepath = mozpath.join(base, basepath)
+ entry = (
+ entry.rebase(chromepath)
+ .move(mozpath.join(base, "jar:%s.jar!" % basepath))
+ .rebase(base)
+ )
+ return chromepath, entry
+
+ def add_manifest(self, entry):
+ if isinstance(entry, ManifestChrome) and not urlparse(entry.relpath).scheme:
+ chromepath, entry = self._jarize(entry, entry.relpath)
+ assert not self._frozen_chrome
+ if chromepath not in self._sub_formatter:
+ jarrer = Jarrer(self._compress)
+ self.copier.add(chromepath + ".jar", jarrer)
+ self._sub_formatter[chromepath] = FlatSubFormatter(jarrer)
+ elif isinstance(entry, ManifestResource) and not urlparse(entry.target).scheme:
+ chromepath, new_entry = self._jarize(entry, entry.target)
+ if chromepath in self._sub_formatter:
+ entry = new_entry
+ PiecemealFormatter.add_manifest(self, entry)
+
+
+class OmniJarFormatter(JarFormatter):
+ """
+ Formatter for the omnijar package format.
+ """
+
+ def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
+ JarFormatter.__init__(self, copier, compress)
+ self._omnijar_name = omnijar_name
+ self._non_resources = non_resources
+
+ def _add_base(self, base, addon=False):
+ if addon:
+ # Because add_base is always called with parents before children,
+ # all the possible ancestry of `base` is already present in
+ # `_sub_formatter`.
+ parent_base = mozpath.basedir(base, self._sub_formatter.keys())
+ rel_base = mozpath.relpath(base, parent_base)
+ # If the addon is under a resource directory, package it in the
+ # omnijar.
+ parent_sub_formatter = self._sub_formatter[parent_base]
+ if parent_sub_formatter.is_resource(rel_base):
+ omnijar_sub_formatter = parent_sub_formatter._sub_formatter[
+ self._omnijar_name
+ ]
+ self._sub_formatter[base] = FlatSubFormatter(
+ FileRegistrySubtree(rel_base, omnijar_sub_formatter.copier)
+ )
+ return
+ JarFormatter._add_base(self, base, addon)
+ else:
+ self._sub_formatter[base] = OmniJarSubFormatter(
+ FileRegistrySubtree(base, self.copier),
+ self._omnijar_name,
+ self._compress,
+ self._non_resources,
+ )
+
+
+class OmniJarSubFormatter(PiecemealFormatter):
+ """
+ Sub-formatter for the omnijar package format. It is a PiecemealFormatter
+ that dispatches between a FlatSubFormatter for the resources data and
+ another FlatSubFormatter for the other files.
+ """
+
+ def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
+ PiecemealFormatter.__init__(self, copier)
+ self._omnijar_name = omnijar_name
+ self._compress = compress
+ self._non_resources = non_resources
+ self._sub_formatter[""] = FlatSubFormatter(copier)
+ jarrer = Jarrer(self._compress)
+ self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer)
+
+ def _get_base(self, path):
+ base = self._omnijar_name if self.is_resource(path) else ""
+ # Only add the omnijar file if something ends up in it.
+ if base and not self.copier.contains(base):
+ self.copier.add(base, self._sub_formatter[base].copier)
+ return base, path
+
+ def add_manifest(self, entry):
+ base = ""
+ if not isinstance(entry, ManifestBinaryComponent):
+ base = self._omnijar_name
+ formatter = self._sub_formatter[base]
+ return formatter.add_manifest(entry)
+
+ def is_resource(self, path):
+ """
+ Return whether the given path corresponds to a resource to be put in an
+ omnijar archive.
+ """
+ if any(mozpath.match(path, p.replace("*", "**")) for p in self._non_resources):
+ return False
+ path = mozpath.split(path)
+ if path[0] == "chrome":
+ return len(path) == 1 or path[1] != "icons"
+ if path[0] == "components":
+ return path[-1].endswith((".js", ".xpt"))
+ if path[0] == "res":
+ return len(path) == 1 or (
+ path[1] != "cursors"
+ and path[1] != "touchbar"
+ and path[1] != "MainMenu.nib"
+ )
+ if path[0] == "defaults":
+ return len(path) != 3 or not (
+ path[2] == "channel-prefs.js" and path[1] in ["pref", "preferences"]
+ )
+ if len(path) <= 2 and path[-1] == "greprefs.js":
+ # Accommodate `greprefs.js` and `$ANDROID_CPU_ARCH/greprefs.js`.
+ return True
+ return path[0] in [
+ "modules",
+ "actors",
+ "dictionaries",
+ "hyphenation",
+ "localization",
+ "update.locale",
+ "contentaccessible",
+ ]
diff --git a/python/mozbuild/mozpack/packager/l10n.py b/python/mozbuild/mozpack/packager/l10n.py
new file mode 100644
index 0000000000..76871e15cd
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/l10n.py
@@ -0,0 +1,304 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Replace localized parts of a packaged directory with data from a langpack
+directory.
+"""
+
+import json
+import os
+
+import six
+from createprecomplete import generate_precomplete
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestChrome,
+ ManifestEntryWithRelPath,
+ ManifestLocale,
+ is_manifest,
+)
+from mozpack.copier import FileCopier, Jarrer
+from mozpack.errors import errors
+from mozpack.files import ComposedFinder, GeneratedFile, ManifestFile
+from mozpack.mozjar import JAR_DEFLATED
+from mozpack.packager import Component, SimpleManifestSink, SimplePackager
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.packager.unpack import UnpackFinder
+
+
+class LocaleManifestFinder(object):
+ def __init__(self, finder):
+ entries = self.entries = []
+ bases = self.bases = []
+
+ class MockFormatter(object):
+ def add_interfaces(self, path, content):
+ pass
+
+ def add(self, path, content):
+ pass
+
+ def add_manifest(self, entry):
+ if entry.localized:
+ entries.append(entry)
+
+ def add_base(self, base, addon=False):
+ bases.append(base)
+
+ # SimplePackager rejects "manifest foo.manifest" entries with
+ # additional flags (such as "manifest foo.manifest application=bar").
+ # Those type of entries are used by language packs to work as addons,
+ # but are not necessary for the purpose of l10n repacking. So we wrap
+ # the finder in order to remove those entries.
+ class WrapFinder(object):
+ def __init__(self, finder):
+ self._finder = finder
+
+ def find(self, pattern):
+ for p, f in self._finder.find(pattern):
+ if isinstance(f, ManifestFile):
+ unwanted = [
+ e for e in f._entries if isinstance(e, Manifest) and e.flags
+ ]
+ if unwanted:
+ f = ManifestFile(
+ f._base, [e for e in f._entries if e not in unwanted]
+ )
+ yield p, f
+
+ sink = SimpleManifestSink(WrapFinder(finder), MockFormatter())
+ sink.add(Component(""), "*")
+ sink.close(False)
+
+ # Find unique locales used in these manifest entries.
+ self.locales = list(
+ set(e.id for e in self.entries if isinstance(e, ManifestLocale))
+ )
+
+
+class L10NRepackFormatterMixin(object):
+ def __init__(self, *args, **kwargs):
+ super(L10NRepackFormatterMixin, self).__init__(*args, **kwargs)
+ self._dictionaries = {}
+
+ def add(self, path, file):
+ base, relpath = self._get_base(path)
+ if path.endswith(".dic"):
+ if relpath.startswith("dictionaries/"):
+ root, ext = mozpath.splitext(mozpath.basename(path))
+ self._dictionaries[root] = path
+ elif path.endswith("/built_in_addons.json"):
+ data = json.loads(six.ensure_text(file.open().read()))
+ data["dictionaries"] = self._dictionaries
+ # The GeneratedFile content is only really generated after
+ # all calls to formatter.add.
+ file = GeneratedFile(lambda: json.dumps(data))
+ elif relpath.startswith("META-INF/"):
+ # Ignore signatures inside omnijars. We drop these items: if we
+ # don't treat them as omnijar resources, they will be included in
+ # the top-level package, and that's not how omnijars are signed (Bug
+ # 1750676). If we treat them as omnijar resources, they will stay
+ # in the omnijar, as expected -- but the signatures won't be valid
+ # after repacking. Therefore, drop them.
+ return
+ super(L10NRepackFormatterMixin, self).add(path, file)
+
+
+def L10NRepackFormatter(klass):
+ class L10NRepackFormatter(L10NRepackFormatterMixin, klass):
+ pass
+
+ return L10NRepackFormatter
+
+
+FlatFormatter = L10NRepackFormatter(FlatFormatter)
+JarFormatter = L10NRepackFormatter(JarFormatter)
+OmniJarFormatter = L10NRepackFormatter(OmniJarFormatter)
+
+
+def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()):
+ app = LocaleManifestFinder(app_finder)
+ l10n = LocaleManifestFinder(l10n_finder)
+
+ # The code further below assumes there's only one locale replaced with
+ # another one.
+ if len(app.locales) > 1:
+ errors.fatal("Multiple app locales aren't supported: " + ",".join(app.locales))
+ if len(l10n.locales) > 1:
+ errors.fatal(
+ "Multiple l10n locales aren't supported: " + ",".join(l10n.locales)
+ )
+ locale = app.locales[0]
+ l10n_locale = l10n.locales[0]
+
+ # For each base directory, store what path a locale chrome package name
+ # corresponds to.
+ # e.g., for the following entry under app/chrome:
+ # locale foo en-US path/to/files
+ # keep track that the locale path for foo in app is
+ # app/chrome/path/to/files.
+ # As there may be multiple locale entries with the same base, but with
+ # different flags, that tracking takes the flags into account when there
+ # are some. Example:
+ # locale foo en-US path/to/files/win os=Win
+ # locale foo en-US path/to/files/mac os=Darwin
+ def key(entry):
+ if entry.flags:
+ return "%s %s" % (entry.name, entry.flags)
+ return entry.name
+
+ l10n_paths = {}
+ for e in l10n.entries:
+ if isinstance(e, ManifestChrome):
+ base = mozpath.basedir(e.path, app.bases)
+ l10n_paths.setdefault(base, {})
+ l10n_paths[base][key(e)] = e.path
+
+ # For chrome and non chrome files or directories, store what langpack path
+ # corresponds to a package path.
+ paths = {}
+ for e in app.entries:
+ if isinstance(e, ManifestEntryWithRelPath):
+ base = mozpath.basedir(e.path, app.bases)
+ if base not in l10n_paths:
+ errors.fatal("Locale doesn't contain %s/" % base)
+ # Allow errors to accumulate
+ continue
+ if key(e) not in l10n_paths[base]:
+ errors.fatal("Locale doesn't have a manifest entry for '%s'" % e.name)
+ # Allow errors to accumulate
+ continue
+ paths[e.path] = l10n_paths[base][key(e)]
+
+ for pattern in non_chrome:
+ for base in app.bases:
+ path = mozpath.join(base, pattern)
+ left = set(p for p, f in app_finder.find(path))
+ right = set(p for p, f in l10n_finder.find(path))
+ for p in right:
+ paths[p] = p
+ for p in left - right:
+ paths[p] = None
+
+ # Create a new package, with non localized bits coming from the original
+ # package, and localized bits coming from the langpack.
+ packager = SimplePackager(formatter)
+ for p, f in app_finder:
+ if is_manifest(p):
+ # Remove localized manifest entries.
+ for e in [e for e in f if e.localized]:
+ f.remove(e)
+ # If the path is one that needs a locale replacement, use the
+ # corresponding file from the langpack.
+ path = None
+ if p in paths:
+ path = paths[p]
+ if not path:
+ continue
+ else:
+ base = mozpath.basedir(p, paths.keys())
+ if base:
+ subpath = mozpath.relpath(p, base)
+ path = mozpath.normpath(mozpath.join(paths[base], subpath))
+
+ if path:
+ files = [f for p, f in l10n_finder.find(path)]
+ if not len(files):
+ if base not in non_chrome:
+ finderBase = ""
+ if hasattr(l10n_finder, "base"):
+ finderBase = l10n_finder.base
+ errors.error("Missing file: %s" % os.path.join(finderBase, path))
+ else:
+ packager.add(path, files[0])
+ else:
+ packager.add(p, f)
+
+ # Add localized manifest entries from the langpack.
+ l10n_manifests = []
+ for base in set(e.base for e in l10n.entries):
+ m = ManifestFile(base, [e for e in l10n.entries if e.base == base])
+ path = mozpath.join(base, "chrome.%s.manifest" % l10n_locale)
+ l10n_manifests.append((path, m))
+ bases = packager.get_bases()
+ for path, m in l10n_manifests:
+ base = mozpath.basedir(path, bases)
+ packager.add(path, m)
+ # Add a "manifest $path" entry in the top manifest under that base.
+ m = ManifestFile(base)
+ m.add(Manifest(base, mozpath.relpath(path, base)))
+ packager.add(mozpath.join(base, "chrome.manifest"), m)
+
+ packager.close()
+
+ # Add any remaining non chrome files.
+ for pattern in non_chrome:
+ for base in bases:
+ for p, f in l10n_finder.find(mozpath.join(base, pattern)):
+ if not formatter.contains(p):
+ formatter.add(p, f)
+
+ # Resources in `localization` directories are packaged from the source and then
+ # if localized versions are present in the l10n dir, we package them as well
+ # keeping the source dir resources as a runtime fallback.
+ for p, f in l10n_finder.find("**/localization"):
+ if not formatter.contains(p):
+ formatter.add(p, f)
+
+ # Transplant jar preloading information.
+ for path, log in six.iteritems(app_finder.jarlogs):
+ assert isinstance(copier[path], Jarrer)
+ copier[path].preload([l.replace(locale, l10n_locale) for l in log])
+
+
+def repack(
+ source, l10n, extra_l10n={}, non_resources=[], non_chrome=set(), minify=False
+):
+ """
+ Replace localized data from the `source` directory with localized data
+ from `l10n` and `extra_l10n`.
+
+ The `source` argument points to a directory containing a packaged
+ application (in omnijar, jar or flat form).
+ The `l10n` argument points to a directory containing the main localized
+ data (usually in the form of a language pack addon) to use to replace
+ in the packaged application.
+ The `extra_l10n` argument contains a dict associating relative paths in
+ the source to separate directories containing localized data for them.
+ This can be used to point at different language pack addons for different
+ parts of the package application.
+ The `non_resources` argument gives a list of relative paths in the source
+ that should not be added in an omnijar in case the packaged application
+ is in that format.
+ The `non_chrome` argument gives a list of file/directory patterns for
+ localized files that are not listed in a chrome.manifest.
+ If `minify`, `.properties` files are minified.
+ """
+ app_finder = UnpackFinder(source, minify=minify)
+ l10n_finder = UnpackFinder(l10n, minify=minify)
+ if extra_l10n:
+ finders = {
+ "": l10n_finder,
+ }
+ for base, path in six.iteritems(extra_l10n):
+ finders[base] = UnpackFinder(path, minify=minify)
+ l10n_finder = ComposedFinder(finders)
+ copier = FileCopier()
+ compress = min(app_finder.compressed, JAR_DEFLATED)
+ if app_finder.kind == "flat":
+ formatter = FlatFormatter(copier)
+ elif app_finder.kind == "jar":
+ formatter = JarFormatter(copier, compress=compress)
+ elif app_finder.kind == "omni":
+ formatter = OmniJarFormatter(
+ copier, app_finder.omnijar, compress=compress, non_resources=non_resources
+ )
+
+ with errors.accumulate():
+ _repack(app_finder, l10n_finder, copier, formatter, non_chrome)
+ copier.copy(source, skip_if_older=False)
+ generate_precomplete(source)
diff --git a/python/mozbuild/mozpack/packager/unpack.py b/python/mozbuild/mozpack/packager/unpack.py
new file mode 100644
index 0000000000..dff295eb9b
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/unpack.py
@@ -0,0 +1,200 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import codecs
+
+from six.moves.urllib.parse import urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestEntryWithRelPath,
+ ManifestResource,
+ is_manifest,
+ parse_manifest,
+)
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.files import BaseFinder, DeflatedFile, FileFinder, ManifestFile
+from mozpack.mozjar import JarReader
+from mozpack.packager import SimplePackager
+from mozpack.packager.formats import FlatFormatter
+
+
+class UnpackFinder(BaseFinder):
+ """
+ Special Finder object that treats the source package directory as if it
+ were in the flat chrome format, whatever chrome format it actually is in.
+
+ This means that for example, paths like chrome/browser/content/... match
+ files under jar:chrome/browser.jar!/content/... in case of jar chrome
+ format.
+
+ The only argument to the constructor is a Finder instance or a path.
+ The UnpackFinder is populated with files from this Finder instance,
+ or with files from a FileFinder using the given path as its root.
+ """
+
+ def __init__(self, source, omnijar_name=None, unpack_xpi=True, **kwargs):
+ if isinstance(source, BaseFinder):
+ assert not kwargs
+ self._finder = source
+ else:
+ self._finder = FileFinder(source, **kwargs)
+ self.base = self._finder.base
+ self.files = FileRegistry()
+ self.kind = "flat"
+ if omnijar_name:
+ self.omnijar = omnijar_name
+ else:
+ # Can't include globally because of bootstrapping issues.
+ from buildconfig import substs
+
+ self.omnijar = substs.get("OMNIJAR_NAME", "omni.ja")
+ self.jarlogs = {}
+ self.compressed = False
+ self._unpack_xpi = unpack_xpi
+
+ jars = set()
+
+ for p, f in self._finder.find("*"):
+ # Skip the precomplete file, which is generated at packaging time.
+ if p == "precomplete":
+ continue
+ base = mozpath.dirname(p)
+ # If the file matches the omnijar pattern, it is an omnijar.
+ # All the files it contains go in the directory containing the full
+ # pattern. Manifests are merged if there is a corresponding manifest
+ # in the directory.
+ if self._maybe_zip(f) and mozpath.match(p, "**/%s" % self.omnijar):
+ jar = self._open_jar(p, f)
+ if "chrome.manifest" in jar:
+ self.kind = "omni"
+ self._fill_with_jar(p[: -len(self.omnijar) - 1], jar)
+ continue
+ # If the file is a manifest, scan its entries for some referencing
+ # jar: urls. If there are some, the files contained in the jar they
+ # point to, go under a directory named after the jar.
+ if is_manifest(p):
+ m = self.files[p] if self.files.contains(p) else ManifestFile(base)
+ for e in parse_manifest(
+ self.base, p, codecs.getreader("utf-8")(f.open())
+ ):
+ m.add(self._handle_manifest_entry(e, jars))
+ if self.files.contains(p):
+ continue
+ f = m
+ # If we're unpacking packed addons and the file is a packed addon,
+ # unpack it under a directory named after the xpi.
+ if self._unpack_xpi and p.endswith(".xpi") and self._maybe_zip(f):
+ self._fill_with_jar(p[:-4], self._open_jar(p, f))
+ continue
+ if p not in jars:
+ self.files.add(p, f)
+
+ def _fill_with_jar(self, base, jar):
+ for j in jar:
+ path = mozpath.join(base, j.filename)
+ if is_manifest(j.filename):
+ m = (
+ self.files[path]
+ if self.files.contains(path)
+ else ManifestFile(mozpath.dirname(path))
+ )
+ for e in parse_manifest(None, path, j):
+ m.add(e)
+ if not self.files.contains(path):
+ self.files.add(path, m)
+ continue
+ else:
+ self.files.add(path, DeflatedFile(j))
+
+ def _handle_manifest_entry(self, entry, jars):
+ jarpath = None
+ if (
+ isinstance(entry, ManifestEntryWithRelPath)
+ and urlparse(entry.relpath).scheme == "jar"
+ ):
+ jarpath, entry = self._unjarize(entry, entry.relpath)
+ elif (
+ isinstance(entry, ManifestResource)
+ and urlparse(entry.target).scheme == "jar"
+ ):
+ jarpath, entry = self._unjarize(entry, entry.target)
+ if jarpath:
+ # Don't defer unpacking the jar file. If we already saw
+ # it, take (and remove) it from the registry. If we
+ # haven't, try to find it now.
+ if self.files.contains(jarpath):
+ jar = self.files[jarpath]
+ self.files.remove(jarpath)
+ else:
+ jar = [f for p, f in self._finder.find(jarpath)]
+ assert len(jar) == 1
+ jar = jar[0]
+ if jarpath not in jars:
+ base = mozpath.splitext(jarpath)[0]
+ for j in self._open_jar(jarpath, jar):
+ self.files.add(mozpath.join(base, j.filename), DeflatedFile(j))
+ jars.add(jarpath)
+ self.kind = "jar"
+ return entry
+
+ def _open_jar(self, path, file):
+ """
+ Return a JarReader for the given BaseFile instance, keeping a log of
+ the preloaded entries it has.
+ """
+ jar = JarReader(fileobj=file.open())
+ self.compressed = max(self.compressed, jar.compression)
+ if jar.last_preloaded:
+ jarlog = list(jar.entries.keys())
+ self.jarlogs[path] = jarlog[: jarlog.index(jar.last_preloaded) + 1]
+ return jar
+
+ def find(self, path):
+ for p in self.files.match(path):
+ yield p, self.files[p]
+
+ def _maybe_zip(self, file):
+ """
+ Return whether the given BaseFile looks like a ZIP/Jar.
+ """
+ header = file.open().read(8)
+ return len(header) == 8 and (header[0:2] == b"PK" or header[4:6] == b"PK")
+
+ def _unjarize(self, entry, relpath):
+ """
+ Transform a manifest entry pointing to chrome data in a jar in one
+ pointing to the corresponding unpacked path. Return the jar path and
+ the new entry.
+ """
+ base = entry.base
+ jar, relpath = urlparse(relpath).path.split("!", 1)
+ entry = (
+ entry.rebase(mozpath.join(base, "jar:%s!" % jar))
+ .move(mozpath.join(base, mozpath.splitext(jar)[0]))
+ .rebase(base)
+ )
+ return mozpath.join(base, jar), entry
+
+
+def unpack_to_registry(source, registry, omnijar_name=None):
+ """
+ Transform a jar chrome or omnijar packaged directory into a flat package.
+
+ The given registry is filled with the flat package.
+ """
+ finder = UnpackFinder(source, omnijar_name)
+ packager = SimplePackager(FlatFormatter(registry))
+ for p, f in finder.find("*"):
+ packager.add(p, f)
+ packager.close()
+
+
+def unpack(source, omnijar_name=None):
+ """
+ Transform a jar chrome or omnijar packaged directory into a flat package.
+ """
+ copier = FileCopier()
+ unpack_to_registry(source, copier, omnijar_name)
+ copier.copy(source, skip_if_older=False)
diff --git a/python/mozbuild/mozpack/path.py b/python/mozbuild/mozpack/path.py
new file mode 100644
index 0000000000..3e5af0a06b
--- /dev/null
+++ b/python/mozbuild/mozpack/path.py
@@ -0,0 +1,246 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Like :py:mod:`os.path`, with a reduced set of functions, and with normalized path
+separators (always use forward slashes).
+Also contains a few additional utilities not found in :py:mod:`os.path`.
+"""
+
+import ctypes
+import os
+import posixpath
+import re
+import sys
+
+
+def normsep(path):
+ """
+ Normalize path separators, by using forward slashes instead of whatever
+ :py:const:`os.sep` is.
+ """
+ if os.sep != "/":
+ # Python 2 is happy to do things like byte_string.replace(u'foo',
+ # u'bar'), but not Python 3.
+ if isinstance(path, bytes):
+ path = path.replace(os.sep.encode("ascii"), b"/")
+ else:
+ path = path.replace(os.sep, "/")
+ if os.altsep and os.altsep != "/":
+ if isinstance(path, bytes):
+ path = path.replace(os.altsep.encode("ascii"), b"/")
+ else:
+ path = path.replace(os.altsep, "/")
+ return path
+
+
+def cargo_workaround(path):
+ unc = "//?/"
+ if path.startswith(unc):
+ return path[len(unc) :]
+ return path
+
+
+def relpath(path, start):
+ path = normsep(path)
+ start = normsep(start)
+ if sys.platform == "win32":
+ # os.path.relpath can't handle relative paths between UNC and non-UNC
+ # paths, so strip a //?/ prefix if present (bug 1581248)
+ path = cargo_workaround(path)
+ start = cargo_workaround(start)
+ try:
+ rel = os.path.relpath(path, start)
+ except ValueError:
+ # On Windows this can throw a ValueError if the two paths are on
+ # different drives. In that case, just return the path.
+ return abspath(path)
+ rel = normsep(rel)
+ return "" if rel == "." else rel
+
+
+def realpath(path):
+ return normsep(os.path.realpath(path))
+
+
+def abspath(path):
+ return normsep(os.path.abspath(path))
+
+
+def join(*paths):
+ return normsep(os.path.join(*paths))
+
+
+def normpath(path):
+ return posixpath.normpath(normsep(path))
+
+
+def dirname(path):
+ return posixpath.dirname(normsep(path))
+
+
+def commonprefix(paths):
+ return posixpath.commonprefix([normsep(path) for path in paths])
+
+
+def basename(path):
+ return os.path.basename(path)
+
+
+def splitext(path):
+ return posixpath.splitext(normsep(path))
+
+
+def split(path):
+ """
+ Return the normalized path as a list of its components.
+
+ ``split('foo/bar/baz')`` returns ``['foo', 'bar', 'baz']``
+ """
+ return normsep(path).split("/")
+
+
+def basedir(path, bases):
+ """
+ Given a list of directories (`bases`), return which one contains the given
+ path. If several matches are found, the deepest base directory is returned.
+
+ ``basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar'])`` returns ``'foo/bar'``
+ (`'foo'` and `'foo/bar'` both match, but `'foo/bar'` is the deepest match)
+ """
+ path = normsep(path)
+ bases = [normsep(b) for b in bases]
+ if path in bases:
+ return path
+ for b in sorted(bases, reverse=True):
+ if b == "" or path.startswith(b + "/"):
+ return b
+
+
+re_cache = {}
+# Python versions < 3.7 return r'\/' for re.escape('/').
+if re.escape("/") == "/":
+ MATCH_STAR_STAR_RE = re.compile(r"(^|/)\\\*\\\*/")
+ MATCH_STAR_STAR_END_RE = re.compile(r"(^|/)\\\*\\\*$")
+else:
+ MATCH_STAR_STAR_RE = re.compile(r"(^|\\\/)\\\*\\\*\\\/")
+ MATCH_STAR_STAR_END_RE = re.compile(r"(^|\\\/)\\\*\\\*$")
+
+
+def match(path, pattern):
+ """
+ Return whether the given path matches the given pattern.
+ An asterisk can be used to match any string, including the null string, in
+ one part of the path:
+
+ ``foo`` matches ``*``, ``f*`` or ``fo*o``
+
+ However, an asterisk matching a subdirectory may not match the null string:
+
+ ``foo/bar`` does *not* match ``foo/*/bar``
+
+ If the pattern matches one of the ancestor directories of the path, the
+ patch is considered matching:
+
+ ``foo/bar`` matches ``foo``
+
+ Two adjacent asterisks can be used to match files and zero or more
+ directories and subdirectories.
+
+ ``foo/bar`` matches ``foo/**/bar``, or ``**/bar``
+ """
+ if not pattern:
+ return True
+ if pattern not in re_cache:
+ p = re.escape(pattern)
+ p = MATCH_STAR_STAR_RE.sub(r"\1(?:.+/)?", p)
+ p = MATCH_STAR_STAR_END_RE.sub(r"(?:\1.+)?", p)
+ p = p.replace(r"\*", "[^/]*") + "(?:/.*)?$"
+ re_cache[pattern] = re.compile(p)
+ return re_cache[pattern].match(path) is not None
+
+
+def rebase(oldbase, base, relativepath):
+ """
+ Return `relativepath` relative to `base` instead of `oldbase`.
+ """
+ if base == oldbase:
+ return relativepath
+ if len(base) < len(oldbase):
+ assert basedir(oldbase, [base]) == base
+ relbase = relpath(oldbase, base)
+ result = join(relbase, relativepath)
+ else:
+ assert basedir(base, [oldbase]) == oldbase
+ relbase = relpath(base, oldbase)
+ result = relpath(relativepath, relbase)
+ result = normpath(result)
+ if relativepath.endswith("/") and not result.endswith("/"):
+ result += "/"
+ return result
+
+
+def readlink(path):
+ if hasattr(os, "readlink"):
+ return normsep(os.readlink(path))
+
+ # Unfortunately os.path.realpath doesn't support symlinks on Windows, and os.readlink
+ # is only available on Windows with Python 3.2+. We have to resort to ctypes...
+
+ assert sys.platform == "win32"
+
+ CreateFileW = ctypes.windll.kernel32.CreateFileW
+ CreateFileW.argtypes = [
+ ctypes.wintypes.LPCWSTR,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.LPVOID,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.HANDLE,
+ ]
+ CreateFileW.restype = ctypes.wintypes.HANDLE
+
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ OPEN_EXISTING = 3
+ FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+
+ handle = CreateFileW(
+ path,
+ GENERIC_READ,
+ FILE_SHARE_READ,
+ 0,
+ OPEN_EXISTING,
+ FILE_FLAG_BACKUP_SEMANTICS,
+ 0,
+ )
+ assert handle != 1, "Failed getting a handle to: {}".format(path)
+
+ MAX_PATH = 260
+
+ buf = ctypes.create_unicode_buffer(MAX_PATH)
+ GetFinalPathNameByHandleW = ctypes.windll.kernel32.GetFinalPathNameByHandleW
+ GetFinalPathNameByHandleW.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.LPWSTR,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ]
+ GetFinalPathNameByHandleW.restype = ctypes.wintypes.DWORD
+
+ FILE_NAME_NORMALIZED = 0x0
+
+ rv = GetFinalPathNameByHandleW(handle, buf, MAX_PATH, FILE_NAME_NORMALIZED)
+ assert rv != 0 and rv <= MAX_PATH, "Failed getting final path for: {}".format(path)
+
+ CloseHandle = ctypes.windll.kernel32.CloseHandle
+ CloseHandle.argtypes = [ctypes.wintypes.HANDLE]
+ CloseHandle.restype = ctypes.wintypes.BOOL
+
+ rv = CloseHandle(handle)
+ assert rv != 0, "Failed closing handle"
+
+ # Remove leading '\\?\' from the result.
+ return normsep(buf.value[4:])
diff --git a/python/mozbuild/mozpack/pkg.py b/python/mozbuild/mozpack/pkg.py
new file mode 100644
index 0000000000..75a63b9746
--- /dev/null
+++ b/python/mozbuild/mozpack/pkg.py
@@ -0,0 +1,299 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import concurrent.futures
+import lzma
+import os
+import plistlib
+import struct
+import subprocess
+from pathlib import Path
+from string import Template
+from typing import List
+from urllib.parse import quote
+
+import mozfile
+
+TEMPLATE_DIRECTORY = Path(__file__).parent / "apple_pkg"
+PBZX_CHUNK_SIZE = 16 * 1024 * 1024 # 16MB chunks
+
+
+def get_apple_template(name: str) -> Template:
+ """
+ Given <name>, open file at <TEMPLATE_DIRECTORY>/<name>, read contents and
+ return as a Template
+
+ Args:
+ name: str, Filename for the template
+
+ Returns:
+ Template, loaded from file
+ """
+ tmpl_path = TEMPLATE_DIRECTORY / name
+ if not tmpl_path.is_file():
+ raise Exception(f"Could not find template: {tmpl_path}")
+ with tmpl_path.open("r") as tmpl:
+ contents = tmpl.read()
+ return Template(contents)
+
+
+def save_text_file(content: str, destination: Path):
+ """
+ Saves a text file to <destination> with provided <content>
+ Note: Overwrites contents
+
+ Args:
+ content: str, The desired contents of the file
+ destination: Path, The file path
+ """
+ with destination.open("w") as out_fd:
+ out_fd.write(content)
+ print(f"Created text file at {destination}")
+ print(f"Created text file size: {destination.stat().st_size} bytes")
+
+
+def get_app_info_plist(app_path: Path) -> dict:
+ """
+ Retrieve most information from Info.plist file of an app.
+ The Info.plist file should be located in ?.app/Contents/Info.plist
+
+ Note: Ignores properties that are not <string> type
+
+ Args:
+ app_path: Path, the .app file/directory path
+
+ Returns:
+ dict, the dictionary of properties found in Info.plist
+ """
+ info_plist = app_path / "Contents/Info.plist"
+ if not info_plist.is_file():
+ raise Exception(f"Could not find Info.plist in {info_plist}")
+
+ print(f"Reading app Info.plist from: {info_plist}")
+
+ with info_plist.open("rb") as plist_fd:
+ data = plistlib.load(plist_fd)
+
+ return data
+
+
+def create_payload(destination: Path, root_path: Path, cpio_tool: str):
+ """
+ Creates a payload at <destination> based on <root_path>
+
+ Args:
+ destination: Path, the destination Path
+ root_path: Path, the root directory Path
+ cpio_tool: str,
+ """
+ # Files to be cpio'd are root folder + contents
+ file_list = ["./"] + get_relative_glob_list(root_path, "**/*")
+
+ with mozfile.TemporaryDirectory() as tmp_dir:
+ tmp_payload_path = Path(tmp_dir) / "Payload"
+ print(f"Creating Payload with cpio from {root_path} to {tmp_payload_path}")
+ print(f"Found {len(file_list)} files")
+ with tmp_payload_path.open("wb") as tmp_payload:
+ process = subprocess.run(
+ [
+ cpio_tool,
+ "-o", # copy-out mode
+ "--format",
+ "odc", # old POSIX .1 portable format
+ "--owner",
+ "0:80", # clean ownership
+ ],
+ stdout=tmp_payload,
+ stderr=subprocess.PIPE,
+ input="\n".join(file_list) + "\n",
+ encoding="ascii",
+ cwd=root_path,
+ )
+ # cpio outputs number of blocks to stderr
+ print(f"[CPIO]: {process.stderr}")
+ if process.returncode:
+ raise Exception(f"CPIO error {process.returncode}")
+
+ tmp_payload_size = tmp_payload_path.stat().st_size
+ print(f"Uncompressed Payload size: {tmp_payload_size // 1024}kb")
+
+ def compress_chunk(chunk):
+ compressed_chunk = lzma.compress(chunk)
+ return len(chunk), compressed_chunk
+
+ def chunker(fileobj, chunk_size):
+ while True:
+ chunk = fileobj.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ with tmp_payload_path.open("rb") as f_in, destination.open(
+ "wb"
+ ) as f_out, concurrent.futures.ThreadPoolExecutor(
+ max_workers=os.cpu_count()
+ ) as executor:
+ f_out.write(b"pbzx")
+ f_out.write(struct.pack(">Q", PBZX_CHUNK_SIZE))
+ chunks = chunker(f_in, PBZX_CHUNK_SIZE)
+ for uncompressed_size, compressed_chunk in executor.map(
+ compress_chunk, chunks
+ ):
+ f_out.write(struct.pack(">Q", uncompressed_size))
+ if len(compressed_chunk) < uncompressed_size:
+ f_out.write(struct.pack(">Q", len(compressed_chunk)))
+ f_out.write(compressed_chunk)
+ else:
+ # Considering how unlikely this is, we prefer to just decompress
+ # here than to keep the original uncompressed chunk around
+ f_out.write(struct.pack(">Q", uncompressed_size))
+ f_out.write(lzma.decompress(compressed_chunk))
+
+ print(f"Compressed Payload file to {destination}")
+ print(f"Compressed Payload size: {destination.stat().st_size // 1024}kb")
+
+
+def create_bom(bom_path: Path, root_path: Path, mkbom_tool: Path):
+ """
+ Creates a Bill Of Materials file at <bom_path> based on <root_path>
+
+ Args:
+ bom_path: Path, destination Path for the BOM file
+ root_path: Path, root directory Path
+ mkbom_tool: Path, mkbom tool Path
+ """
+ print(f"Creating BOM file from {root_path} to {bom_path}")
+ subprocess.check_call(
+ [
+ mkbom_tool,
+ "-u",
+ "0",
+ "-g",
+ "80",
+ str(root_path),
+ str(bom_path),
+ ]
+ )
+ print(f"Created BOM File size: {bom_path.stat().st_size // 1024}kb")
+
+
+def get_relative_glob_list(source: Path, glob: str) -> List[str]:
+ """
+ Given a source path, return a list of relative path based on glob
+
+ Args:
+ source: Path, source directory Path
+ glob: str, unix style glob
+
+ Returns:
+ list[str], paths found in source directory
+ """
+ return [f"./{c.relative_to(source)}" for c in source.glob(glob)]
+
+
+def xar_package_folder(source_path: Path, destination: Path, xar_tool: Path):
+ """
+ Create a pkg from <source_path> to <destination>
+ The command is issued with <source_path> as cwd
+
+ Args:
+ source_path: Path, source absolute Path
+ destination: Path, destination absolute Path
+ xar_tool: Path, xar tool Path
+ """
+ if not source_path.is_absolute() or not destination.is_absolute():
+ raise Exception("Source and destination should be absolute.")
+
+ print(f"Creating pkg from {source_path} to {destination}")
+ # Create a list of ./<file> - noting xar takes care of <file>/**
+ file_list = get_relative_glob_list(source_path, "*")
+
+ subprocess.check_call(
+ [
+ xar_tool,
+ "--compression",
+ "none",
+ "-vcf",
+ destination,
+ *file_list,
+ ],
+ cwd=source_path,
+ )
+ print(f"Created PKG file to {destination}")
+ print(f"Created PKG size: {destination.stat().st_size // 1024}kb")
+
+
+def create_pkg(
+ source_app: Path,
+ output_pkg: Path,
+ mkbom_tool: Path,
+ xar_tool: Path,
+ cpio_tool: Path,
+):
+ """
+ Create a mac PKG installer from <source_app> to <output_pkg>
+
+ Args:
+ source_app: Path, source .app file/directory Path
+ output_pkg: Path, destination .pkg file
+ mkbom_tool: Path, mkbom tool Path
+ xar_tool: Path, xar tool Path
+ cpio: Path, cpio tool Path
+ """
+
+ app_name = source_app.name.rsplit(".", maxsplit=1)[0]
+
+ with mozfile.TemporaryDirectory() as tmpdir:
+ root_path = Path(tmpdir) / "darwin/root"
+ flat_path = Path(tmpdir) / "darwin/flat"
+
+ # Create required directories
+ # TODO: Investigate Resources folder contents for other lproj?
+ (flat_path / "Resources/en.lproj").mkdir(parents=True, exist_ok=True)
+ (flat_path / f"{app_name}.pkg").mkdir(parents=True, exist_ok=True)
+ root_path.mkdir(parents=True, exist_ok=True)
+
+ # Copy files over
+ subprocess.check_call(
+ [
+ "cp",
+ "-R",
+ str(source_app),
+ str(root_path),
+ ]
+ )
+
+ # Count all files (innards + itself)
+ file_count = len(list(source_app.glob("**/*"))) + 1
+ print(f"Calculated source files count: {file_count}")
+ # Get package contents size
+ package_size = sum(f.stat().st_size for f in source_app.glob("**/*")) // 1024
+ print(f"Calculated source package size: {package_size}kb")
+
+ app_info = get_app_info_plist(source_app)
+ app_info["numberOfFiles"] = file_count
+ app_info["installKBytes"] = package_size
+ app_info["app_name"] = app_name
+ app_info["app_name_url_encoded"] = quote(app_name)
+
+ # This seems arbitrary, there might be another way of doing it,
+ # but Info.plist doesn't provide the simple version we need
+ major_version = app_info["CFBundleShortVersionString"].split(".")[0]
+ app_info["simple_version"] = f"{major_version}.0.0"
+
+ pkg_info_tmpl = get_apple_template("PackageInfo.template")
+ pkg_info = pkg_info_tmpl.substitute(app_info)
+ save_text_file(pkg_info, flat_path / f"{app_name}.pkg/PackageInfo")
+
+ distribution_tmp = get_apple_template("Distribution.template")
+ distribution = distribution_tmp.substitute(app_info)
+ save_text_file(distribution, flat_path / "Distribution")
+
+ payload_path = flat_path / f"{app_name}.pkg/Payload"
+ create_payload(payload_path, root_path, cpio_tool)
+
+ bom_path = flat_path / f"{app_name}.pkg/Bom"
+ create_bom(bom_path, root_path, mkbom_tool)
+
+ xar_package_folder(flat_path, output_pkg, xar_tool)
diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/__init__.py
diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data
new file mode 100644
index 0000000000..fb7f0c4fc2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/data/test_data
@@ -0,0 +1 @@
+test_data \ No newline at end of file
diff --git a/python/mozbuild/mozpack/test/python.ini b/python/mozbuild/mozpack/test/python.ini
new file mode 100644
index 0000000000..2b229de945
--- /dev/null
+++ b/python/mozbuild/mozpack/test/python.ini
@@ -0,0 +1,18 @@
+[DEFAULT]
+subsuite = mozbuild
+
+[test_archive.py]
+[test_chrome_flags.py]
+[test_chrome_manifest.py]
+[test_copier.py]
+[test_errors.py]
+[test_files.py]
+[test_manifests.py]
+[test_mozjar.py]
+[test_packager.py]
+[test_packager_formats.py]
+[test_packager_l10n.py]
+[test_packager_unpack.py]
+[test_path.py]
+[test_pkg.py]
+[test_unify.py]
diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py
new file mode 100644
index 0000000000..88cc0ece0c
--- /dev/null
+++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py
@@ -0,0 +1,15 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+if len(sys.argv) != 4:
+ raise Exception("Usage: minify_js_verify <exitcode> <orig> <minified>")
+
+retcode = int(sys.argv[1])
+
+if retcode:
+ print("Error message", file=sys.stderr)
+
+sys.exit(retcode)
diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py
new file mode 100644
index 0000000000..3417f279df
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_archive.py
@@ -0,0 +1,197 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+
+import pytest
+from mozunit import main
+
+from mozpack.archive import (
+ DEFAULT_MTIME,
+ create_tar_bz2_from_files,
+ create_tar_from_files,
+ create_tar_gz_from_files,
+)
+from mozpack.files import GeneratedFile
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+def file_hash(path):
+ h = hashlib.sha1()
+ with open(path, "rb") as fh:
+ while True:
+ data = fh.read(8192)
+ if not data:
+ break
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class TestArchive(unittest.TestCase):
+ def _create_files(self, root):
+ files = {}
+ for i in range(10):
+ p = os.path.join(root, "file%02d" % i)
+ with open(p, "wb") as fh:
+ fh.write(b"file%02d" % i)
+ # Need to set permissions or umask may influence testing.
+ os.chmod(p, MODE_STANDARD)
+ files["file%02d" % i] = p
+
+ for i in range(10):
+ files["file%02d" % (i + 10)] = GeneratedFile(b"file%02d" % (i + 10))
+
+ return files
+
+ def _verify_basic_tarfile(self, tf):
+ self.assertEqual(len(tf.getmembers()), 20)
+
+ names = ["file%02d" % i for i in range(20)]
+ self.assertEqual(tf.getnames(), names)
+
+ for ti in tf.getmembers():
+ self.assertEqual(ti.uid, 0)
+ self.assertEqual(ti.gid, 0)
+ self.assertEqual(ti.uname, "")
+ self.assertEqual(ti.gname, "")
+ self.assertEqual(ti.mode, MODE_STANDARD)
+ self.assertEqual(ti.mtime, DEFAULT_MTIME)
+
+ @pytest.mark.xfail(
+ reason="ValueError is not thrown despite being provided directory."
+ )
+ def test_dirs_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ with self.assertRaisesRegexp(ValueError, "not a regular"):
+ create_tar_from_files(fh, {"test": d})
+ finally:
+ shutil.rmtree(d)
+
+ @pytest.mark.xfail(reason="ValueError is not thrown despite uid/gid being set.")
+ def test_setuid_setgid_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ uid = os.path.join(d, "setuid")
+ gid = os.path.join(d, "setgid")
+ with open(uid, "a"):
+ pass
+ with open(gid, "a"):
+ pass
+
+ os.chmod(uid, MODE_STANDARD | stat.S_ISUID)
+ os.chmod(gid, MODE_STANDARD | stat.S_ISGID)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"):
+ create_tar_from_files(fh, {"test": uid})
+ with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"):
+ create_tar_from_files(fh, {"test": gid})
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ create_tar_from_files(fh, files)
+
+ # Output should be deterministic.
+ self.assertEqual(file_hash(tp), "01cd314e277f060e98c7de6c8ea57f96b3a2065c")
+
+ with tarfile.open(tp, "r") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ @pytest.mark.xfail(reason="hash mismatch")
+ def test_executable_preserved(self):
+ d = tempfile.mkdtemp()
+ try:
+ p = os.path.join(d, "exec")
+ with open(p, "wb") as fh:
+ fh.write("#!/bin/bash\n")
+ os.chmod(p, MODE_STANDARD | stat.S_IXUSR)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ create_tar_from_files(fh, {"exec": p})
+
+ self.assertEqual(file_hash(tp), "357e1b81c0b6cfdfa5d2d118d420025c3c76ee93")
+
+ with tarfile.open(tp, "r") as tf:
+ m = tf.getmember("exec")
+ self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_gz_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, "test.tar.gz")
+ with open(gp, "wb") as fh:
+ create_tar_gz_from_files(fh, files)
+
+ self.assertEqual(file_hash(gp), "7c4da5adc5088cdf00911d5daf9a67b15de714b7")
+
+ with tarfile.open(gp, "r:gz") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_tar_gz_name(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, "test.tar.gz")
+ with open(gp, "wb") as fh:
+ create_tar_gz_from_files(fh, files, filename="foobar")
+
+ self.assertEqual(file_hash(gp), "721e00083c17d16df2edbddf40136298c06d0c49")
+
+ with tarfile.open(gp, "r:gz") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_bz2_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ bp = os.path.join(d, "test.tar.bz2")
+ with open(bp, "wb") as fh:
+ create_tar_bz2_from_files(fh, files)
+
+ self.assertEqual(file_hash(bp), "eb5096d2fbb71df7b3d690001a6f2e82a5aad6a7")
+
+ with tarfile.open(bp, "r:bz2") as tf:
+ self._verify_basic_tarfile(tf)
+ finally:
+ shutil.rmtree(d)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py
new file mode 100644
index 0000000000..4f1a968dc2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_flags.py
@@ -0,0 +1,150 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozunit
+
+from mozpack.chrome.flags import Flag, Flags, StringFlag, VersionFlag
+from mozpack.errors import ErrorMessage
+
+
+class TestFlag(unittest.TestCase):
+ def test_flag(self):
+ flag = Flag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches(False))
+ self.assertTrue(flag.matches("false"))
+ self.assertFalse(flag.matches("true"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag=")
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag=42")
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag!=false")
+
+ flag.add_definition("flag=1")
+ self.assertEqual(str(flag), "flag=1")
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches("1"))
+ self.assertFalse(flag.matches("no"))
+
+ flag.add_definition("flag=true")
+ self.assertEqual(str(flag), "flag=true")
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches("true"))
+ self.assertFalse(flag.matches("0"))
+
+ flag.add_definition("flag=no")
+ self.assertEqual(str(flag), "flag=no")
+ self.assertTrue(flag.matches("false"))
+ self.assertFalse(flag.matches("1"))
+
+ flag.add_definition("flag")
+ self.assertEqual(str(flag), "flag")
+ self.assertFalse(flag.matches("false"))
+ self.assertTrue(flag.matches("true"))
+ self.assertFalse(flag.matches(False))
+
+ def test_string_flag(self):
+ flag = StringFlag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches("foo"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag>=2")
+
+ flag.add_definition("flag=foo")
+ self.assertEqual(str(flag), "flag=foo")
+ self.assertTrue(flag.matches("foo"))
+ self.assertFalse(flag.matches("bar"))
+
+ flag.add_definition("flag=bar")
+ self.assertEqual(str(flag), "flag=foo flag=bar")
+ self.assertTrue(flag.matches("foo"))
+ self.assertTrue(flag.matches("bar"))
+ self.assertFalse(flag.matches("baz"))
+
+ flag = StringFlag("flag")
+ flag.add_definition("flag!=bar")
+ self.assertEqual(str(flag), "flag!=bar")
+ self.assertTrue(flag.matches("foo"))
+ self.assertFalse(flag.matches("bar"))
+
+ def test_version_flag(self):
+ flag = VersionFlag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag!=2")
+
+ flag.add_definition("flag=1.0")
+ self.assertEqual(str(flag), "flag=1.0")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertFalse(flag.matches("2.0"))
+
+ flag.add_definition("flag=2.0")
+ self.assertEqual(str(flag), "flag=1.0 flag=2.0")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertFalse(flag.matches("3.0"))
+
+ flag = VersionFlag("flag")
+ flag.add_definition("flag>=2.0")
+ self.assertEqual(str(flag), "flag>=2.0")
+ self.assertFalse(flag.matches("1.0"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertTrue(flag.matches("3.0"))
+
+ flag.add_definition("flag<1.10")
+ self.assertEqual(str(flag), "flag>=2.0 flag<1.10")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertTrue(flag.matches("1.9"))
+ self.assertFalse(flag.matches("1.10"))
+ self.assertFalse(flag.matches("1.20"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertTrue(flag.matches("3.0"))
+ self.assertRaises(Exception, flag.add_definition, "flag<")
+ self.assertRaises(Exception, flag.add_definition, "flag>")
+ self.assertRaises(Exception, flag.add_definition, "flag>=")
+ self.assertRaises(Exception, flag.add_definition, "flag<=")
+ self.assertRaises(Exception, flag.add_definition, "flag!=1.0")
+
+
+class TestFlags(unittest.TestCase):
+ def setUp(self):
+ self.flags = Flags(
+ "contentaccessible=yes",
+ "appversion>=3.5",
+ "application=foo",
+ "application=bar",
+ "appversion<2.0",
+ "platform",
+ "abi!=Linux_x86-gcc3",
+ )
+
+ def test_flags_str(self):
+ self.assertEqual(
+ str(self.flags),
+ "contentaccessible=yes "
+ + "appversion>=3.5 appversion<2.0 application=foo "
+ + "application=bar platform abi!=Linux_x86-gcc3",
+ )
+
+ def test_flags_match_unset(self):
+ self.assertTrue(self.flags.match(os="WINNT"))
+
+ def test_flags_match(self):
+ self.assertTrue(self.flags.match(application="foo"))
+ self.assertFalse(self.flags.match(application="qux"))
+
+ def test_flags_match_different(self):
+ self.assertTrue(self.flags.match(abi="WINNT_x86-MSVC"))
+ self.assertFalse(self.flags.match(abi="Linux_x86-gcc3"))
+
+ def test_flags_match_version(self):
+ self.assertTrue(self.flags.match(appversion="1.0"))
+ self.assertTrue(self.flags.match(appversion="1.5"))
+ self.assertFalse(self.flags.match(appversion="2.0"))
+ self.assertFalse(self.flags.match(appversion="3.0"))
+ self.assertTrue(self.flags.match(appversion="3.5"))
+ self.assertTrue(self.flags.match(appversion="3.10"))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py
new file mode 100644
index 0000000000..c1d5826bbc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py
@@ -0,0 +1,176 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+
+from mozpack.chrome.manifest import (
+ MANIFESTS_TYPES,
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestCategory,
+ ManifestComponent,
+ ManifestContent,
+ ManifestContract,
+ ManifestInterfaces,
+ ManifestLocale,
+ ManifestOverlay,
+ ManifestOverride,
+ ManifestResource,
+ ManifestSkin,
+ ManifestStyle,
+ parse_manifest,
+ parse_manifest_line,
+)
+from mozpack.errors import AccumulatedErrors, errors
+from test_errors import TestErrors
+
+
+class TestManifest(unittest.TestCase):
+ def test_parse_manifest(self):
+ manifest = [
+ "content global content/global/",
+ "content global content/global/ application=foo application=bar"
+ + " platform",
+ "locale global en-US content/en-US/",
+ "locale global en-US content/en-US/ application=foo",
+ "skin global classic/1.0 content/skin/classic/",
+ "skin global classic/1.0 content/skin/classic/ application=foo"
+ + " os=WINNT",
+ "",
+ "manifest pdfjs/chrome.manifest",
+ "resource gre-resources toolkit/res/",
+ "override chrome://global/locale/netError.dtd"
+ + " chrome://browser/locale/netError.dtd",
+ "# Comment",
+ "component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js",
+ "contract @mozilla.org/foo;1" + " {b2bba4df-057d-41ea-b6b1-94a10a8ede68}",
+ "interfaces foo.xpt",
+ "binary-component bar.so",
+ "category command-line-handler m-browser"
+ + " @mozilla.org/browser/clh;1"
+ + " application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ "style chrome://global/content/viewSource.xul" + " chrome://browser/skin/",
+ "overlay chrome://global/content/viewSource.xul"
+ + " chrome://browser/content/viewSourceOverlay.xul",
+ ]
+ other_manifest = ["content global content/global/"]
+ expected_result = [
+ ManifestContent("", "global", "content/global/"),
+ ManifestContent(
+ "",
+ "global",
+ "content/global/",
+ "application=foo",
+ "application=bar",
+ "platform",
+ ),
+ ManifestLocale("", "global", "en-US", "content/en-US/"),
+ ManifestLocale("", "global", "en-US", "content/en-US/", "application=foo"),
+ ManifestSkin("", "global", "classic/1.0", "content/skin/classic/"),
+ ManifestSkin(
+ "",
+ "global",
+ "classic/1.0",
+ "content/skin/classic/",
+ "application=foo",
+ "os=WINNT",
+ ),
+ Manifest("", "pdfjs/chrome.manifest"),
+ ManifestResource("", "gre-resources", "toolkit/res/"),
+ ManifestOverride(
+ "",
+ "chrome://global/locale/netError.dtd",
+ "chrome://browser/locale/netError.dtd",
+ ),
+ ManifestComponent("", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}", "foo.js"),
+ ManifestContract(
+ "", "@mozilla.org/foo;1", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}"
+ ),
+ ManifestInterfaces("", "foo.xpt"),
+ ManifestBinaryComponent("", "bar.so"),
+ ManifestCategory(
+ "",
+ "command-line-handler",
+ "m-browser",
+ "@mozilla.org/browser/clh;1",
+ "application=" + "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ ),
+ ManifestStyle(
+ "", "chrome://global/content/viewSource.xul", "chrome://browser/skin/"
+ ),
+ ManifestOverlay(
+ "",
+ "chrome://global/content/viewSource.xul",
+ "chrome://browser/content/viewSourceOverlay.xul",
+ ),
+ ]
+ with mozunit.MockedOpen(
+ {
+ "manifest": "\n".join(manifest),
+ "other/manifest": "\n".join(other_manifest),
+ }
+ ):
+ # Ensure we have tests for all types of manifests.
+ self.assertEqual(
+ set(type(e) for e in expected_result), set(MANIFESTS_TYPES.values())
+ )
+ self.assertEqual(
+ list(parse_manifest(os.curdir, "manifest")), expected_result
+ )
+ self.assertEqual(
+ list(parse_manifest(os.curdir, "other/manifest")),
+ [ManifestContent("other", "global", "content/global/")],
+ )
+
+ def test_manifest_rebase(self):
+ m = parse_manifest_line("chrome", "content global content/global/")
+ m = m.rebase("")
+ self.assertEqual(str(m), "content global chrome/content/global/")
+ m = m.rebase("chrome")
+ self.assertEqual(str(m), "content global content/global/")
+
+ m = parse_manifest_line("chrome/foo", "content global content/global/")
+ m = m.rebase("chrome")
+ self.assertEqual(str(m), "content global foo/content/global/")
+ m = m.rebase("chrome/foo")
+ self.assertEqual(str(m), "content global content/global/")
+
+ m = parse_manifest_line("modules/foo", "resource foo ./")
+ m = m.rebase("modules")
+ self.assertEqual(str(m), "resource foo foo/")
+ m = m.rebase("modules/foo")
+ self.assertEqual(str(m), "resource foo ./")
+
+ m = parse_manifest_line("chrome", "content browser browser/content/")
+ m = m.rebase("chrome/browser").move("jar:browser.jar!").rebase("")
+ self.assertEqual(str(m), "content browser jar:browser.jar!/content/")
+
+
+class TestManifestErrors(TestErrors, unittest.TestCase):
+ def test_parse_manifest_errors(self):
+ manifest = [
+ "skin global classic/1.0 content/skin/classic/ platform",
+ "",
+ "binary-component bar.so",
+ "unsupported foo",
+ ]
+ with mozunit.MockedOpen({"manifest": "\n".join(manifest)}):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ list(parse_manifest(os.curdir, "manifest"))
+ out = self.get_output()
+ # Expecting 2 errors
+ self.assertEqual(len(out), 2)
+ path = os.path.abspath("manifest")
+ # First on line 1
+ self.assertTrue(out[0].startswith("error: %s:1: " % path))
+ # Second on line 4
+ self.assertTrue(out[1].startswith("error: %s:4: " % path))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py
new file mode 100644
index 0000000000..60ebd2c1e9
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_copier.py
@@ -0,0 +1,548 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import stat
+import unittest
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.copier import FileCopier, FileRegistry, FileRegistrySubtree, Jarrer
+from mozpack.errors import ErrorMessage
+from mozpack.files import ExistingFile, GeneratedFile
+from mozpack.mozjar import JarReader
+from mozpack.test.test_files import MatchTestTemplate, MockDest, TestWithTmpDir
+
+
+class BaseTestFileRegistry(MatchTestTemplate):
+ def add(self, path):
+ self.registry.add(path, GeneratedFile(path))
+
+ def do_check(self, pattern, result):
+ self.checked = True
+ if result:
+ self.assertTrue(self.registry.contains(pattern))
+ else:
+ self.assertFalse(self.registry.contains(pattern))
+ self.assertEqual(self.registry.match(pattern), result)
+
+ def do_test_file_registry(self, registry):
+ self.registry = registry
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ bar = GeneratedFile(b"bar")
+ self.registry.add("bar", bar)
+ self.assertEqual(self.registry.paths(), ["foo", "bar"])
+ self.assertEqual(self.registry["bar"], bar)
+
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo", GeneratedFile(b"foo2")
+ )
+
+ self.assertRaises(ErrorMessage, self.registry.remove, "qux")
+
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")
+ )
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar/baz", GeneratedFile(b"foobar")
+ )
+
+ self.assertEqual(self.registry.paths(), ["foo", "bar"])
+
+ self.registry.remove("foo")
+ self.assertEqual(self.registry.paths(), ["bar"])
+ self.registry.remove("bar")
+ self.assertEqual(self.registry.paths(), [])
+
+ self.prepare_match_test()
+ self.do_match_test()
+ self.assertTrue(self.checked)
+ self.assertEqual(
+ self.registry.paths(),
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+ self.registry.remove("foo/qux")
+ self.assertEqual(self.registry.paths(), ["bar", "foo/bar", "foo/baz"])
+
+ self.registry.add("foo/qux", GeneratedFile(b"fooqux"))
+ self.assertEqual(
+ self.registry.paths(), ["bar", "foo/bar", "foo/baz", "foo/qux"]
+ )
+ self.registry.remove("foo/b*")
+ self.assertEqual(self.registry.paths(), ["bar", "foo/qux"])
+
+ self.assertEqual([f for f, c in self.registry], ["bar", "foo/qux"])
+ self.assertEqual(len(self.registry), 2)
+
+ self.add("foo/.foo")
+ self.assertTrue(self.registry.contains("foo/.foo"))
+
+ def do_test_registry_paths(self, registry):
+ self.registry = registry
+
+ # Can't add a file if it requires a directory in place of a
+ # file we also require.
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")
+ )
+
+ # Can't add a file if we already have a directory there.
+ self.registry.add("bar/baz", GeneratedFile(b"barbaz"))
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Bump the count of things that require bar/ to 2.
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Drop the count of things that require bar/ to 1.
+ self.registry.remove("bar/baz")
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Drop the count of things that require bar/ to 0.
+ self.registry.remove("bar/zot")
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+
+
+class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase):
+ def test_partial_paths(self):
+ cases = {
+ "foo/bar/baz/zot": ["foo/bar/baz", "foo/bar", "foo"],
+ "foo/bar": ["foo"],
+ "bar": [],
+ }
+ reg = FileRegistry()
+ for path, parts in six.iteritems(cases):
+ self.assertEqual(reg._partial_paths(path), parts)
+
+ def test_file_registry(self):
+ self.do_test_file_registry(FileRegistry())
+
+ def test_registry_paths(self):
+ self.do_test_registry_paths(FileRegistry())
+
+ def test_required_directories(self):
+ self.registry = FileRegistry()
+
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add("bar/baz", GeneratedFile(b"barbaz"))
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.add("bar/zap/zot", GeneratedFile(b"barzapzot"))
+ self.assertEqual(self.registry.required_directories(), {"bar", "bar/zap"})
+
+ self.registry.remove("bar/zap/zot")
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.remove("bar/baz")
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.remove("bar/zot")
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add("x/y/z", GeneratedFile(b"xyz"))
+ self.assertEqual(self.registry.required_directories(), {"x", "x/y"})
+
+
+class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase):
+ def test_file_registry_subtree_base(self):
+ registry = FileRegistry()
+ self.assertEqual(registry, FileRegistrySubtree("", registry))
+ self.assertNotEqual(registry, FileRegistrySubtree("base", registry))
+
+ def create_registry(self):
+ registry = FileRegistry()
+ registry.add("foo/bar", GeneratedFile(b"foo/bar"))
+ registry.add("baz/qux", GeneratedFile(b"baz/qux"))
+ return FileRegistrySubtree("base/root", registry)
+
+ def test_file_registry_subtree(self):
+ self.do_test_file_registry(self.create_registry())
+
+ def test_registry_paths_subtree(self):
+ FileRegistry()
+ self.do_test_registry_paths(self.create_registry())
+
+
+class TestFileCopier(TestWithTmpDir):
+ def all_dirs(self, base):
+ all_dirs = set()
+ for root, dirs, files in os.walk(base):
+ if not dirs:
+ all_dirs.add(mozpath.relpath(root, base))
+ return all_dirs
+
+ def all_files(self, base):
+ all_files = set()
+ for root, dirs, files in os.walk(base):
+ for f in files:
+ all_files.add(mozpath.join(mozpath.relpath(root, base), f))
+ return all_files
+
+ def test_file_copier(self):
+ copier = FileCopier()
+ copier.add("foo/bar", GeneratedFile(b"foobar"))
+ copier.add("foo/qux", GeneratedFile(b"fooqux"))
+ copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
+ copier.add("bar", GeneratedFile(b"bar"))
+ copier.add("qux/foo", GeneratedFile(b"quxfoo"))
+ copier.add("qux/bar", GeneratedFile(b""))
+
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(
+ self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"])
+ )
+
+ self.assertEqual(
+ result.updated_files,
+ set(self.tmppath(p) for p in self.all_files(self.tmpdir)),
+ )
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ copier.remove("foo")
+ copier.add("test", GeneratedFile(b"test"))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"]))
+ self.assertEqual(
+ result.removed_files,
+ set(
+ self.tmppath(p)
+ for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file")
+ ),
+ )
+
+ def test_symlink_directory_replaced(self):
+ """Directory symlinks in destination are replaced if they need to be
+ real directories."""
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+
+ os.makedirs(self.tmppath("dest/foo"))
+ dummy = self.tmppath("dummy")
+ os.mkdir(dummy)
+ link = self.tmppath("dest/foo/bar")
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest)
+
+ st = os.lstat(link)
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ def test_remove_unaccounted_directory_symlinks(self):
+ """Directory symlinks in destination that are not in the way are
+ deleted according to remove_unaccounted and
+ remove_all_directory_symlinks.
+ """
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+
+ os.makedirs(self.tmppath("dest/foo"))
+ dummy = self.tmppath("dummy")
+ os.mkdir(dummy)
+
+ os.mkdir(self.tmppath("dest/zot"))
+ link = self.tmppath("dest/zot/zap")
+ os.symlink(dummy, link)
+
+ # If not remove_unaccounted but remove_empty_directories, then
+ # the symlinked directory remains (as does its containing
+ # directory).
+ result = copier.copy(
+ dest,
+ remove_unaccounted=False,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False,
+ )
+
+ st = os.lstat(link)
+ self.assertTrue(stat.S_ISLNK(st.st_mode))
+ self.assertFalse(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ # If remove_unaccounted but not remove_empty_directories, then
+ # only the symlinked directory is removed.
+ result = copier.copy(
+ dest,
+ remove_unaccounted=True,
+ remove_empty_directories=False,
+ remove_all_directory_symlinks=False,
+ )
+
+ st = os.lstat(self.tmppath("dest/zot"))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set())
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar", "zot"]))
+
+ # If remove_unaccounted and remove_empty_directories, then
+ # both the symlink and its containing directory are removed.
+ link = self.tmppath("dest/zot/zap")
+ os.symlink(dummy, link)
+
+ result = copier.copy(
+ dest,
+ remove_unaccounted=True,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False,
+ )
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set([self.tmppath("dest/zot")]))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
+
+ def test_permissions(self):
+ """Ensure files without write permission can be deleted."""
+ with open(self.tmppath("dummy"), "a"):
+ pass
+
+ p = self.tmppath("no_perms")
+ with open(p, "a"):
+ pass
+
+ # Make file and directory unwritable. Reminder: making a directory
+ # unwritable prevents modifications (including deletes) from the list
+ # of files in that directory.
+ os.chmod(p, 0o400)
+ os.chmod(self.tmpdir, 0o400)
+
+ copier = FileCopier()
+ copier.add("dummy", GeneratedFile(b"content"))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(result.removed_files_count, 1)
+ self.assertFalse(os.path.exists(p))
+
+ def test_no_remove(self):
+ copier = FileCopier()
+ copier.add("foo", GeneratedFile(b"foo"))
+
+ with open(self.tmppath("bar"), "a"):
+ pass
+
+ os.mkdir(self.tmppath("emptydir"))
+ d = self.tmppath("populateddir")
+ os.mkdir(d)
+
+ with open(self.tmppath("populateddir/foo"), "a"):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False)
+
+ self.assertEqual(
+ self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])
+ )
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["populateddir"]))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set([self.tmppath("emptydir")]))
+
+ def test_no_remove_empty_directories(self):
+ copier = FileCopier()
+ copier.add("foo", GeneratedFile(b"foo"))
+
+ with open(self.tmppath("bar"), "a"):
+ pass
+
+ os.mkdir(self.tmppath("emptydir"))
+ d = self.tmppath("populateddir")
+ os.mkdir(d)
+
+ with open(self.tmppath("populateddir/foo"), "a"):
+ pass
+
+ result = copier.copy(
+ self.tmpdir, remove_unaccounted=False, remove_empty_directories=False
+ )
+
+ self.assertEqual(
+ self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])
+ )
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["emptydir", "populateddir"]))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ def test_optional_exists_creates_unneeded_directory(self):
+ """Demonstrate that a directory not strictly required, but specified
+ as the path to an optional file, will be unnecessarily created.
+
+ This behaviour is wrong; fixing it is tracked by Bug 972432;
+ and this test exists to guard against unexpected changes in
+ behaviour.
+ """
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar", ExistingFile(required=False))
+
+ result = copier.copy(dest)
+
+ st = os.lstat(self.tmppath("dest/foo"))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ # What's worse, we have no record that dest was created.
+ self.assertEqual(len(result.updated_files), 0)
+
+ # But we do have an erroneous record of an optional file
+ # existing when it does not.
+ self.assertIn(self.tmppath("dest/foo/bar"), result.existing_files)
+
+ def test_remove_unaccounted_file_registry(self):
+ """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+ copier.add("foo/bar/qux", GeneratedFile(b"foobarqux"))
+ copier.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
+ copier.add("foo/toto/tata", GeneratedFile(b"footototata"))
+
+ os.makedirs(os.path.join(dest, "bar"))
+ with open(os.path.join(dest, "bar", "bar"), "w") as fh:
+ fh.write("barbar")
+ os.makedirs(os.path.join(dest, "foo", "toto"))
+ with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh:
+ fh.write("foototototo")
+
+ result = copier.copy(dest, remove_unaccounted=False)
+
+ self.assertEqual(
+ self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"}
+ )
+ self.assertEqual(
+ self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"}
+ )
+
+ copier2 = FileCopier()
+ copier2.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
+
+ # We expect only files copied from the first copier to be removed,
+ # not the extra file that was there beforehand.
+ result = copier2.copy(dest, remove_unaccounted=copier)
+
+ self.assertEqual(
+ self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"}
+ )
+ self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"})
+ self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")})
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(
+ result.removed_files,
+ {
+ self.tmppath(p)
+ for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata")
+ },
+ )
+ self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")})
+
+
+class TestJarrer(unittest.TestCase):
+ def check_jar(self, dest, copier):
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], copier.paths())
+ for f in jar:
+ self.assertEqual(f.uncompressed_data.read(), copier[f.filename].content)
+
+ def test_jarrer(self):
+ copier = Jarrer()
+ copier.add("foo/bar", GeneratedFile(b"foobar"))
+ copier.add("foo/qux", GeneratedFile(b"fooqux"))
+ copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
+ copier.add("bar", GeneratedFile(b"bar"))
+ copier.add("qux/foo", GeneratedFile(b"quxfoo"))
+ copier.add("qux/bar", GeneratedFile(b""))
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove("foo")
+ copier.add("test", GeneratedFile(b"test"))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove("test")
+ copier.add("test", GeneratedFile(b"replaced-content"))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ preloaded = ["qux/bar", "bar"]
+ copier.preload(preloaded)
+ copier.copy(dest)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertEqual(
+ [f.filename for f in jar],
+ preloaded + [p for p in copier.paths() if p not in preloaded],
+ )
+ self.assertEqual(jar.last_preloaded, preloaded[-1])
+
+ def test_jarrer_compress(self):
+ copier = Jarrer()
+ copier.add("foo/bar", GeneratedFile(b"ffffff"))
+ copier.add("foo/qux", GeneratedFile(b"ffffff"), compress=False)
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertTrue(jar["foo/bar"].compressed)
+ self.assertFalse(jar["foo/qux"].compressed)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py
new file mode 100644
index 0000000000..411b1b54c3
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_errors.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import unittest
+
+import mozunit
+import six
+
+from mozpack.errors import AccumulatedErrors, ErrorMessage, errors
+
+
+class TestErrors(object):
+ def setUp(self):
+ errors.out = six.moves.cStringIO()
+ errors.ignore_errors(False)
+
+ def tearDown(self):
+ errors.out = sys.stderr
+
+ def get_output(self):
+ return [l.strip() for l in errors.out.getvalue().splitlines()]
+
+
+class TestErrorsImpl(TestErrors, unittest.TestCase):
+ def test_plain_error(self):
+ errors.warn("foo")
+ self.assertRaises(ErrorMessage, errors.error, "foo")
+ self.assertRaises(ErrorMessage, errors.fatal, "foo")
+ self.assertEqual(self.get_output(), ["warning: foo"])
+
+ def test_ignore_errors(self):
+ errors.ignore_errors()
+ errors.warn("foo")
+ errors.error("bar")
+ self.assertRaises(ErrorMessage, errors.fatal, "foo")
+ self.assertEqual(self.get_output(), ["warning: foo", "warning: bar"])
+
+ def test_no_error(self):
+ with errors.accumulate():
+ errors.warn("1")
+
+ def test_simple_error(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error("1")
+ self.assertEqual(self.get_output(), ["error: 1"])
+
+ def test_error_loop(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ for i in range(3):
+ errors.error("%d" % i)
+ self.assertEqual(self.get_output(), ["error: 0", "error: 1", "error: 2"])
+
+ def test_multiple_errors(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error("foo")
+ for i in range(3):
+ if i == 2:
+ errors.warn("%d" % i)
+ else:
+ errors.error("%d" % i)
+ errors.error("bar")
+ self.assertEqual(
+ self.get_output(),
+ ["error: foo", "error: 0", "error: 1", "warning: 2", "error: bar"],
+ )
+
+ def test_errors_context(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ self.assertEqual(errors.get_context(), None)
+ with errors.context("foo", 1):
+ self.assertEqual(errors.get_context(), ("foo", 1))
+ errors.error("a")
+ with errors.context("bar", 2):
+ self.assertEqual(errors.get_context(), ("bar", 2))
+ errors.error("b")
+ self.assertEqual(errors.get_context(), ("foo", 1))
+ errors.error("c")
+ self.assertEqual(
+ self.get_output(),
+ [
+ "error: foo:1: a",
+ "error: bar:2: b",
+ "error: foo:1: c",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py
new file mode 100644
index 0000000000..1c86f2e0cc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_files.py
@@ -0,0 +1,1362 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensure_bytes, ensureParentDir
+from mozpack.errors import ErrorMessage, errors
+from mozpack.files import (
+ AbsoluteSymlinkFile,
+ ComposedFinder,
+ DeflatedFile,
+ Dest,
+ ExistingFile,
+ ExtractedTarFile,
+ File,
+ FileFinder,
+ GeneratedFile,
+ HardlinkFile,
+ JarFinder,
+ ManifestFile,
+ MercurialFile,
+ MercurialRevisionFinder,
+ MinifiedCommentStripped,
+ MinifiedJavaScript,
+ PreprocessedFile,
+ TarFinder,
+)
+
+# We don't have hglib installed everywhere.
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+import os
+import platform
+import random
+import sys
+import tarfile
+import unittest
+from io import BytesIO
+from tempfile import mkdtemp
+
+import mozfile
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestLocale,
+ ManifestOverride,
+ ManifestResource,
+)
+from mozpack.mozjar import JarReader, JarWriter
+
+
+class TestWithTmpDir(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ self.symlink_supported = False
+ self.hardlink_supported = False
+
+ # See comment in mozpack.files.AbsoluteSymlinkFile
+ if hasattr(os, "symlink") and platform.system() != "Windows":
+ dummy_path = self.tmppath("dummy_file")
+ with open(dummy_path, "a"):
+ pass
+
+ try:
+ os.symlink(dummy_path, self.tmppath("dummy_symlink"))
+ os.remove(self.tmppath("dummy_symlink"))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.symlink_supported = True
+
+ if hasattr(os, "link"):
+ dummy_path = self.tmppath("dummy_file")
+ with open(dummy_path, "a"):
+ pass
+
+ try:
+ os.link(dummy_path, self.tmppath("dummy_hardlink"))
+ os.remove(self.tmppath("dummy_hardlink"))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.hardlink_supported = True
+
+ def tearDown(self):
+ mozfile.rmtree(self.tmpdir)
+
+ def tmppath(self, relpath):
+ return os.path.normpath(os.path.join(self.tmpdir, relpath))
+
+
+class MockDest(BytesIO, Dest):
+ def __init__(self):
+ BytesIO.__init__(self)
+ self.mode = None
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ self.seek(0)
+ self.mode = "r"
+ return BytesIO.read(self, length)
+
+ def write(self, data):
+ if self.mode != "w":
+ self.seek(0)
+ self.truncate(0)
+ self.mode = "w"
+ return BytesIO.write(self, data)
+
+ def exists(self):
+ return True
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+
+
+class DestNoWrite(Dest):
+ def write(self, data):
+ raise RuntimeError
+
+
+class TestDest(TestWithTmpDir):
+ def test_dest(self):
+ dest = Dest(self.tmppath("dest"))
+ self.assertFalse(dest.exists())
+ dest.write(b"foo")
+ self.assertTrue(dest.exists())
+ dest.write(b"foo")
+ self.assertEqual(dest.read(4), b"foof")
+ self.assertEqual(dest.read(), b"oo")
+ self.assertEqual(dest.read(), b"")
+ dest.write(b"bar")
+ self.assertEqual(dest.read(4), b"bar")
+ dest.close()
+ self.assertEqual(dest.read(), b"bar")
+ dest.write(b"foo")
+ dest.close()
+ dest.write(b"qux")
+ self.assertEqual(dest.read(), b"qux")
+
+
+rand = bytes(
+ random.choice(b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
+ for i in six.moves.xrange(131597)
+)
+samples = [
+ b"",
+ b"test",
+ b"fooo",
+ b"same",
+ b"same",
+ b"Different and longer",
+ rand,
+ rand,
+ rand[:-1] + b"_",
+ b"test",
+]
+
+
+class TestFile(TestWithTmpDir):
+ def test_file(self):
+ """
+ Check that File.copy yields the proper content in the destination file
+ in all situations that trigger different code paths:
+ - different content
+ - different content of the same size
+ - same content
+ - long content
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ for content in samples:
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+ # Ensure the destination file, when it exists, is older than the
+ # source
+ if os.path.exists(dest):
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, "rb").read())
+ self.assertEqual(content, f.open().read())
+ self.assertEqual(content, f.open().read())
+
+ def test_file_dest(self):
+ """
+ Similar to test_file, but for a destination object instead of
+ a destination file. This ensures the destination object is being
+ used properly by File.copy, ensuring that other subclasses of Dest
+ will work.
+ """
+ src = self.tmppath("src")
+ dest = MockDest()
+
+ for content in samples:
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, dest.getvalue())
+
+ def test_file_open(self):
+ """
+ Test whether File.open returns an appropriately reset file object.
+ """
+ src = self.tmppath("src")
+ content = b"".join(samples)
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+
+ f = File(src)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_file_no_write(self):
+ """
+ Test various conditions where File.copy is expected not to write
+ in the destination file.
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"test")
+
+ # Initial copy
+ f = File(src)
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When the source file is newer, but with the same content, no copy
+ # should occur
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, "wb") as tmp:
+ tmp.write(b"fooo")
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ f.copy(dest, skip_if_older=False)
+ self.assertEqual(b"fooo", open(dest, "rb").read())
+
+
+class TestAbsoluteSymlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ AbsoluteSymlinkFile("/foo")
+
+ with self.assertRaisesRegexp(ValueError, "Symlink target not absolute"):
+ AbsoluteSymlinkFile("./foo")
+
+ def test_symlink_file(self):
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("Hello world")
+
+ s = AbsoluteSymlinkFile(source)
+ dest = self.tmppath("symlink")
+ self.assertTrue(s.copy(dest))
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, "Hello world")
+
+ def test_replace_file_with_symlink(self):
+ # If symlinks are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("source")
+
+ dest = self.tmppath("dest")
+ with open(dest, "a"):
+ pass
+
+ s = AbsoluteSymlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, "source")
+
+ def test_replace_symlink(self):
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath("source")
+ with open(source, "a"):
+ pass
+
+ dest = self.tmppath("dest")
+
+ os.symlink(self.tmppath("bad"), dest)
+ self.assertTrue(os.path.islink(dest))
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertTrue(s.copy(dest))
+
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ def test_noop(self):
+ if not hasattr(os, "symlink") or sys.platform == "win32":
+ return
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+
+ with open(source, "a"):
+ pass
+
+ os.symlink(source, dest)
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+
+class TestHardlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ HardlinkFile("/foo")
+ HardlinkFile("./foo")
+
+ def test_hardlink_file(self):
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("Hello world")
+
+ s = HardlinkFile(source)
+ dest = self.tmppath("hardlink")
+ self.assertTrue(s.copy(dest))
+
+ if self.hardlink_supported:
+ source_stat = os.stat(source)
+ dest_stat = os.stat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ with open(dest) as f:
+ content = f.read()
+ self.assertEqual(content, "Hello world")
+
+ def test_replace_file_with_hardlink(self):
+ # If hardlink are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("source")
+
+ dest = self.tmppath("dest")
+ with open(dest, "a"):
+ pass
+
+ s = HardlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.hardlink_supported:
+ source_stat = os.stat(source)
+ dest_stat = os.stat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ with open(dest) as f:
+ content = f.read()
+ self.assertEqual(content, "source")
+
+ def test_replace_hardlink(self):
+ if not self.hardlink_supported:
+ raise unittest.SkipTest("hardlink not supported")
+
+ source = self.tmppath("source")
+ with open(source, "a"):
+ pass
+
+ dest = self.tmppath("dest")
+
+ os.link(source, dest)
+
+ s = HardlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ source_stat = os.lstat(source)
+ dest_stat = os.lstat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+
+ def test_noop(self):
+ if not self.hardlink_supported:
+ raise unittest.SkipTest("hardlink not supported")
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+
+ with open(source, "a"):
+ pass
+
+ os.link(source, dest)
+
+ s = HardlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ source_stat = os.lstat(source)
+ dest_stat = os.lstat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+
+
+class TestPreprocessedFile(TestWithTmpDir):
+ def test_preprocess(self):
+ """
+ Test that copying the file invokes the preprocessor
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ f = PreprocessedFile(src, depfile_path=None, marker="#", defines={"FOO": True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ def test_preprocess_file_no_write(self):
+ """
+ Test various conditions where PreprocessedFile.copy is expected not to
+ write in the destination file.
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+ depfile = self.tmppath("depfile")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ # Initial copy
+ f = PreprocessedFile(
+ src, depfile_path=depfile, marker="#", defines={"FOO": True}
+ )
+ self.assertTrue(f.copy(dest))
+
+ # Ensure subsequent copies won't trigger writes
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\nfooo\n#endif")
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ self.assertTrue(f.copy(dest, skip_if_older=False))
+ self.assertEqual(b"fooo\n", open(dest, "rb").read())
+
+ def test_preprocess_file_dependencies(self):
+ """
+ Test that the preprocess runs if the dependencies of the source change
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+ incl = self.tmppath("incl")
+ deps = self.tmppath("src.pp")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ with open(incl, "wb") as tmp:
+ tmp.write(b"foo bar")
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=deps, marker="#", defines={"FOO": True})
+ self.assertTrue(f.copy(dest))
+
+ # Update the source so it #includes the include file.
+ with open(src, "wb") as tmp:
+ tmp.write(b"#include incl\n")
+ time = os.path.getmtime(dest) + 1
+ os.utime(src, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual(b"foo bar", open(dest, "rb").read())
+
+ # If one of the dependencies changes, the file should be updated. The
+ # mtime of the dependency is set after the destination file, to avoid
+ # both files having the same time.
+ with open(incl, "wb") as tmp:
+ tmp.write(b"quux")
+ time = os.path.getmtime(dest) + 1
+ os.utime(incl, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual(b"quux", open(dest, "rb").read())
+
+ # Perform one final copy to confirm that we don't run the preprocessor
+ # again. We update the mtime of the destination so it's newer than the
+ # input files. This would "just work" if we weren't changing
+ time = os.path.getmtime(incl) + 1
+ os.utime(dest, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+
+ def test_replace_symlink(self):
+ """
+ Test that if the destination exists, and is a symlink, the target of
+ the symlink is not overwritten by the preprocessor output.
+ """
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+ pp_source = self.tmppath("pp_in")
+ deps = self.tmppath("deps")
+
+ with open(source, "a"):
+ pass
+
+ os.symlink(source, dest)
+ self.assertTrue(os.path.islink(dest))
+
+ with open(pp_source, "wb") as tmp:
+ tmp.write(b"#define FOO\nPREPROCESSED")
+
+ f = PreprocessedFile(
+ pp_source, depfile_path=deps, marker="#", defines={"FOO": True}
+ )
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual(b"PREPROCESSED", open(dest, "rb").read())
+ self.assertFalse(os.path.islink(dest))
+ self.assertEqual(b"", open(source, "rb").read())
+
+
+class TestExistingFile(TestWithTmpDir):
+ def test_required_missing_dest(self):
+ with self.assertRaisesRegexp(ErrorMessage, "Required existing file"):
+ f = ExistingFile(required=True)
+ f.copy(self.tmppath("dest"))
+
+ def test_required_existing_dest(self):
+ p = self.tmppath("dest")
+ with open(p, "a"):
+ pass
+
+ f = ExistingFile(required=True)
+ f.copy(p)
+
+ def test_optional_missing_dest(self):
+ f = ExistingFile(required=False)
+ f.copy(self.tmppath("dest"))
+
+ def test_optional_existing_dest(self):
+ p = self.tmppath("dest")
+ with open(p, "a"):
+ pass
+
+ f = ExistingFile(required=False)
+ f.copy(p)
+
+
+class TestGeneratedFile(TestWithTmpDir):
+ def test_generated_file(self):
+ """
+ Check that GeneratedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ """
+ dest = self.tmppath("dest")
+
+ for content in samples:
+ f = GeneratedFile(content)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, "rb").read())
+
+ def test_generated_file_open(self):
+ """
+ Test whether GeneratedFile.open returns an appropriately reset file
+ object.
+ """
+ content = b"".join(samples)
+ f = GeneratedFile(content)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_generated_file_no_write(self):
+ """
+ Test various conditions where GeneratedFile.copy is expected not to
+ write in the destination file.
+ """
+ dest = self.tmppath("dest")
+
+ # Initial copy
+ f = GeneratedFile(b"test")
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When using a new instance with the same content, no copy should occur
+ f = GeneratedFile(b"test")
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = GeneratedFile(b"fooo")
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ def test_generated_file_function(self):
+ """
+ Test GeneratedFile behavior with functions.
+ """
+ dest = self.tmppath("dest")
+ data = {
+ "num_calls": 0,
+ }
+
+ def content():
+ data["num_calls"] += 1
+ return b"content"
+
+ f = GeneratedFile(content)
+ self.assertEqual(data["num_calls"], 0)
+ f.copy(dest)
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"content", open(dest, "rb").read())
+ self.assertEqual(b"content", f.open().read())
+ self.assertEqual(b"content", f.read())
+ self.assertEqual(len(b"content"), f.size())
+ self.assertEqual(data["num_calls"], 1)
+
+ f.content = b"modified"
+ f.copy(dest)
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"modified", open(dest, "rb").read())
+ self.assertEqual(b"modified", f.open().read())
+ self.assertEqual(b"modified", f.read())
+ self.assertEqual(len(b"modified"), f.size())
+
+ f.content = content
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"content", f.read())
+ self.assertEqual(data["num_calls"], 2)
+
+
+class TestDeflatedFile(TestWithTmpDir):
+ def test_deflated_file(self):
+ """
+ Check that DeflatedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ """
+ src = self.tmppath("src.jar")
+ dest = self.tmppath("dest")
+
+ contents = {}
+ with JarWriter(src) as jar:
+ for content in samples:
+ name = "".join(
+ random.choice(
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ )
+ for i in range(8)
+ )
+ jar.add(name, content, compress=True)
+ contents[name] = content
+
+ for j in JarReader(src):
+ f = DeflatedFile(j)
+ f.copy(dest)
+ self.assertEqual(contents[j.filename], open(dest, "rb").read())
+
+ def test_deflated_file_open(self):
+ """
+ Test whether DeflatedFile.open returns an appropriately reset file
+ object.
+ """
+ src = self.tmppath("src.jar")
+ content = b"".join(samples)
+ with JarWriter(src) as jar:
+ jar.add("content", content)
+
+ f = DeflatedFile(JarReader(src)["content"])
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_deflated_file_no_write(self):
+ """
+ Test various conditions where DeflatedFile.copy is expected not to
+ write in the destination file.
+ """
+ src = self.tmppath("src.jar")
+ dest = self.tmppath("dest")
+
+ with JarWriter(src) as jar:
+ jar.add("test", b"test")
+ jar.add("test2", b"test")
+ jar.add("fooo", b"fooo")
+
+ jar = JarReader(src)
+ # Initial copy
+ f = DeflatedFile(jar["test"])
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When using a different file with the same content, no copy should
+ # occur
+ f = DeflatedFile(jar["test2"])
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = DeflatedFile(jar["fooo"])
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestManifestFile(TestWithTmpDir):
+ def test_manifest_file(self):
+ f = ManifestFile("chrome")
+ f.add(ManifestContent("chrome", "global", "toolkit/content/global/"))
+ f.add(ManifestResource("chrome", "gre-resources", "toolkit/res/"))
+ f.add(ManifestResource("chrome/pdfjs", "pdfjs", "./"))
+ f.add(ManifestContent("chrome/pdfjs", "pdfjs", "pdfjs"))
+ f.add(ManifestLocale("chrome", "browser", "en-US", "en-US/locale/browser/"))
+
+ f.copy(self.tmppath("chrome.manifest"))
+ self.assertEqual(
+ open(self.tmppath("chrome.manifest")).readlines(),
+ [
+ "content global toolkit/content/global/\n",
+ "resource gre-resources toolkit/res/\n",
+ "resource pdfjs pdfjs/\n",
+ "content pdfjs pdfjs/pdfjs\n",
+ "locale browser en-US en-US/locale/browser/\n",
+ ],
+ )
+
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent("", "global", "toolkit/content/global/"),
+ )
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestOverride(
+ "chrome",
+ "chrome://global/locale/netError.dtd",
+ "chrome://browser/locale/netError.dtd",
+ ),
+ )
+
+ f.remove(ManifestContent("chrome", "global", "toolkit/content/global/"))
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent("chrome", "global", "toolkit/content/global/"),
+ )
+
+ f.copy(self.tmppath("chrome.manifest"))
+ content = open(self.tmppath("chrome.manifest"), "rb").read()
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+
+# Compiled typelib for the following IDL:
+# interface foo;
+# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)]
+# interface bar {
+# void bar(in foo f);
+# };
+# We need to make this [scriptable] so it doesn't get deleted from the
+# typelib. We don't need to make the foo interfaces below [scriptable],
+# because they will be automatically included by virtue of being an
+# argument to a method of |bar|.
+bar_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C"
+ + b"\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+ + b"\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F"
+ + b"\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00"
+ + b"\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00"
+ + b"\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80"
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)]
+# interface foo {
+# void foo();
+# };
+foo_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40"
+ + b"\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA"
+ + b"\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00"
+ + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x05\x00\x80\x06\x00\x00\x00"
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)]
+# interface foo {
+# void foo();
+# };
+foo2_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40"
+ + b"\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39"
+ + b"\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00"
+ + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x05\x00\x80\x06\x00\x00\x00"
+)
+
+
+class TestMinifiedCommentStripped(TestWithTmpDir):
+ def test_minified_comment_stripped(self):
+ propLines = [
+ "# Comments are removed",
+ "foo = bar",
+ "",
+ "# Another comment",
+ ]
+ prop = GeneratedFile("\n".join(propLines))
+ self.assertEqual(
+ MinifiedCommentStripped(prop).open().readlines(), [b"foo = bar\n", b"\n"]
+ )
+ open(self.tmppath("prop"), "w").write("\n".join(propLines))
+ MinifiedCommentStripped(File(self.tmppath("prop"))).copy(self.tmppath("prop2"))
+ self.assertEqual(open(self.tmppath("prop2")).readlines(), ["foo = bar\n", "\n"])
+
+
+class TestMinifiedJavaScript(TestWithTmpDir):
+ orig_lines = [
+ "// Comment line",
+ 'let foo = "bar";',
+ "var bar = true;",
+ "",
+ "// Another comment",
+ ]
+
+ def test_minified_javascript(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f)
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def _verify_command(self, code):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+ return [
+ sys.executable,
+ os.path.join(our_dir, "support", "minify_js_verify.py"),
+ code,
+ ]
+
+ def test_minified_verify_success(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("0"))
+
+ mini_lines = [six.ensure_text(s) for s in min_f.open().readlines()]
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def test_minified_verify_failure(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ errors.out = six.StringIO()
+ min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("1"))
+
+ mini_lines = min_f.open().readlines()
+ output = errors.out.getvalue()
+ errors.out = sys.stderr
+ self.assertEqual(
+ output,
+ "warning: JS minification verification failed for <unknown>:\n"
+ "warning: Error message\n",
+ )
+ self.assertEqual(mini_lines, orig_f.open().readlines())
+
+
+class MatchTestTemplate(object):
+ def prepare_match_test(self, with_dotfiles=False):
+ self.add("bar")
+ self.add("foo/bar")
+ self.add("foo/baz")
+ self.add("foo/qux/1")
+ self.add("foo/qux/bar")
+ self.add("foo/qux/2/test")
+ self.add("foo/qux/2/test2")
+ if with_dotfiles:
+ self.add("foo/.foo")
+ self.add("foo/.bar/foo")
+
+ def do_match_test(self):
+ self.do_check(
+ "",
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "*",
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "foo/qux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"]
+ )
+ self.do_check("foo/b*", ["foo/bar", "foo/baz"])
+ self.do_check("baz", [])
+ self.do_check("foo/foo", [])
+ self.do_check("foo/*ar", ["foo/bar"])
+ self.do_check("*ar", ["bar"])
+ self.do_check("*/bar", ["foo/bar"])
+ self.do_check(
+ "foo/*ux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"]
+ )
+ self.do_check(
+ "foo/q*ux",
+ ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"],
+ )
+ self.do_check("foo/*/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"])
+ self.do_check("**/bar", ["bar", "foo/bar", "foo/qux/bar"])
+ self.do_check("foo/**/test", ["foo/qux/2/test"])
+ self.do_check(
+ "foo",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check("**/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"])
+ self.do_check(
+ "**/foo",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check("**/barbaz", [])
+ self.do_check("f**/bar", ["foo/bar"])
+
+ def do_finder_test(self, finder):
+ self.assertTrue(finder.contains("foo/.foo"))
+ self.assertTrue(finder.contains("foo/.bar"))
+ self.assertTrue("foo/.foo" in [f for f, c in finder.find("foo/.foo")])
+ self.assertTrue("foo/.bar/foo" in [f for f, c in finder.find("foo/.bar")])
+ self.assertEqual(
+ sorted([f for f, c in finder.find("foo/.*")]), ["foo/.bar/foo", "foo/.foo"]
+ )
+ for pattern in ["foo", "**", "**/*", "**/foo", "foo/*"]:
+ self.assertFalse("foo/.foo" in [f for f, c in finder.find(pattern)])
+ self.assertFalse("foo/.bar/foo" in [f for f, c in finder.find(pattern)])
+ self.assertEqual(
+ sorted([f for f, c in finder.find(pattern)]),
+ sorted([f for f, c in finder if mozpath.match(f, pattern)]),
+ )
+
+
+def do_check(test, finder, pattern, result):
+ if result:
+ test.assertTrue(finder.contains(pattern))
+ else:
+ test.assertFalse(finder.contains(pattern))
+ test.assertEqual(sorted(list(f for f, c in finder.find(pattern))), sorted(result))
+
+
+class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ ensureParentDir(self.tmppath(path))
+ open(self.tmppath(path), "wb").write(six.ensure_binary(path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_file_finder(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir)
+ self.do_match_test()
+ self.do_finder_test(self.finder)
+
+ def test_get(self):
+ self.prepare_match_test()
+ finder = FileFinder(self.tmpdir)
+
+ self.assertIsNone(finder.get("does-not-exist"))
+ res = finder.get("bar")
+ self.assertIsInstance(res, File)
+ self.assertEqual(mozpath.normpath(res.path), mozpath.join(self.tmpdir, "bar"))
+
+ def test_ignored_dirs(self):
+ """Ignored directories should not have results returned."""
+ self.prepare_match_test()
+ self.add("fooz")
+
+ # Present to ensure prefix matching doesn't exclude.
+ self.add("foo/quxz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/qux"])
+
+ self.do_check("**", ["bar", "foo/bar", "foo/baz", "foo/quxz", "fooz"])
+ self.do_check("foo/*", ["foo/bar", "foo/baz", "foo/quxz"])
+ self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
+ self.do_check("foo/qux/**", [])
+ self.do_check("foo/qux/*", [])
+ self.do_check("foo/qux/bar", [])
+ self.do_check("foo/quxz", ["foo/quxz"])
+ self.do_check("fooz", ["fooz"])
+
+ def test_ignored_files(self):
+ """Ignored files should not have results returned."""
+ self.prepare_match_test()
+
+ # Be sure prefix match doesn't get ignored.
+ self.add("barz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/bar", "bar"])
+ self.do_check(
+ "**",
+ [
+ "barz",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ "foo/qux/bar",
+ ],
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ "foo/qux/bar",
+ ],
+ )
+
+ def test_ignored_patterns(self):
+ """Ignore entries with patterns should be honored."""
+ self.prepare_match_test()
+
+ self.add("foo/quxz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/qux/*"])
+ self.do_check("**", ["foo/bar", "foo/baz", "foo/quxz", "bar"])
+ self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
+
+ def test_dotfiles(self):
+ """Finder can find files beginning with . is configured."""
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True)
+ self.do_check(
+ "**",
+ [
+ "bar",
+ "foo/.foo",
+ "foo/.bar/foo",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+ def test_dotfiles_plus_ignore(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(
+ self.tmpdir, find_dotfiles=True, ignore=["foo/.bar/**"]
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/.foo",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+
+class TestJarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.jar.add(path, ensure_bytes(path), compress=True)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_jar_finder(self):
+ self.jar = JarWriter(file=self.tmppath("test.jar"))
+ self.prepare_match_test()
+ self.jar.finish()
+ reader = JarReader(file=self.tmppath("test.jar"))
+ self.finder = JarFinder(self.tmppath("test.jar"), reader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), DeflatedFile)
+
+
+class TestTarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.tar.addfile(tarfile.TarInfo(name=path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_tar_finder(self):
+ self.tar = tarfile.open(name=self.tmppath("test.tar.bz2"), mode="w:bz2")
+ self.prepare_match_test()
+ self.tar.close()
+ with tarfile.open(name=self.tmppath("test.tar.bz2"), mode="r:bz2") as tarreader:
+ self.finder = TarFinder(self.tmppath("test.tar.bz2"), tarreader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), ExtractedTarFile)
+
+
+class TestComposedFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path, content=None):
+ # Put foo/qux files under $tmp/b.
+ if path.startswith("foo/qux/"):
+ real_path = mozpath.join("b", path[8:])
+ else:
+ real_path = mozpath.join("a", path)
+ ensureParentDir(self.tmppath(real_path))
+ if not content:
+ content = six.ensure_binary(path)
+ open(self.tmppath(real_path), "wb").write(content)
+
+ def do_check(self, pattern, result):
+ if "*" in pattern:
+ return
+ do_check(self, self.finder, pattern, result)
+
+ def test_composed_finder(self):
+ self.prepare_match_test()
+ # Also add files in $tmp/a/foo/qux because ComposedFinder is
+ # expected to mask foo/qux entirely with content from $tmp/b.
+ ensureParentDir(self.tmppath("a/foo/qux/hoge"))
+ open(self.tmppath("a/foo/qux/hoge"), "wb").write(b"hoge")
+ open(self.tmppath("a/foo/qux/bar"), "wb").write(b"not the right content")
+ self.finder = ComposedFinder(
+ {
+ "": FileFinder(self.tmppath("a")),
+ "foo/qux": FileFinder(self.tmppath("b")),
+ }
+ )
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), File)
+
+
+@unittest.skipUnless(hglib, "hglib not available")
+@unittest.skipIf(
+ six.PY3 and os.name == "nt", "Does not currently work in Python3 on Windows"
+)
+class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir):
+ def setUp(self):
+ super(TestMercurialRevisionFinder, self).setUp()
+ hglib.init(self.tmpdir)
+ self._clients = []
+
+ def tearDown(self):
+ # Ensure the hg client process is closed. Otherwise, Windows
+ # may have trouble removing the repo directory because the process
+ # has an open handle on it.
+ for client in getattr(self, "_clients", []):
+ if client.server:
+ client.close()
+
+ self._clients[:] = []
+
+ super(TestMercurialRevisionFinder, self).tearDown()
+
+ def _client(self):
+ configs = (
+ # b'' because py2 needs !unicode
+ b'ui.username="Dummy User <dummy@example.com>"',
+ )
+ client = hglib.open(
+ six.ensure_binary(self.tmpdir),
+ encoding=b"UTF-8", # b'' because py2 needs !unicode
+ configs=configs,
+ )
+ self._clients.append(client)
+ return client
+
+ def add(self, path):
+ with self._client() as c:
+ ensureParentDir(self.tmppath(path))
+ with open(self.tmppath(path), "wb") as fh:
+ fh.write(six.ensure_binary(path))
+ c.add(six.ensure_binary(self.tmppath(path)))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def _get_finder(self, *args, **kwargs):
+ f = MercurialRevisionFinder(*args, **kwargs)
+ self._clients.append(f._client)
+ return f
+
+ def test_default_revision(self):
+ self.prepare_match_test()
+ with self._client() as c:
+ c.commit("initial commit")
+
+ self.finder = self._get_finder(self.tmpdir)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), MercurialFile)
+
+ def test_old_revision(self):
+ with self._client() as c:
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"foo initial")
+ c.add(six.ensure_binary(self.tmppath("foo")))
+ c.commit("initial")
+
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"foo second")
+ with open(self.tmppath("bar"), "wb") as fh:
+ fh.write(b"bar second")
+ c.add(six.ensure_binary(self.tmppath("bar")))
+ c.commit("second")
+ # This wipes out the working directory, ensuring the finder isn't
+ # finding anything from the filesystem.
+ c.rawcommand([b"update", b"null"])
+
+ finder = self._get_finder(self.tmpdir, "0")
+ f = finder.get("foo")
+ self.assertEqual(f.read(), b"foo initial")
+ self.assertEqual(f.read(), b"foo initial", "read again for good measure")
+ self.assertIsNone(finder.get("bar"))
+
+ finder = self._get_finder(self.tmpdir, rev="1")
+ f = finder.get("foo")
+ self.assertEqual(f.read(), b"foo second")
+ f = finder.get("bar")
+ self.assertEqual(f.read(), b"bar second")
+ f = None
+
+ def test_recognize_repo_paths(self):
+ with self._client() as c:
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"initial")
+ c.add(six.ensure_binary(self.tmppath("foo")))
+ c.commit("initial")
+ c.rawcommand([b"update", b"null"])
+
+ finder = self._get_finder(self.tmpdir, "0", recognize_repo_paths=True)
+ with self.assertRaises(NotImplementedError):
+ list(finder.find(""))
+
+ with self.assertRaises(ValueError):
+ finder.get("foo")
+ with self.assertRaises(ValueError):
+ finder.get("")
+
+ f = finder.get(self.tmppath("foo"))
+ self.assertIsInstance(f, MercurialFile)
+ self.assertEqual(f.read(), b"initial")
+ f = None
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py
new file mode 100644
index 0000000000..a5db53b58c
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_manifests.py
@@ -0,0 +1,465 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozunit
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.manifests import InstallManifest, UnreadableInstallManifest
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestInstallManifest(TestWithTmpDir):
+ def test_construct(self):
+ m = InstallManifest()
+ self.assertEqual(len(m), 0)
+
+ def test_malformed(self):
+ f = self.tmppath("manifest")
+ open(f, "wt").write("junk\n")
+ with self.assertRaises(UnreadableInstallManifest):
+ InstallManifest(f)
+
+ def test_adds(self):
+ m = InstallManifest()
+ m.add_link("s_source", "s_dest")
+ m.add_copy("c_source", "c_dest")
+ m.add_required_exists("e_dest")
+ m.add_optional_exists("o_dest")
+ m.add_pattern_link("ps_base", "ps/*", "ps_dest")
+ m.add_pattern_copy("pc_base", "pc/**", "pc_dest")
+ m.add_preprocess("p_source", "p_dest", "p_source.pp")
+ m.add_content("content", "content")
+
+ self.assertEqual(len(m), 8)
+ self.assertIn("s_dest", m)
+ self.assertIn("c_dest", m)
+ self.assertIn("p_dest", m)
+ self.assertIn("e_dest", m)
+ self.assertIn("o_dest", m)
+ self.assertIn("content", m)
+
+ with self.assertRaises(ValueError):
+ m.add_link("s_other", "s_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_copy("c_other", "c_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_preprocess("p_other", "p_dest", "p_other.pp")
+
+ with self.assertRaises(ValueError):
+ m.add_required_exists("e_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_optional_exists("o_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_link("ps_base", "ps/*", "ps_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_copy("pc_base", "pc/**", "pc_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_content("content", "content")
+
+ def _get_test_manifest(self):
+ m = InstallManifest()
+ m.add_link(self.tmppath("s_source"), "s_dest")
+ m.add_copy(self.tmppath("c_source"), "c_dest")
+ m.add_preprocess(
+ self.tmppath("p_source"),
+ "p_dest",
+ self.tmppath("p_source.pp"),
+ "#",
+ {"FOO": "BAR", "BAZ": "QUX"},
+ )
+ m.add_required_exists("e_dest")
+ m.add_optional_exists("o_dest")
+ m.add_pattern_link("ps_base", "*", "ps_dest")
+ m.add_pattern_copy("pc_base", "**", "pc_dest")
+ m.add_content("the content\non\nmultiple lines", "content")
+
+ return m
+
+ def test_serialization(self):
+ m = self._get_test_manifest()
+
+ p = self.tmppath("m")
+ m.write(path=p)
+ self.assertTrue(os.path.isfile(p))
+
+ with open(p, "r") as fh:
+ c = fh.read()
+
+ self.assertEqual(c.count("\n"), 9)
+
+ lines = c.splitlines()
+ self.assertEqual(len(lines), 9)
+
+ self.assertEqual(lines[0], "5")
+
+ m2 = InstallManifest(path=p)
+ self.assertEqual(m, m2)
+ p2 = self.tmppath("m2")
+ m2.write(path=p2)
+
+ with open(p2, "r") as fh:
+ c2 = fh.read()
+
+ self.assertEqual(c, c2)
+
+ def test_populate_registry(self):
+ m = self._get_test_manifest()
+ r = FileRegistry()
+ m.populate_registry(r)
+
+ self.assertEqual(len(r), 6)
+ self.assertEqual(
+ r.paths(), ["c_dest", "content", "e_dest", "o_dest", "p_dest", "s_dest"]
+ )
+
+ def test_pattern_expansion(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
+
+ def test_write_expand_pattern(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ track = self.tmppath("track")
+ m.write(path=track, expand_pattern=True)
+
+ m = InstallManifest(path=track)
+ self.assertEqual(
+ sorted(dest for dest in m._dests), ["dest/foo/file1", "dest/foo/file2"]
+ )
+
+ def test_or(self):
+ m1 = self._get_test_manifest()
+ orig_length = len(m1)
+ m2 = InstallManifest()
+ m2.add_link("s_source2", "s_dest2")
+ m2.add_copy("c_source2", "c_dest2")
+
+ m1 |= m2
+
+ self.assertEqual(len(m2), 2)
+ self.assertEqual(len(m1), orig_length + 2)
+
+ self.assertIn("s_dest2", m1)
+ self.assertIn("c_dest2", m1)
+
+ def test_copier_application(self):
+ dest = self.tmppath("dest")
+ os.mkdir(dest)
+
+ to_delete = self.tmppath("dest/to_delete")
+ with open(to_delete, "a"):
+ pass
+
+ with open(self.tmppath("s_source"), "wt") as fh:
+ fh.write("symlink!")
+
+ with open(self.tmppath("c_source"), "wt") as fh:
+ fh.write("copy!")
+
+ with open(self.tmppath("p_source"), "wt") as fh:
+ fh.write("#define FOO 1\npreprocess!")
+
+ with open(self.tmppath("dest/e_dest"), "a"):
+ pass
+
+ with open(self.tmppath("dest/o_dest"), "a"):
+ pass
+
+ m = self._get_test_manifest()
+ c = FileCopier()
+ m.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/s_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/c_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/p_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/e_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/o_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/content")))
+ self.assertFalse(os.path.exists(to_delete))
+
+ with open(self.tmppath("dest/s_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "symlink!")
+
+ with open(self.tmppath("dest/c_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "copy!")
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "preprocess!")
+
+ self.assertEqual(
+ result.updated_files,
+ set(
+ self.tmppath(p)
+ for p in ("dest/s_dest", "dest/c_dest", "dest/p_dest", "dest/content")
+ ),
+ )
+ self.assertEqual(
+ result.existing_files,
+ set([self.tmppath("dest/e_dest"), self.tmppath("dest/o_dest")]),
+ )
+ self.assertEqual(result.removed_files, {to_delete})
+ self.assertEqual(result.removed_directories, set())
+
+ def test_preprocessor(self):
+ manifest = self.tmppath("m")
+ deps = self.tmppath("m.pp")
+ dest = self.tmppath("dest")
+ include = self.tmppath("p_incl")
+
+ with open(include, "wt") as fh:
+ fh.write("#define INCL\n")
+ time = os.path.getmtime(include) - 3
+ os.utime(include, (time, time))
+
+ with open(self.tmppath("p_source"), "wt") as fh:
+ fh.write("#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n")
+ fh.write("#ifdef DEPTEST\nPASS2\n#endif\n")
+ fh.write("#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n")
+ time = os.path.getmtime(self.tmppath("p_source")) - 3
+ os.utime(self.tmppath("p_source"), (time, time))
+
+ # Create and write a manifest with the preprocessed file, then apply it.
+ # This should write out our preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(
+ self.tmppath("p_source"), "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"}
+ )
+ m.write(path=manifest)
+
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/p_dest")))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS1\n")
+
+ # Create a second manifest with the preprocessed file, then apply it.
+ # Since this manifest does not exist on the disk, there should not be a
+ # dependency on it, and the preprocessed file should not be modified.
+ m2 = InstallManifest()
+ m2.add_preprocess(
+ self.tmppath("p_source"), "p_dest", deps, "#", {"DEPTEST": True}
+ )
+ c = FileCopier()
+ m2.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertFalse(self.tmppath("dest/p_dest") in result.updated_files)
+ self.assertTrue(self.tmppath("dest/p_dest") in result.existing_files)
+
+ # Write out the second manifest, then load it back in from the disk.
+ # This should add the dependency on the manifest file, so our
+ # preprocessed file should be regenerated with the new defines.
+ # We also set the mtime on the destination file back, so it will be
+ # older than the manifest file.
+ m2.write(path=manifest)
+ time = os.path.getmtime(manifest) - 1
+ os.utime(self.tmppath("dest/p_dest"), (time, time))
+ m2 = InstallManifest(path=manifest)
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS2\n")
+
+ # Set the time on the manifest back, so it won't be picked up as
+ # modified in the next test
+ time = os.path.getmtime(manifest) - 1
+ os.utime(manifest, (time, time))
+
+ # Update the contents of a file included by the source file. This should
+ # cause the destination to be regenerated.
+ with open(include, "wt") as fh:
+ fh.write("#define INCLTEST\n")
+
+ time = os.path.getmtime(include) - 1
+ os.utime(self.tmppath("dest/p_dest"), (time, time))
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS2\nPASS3\n")
+
+ def test_preprocessor_dependencies(self):
+ manifest = self.tmppath("m")
+ deps = self.tmppath("m.pp")
+ dest = self.tmppath("dest")
+ source = self.tmppath("p_source")
+ destfile = self.tmppath("dest/p_dest")
+ include = self.tmppath("p_incl")
+ os.mkdir(dest)
+
+ with open(source, "wt") as fh:
+ fh.write("#define SRC\nSOURCE\n")
+ time = os.path.getmtime(source) - 3
+ os.utime(source, (time, time))
+
+ with open(include, "wt") as fh:
+ fh.write("INCLUDE\n")
+ time = os.path.getmtime(source) - 3
+ os.utime(include, (time, time))
+
+ # Create and write a manifest with the preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(source, "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"})
+ m.write(path=manifest)
+
+ time = os.path.getmtime(source) - 5
+ os.utime(manifest, (time, time))
+
+ # Now read the manifest back in, and apply it. This should write out
+ # our preprocessed file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\n")
+
+ # Next, modify the source to #INCLUDE another file.
+ with open(source, "wt") as fh:
+ fh.write("SOURCE\n#include p_incl\n")
+ time = os.path.getmtime(source) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that it also reads the newly included
+ # file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\nINCLUDE\n")
+
+ # Set the time on the source file back, so it won't be picked up as
+ # modified in the next test.
+ time = os.path.getmtime(source) - 1
+ os.utime(source, (time, time))
+
+ # Now, modify the include file (but not the original source).
+ with open(include, "wt") as fh:
+ fh.write("INCLUDE MODIFIED\n")
+ time = os.path.getmtime(include) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that the change to the include file
+ # is detected. That should cause the preprocessor to run again.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\nINCLUDE MODIFIED\n")
+
+ # ORing an InstallManifest should copy file dependencies
+ m = InstallManifest()
+ m |= InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ e = c._files["p_dest"]
+ self.assertEqual(e.extra_depends, [manifest])
+
+ def test_add_entries_from(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ p = InstallManifest()
+ p.add_entries_from(m)
+ self.assertEqual(len(p), 1)
+
+ c = FileCopier()
+ p.populate_registry(c)
+ self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
+
+ q = InstallManifest()
+ q.add_entries_from(m, base="target")
+ self.assertEqual(len(q), 1)
+
+ d = FileCopier()
+ q.populate_registry(d)
+ self.assertEqual(d.paths(), ["target/dest/foo/file1", "target/dest/foo/file2"])
+
+ # Some of the values in an InstallManifest include destination
+ # information that is present in the keys. Verify that we can
+ # round-trip serialization.
+ r = InstallManifest()
+ r.add_entries_from(m)
+ r.add_entries_from(m, base="target")
+ self.assertEqual(len(r), 2)
+
+ temp_path = self.tmppath("temp_path")
+ r.write(path=temp_path)
+
+ s = InstallManifest(path=temp_path)
+ e = FileCopier()
+ s.populate_registry(e)
+
+ self.assertEqual(
+ e.paths(),
+ [
+ "dest/foo/file1",
+ "dest/foo/file2",
+ "target/dest/foo/file1",
+ "target/dest/foo/file2",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py
new file mode 100644
index 0000000000..e96c59238f
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -0,0 +1,350 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from collections import OrderedDict
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+from mozpack.mozjar import (
+ Deflater,
+ JarLog,
+ JarReader,
+ JarReaderError,
+ JarStruct,
+ JarWriter,
+ JarWriterError,
+)
+from mozpack.test.test_files import MockDest
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestJarStruct(unittest.TestCase):
+ class Foo(JarStruct):
+ MAGIC = 0x01020304
+ STRUCT = OrderedDict(
+ [
+ ("foo", "uint32"),
+ ("bar", "uint16"),
+ ("qux", "uint16"),
+ ("length", "uint16"),
+ ("length2", "uint16"),
+ ("string", "length"),
+ ("string2", "length2"),
+ ]
+ )
+
+ def test_jar_struct(self):
+ foo = TestJarStruct.Foo()
+ self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC)
+ self.assertEqual(foo["foo"], 0)
+ self.assertEqual(foo["bar"], 0)
+ self.assertEqual(foo["qux"], 0)
+ self.assertFalse("length" in foo)
+ self.assertFalse("length2" in foo)
+ self.assertEqual(foo["string"], "")
+ self.assertEqual(foo["string2"], "")
+
+ self.assertEqual(foo.size, 16)
+
+ foo["foo"] = 0x42434445
+ foo["bar"] = 0xABCD
+ foo["qux"] = 0xEF01
+ foo["string"] = "abcde"
+ foo["string2"] = "Arbitrarily long string"
+
+ serialized = (
+ b"\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef"
+ + b"\x05\x00\x17\x00abcdeArbitrarily long string"
+ )
+ self.assertEqual(foo.size, len(serialized))
+ foo_serialized = foo.serialize()
+ self.assertEqual(foo_serialized, serialized)
+
+ def do_test_read_jar_struct(self, data):
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data)
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:])
+
+ foo = TestJarStruct.Foo(data[1:])
+ self.assertEqual(foo["foo"], 0x45444342)
+ self.assertEqual(foo["bar"], 0xCDAB)
+ self.assertEqual(foo["qux"], 0x01EF)
+ self.assertFalse("length" in foo)
+ self.assertFalse("length2" in foo)
+ self.assertEqual(foo["string"], b"012345")
+ self.assertEqual(foo["string2"], b"67")
+
+ def test_read_jar_struct(self):
+ data = (
+ b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef"
+ + b"\x01\x06\x00\x02\x0001234567890"
+ )
+ self.do_test_read_jar_struct(data)
+
+ def test_read_jar_struct_memoryview(self):
+ data = (
+ b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef"
+ + b"\x01\x06\x00\x02\x0001234567890"
+ )
+ self.do_test_read_jar_struct(memoryview(data))
+
+
+class TestDeflater(unittest.TestCase):
+ def wrap(self, data):
+ return data
+
+ def test_deflater_no_compress(self):
+ deflater = Deflater(False)
+ deflater.write(self.wrap(b"abc"))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"abc")
+ self.assertEqual(deflater.crc32, 0x352441C2)
+
+ def test_deflater_compress_no_gain(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap(b"abc"))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"abc")
+ self.assertEqual(deflater.crc32, 0x352441C2)
+
+ def test_deflater_compress(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz"))
+ self.assertTrue(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 26)
+ self.assertNotEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.crc32, 0xD46B97ED)
+ # The CRC is the same as when not compressed
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz"))
+ self.assertEqual(deflater.crc32, 0xD46B97ED)
+
+ def test_deflater_empty(self):
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 0)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"")
+ self.assertEqual(deflater.crc32, 0)
+
+
+class TestDeflaterMemoryView(TestDeflater):
+ def wrap(self, data):
+ return memoryview(data)
+
+
+class TestJar(unittest.TestCase):
+ def test_jar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ self.assertRaises(JarWriterError, jar.add, "foo", b"bar")
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False)
+ jar.add("baz\\backslash", b"aaaaaaaaaaaaaaa")
+
+ files = [j for j in JarReader(fileobj=s)]
+
+ self.assertEqual(files[0].filename, "foo")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"foo")
+
+ self.assertEqual(files[1].filename, "bar")
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertFalse(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ if os.sep == "\\":
+ self.assertEqual(
+ files[3].filename,
+ "baz/backslash",
+ "backslashes in filenames on Windows should get normalized",
+ )
+ else:
+ self.assertEqual(
+ files[3].filename,
+ "baz\\backslash",
+ "backslashes in filenames on POSIX platform are untouched",
+ )
+
+ s = MockDest()
+ with JarWriter(fileobj=s, compress=False) as jar:
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("foo", b"foo")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", True)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "bar")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[1].filename, "foo")
+ self.assertFalse(files[1].compressed)
+ self.assertEqual(files[1].read(), b"foo")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertTrue("bar" in jar)
+ self.assertTrue("foo" in jar)
+ self.assertFalse("baz" in jar)
+ self.assertTrue("baz/qux" in jar)
+ self.assertTrue(jar["bar"], files[1])
+ self.assertTrue(jar["foo"], files[0])
+ self.assertTrue(jar["baz/qux"], files[2])
+
+ s.seek(0)
+ jar = JarReader(fileobj=s)
+ self.assertTrue("bar" in jar)
+ self.assertTrue("foo" in jar)
+ self.assertFalse("baz" in jar)
+ self.assertTrue("baz/qux" in jar)
+
+ files[0].seek(0)
+ self.assertEqual(jar["bar"].filename, files[0].filename)
+ self.assertEqual(jar["bar"].compressed, files[0].compressed)
+ self.assertEqual(jar["bar"].read(), files[0].read())
+
+ files[1].seek(0)
+ self.assertEqual(jar["foo"].filename, files[1].filename)
+ self.assertEqual(jar["foo"].compressed, files[1].compressed)
+ self.assertEqual(jar["foo"].read(), files[1].read())
+
+ files[2].seek(0)
+ self.assertEqual(jar["baz/qux"].filename, files[2].filename)
+ self.assertEqual(jar["baz/qux"].compressed, files[2].compressed)
+ self.assertEqual(jar["baz/qux"].read(), files[2].read())
+
+ def test_rejar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False)
+
+ new = MockDest()
+ with JarWriter(fileobj=new) as jar:
+ for j in JarReader(fileobj=s):
+ jar.add(j.filename, j)
+
+ jar = JarReader(fileobj=new)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "foo")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"foo")
+
+ self.assertEqual(files[1].filename, "bar")
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ def test_add_from_finder(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ finder = FileFinder(test_data_path)
+ for p, f in finder.find("test_data"):
+ jar.add("test_data", f)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "test_data")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"test_data")
+
+
+class TestPreload(unittest.TestCase):
+ def test_preload(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"abcdefghijklmnopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, None)
+
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"abcdefghijklmnopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.preload(["baz/qux", "bar"])
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, "bar")
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "baz/qux")
+ self.assertEqual(files[1].filename, "bar")
+ self.assertEqual(files[2].filename, "foo")
+
+
+class TestJarLog(unittest.TestCase):
+ def test_jarlog(self):
+ s = six.moves.cStringIO(
+ "\n".join(
+ [
+ "bar/baz.jar first",
+ "bar/baz.jar second",
+ "bar/baz.jar third",
+ "bar/baz.jar second",
+ "bar/baz.jar second",
+ "omni.ja stuff",
+ "bar/baz.jar first",
+ "omni.ja other/stuff",
+ "omni.ja stuff",
+ "bar/baz.jar third",
+ ]
+ )
+ )
+ log = JarLog(fileobj=s)
+ self.assertEqual(
+ set(log.keys()),
+ set(
+ [
+ "bar/baz.jar",
+ "omni.ja",
+ ]
+ ),
+ )
+ self.assertEqual(
+ log["bar/baz.jar"],
+ [
+ "first",
+ "second",
+ "third",
+ ],
+ )
+ self.assertEqual(
+ log["omni.ja"],
+ [
+ "stuff",
+ "other/stuff",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py
new file mode 100644
index 0000000000..266902ebb2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager.py
@@ -0,0 +1,630 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+from buildconfig import topobjdir
+from mozunit import MockedOpen
+
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestContent,
+ ManifestResource,
+)
+from mozpack.errors import ErrorMessage, errors
+from mozpack.files import GeneratedFile
+from mozpack.packager import (
+ CallDeque,
+ Component,
+ SimpleManifestSink,
+ SimplePackager,
+ preprocess_manifest,
+)
+
+MANIFEST = """
+bar/*
+[foo]
+foo/*
+-foo/bar
+chrome.manifest
+[zot destdir="destdir"]
+foo/zot
+; comment
+#ifdef baz
+[baz]
+baz@SUFFIX@
+#endif
+"""
+
+
+class TestPreprocessManifest(unittest.TestCase):
+ MANIFEST_PATH = mozpath.join("$OBJDIR", "manifest")
+
+ EXPECTED_LOG = [
+ ((MANIFEST_PATH, 2), "add", "", "bar/*"),
+ ((MANIFEST_PATH, 4), "add", "foo", "foo/*"),
+ ((MANIFEST_PATH, 5), "remove", "foo", "foo/bar"),
+ ((MANIFEST_PATH, 6), "add", "foo", "chrome.manifest"),
+ ((MANIFEST_PATH, 8), "add", 'zot destdir="destdir"', "foo/zot"),
+ ]
+
+ def setUp(self):
+ class MockSink(object):
+ def __init__(self):
+ self.log = []
+
+ def add(self, component, path):
+ self._log(errors.get_context(), "add", repr(component), path)
+
+ def remove(self, component, path):
+ self._log(errors.get_context(), "remove", repr(component), path)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+ self.sink = MockSink()
+ self.cwd = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self.cwd)
+
+ def test_preprocess_manifest(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ preprocess_manifest(self.sink, "manifest")
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG)
+
+ def test_preprocess_manifest_missing_define(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ self.assertRaises(
+ Preprocessor.Error,
+ preprocess_manifest,
+ self.sink,
+ "manifest",
+ {"baz": 1},
+ )
+
+ def test_preprocess_manifest_defines(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ preprocess_manifest(self.sink, "manifest", {"baz": 1, "SUFFIX": ".exe"})
+ self.assertEqual(
+ self.sink.log,
+ self.EXPECTED_LOG + [((self.MANIFEST_PATH, 12), "add", "baz", "baz.exe")],
+ )
+
+
+class MockFinder(object):
+ def __init__(self, files):
+ self.files = files
+ self.log = []
+
+ def find(self, path):
+ self.log.append(path)
+ for f in sorted(self.files):
+ if mozpath.match(f, path):
+ yield f, self.files[f]
+
+ def __iter__(self):
+ return self.find("")
+
+
+class MockFormatter(object):
+ def __init__(self):
+ self.log = []
+
+ def add_base(self, *args):
+ self._log(errors.get_context(), "add_base", *args)
+
+ def add_manifest(self, *args):
+ self._log(errors.get_context(), "add_manifest", *args)
+
+ def add_interfaces(self, *args):
+ self._log(errors.get_context(), "add_interfaces", *args)
+
+ def add(self, *args):
+ self._log(errors.get_context(), "add", *args)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+
+class TestSimplePackager(unittest.TestCase):
+ def test_simple_packager(self):
+ class GeneratedFileWithPath(GeneratedFile):
+ def __init__(self, path, content):
+ GeneratedFile.__init__(self, content)
+ self.path = path
+
+ formatter = MockFormatter()
+ packager = SimplePackager(formatter)
+ curdir = os.path.abspath(os.curdir)
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo", "bar.manifest"),
+ b"resource bar bar/\ncontent bar bar/",
+ )
+ with errors.context("manifest", 1):
+ packager.add("foo/bar.manifest", file)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo", "baz.manifest"), b"resource baz baz/"
+ )
+ with errors.context("manifest", 2):
+ packager.add("bar/baz.manifest", file)
+
+ with errors.context("manifest", 3):
+ packager.add(
+ "qux/qux.manifest",
+ GeneratedFile(
+ b"".join(
+ [
+ b"resource qux qux/\n",
+ b"binary-component qux.so\n",
+ ]
+ )
+ ),
+ )
+ bar_xpt = GeneratedFile(b"bar.xpt")
+ qux_xpt = GeneratedFile(b"qux.xpt")
+ foo_html = GeneratedFile(b"foo_html")
+ bar_html = GeneratedFile(b"bar_html")
+ with errors.context("manifest", 4):
+ packager.add("foo/bar.xpt", bar_xpt)
+ with errors.context("manifest", 5):
+ packager.add("foo/bar/foo.html", foo_html)
+ packager.add("foo/bar/bar.html", bar_html)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo.manifest"),
+ b"".join(
+ [
+ b"manifest foo/bar.manifest\n",
+ b"manifest bar/baz.manifest\n",
+ ]
+ ),
+ )
+ with errors.context("manifest", 6):
+ packager.add("foo.manifest", file)
+ with errors.context("manifest", 7):
+ packager.add("foo/qux.xpt", qux_xpt)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "addon", "chrome.manifest"), b"resource hoge hoge/"
+ )
+ with errors.context("manifest", 8):
+ packager.add("addon/chrome.manifest", file)
+
+ install_rdf = GeneratedFile(b"<RDF></RDF>")
+ with errors.context("manifest", 9):
+ packager.add("addon/install.rdf", install_rdf)
+
+ with errors.context("manifest", 10):
+ packager.add("addon2/install.rdf", install_rdf)
+ packager.add(
+ "addon2/chrome.manifest", GeneratedFile(b"binary-component addon2.so")
+ )
+
+ with errors.context("manifest", 11):
+ packager.add("addon3/install.rdf", install_rdf)
+ packager.add(
+ "addon3/chrome.manifest",
+ GeneratedFile(b"manifest components/components.manifest"),
+ )
+ packager.add(
+ "addon3/components/components.manifest",
+ GeneratedFile(b"binary-component addon3.so"),
+ )
+
+ with errors.context("manifest", 12):
+ install_rdf_addon4 = GeneratedFile(
+ b"<RDF>\n<...>\n<em:unpack>true</em:unpack>\n<...>\n</RDF>"
+ )
+ packager.add("addon4/install.rdf", install_rdf_addon4)
+
+ with errors.context("manifest", 13):
+ install_rdf_addon5 = GeneratedFile(
+ b"<RDF>\n<...>\n<em:unpack>false</em:unpack>\n<...>\n</RDF>"
+ )
+ packager.add("addon5/install.rdf", install_rdf_addon5)
+
+ with errors.context("manifest", 14):
+ install_rdf_addon6 = GeneratedFile(
+ b"<RDF>\n<... em:unpack=true>\n<...>\n</RDF>"
+ )
+ packager.add("addon6/install.rdf", install_rdf_addon6)
+
+ with errors.context("manifest", 15):
+ install_rdf_addon7 = GeneratedFile(
+ b"<RDF>\n<... em:unpack=false>\n<...>\n</RDF>"
+ )
+ packager.add("addon7/install.rdf", install_rdf_addon7)
+
+ with errors.context("manifest", 16):
+ install_rdf_addon8 = GeneratedFile(
+ b'<RDF>\n<... em:unpack="true">\n<...>\n</RDF>'
+ )
+ packager.add("addon8/install.rdf", install_rdf_addon8)
+
+ with errors.context("manifest", 17):
+ install_rdf_addon9 = GeneratedFile(
+ b'<RDF>\n<... em:unpack="false">\n<...>\n</RDF>'
+ )
+ packager.add("addon9/install.rdf", install_rdf_addon9)
+
+ with errors.context("manifest", 18):
+ install_rdf_addon10 = GeneratedFile(
+ b"<RDF>\n<... em:unpack='true'>\n<...>\n</RDF>"
+ )
+ packager.add("addon10/install.rdf", install_rdf_addon10)
+
+ with errors.context("manifest", 19):
+ install_rdf_addon11 = GeneratedFile(
+ b"<RDF>\n<... em:unpack='false'>\n<...>\n</RDF>"
+ )
+ packager.add("addon11/install.rdf", install_rdf_addon11)
+
+ we_manifest = GeneratedFile(
+ b'{"manifest_version": 2, "name": "Test WebExtension", "version": "1.0"}'
+ )
+ # hybrid and hybrid2 are both bootstrapped extensions with
+ # embedded webextensions, they differ in the order in which
+ # the manifests are added to the packager.
+ with errors.context("manifest", 20):
+ packager.add("hybrid/install.rdf", install_rdf)
+
+ with errors.context("manifest", 21):
+ packager.add("hybrid/webextension/manifest.json", we_manifest)
+
+ with errors.context("manifest", 22):
+ packager.add("hybrid2/webextension/manifest.json", we_manifest)
+
+ with errors.context("manifest", 23):
+ packager.add("hybrid2/install.rdf", install_rdf)
+
+ with errors.context("manifest", 24):
+ packager.add("webextension/manifest.json", we_manifest)
+
+ non_we_manifest = GeneratedFile(b'{"not a webextension": true}')
+ with errors.context("manifest", 25):
+ packager.add("nonwebextension/manifest.json", non_we_manifest)
+
+ self.assertEqual(formatter.log, [])
+
+ with errors.context("dummy", 1):
+ packager.close()
+ self.maxDiff = None
+ # The formatter is expected to reorder the manifest entries so that
+ # chrome entries appear before the others.
+ self.assertEqual(
+ formatter.log,
+ [
+ (("dummy", 1), "add_base", "", False),
+ (("dummy", 1), "add_base", "addon", True),
+ (("dummy", 1), "add_base", "addon10", "unpacked"),
+ (("dummy", 1), "add_base", "addon11", True),
+ (("dummy", 1), "add_base", "addon2", "unpacked"),
+ (("dummy", 1), "add_base", "addon3", "unpacked"),
+ (("dummy", 1), "add_base", "addon4", "unpacked"),
+ (("dummy", 1), "add_base", "addon5", True),
+ (("dummy", 1), "add_base", "addon6", "unpacked"),
+ (("dummy", 1), "add_base", "addon7", True),
+ (("dummy", 1), "add_base", "addon8", "unpacked"),
+ (("dummy", 1), "add_base", "addon9", True),
+ (("dummy", 1), "add_base", "hybrid", True),
+ (("dummy", 1), "add_base", "hybrid2", True),
+ (("dummy", 1), "add_base", "qux", False),
+ (("dummy", 1), "add_base", "webextension", True),
+ (
+ (os.path.join(curdir, "foo", "bar.manifest"), 2),
+ "add_manifest",
+ ManifestContent("foo", "bar", "bar/"),
+ ),
+ (
+ (os.path.join(curdir, "foo", "bar.manifest"), 1),
+ "add_manifest",
+ ManifestResource("foo", "bar", "bar/"),
+ ),
+ (
+ ("bar/baz.manifest", 1),
+ "add_manifest",
+ ManifestResource("bar", "baz", "baz/"),
+ ),
+ (
+ ("qux/qux.manifest", 1),
+ "add_manifest",
+ ManifestResource("qux", "qux", "qux/"),
+ ),
+ (
+ ("qux/qux.manifest", 2),
+ "add_manifest",
+ ManifestBinaryComponent("qux", "qux.so"),
+ ),
+ (("manifest", 4), "add_interfaces", "foo/bar.xpt", bar_xpt),
+ (("manifest", 7), "add_interfaces", "foo/qux.xpt", qux_xpt),
+ (
+ (os.path.join(curdir, "addon", "chrome.manifest"), 1),
+ "add_manifest",
+ ManifestResource("addon", "hoge", "hoge/"),
+ ),
+ (
+ ("addon2/chrome.manifest", 1),
+ "add_manifest",
+ ManifestBinaryComponent("addon2", "addon2.so"),
+ ),
+ (
+ ("addon3/components/components.manifest", 1),
+ "add_manifest",
+ ManifestBinaryComponent("addon3/components", "addon3.so"),
+ ),
+ (("manifest", 5), "add", "foo/bar/foo.html", foo_html),
+ (("manifest", 5), "add", "foo/bar/bar.html", bar_html),
+ (("manifest", 9), "add", "addon/install.rdf", install_rdf),
+ (("manifest", 10), "add", "addon2/install.rdf", install_rdf),
+ (("manifest", 11), "add", "addon3/install.rdf", install_rdf),
+ (("manifest", 12), "add", "addon4/install.rdf", install_rdf_addon4),
+ (("manifest", 13), "add", "addon5/install.rdf", install_rdf_addon5),
+ (("manifest", 14), "add", "addon6/install.rdf", install_rdf_addon6),
+ (("manifest", 15), "add", "addon7/install.rdf", install_rdf_addon7),
+ (("manifest", 16), "add", "addon8/install.rdf", install_rdf_addon8),
+ (("manifest", 17), "add", "addon9/install.rdf", install_rdf_addon9),
+ (("manifest", 18), "add", "addon10/install.rdf", install_rdf_addon10),
+ (("manifest", 19), "add", "addon11/install.rdf", install_rdf_addon11),
+ (("manifest", 20), "add", "hybrid/install.rdf", install_rdf),
+ (
+ ("manifest", 21),
+ "add",
+ "hybrid/webextension/manifest.json",
+ we_manifest,
+ ),
+ (
+ ("manifest", 22),
+ "add",
+ "hybrid2/webextension/manifest.json",
+ we_manifest,
+ ),
+ (("manifest", 23), "add", "hybrid2/install.rdf", install_rdf),
+ (("manifest", 24), "add", "webextension/manifest.json", we_manifest),
+ (
+ ("manifest", 25),
+ "add",
+ "nonwebextension/manifest.json",
+ non_we_manifest,
+ ),
+ ],
+ )
+
+ self.assertEqual(
+ packager.get_bases(),
+ set(
+ [
+ "",
+ "addon",
+ "addon2",
+ "addon3",
+ "addon4",
+ "addon5",
+ "addon6",
+ "addon7",
+ "addon8",
+ "addon9",
+ "addon10",
+ "addon11",
+ "qux",
+ "hybrid",
+ "hybrid2",
+ "webextension",
+ ]
+ ),
+ )
+ self.assertEqual(packager.get_bases(addons=False), set(["", "qux"]))
+
+ def test_simple_packager_manifest_consistency(self):
+ formatter = MockFormatter()
+ # bar/ is detected as an addon because of install.rdf, but top-level
+ # includes a manifest inside bar/.
+ packager = SimplePackager(formatter)
+ packager.add(
+ "base.manifest",
+ GeneratedFile(
+ b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n"
+ ),
+ )
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/install.rdf", GeneratedFile(b""))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"',
+ )
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but top-level includes another manifest inside
+ # bar/.
+ packager = SimplePackager(formatter)
+ packager.add(
+ "base.manifest",
+ GeneratedFile(
+ b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n"
+ ),
+ )
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/chrome.manifest", GeneratedFile(b"resource baz baz"))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"',
+ )
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but chrome.manifest includes baz.manifest from
+ # the same directory. This shouldn't error out.
+ packager = SimplePackager(formatter)
+ packager.add("base.manifest", GeneratedFile(b"manifest foo/bar.manifest\n"))
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/chrome.manifest", GeneratedFile(b"manifest baz.manifest"))
+ packager.close()
+
+
+class TestSimpleManifestSink(unittest.TestCase):
+ def test_simple_manifest_parser(self):
+ formatter = MockFormatter()
+ foobar = GeneratedFile(b"foobar")
+ foobaz = GeneratedFile(b"foobaz")
+ fooqux = GeneratedFile(b"fooqux")
+ foozot = GeneratedFile(b"foozot")
+ finder = MockFinder(
+ {
+ "bin/foo/bar": foobar,
+ "bin/foo/baz": foobaz,
+ "bin/foo/qux": fooqux,
+ "bin/foo/zot": foozot,
+ "bin/foo/chrome.manifest": GeneratedFile(b"resource foo foo/"),
+ "bin/chrome.manifest": GeneratedFile(b"manifest foo/chrome.manifest"),
+ }
+ )
+ parser = SimpleManifestSink(finder, formatter)
+ component0 = Component("component0")
+ component1 = Component("component1")
+ component2 = Component("component2", destdir="destdir")
+ parser.add(component0, "bin/foo/b*")
+ parser.add(component1, "bin/foo/qux")
+ parser.add(component1, "bin/foo/chrome.manifest")
+ parser.add(component2, "bin/foo/zot")
+ self.assertRaises(ErrorMessage, parser.add, "component1", "bin/bar")
+
+ self.assertEqual(formatter.log, [])
+ parser.close()
+ self.assertEqual(
+ formatter.log,
+ [
+ (None, "add_base", "", False),
+ (
+ ("foo/chrome.manifest", 1),
+ "add_manifest",
+ ManifestResource("foo", "foo", "foo/"),
+ ),
+ (None, "add", "foo/bar", foobar),
+ (None, "add", "foo/baz", foobaz),
+ (None, "add", "foo/qux", fooqux),
+ (None, "add", "destdir/foo/zot", foozot),
+ ],
+ )
+
+ self.assertEqual(
+ finder.log,
+ [
+ "bin/foo/b*",
+ "bin/foo/qux",
+ "bin/foo/chrome.manifest",
+ "bin/foo/zot",
+ "bin/bar",
+ "bin/chrome.manifest",
+ ],
+ )
+
+
+class TestCallDeque(unittest.TestCase):
+ def test_call_deque(self):
+ class Logger(object):
+ def __init__(self):
+ self._log = []
+
+ def log(self, str):
+ self._log.append(str)
+
+ @staticmethod
+ def staticlog(logger, str):
+ logger.log(str)
+
+ def do_log(logger, str):
+ logger.log(str)
+
+ logger = Logger()
+ d = CallDeque()
+ d.append(logger.log, "foo")
+ d.append(logger.log, "bar")
+ d.append(logger.staticlog, logger, "baz")
+ d.append(do_log, logger, "qux")
+ self.assertEqual(logger._log, [])
+ d.execute()
+ self.assertEqual(logger._log, ["foo", "bar", "baz", "qux"])
+
+
+class TestComponent(unittest.TestCase):
+ def do_split(self, string, name, options):
+ n, o = Component._split_component_and_options(string)
+ self.assertEqual(name, n)
+ self.assertEqual(options, o)
+
+ def test_component_split_component_and_options(self):
+ self.do_split("component", "component", {})
+ self.do_split("trailingspace ", "trailingspace", {})
+ self.do_split(" leadingspace", "leadingspace", {})
+ self.do_split(" trim ", "trim", {})
+ self.do_split(' trim key="value"', "trim", {"key": "value"})
+ self.do_split(' trim empty=""', "trim", {"empty": ""})
+ self.do_split(' trim space=" "', "trim", {"space": " "})
+ self.do_split(
+ 'component key="value" key2="second" ',
+ "component",
+ {"key": "value", "key2": "second"},
+ )
+ self.do_split(
+ 'trim key=" value with spaces " key2="spaces again"',
+ "trim",
+ {"key": " value with spaces ", "key2": "spaces again"},
+ )
+
+ def do_split_error(self, string):
+ self.assertRaises(ValueError, Component._split_component_and_options, string)
+
+ def test_component_split_component_and_options_errors(self):
+ self.do_split_error('"component')
+ self.do_split_error('comp"onent')
+ self.do_split_error('component"')
+ self.do_split_error('"component"')
+ self.do_split_error("=component")
+ self.do_split_error("comp=onent")
+ self.do_split_error("component=")
+ self.do_split_error('key="val"')
+ self.do_split_error("component key=")
+ self.do_split_error('component key="val')
+ self.do_split_error('component key=val"')
+ self.do_split_error('component key="val" x')
+ self.do_split_error('component x key="val"')
+ self.do_split_error('component key1="val" x key2="val"')
+
+ def do_from_string(self, string, name, destdir=""):
+ component = Component.from_string(string)
+ self.assertEqual(name, component.name)
+ self.assertEqual(destdir, component.destdir)
+
+ def test_component_from_string(self):
+ self.do_from_string("component", "component")
+ self.do_from_string("component-with-hyphen", "component-with-hyphen")
+ self.do_from_string('component destdir="foo/bar"', "component", "foo/bar")
+ self.do_from_string('component destdir="bar spc"', "component", "bar spc")
+ self.assertRaises(ErrorMessage, Component.from_string, "")
+ self.assertRaises(ErrorMessage, Component.from_string, "component novalue=")
+ self.assertRaises(
+ ErrorMessage, Component.from_string, "component badoption=badvalue"
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py
new file mode 100644
index 0000000000..b09971a102
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_formats.py
@@ -0,0 +1,537 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from itertools import chain
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestComponent,
+ ManifestContent,
+ ManifestLocale,
+ ManifestResource,
+ ManifestSkin,
+)
+from mozpack.copier import FileRegistry
+from mozpack.errors import ErrorMessage
+from mozpack.files import GeneratedFile, ManifestFile
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.test.test_files import bar_xpt, foo2_xpt, foo_xpt
+from test_errors import TestErrors
+
+CONTENTS = {
+ "bases": {
+ # base_path: is_addon?
+ "": False,
+ "app": False,
+ "addon0": "unpacked",
+ "addon1": True,
+ "app/chrome/addons/addon2": True,
+ },
+ "manifests": [
+ ManifestContent("chrome/f", "oo", "oo/"),
+ ManifestContent("chrome/f", "bar", "oo/bar/"),
+ ManifestResource("chrome/f", "foo", "resource://bar/"),
+ ManifestBinaryComponent("components", "foo.so"),
+ ManifestContent("app/chrome", "content", "foo/"),
+ ManifestComponent("app/components", "{foo-id}", "foo.js"),
+ ManifestContent("addon0/chrome", "addon0", "foo/bar/"),
+ ManifestContent("addon1/chrome", "addon1", "foo/bar/"),
+ ManifestContent("app/chrome/addons/addon2/chrome", "addon2", "foo/bar/"),
+ ],
+ "files": {
+ "chrome/f/oo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "chrome/f/oo/baz": GeneratedFile(b"foobaz"),
+ "chrome/f/oo/qux": GeneratedFile(b"fooqux"),
+ "components/foo.so": GeneratedFile(b"foo.so"),
+ "components/foo.xpt": foo_xpt,
+ "components/bar.xpt": bar_xpt,
+ "foo": GeneratedFile(b"foo"),
+ "app/chrome/foo/foo": GeneratedFile(b"appfoo"),
+ "app/components/foo.js": GeneratedFile(b"foo.js"),
+ "addon0/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "addon0/components/foo.xpt": foo2_xpt,
+ "addon0/components/bar.xpt": bar_xpt,
+ "addon1/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "addon1/components/foo.xpt": foo2_xpt,
+ "addon1/components/bar.xpt": bar_xpt,
+ "app/chrome/addons/addon2/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "app/chrome/addons/addon2/components/foo.xpt": foo2_xpt,
+ "app/chrome/addons/addon2/components/bar.xpt": bar_xpt,
+ },
+}
+
+FILES = CONTENTS["files"]
+
+RESULT_FLAT = {
+ "chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "chrome/chrome.manifest": [
+ "manifest f/f.manifest",
+ ],
+ "chrome/f/f.manifest": [
+ "content oo oo/",
+ "content bar oo/bar/",
+ "resource foo resource://bar/",
+ ],
+ "chrome/f/oo/bar/baz": FILES["chrome/f/oo/bar/baz"],
+ "chrome/f/oo/baz": FILES["chrome/f/oo/baz"],
+ "chrome/f/oo/qux": FILES["chrome/f/oo/qux"],
+ "components/components.manifest": [
+ "binary-component foo.so",
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ "components/foo.so": FILES["components/foo.so"],
+ "components/foo.xpt": foo_xpt,
+ "components/bar.xpt": bar_xpt,
+ "foo": FILES["foo"],
+ "app/chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "app/chrome/chrome.manifest": [
+ "content content foo/",
+ ],
+ "app/chrome/foo/foo": FILES["app/chrome/foo/foo"],
+ "app/components/components.manifest": [
+ "component {foo-id} foo.js",
+ ],
+ "app/components/foo.js": FILES["app/components/foo.js"],
+}
+
+for addon in ("addon0", "addon1", "app/chrome/addons/addon2"):
+ RESULT_FLAT.update(
+ {
+ mozpath.join(addon, p): f
+ for p, f in six.iteritems(
+ {
+ "chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "chrome/chrome.manifest": [
+ "content %s foo/bar/" % mozpath.basename(addon),
+ ],
+ "chrome/foo/bar/baz": FILES[
+ mozpath.join(addon, "chrome/foo/bar/baz")
+ ],
+ "components/components.manifest": [
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ "components/bar.xpt": bar_xpt,
+ "components/foo.xpt": foo2_xpt,
+ }
+ )
+ }
+ )
+
+RESULT_JAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "components/components.manifest",
+ "components/foo.so",
+ "components/foo.xpt",
+ "components/bar.xpt",
+ "foo",
+ "app/chrome.manifest",
+ "app/components/components.manifest",
+ "app/components/foo.js",
+ "addon0/chrome.manifest",
+ "addon0/components/components.manifest",
+ "addon0/components/foo.xpt",
+ "addon0/components/bar.xpt",
+ )
+}
+
+RESULT_JAR.update(
+ {
+ "chrome/f/f.manifest": [
+ "content oo jar:oo.jar!/",
+ "content bar jar:oo.jar!/bar/",
+ "resource foo resource://bar/",
+ ],
+ "chrome/f/oo.jar": {
+ "bar/baz": FILES["chrome/f/oo/bar/baz"],
+ "baz": FILES["chrome/f/oo/baz"],
+ "qux": FILES["chrome/f/oo/qux"],
+ },
+ "app/chrome/chrome.manifest": [
+ "content content jar:foo.jar!/",
+ ],
+ "app/chrome/foo.jar": {
+ "foo": FILES["app/chrome/foo/foo"],
+ },
+ "addon0/chrome/chrome.manifest": [
+ "content addon0 jar:foo.jar!/bar/",
+ ],
+ "addon0/chrome/foo.jar": {
+ "bar/baz": FILES["addon0/chrome/foo/bar/baz"],
+ },
+ "addon1.xpi": {
+ mozpath.relpath(p, "addon1"): f
+ for p, f in six.iteritems(RESULT_FLAT)
+ if p.startswith("addon1/")
+ },
+ "app/chrome/addons/addon2.xpi": {
+ mozpath.relpath(p, "app/chrome/addons/addon2"): f
+ for p, f in six.iteritems(RESULT_FLAT)
+ if p.startswith("app/chrome/addons/addon2/")
+ },
+ }
+)
+
+RESULT_OMNIJAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ "components/foo.so",
+ "foo",
+ )
+}
+
+RESULT_OMNIJAR.update({p: RESULT_JAR[p] for p in RESULT_JAR if p.startswith("addon")})
+
+RESULT_OMNIJAR.update(
+ {
+ "omni.foo": {
+ "components/components.manifest": [
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ },
+ "chrome.manifest": [
+ "manifest components/components.manifest",
+ ],
+ "components/components.manifest": [
+ "binary-component foo.so",
+ ],
+ "app/omni.foo": {
+ p: RESULT_FLAT["app/" + p]
+ for p in chain(
+ (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "chrome/foo/foo",
+ "components/components.manifest",
+ "components/foo.js",
+ ),
+ (
+ mozpath.relpath(p, "app")
+ for p in six.iterkeys(RESULT_FLAT)
+ if p.startswith("app/chrome/addons/addon2/")
+ ),
+ )
+ },
+ }
+)
+
+RESULT_OMNIJAR["omni.foo"].update(
+ {
+ p: RESULT_FLAT[p]
+ for p in (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "chrome/f/f.manifest",
+ "chrome/f/oo/bar/baz",
+ "chrome/f/oo/baz",
+ "chrome/f/oo/qux",
+ "components/foo.xpt",
+ "components/bar.xpt",
+ )
+ }
+)
+
+RESULT_OMNIJAR_WITH_SUBPATH = {
+ k.replace("omni.foo", "bar/omni.foo"): v for k, v in RESULT_OMNIJAR.items()
+}
+
+CONTENTS_WITH_BASE = {
+ "bases": {
+ mozpath.join("base/root", b) if b else "base/root": a
+ for b, a in six.iteritems(CONTENTS["bases"])
+ },
+ "manifests": [
+ m.move(mozpath.join("base/root", m.base)) for m in CONTENTS["manifests"]
+ ],
+ "files": {
+ mozpath.join("base/root", p): f for p, f in six.iteritems(CONTENTS["files"])
+ },
+}
+
+EXTRA_CONTENTS = {
+ "extra/file": GeneratedFile(b"extra file"),
+}
+
+CONTENTS_WITH_BASE["files"].update(EXTRA_CONTENTS)
+
+
+def result_with_base(results):
+ result = {mozpath.join("base/root", p): v for p, v in six.iteritems(results)}
+ result.update(EXTRA_CONTENTS)
+ return result
+
+
+RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT)
+RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR)
+RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR)
+
+
+def fill_formatter(formatter, contents):
+ for base, is_addon in sorted(contents["bases"].items()):
+ formatter.add_base(base, is_addon)
+
+ for manifest in contents["manifests"]:
+ formatter.add_manifest(manifest)
+
+ for k, v in sorted(six.iteritems(contents["files"])):
+ if k.endswith(".xpt"):
+ formatter.add_interfaces(k, v)
+ else:
+ formatter.add(k, v)
+
+
+def get_contents(registry, read_all=False, mode="rt"):
+ result = {}
+ for k, v in registry:
+ if isinstance(v, FileRegistry):
+ result[k] = get_contents(v)
+ elif isinstance(v, ManifestFile) or read_all:
+ if "b" in mode:
+ result[k] = v.open().read()
+ else:
+ result[k] = six.ensure_text(v.open().read()).splitlines()
+ else:
+ result[k] = v
+ return result
+
+
+class TestFormatters(TestErrors, unittest.TestCase):
+ maxDiff = None
+
+ def test_bases(self):
+ formatter = FlatFormatter(FileRegistry())
+ formatter.add_base("")
+ formatter.add_base("addon0", addon=True)
+ formatter.add_base("browser")
+ self.assertEqual(formatter._get_base("platform.ini"), ("", "platform.ini"))
+ self.assertEqual(
+ formatter._get_base("browser/application.ini"),
+ ("browser", "application.ini"),
+ )
+ self.assertEqual(
+ formatter._get_base("addon0/install.rdf"), ("addon0", "install.rdf")
+ )
+
+ def do_test_contents(self, formatter, contents):
+ for f in contents["files"]:
+ # .xpt files are merged, so skip them.
+ if not f.endswith(".xpt"):
+ self.assertTrue(formatter.contains(f))
+
+ def test_flat_formatter(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_FLAT)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_jar_formatter(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_JAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_formatter(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "omni.foo")
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_flat_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_jar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "omni.foo")
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_subpath(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "bar/omni.foo")
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_SUBPATH)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_is_resource(self):
+ def is_resource(base, path):
+ registry = FileRegistry()
+ f = OmniJarFormatter(
+ registry,
+ "omni.foo",
+ non_resources=[
+ "defaults/messenger/mailViews.dat",
+ "defaults/foo/*",
+ "*/dummy",
+ ],
+ )
+ f.add_base("")
+ f.add_base("app")
+ f.add(mozpath.join(base, path), GeneratedFile(b""))
+ if f.copier.contains(mozpath.join(base, path)):
+ return False
+ self.assertTrue(f.copier.contains(mozpath.join(base, "omni.foo")))
+ self.assertTrue(f.copier[mozpath.join(base, "omni.foo")].contains(path))
+ return True
+
+ for base in ["", "app/"]:
+ self.assertTrue(is_resource(base, "chrome"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/baz.properties"))
+ self.assertFalse(is_resource(base, "chrome/icons/foo.png"))
+ self.assertTrue(is_resource(base, "components/foo.js"))
+ self.assertFalse(is_resource(base, "components/foo.so"))
+ self.assertTrue(is_resource(base, "res/foo.css"))
+ self.assertFalse(is_resource(base, "res/cursors/foo.png"))
+ self.assertFalse(is_resource(base, "res/MainMenu.nib/foo"))
+ self.assertTrue(is_resource(base, "defaults/pref/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/pref/channel-prefs.js"))
+ self.assertTrue(is_resource(base, "defaults/preferences/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/preferences/channel-prefs.js"))
+ self.assertTrue(is_resource(base, "modules/foo.jsm"))
+ self.assertTrue(is_resource(base, "greprefs.js"))
+ self.assertTrue(is_resource(base, "hyphenation/foo"))
+ self.assertTrue(is_resource(base, "update.locale"))
+ self.assertFalse(is_resource(base, "foo"))
+ self.assertFalse(is_resource(base, "foo/bar/greprefs.js"))
+ self.assertTrue(is_resource(base, "defaults/messenger/foo.dat"))
+ self.assertFalse(is_resource(base, "defaults/messenger/mailViews.dat"))
+ self.assertTrue(is_resource(base, "defaults/pref/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/foo/bar.dat"))
+ self.assertFalse(is_resource(base, "defaults/foo/bar/baz.dat"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/baz/dummy_"))
+ self.assertFalse(is_resource(base, "chrome/foo/bar/baz/dummy"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/dummy_"))
+ self.assertFalse(is_resource(base, "chrome/foo/bar/dummy"))
+
+ def test_chrome_override(self):
+ registry = FileRegistry()
+ f = FlatFormatter(registry)
+ f.add_base("")
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/unix"))
+ # A more specific entry for a given chrome name can override a more
+ # generic one.
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/win", "os=WINNT"))
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/osx", "os=Darwin"))
+
+ # Chrome with the same name overrides the previous registration.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content foo foo/" overrides ' '"content foo foo/unix"',
+ )
+
+ # Chrome with the same name and same flags overrides the previous
+ # registration.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/", "os=WINNT"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content foo foo/ os=WINNT" overrides '
+ '"content foo foo/win os=WINNT"',
+ )
+
+ # We may start with the more specific entry first
+ f.add_manifest(ManifestContent("chrome", "bar", "bar/win", "os=WINNT"))
+ # Then adding a more generic one overrides it.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "bar", "bar/unix"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content bar bar/unix" overrides ' '"content bar bar/win os=WINNT"',
+ )
+
+ # Adding something more specific still works.
+ f.add_manifest(
+ ManifestContent("chrome", "bar", "bar/win", "os=WINNT osversion>=7.0")
+ )
+
+ # Variations of skin/locales are allowed.
+ f.add_manifest(
+ ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/")
+ )
+ f.add_manifest(ManifestSkin("chrome", "foo", "modern/1.0", "foo/skin/modern/"))
+
+ f.add_manifest(ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/"))
+ f.add_manifest(ManifestLocale("chrome", "foo", "ja-JP", "foo/locale/ja-JP/"))
+
+ # But same-skin/locale still error out.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(
+ ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/foo")
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "skin foo classic/1.0 foo/skin/classic/foo" overrides '
+ '"skin foo classic/1.0 foo/skin/classic/"',
+ )
+
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(
+ ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/foo")
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "locale foo en-US foo/locale/en-US/foo" overrides '
+ '"locale foo en-US foo/locale/en-US/"',
+ )
+
+ # Duplicating existing manifest entries is not an error.
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/unix"))
+
+ self.assertEqual(
+ self.get_output(),
+ [
+ 'warning: "content foo foo/unix" is duplicated. Skipping.',
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py
new file mode 100644
index 0000000000..0714ae3252
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_l10n.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozunit
+import six
+
+from mozpack.chrome.manifest import Manifest, ManifestContent, ManifestLocale
+from mozpack.copier import FileRegistry
+from mozpack.files import GeneratedFile, ManifestFile
+from mozpack.packager import l10n
+from test_packager import MockFinder
+
+
+class TestL10NRepack(unittest.TestCase):
+ def test_l10n_repack(self):
+ foo = GeneratedFile(b"foo")
+ foobar = GeneratedFile(b"foobar")
+ qux = GeneratedFile(b"qux")
+ bar = GeneratedFile(b"bar")
+ baz = GeneratedFile(b"baz")
+ dict_aa = GeneratedFile(b"dict_aa")
+ dict_bb = GeneratedFile(b"dict_bb")
+ dict_cc = GeneratedFile(b"dict_cc")
+ barbaz = GeneratedFile(b"barbaz")
+ lst = GeneratedFile(b"foo\nbar")
+ app_finder = MockFinder(
+ {
+ "bar/foo": foo,
+ "chrome/foo/foobar": foobar,
+ "chrome/qux/qux.properties": qux,
+ "chrome/qux/baz/baz.properties": baz,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestContent("chrome", "foo", "foo/"),
+ ManifestLocale("chrome", "qux", "en-US", "qux/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/aa": dict_aa,
+ "app/chrome/bar/barbaz.dtd": barbaz,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome", [ManifestLocale("app/chrome", "bar", "en-US", "bar/")]
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/bb": dict_bb,
+ "app/dict/cc": dict_cc,
+ "app/chrome/bar/search/foo.xml": foo,
+ "app/chrome/bar/search/bar.xml": bar,
+ "app/chrome/bar/search/lst.txt": lst,
+ "META-INF/foo": foo, # Stripped.
+ "inner/META-INF/foo": foo, # Not stripped.
+ "app/META-INF/foo": foo, # Stripped.
+ "app/inner/META-INF/foo": foo, # Not stripped.
+ }
+ )
+ app_finder.jarlogs = {}
+ app_finder.base = "app"
+ foo_l10n = GeneratedFile(b"foo_l10n")
+ qux_l10n = GeneratedFile(b"qux_l10n")
+ baz_l10n = GeneratedFile(b"baz_l10n")
+ barbaz_l10n = GeneratedFile(b"barbaz_l10n")
+ lst_l10n = GeneratedFile(b"foo\nqux")
+ l10n_finder = MockFinder(
+ {
+ "chrome/qux-l10n/qux.properties": qux_l10n,
+ "chrome/qux-l10n/baz/baz.properties": baz_l10n,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/bb": dict_bb,
+ "dict/cc": dict_cc,
+ "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome",
+ [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")],
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/aa": dict_aa,
+ "app/chrome/bar-l10n/search/foo.xml": foo_l10n,
+ "app/chrome/bar-l10n/search/qux.xml": qux_l10n,
+ "app/chrome/bar-l10n/search/lst.txt": lst_l10n,
+ }
+ )
+ l10n_finder.base = "l10n"
+ copier = FileRegistry()
+ formatter = l10n.FlatFormatter(copier)
+
+ l10n._repack(
+ app_finder,
+ l10n_finder,
+ copier,
+ formatter,
+ ["dict", "chrome/**/search/*.xml"],
+ )
+ self.maxDiff = None
+
+ repacked = {
+ "bar/foo": foo,
+ "chrome/foo/foobar": foobar,
+ "chrome/qux-l10n/qux.properties": qux_l10n,
+ "chrome/qux-l10n/baz/baz.properties": baz_l10n,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestContent("chrome", "foo", "foo/"),
+ ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/bb": dict_bb,
+ "dict/cc": dict_cc,
+ "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome",
+ [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")],
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/aa": dict_aa,
+ "app/chrome/bar-l10n/search/foo.xml": foo_l10n,
+ "app/chrome/bar-l10n/search/qux.xml": qux_l10n,
+ "app/chrome/bar-l10n/search/lst.txt": lst_l10n,
+ "inner/META-INF/foo": foo,
+ "app/inner/META-INF/foo": foo,
+ }
+
+ self.assertEqual(
+ dict((p, f.open().read()) for p, f in copier),
+ dict((p, f.open().read()) for p, f in six.iteritems(repacked)),
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py
new file mode 100644
index 0000000000..57a2d71eda
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_unpack.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.packager.unpack import unpack_to_registry
+from mozpack.test.test_files import TestWithTmpDir
+from mozpack.test.test_packager_formats import CONTENTS, fill_formatter, get_contents
+
+
+class TestUnpack(TestWithTmpDir):
+ maxDiff = None
+
+ @staticmethod
+ def _get_copier(cls):
+ copier = FileCopier()
+ formatter = cls(copier)
+ fill_formatter(formatter, CONTENTS)
+ return copier
+
+ @classmethod
+ def setUpClass(cls):
+ cls.contents = get_contents(
+ cls._get_copier(FlatFormatter), read_all=True, mode="rb"
+ )
+
+ def _unpack_test(self, cls):
+ # Format a package with the given formatter class
+ copier = self._get_copier(cls)
+ copier.copy(self.tmpdir)
+
+ # Unpack that package. Its content is expected to match that of a Flat
+ # formatted package.
+ registry = FileRegistry()
+ unpack_to_registry(self.tmpdir, registry, getattr(cls, "OMNIJAR_NAME", None))
+ self.assertEqual(
+ get_contents(registry, read_all=True, mode="rb"), self.contents
+ )
+
+ def test_flat_unpack(self):
+ self._unpack_test(FlatFormatter)
+
+ def test_jar_unpack(self):
+ self._unpack_test(JarFormatter)
+
+ @staticmethod
+ def _omni_foo_formatter(name):
+ class OmniFooFormatter(OmniJarFormatter):
+ OMNIJAR_NAME = name
+
+ def __init__(self, registry):
+ super(OmniFooFormatter, self).__init__(registry, name)
+
+ return OmniFooFormatter
+
+ def test_omnijar_unpack(self):
+ self._unpack_test(self._omni_foo_formatter("omni.foo"))
+
+ def test_omnijar_subpath_unpack(self):
+ self._unpack_test(self._omni_foo_formatter("bar/omni.foo"))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py
new file mode 100644
index 0000000000..6c7aeb5400
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_path.py
@@ -0,0 +1,152 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+
+from mozpack.path import (
+ basedir,
+ basename,
+ commonprefix,
+ dirname,
+ join,
+ match,
+ normpath,
+ rebase,
+ relpath,
+ split,
+ splitext,
+)
+
+
+class TestPath(unittest.TestCase):
+ SEP = os.sep
+
+ def test_relpath(self):
+ self.assertEqual(relpath("foo", "foo"), "")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar"), "")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo"), "bar")
+ self.assertEqual(
+ relpath(self.SEP.join(("foo", "bar", "baz")), "foo"), "bar/baz"
+ )
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar/baz"), "..")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/baz"), "../bar")
+ self.assertEqual(relpath("foo/", "foo"), "")
+ self.assertEqual(relpath("foo/bar/", "foo"), "bar")
+
+ def test_join(self):
+ self.assertEqual(join("foo", "bar", "baz"), "foo/bar/baz")
+ self.assertEqual(join("foo", "", "bar"), "foo/bar")
+ self.assertEqual(join("", "foo", "bar"), "foo/bar")
+ self.assertEqual(join("", "foo", "/bar"), "/bar")
+
+ def test_normpath(self):
+ self.assertEqual(
+ normpath(self.SEP.join(("foo", "bar", "baz", "..", "qux"))), "foo/bar/qux"
+ )
+
+ def test_dirname(self):
+ self.assertEqual(dirname("foo/bar/baz"), "foo/bar")
+ self.assertEqual(dirname("foo/bar"), "foo")
+ self.assertEqual(dirname("foo"), "")
+ self.assertEqual(dirname("foo/bar/"), "foo/bar")
+
+ def test_commonprefix(self):
+ self.assertEqual(
+ commonprefix(
+ [self.SEP.join(("foo", "bar", "baz")), "foo/qux", "foo/baz/qux"]
+ ),
+ "foo/",
+ )
+ self.assertEqual(
+ commonprefix([self.SEP.join(("foo", "bar", "baz")), "foo/qux", "baz/qux"]),
+ "",
+ )
+
+ def test_basename(self):
+ self.assertEqual(basename("foo/bar/baz"), "baz")
+ self.assertEqual(basename("foo/bar"), "bar")
+ self.assertEqual(basename("foo"), "foo")
+ self.assertEqual(basename("foo/bar/"), "")
+
+ def test_split(self):
+ self.assertEqual(
+ split(self.SEP.join(("foo", "bar", "baz"))), ["foo", "bar", "baz"]
+ )
+
+ def test_splitext(self):
+ self.assertEqual(
+ splitext(self.SEP.join(("foo", "bar", "baz.qux"))), ("foo/bar/baz", ".qux")
+ )
+
+ def test_basedir(self):
+ foobarbaz = self.SEP.join(("foo", "bar", "baz"))
+ self.assertEqual(basedir(foobarbaz, ["foo", "bar", "baz"]), "foo")
+ self.assertEqual(basedir(foobarbaz, ["foo", "foo/bar", "baz"]), "foo/bar")
+ self.assertEqual(basedir(foobarbaz, ["foo/bar", "foo", "baz"]), "foo/bar")
+ self.assertEqual(basedir(foobarbaz, ["foo", "bar", ""]), "foo")
+ self.assertEqual(basedir(foobarbaz, ["bar", "baz", ""]), "")
+
+ def test_match(self):
+ self.assertTrue(match("foo", ""))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo"))
+ self.assertTrue(match("foo", "*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/*/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/*/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/b*/*z.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/b*r/ba*z.qux"))
+ self.assertFalse(match("foo/bar/baz.qux", "foo/b*z/ba*r.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/foo/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/*.qux"))
+ self.assertFalse(match("foo/bar/baz.qux", "**.qux"))
+ self.assertFalse(match("foo/bar", "foo/*/bar"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/**"))
+ self.assertFalse(match("foo/nobar/baz.qux", "foo/**/bar/**"))
+ self.assertTrue(match("foo/bar", "foo/**/bar/**"))
+
+ def test_rebase(self):
+ self.assertEqual(rebase("foo", "foo/bar", "bar/baz"), "baz")
+ self.assertEqual(rebase("foo", "foo", "bar/baz"), "bar/baz")
+ self.assertEqual(rebase("foo/bar", "foo", "baz"), "bar/baz")
+
+
+if os.altsep:
+
+ class TestAltPath(TestPath):
+ SEP = os.altsep
+
+ class TestReverseAltPath(TestPath):
+ def setUp(self):
+ sep = os.sep
+ os.sep = os.altsep
+ os.altsep = sep
+
+ def tearDown(self):
+ self.setUp()
+
+ class TestAltReverseAltPath(TestReverseAltPath):
+ SEP = os.altsep
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_pkg.py b/python/mozbuild/mozpack/test/test_pkg.py
new file mode 100644
index 0000000000..f1febbbae0
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_pkg.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from pathlib import Path
+from string import Template
+from unittest.mock import patch
+
+import mozunit
+
+import mozpack.pkg
+from mozpack.pkg import (
+ create_bom,
+ create_payload,
+ create_pkg,
+ get_app_info_plist,
+ get_apple_template,
+ get_relative_glob_list,
+ save_text_file,
+ xar_package_folder,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestPkg(TestWithTmpDir):
+ maxDiff = None
+
+ class MockSubprocessRun:
+ stderr = ""
+ stdout = ""
+ returncode = 0
+
+ def __init__(self, returncode=0):
+ self.returncode = returncode
+
+ def _mk_test_file(self, name, mode=0o777):
+ tool = Path(self.tmpdir) / f"{name}"
+ tool.touch()
+ tool.chmod(mode)
+ return tool
+
+ def test_get_apple_template(self):
+ tmpl = get_apple_template("Distribution.template")
+ assert type(tmpl) == Template
+
+ def test_get_apple_template_not_file(self):
+ with self.assertRaises(Exception):
+ get_apple_template("tmpl-should-not-exist")
+
+ def test_save_text_file(self):
+ content = "Hello"
+ destination = Path(self.tmpdir) / "test_save_text_file"
+ save_text_file(content, destination)
+ with destination.open("r") as file:
+ assert content == file.read()
+
+ def test_get_app_info_plist(self):
+ app_path = Path(self.tmpdir) / "app"
+ (app_path / "Contents").mkdir(parents=True)
+ (app_path / "Contents/Info.plist").touch()
+ data = {"foo": "bar"}
+ with patch.object(mozpack.pkg.plistlib, "load", lambda x: data):
+ assert data == get_app_info_plist(app_path)
+
+ def test_get_app_info_plist_not_file(self):
+ app_path = Path(self.tmpdir) / "app-does-not-exist"
+ with self.assertRaises(Exception):
+ get_app_info_plist(app_path)
+
+ def _mock_payload(self, returncode):
+ def _mock_run(*args, **kwargs):
+ return self.MockSubprocessRun(returncode)
+
+ return _mock_run
+
+ def test_create_payload(self):
+ destination = Path(self.tmpdir) / "mockPayload"
+ with patch.object(mozpack.pkg.subprocess, "run", self._mock_payload(0)):
+ create_payload(destination, Path(self.tmpdir), "cpio")
+
+ def test_create_bom(self):
+ bom_path = Path(self.tmpdir) / "Bom"
+ bom_path.touch()
+ root_path = Path(self.tmpdir)
+ tool_path = Path(self.tmpdir) / "not-really-used-during-test"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x: None):
+ create_bom(bom_path, root_path, tool_path)
+
+ def get_relative_glob_list(self):
+ source = Path(self.tmpdir)
+ (source / "testfile").touch()
+ glob = "*"
+ assert len(get_relative_glob_list(source, glob)) == 1
+
+ def test_xar_package_folder(self):
+ source = Path(self.tmpdir)
+ dest = source / "fakedestination"
+ dest.touch()
+ tool = source / "faketool"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x, **y: None):
+ xar_package_folder(source, dest, tool)
+
+ def test_xar_package_folder_not_absolute(self):
+ source = Path("./some/relative/path")
+ dest = Path("./some/other/relative/path")
+ tool = source / "faketool"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda: None):
+ with self.assertRaises(Exception):
+ xar_package_folder(source, dest, tool)
+
+ def test_create_pkg(self):
+ def noop(*x, **y):
+ pass
+
+ def mock_get_app_info_plist(*args):
+ return {"CFBundleShortVersionString": "1.0.0"}
+
+ def mock_get_apple_template(*args):
+ return Template("fake template")
+
+ source = Path(self.tmpdir) / "FakeApp.app"
+ source.mkdir()
+ output = Path(self.tmpdir) / "output.pkg"
+ fake_tool = Path(self.tmpdir) / "faketool"
+ with patch.multiple(
+ mozpack.pkg,
+ get_app_info_plist=mock_get_app_info_plist,
+ get_apple_template=mock_get_apple_template,
+ save_text_file=noop,
+ create_payload=noop,
+ create_bom=noop,
+ xar_package_folder=noop,
+ ):
+ create_pkg(source, output, fake_tool, fake_tool, fake_tool)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py
new file mode 100644
index 0000000000..15de50dccc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_unify.py
@@ -0,0 +1,250 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+from io import StringIO
+
+import mozunit
+
+from mozbuild.util import ensureParentDir
+from mozpack.errors import AccumulatedErrors, ErrorMessage, errors
+from mozpack.files import FileFinder
+from mozpack.mozjar import JarWriter
+from mozpack.test.test_files import MockDest, TestWithTmpDir
+from mozpack.unify import UnifiedBuildFinder, UnifiedFinder
+
+
+class TestUnified(TestWithTmpDir):
+ def create_one(self, which, path, content):
+ file = self.tmppath(os.path.join(which, path))
+ ensureParentDir(file)
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+ open(file, "wb").write(content)
+
+ def create_both(self, path, content):
+ for p in ["a", "b"]:
+ self.create_one(p, path, content)
+
+
+class TestUnifiedFinder(TestUnified):
+ def test_unified_finder(self):
+ self.create_both("foo/bar", "foobar")
+ self.create_both("foo/baz", "foobaz")
+ self.create_one("a", "bar", "bar")
+ self.create_one("b", "baz", "baz")
+ self.create_one("a", "qux", "foobar")
+ self.create_one("b", "qux", "baz")
+ self.create_one("a", "test/foo", "a\nb\nc\n")
+ self.create_one("b", "test/foo", "b\nc\na\n")
+ self.create_both("test/bar", "a\nb\nc\n")
+
+ finder = UnifiedFinder(
+ FileFinder(self.tmppath("a")),
+ FileFinder(self.tmppath("b")),
+ sorted=["test"],
+ )
+ self.assertEqual(
+ sorted(
+ [(f, c.open().read().decode("utf-8")) for f, c in finder.find("foo")]
+ ),
+ [("foo/bar", "foobar"), ("foo/baz", "foobaz")],
+ )
+ self.assertRaises(ErrorMessage, any, finder.find("bar"))
+ self.assertRaises(ErrorMessage, any, finder.find("baz"))
+ self.assertRaises(ErrorMessage, any, finder.find("qux"))
+ self.assertEqual(
+ sorted(
+ [(f, c.open().read().decode("utf-8")) for f, c in finder.find("test")]
+ ),
+ [("test/bar", "a\nb\nc\n"), ("test/foo", "a\nb\nc\n")],
+ )
+
+
+class TestUnifiedBuildFinder(TestUnified):
+ def test_unified_build_finder(self):
+ finder = UnifiedBuildFinder(
+ FileFinder(self.tmppath("a")), FileFinder(self.tmppath("b"))
+ )
+
+ # Test chrome.manifest unification
+ self.create_both("chrome.manifest", "a\nb\nc\n")
+ self.create_one("a", "chrome/chrome.manifest", "a\nb\nc\n")
+ self.create_one("b", "chrome/chrome.manifest", "b\nc\na\n")
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/chrome.manifest")
+ ]
+ ),
+ [("chrome.manifest", "a\nb\nc\n"), ("chrome/chrome.manifest", "a\nb\nc\n")],
+ )
+
+ # Test buildconfig.html unification
+ self.create_one(
+ "a",
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>foo</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ self.create_one(
+ "b",
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>bar</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/buildconfig.html")
+ ]
+ ),
+ [
+ (
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>foo</div>",
+ " <hr> </hr>",
+ " <div>bar</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ ],
+ )
+
+ # Test xpi file unification
+ xpi = MockDest()
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add("foo", "foo")
+ jar.add("bar", "bar")
+ foo_xpi = xpi.read()
+ self.create_both("foo.xpi", foo_xpi)
+
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add("foo", "bar")
+ self.create_one("a", "bar.xpi", foo_xpi)
+ self.create_one("b", "bar.xpi", xpi.read())
+
+ errors.out = StringIO()
+ with self.assertRaises(AccumulatedErrors), errors.accumulate():
+ self.assertEqual(
+ [(f, c.open().read()) for f, c in finder.find("*.xpi")],
+ [("foo.xpi", foo_xpi)],
+ )
+ errors.out = sys.stderr
+
+ # Test install.rdf unification
+ x86_64 = "Darwin_x86_64-gcc3"
+ x86 = "Darwin_x86-gcc3"
+ target_tag = "<{em}targetPlatform>{platform}</{em}targetPlatform>"
+ target_attr = '{em}targetPlatform="{platform}" '
+
+ rdf_tag = "".join(
+ [
+ '<{RDF}Description {em}bar="bar" {em}qux="qux">',
+ "<{em}foo>foo</{em}foo>",
+ "{targets}",
+ "<{em}baz>baz</{em}baz>",
+ "</{RDF}Description>",
+ ]
+ )
+ rdf_attr = "".join(
+ [
+ '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">',
+ "{targets}",
+ "<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>",
+ "</{RDF}Description>",
+ ]
+ )
+
+ for descr_ns, target_ns in (("RDF:", ""), ("", "em:"), ("RDF:", "em:")):
+ # First we need to infuse the above strings with our namespaces and
+ # platform values.
+ ns = {"RDF": descr_ns, "em": target_ns}
+ target_tag_x86_64 = target_tag.format(platform=x86_64, **ns)
+ target_tag_x86 = target_tag.format(platform=x86, **ns)
+ target_attr_x86_64 = target_attr.format(platform=x86_64, **ns)
+ target_attr_x86 = target_attr.format(platform=x86, **ns)
+
+ tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns)
+ tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns)
+ tag_merged = rdf_tag.format(
+ targets=target_tag_x86_64 + target_tag_x86, **ns
+ )
+ tag_empty = rdf_tag.format(targets="", **ns)
+
+ attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns)
+ attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns)
+ attr_merged = rdf_attr.format(
+ attr="", targets=target_tag_x86_64 + target_tag_x86, **ns
+ )
+
+ # This table defines the test cases, columns "a" and "b" being the
+ # contents of the install.rdf of the respective platform and
+ # "result" the exepected merged content after unification.
+ testcases = (
+ # _____a_____ _____b_____ ___result___#
+ (tag_x86_64, tag_x86, tag_merged),
+ (tag_x86_64, tag_empty, tag_empty),
+ (tag_empty, tag_x86, tag_empty),
+ (tag_empty, tag_empty, tag_empty),
+ (attr_x86_64, attr_x86, attr_merged),
+ (tag_x86_64, attr_x86, tag_merged),
+ (attr_x86_64, tag_x86, attr_merged),
+ (attr_x86_64, tag_empty, tag_empty),
+ (tag_empty, attr_x86, tag_empty),
+ )
+
+ # Now create the files from the above table and compare
+ results = []
+ for emid, (rdf_a, rdf_b, result) in enumerate(testcases):
+ filename = "ext/id{0}/install.rdf".format(emid)
+ self.create_one("a", filename, rdf_a)
+ self.create_one("b", filename, rdf_b)
+ results.append((filename, result))
+
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/install.rdf")
+ ]
+ ),
+ results,
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/unify.py b/python/mozbuild/mozpack/unify.py
new file mode 100644
index 0000000000..ca4d0017a9
--- /dev/null
+++ b/python/mozbuild/mozpack/unify.py
@@ -0,0 +1,265 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import struct
+import subprocess
+from collections import OrderedDict
+from tempfile import mkstemp
+
+import buildconfig
+
+import mozpack.path as mozpath
+from mozbuild.util import hexdump
+from mozpack.errors import errors
+from mozpack.executables import MACHO_SIGNATURES
+from mozpack.files import BaseFile, BaseFinder, ExecutableFile, GeneratedFile
+
+# Regular expressions for unifying install.rdf
+FIND_TARGET_PLATFORM = re.compile(
+ r"""
+ <(?P<ns>[-._0-9A-Za-z]+:)?targetPlatform> # The targetPlatform tag, with any namespace
+ (?P<platform>[^<]*) # The actual platform value
+ </(?P=ns)?targetPlatform> # The closing tag
+ """,
+ re.X,
+)
+FIND_TARGET_PLATFORM_ATTR = re.compile(
+ r"""
+ (?P<tag><(?:[-._0-9A-Za-z]+:)?Description) # The opening part of the <Description> tag
+ (?P<attrs>[^>]*?)\s+ # The initial attributes
+ (?P<ns>[-._0-9A-Za-z]+:)?targetPlatform= # The targetPlatform attribute, with any namespace
+ [\'"](?P<platform>[^\'"]+)[\'"] # The actual platform value
+ (?P<otherattrs>[^>]*?>) # The remaining attributes and closing angle bracket
+ """,
+ re.X,
+)
+
+
+def may_unify_binary(file):
+ """
+ Return whether the given BaseFile instance is an ExecutableFile that
+ may be unified. Only non-fat Mach-O binaries are to be unified.
+ """
+ if isinstance(file, ExecutableFile):
+ signature = file.open().read(4)
+ if len(signature) < 4:
+ return False
+ signature = struct.unpack(">L", signature)[0]
+ if signature in MACHO_SIGNATURES:
+ return True
+ return False
+
+
+class UnifiedExecutableFile(BaseFile):
+ """
+ File class for executable and library files that to be unified with 'lipo'.
+ """
+
+ def __init__(self, executable1, executable2):
+ """
+ Initialize a UnifiedExecutableFile with a pair of ExecutableFiles to
+ be unified. They are expected to be non-fat Mach-O executables.
+ """
+ assert isinstance(executable1, ExecutableFile)
+ assert isinstance(executable2, ExecutableFile)
+ self._executables = (executable1, executable2)
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Create a fat executable from the two Mach-O executable given when
+ creating the instance.
+ skip_if_older is ignored.
+ """
+ assert isinstance(dest, str)
+ tmpfiles = []
+ try:
+ for e in self._executables:
+ fd, f = mkstemp()
+ os.close(fd)
+ tmpfiles.append(f)
+ e.copy(f, skip_if_older=False)
+ lipo = buildconfig.substs.get("LIPO") or "lipo"
+ subprocess.check_call([lipo, "-create"] + tmpfiles + ["-output", dest])
+ except Exception as e:
+ errors.error(
+ "Failed to unify %s and %s: %s"
+ % (self._executables[0].path, self._executables[1].path, str(e))
+ )
+ finally:
+ for f in tmpfiles:
+ os.unlink(f)
+
+
+class UnifiedFinder(BaseFinder):
+ """
+ Helper to get unified BaseFile instances from two distinct trees on the
+ file system.
+ """
+
+ def __init__(self, finder1, finder2, sorted=[], **kargs):
+ """
+ Initialize a UnifiedFinder. finder1 and finder2 are BaseFinder
+ instances from which files are picked. UnifiedFinder.find() will act as
+ FileFinder.find() but will error out when matches can only be found in
+ one of the two trees and not the other. It will also error out if
+ matches can be found on both ends but their contents are not identical.
+
+ The sorted argument gives a list of mozpath.match patterns. File
+ paths matching one of these patterns will have their contents compared
+ with their lines sorted.
+ """
+ assert isinstance(finder1, BaseFinder)
+ assert isinstance(finder2, BaseFinder)
+ self._finder1 = finder1
+ self._finder2 = finder2
+ self._sorted = sorted
+ BaseFinder.__init__(self, finder1.base, **kargs)
+
+ def _find(self, path):
+ """
+ UnifiedFinder.find() implementation.
+ """
+ # There is no `OrderedSet`. Operator `|` was added only in
+ # Python 3.9, so we merge by hand.
+ all_paths = OrderedDict()
+
+ files1 = OrderedDict()
+ for p, f in self._finder1.find(path):
+ files1[p] = f
+ all_paths[p] = True
+ files2 = OrderedDict()
+ for p, f in self._finder2.find(path):
+ files2[p] = f
+ all_paths[p] = True
+
+ for p in all_paths:
+ err = errors.count
+ unified = self.unify_file(p, files1.get(p), files2.get(p))
+ if unified:
+ yield p, unified
+ elif err == errors.count: # No errors have already been reported.
+ self._report_difference(p, files1.get(p), files2.get(p))
+
+ def _report_difference(self, path, file1, file2):
+ """
+ Report differences between files in both trees.
+ """
+ if not file1:
+ errors.error("File missing in %s: %s" % (self._finder1.base, path))
+ return
+ if not file2:
+ errors.error("File missing in %s: %s" % (self._finder2.base, path))
+ return
+
+ errors.error(
+ "Can't unify %s: file differs between %s and %s"
+ % (path, self._finder1.base, self._finder2.base)
+ )
+ if not isinstance(file1, ExecutableFile) and not isinstance(
+ file2, ExecutableFile
+ ):
+ from difflib import unified_diff
+
+ try:
+ lines1 = [l.decode("utf-8") for l in file1.open().readlines()]
+ lines2 = [l.decode("utf-8") for l in file2.open().readlines()]
+ except UnicodeDecodeError:
+ lines1 = hexdump(file1.open().read())
+ lines2 = hexdump(file2.open().read())
+
+ for line in unified_diff(
+ lines1,
+ lines2,
+ os.path.join(self._finder1.base, path),
+ os.path.join(self._finder2.base, path),
+ ):
+ errors.out.write(line)
+
+ def unify_file(self, path, file1, file2):
+ """
+ Given two BaseFiles and the path they were found at, return a
+ unified version of the files. If the files match, the first BaseFile
+ may be returned.
+ If the files don't match or one of them is `None`, the method returns
+ `None`.
+ Subclasses may decide to unify by using one of the files in that case.
+ """
+ if not file1 or not file2:
+ return None
+
+ if may_unify_binary(file1) and may_unify_binary(file2):
+ return UnifiedExecutableFile(file1, file2)
+
+ content1 = file1.open().readlines()
+ content2 = file2.open().readlines()
+ if content1 == content2:
+ return file1
+ for pattern in self._sorted:
+ if mozpath.match(path, pattern):
+ if sorted(content1) == sorted(content2):
+ return file1
+ break
+ return None
+
+
+class UnifiedBuildFinder(UnifiedFinder):
+ """
+ Specialized UnifiedFinder for Mozilla applications packaging. It allows
+ ``*.manifest`` files to differ in their order, and unifies ``buildconfig.html``
+ files by merging their content.
+ """
+
+ def __init__(self, finder1, finder2, **kargs):
+ UnifiedFinder.__init__(
+ self, finder1, finder2, sorted=["**/*.manifest"], **kargs
+ )
+
+ def unify_file(self, path, file1, file2):
+ """
+ Unify files taking Mozilla application special cases into account.
+ Otherwise defer to UnifiedFinder.unify_file.
+ """
+ basename = mozpath.basename(path)
+ if file1 and file2 and basename == "buildconfig.html":
+ content1 = file1.open().readlines()
+ content2 = file2.open().readlines()
+ # Copy everything from the first file up to the end of its <div>,
+ # insert a <hr> between the two files and copy the second file's
+ # content beginning after its leading <h1>.
+ return GeneratedFile(
+ b"".join(
+ content1[: content1.index(b" </div>\n")]
+ + [b" <hr> </hr>\n"]
+ + content2[
+ content2.index(b" <h1>Build Configuration</h1>\n") + 1 :
+ ]
+ )
+ )
+ elif file1 and file2 and basename == "install.rdf":
+ # install.rdf files often have em:targetPlatform (either as
+ # attribute or as tag) that will differ between platforms. The
+ # unified install.rdf should contain both em:targetPlatforms if
+ # they exist, or strip them if only one file has a target platform.
+ content1, content2 = (
+ FIND_TARGET_PLATFORM_ATTR.sub(
+ lambda m: m.group("tag")
+ + m.group("attrs")
+ + m.group("otherattrs")
+ + "<%stargetPlatform>%s</%stargetPlatform>"
+ % (m.group("ns") or "", m.group("platform"), m.group("ns") or ""),
+ f.open().read().decode("utf-8"),
+ )
+ for f in (file1, file2)
+ )
+
+ platform2 = FIND_TARGET_PLATFORM.search(content2)
+ return GeneratedFile(
+ FIND_TARGET_PLATFORM.sub(
+ lambda m: m.group(0) + platform2.group(0) if platform2 else "",
+ content1,
+ )
+ )
+ return UnifiedFinder.unify_file(self, path, file1, file2)