summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozpack/test
diff options
context:
space:
mode:
Diffstat (limited to 'python/mozbuild/mozpack/test')
-rw-r--r--python/mozbuild/mozpack/test/__init__.py0
-rw-r--r--python/mozbuild/mozpack/test/data/test_data1
-rw-r--r--python/mozbuild/mozpack/test/python.toml32
-rw-r--r--python/mozbuild/mozpack/test/support/minify_js_verify.py15
-rw-r--r--python/mozbuild/mozpack/test/test_archive.py197
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_flags.py150
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_manifest.py176
-rw-r--r--python/mozbuild/mozpack/test/test_copier.py548
-rw-r--r--python/mozbuild/mozpack/test/test_errors.py95
-rw-r--r--python/mozbuild/mozpack/test/test_files.py1362
-rw-r--r--python/mozbuild/mozpack/test/test_manifests.py465
-rw-r--r--python/mozbuild/mozpack/test/test_mozjar.py350
-rw-r--r--python/mozbuild/mozpack/test/test_packager.py630
-rw-r--r--python/mozbuild/mozpack/test/test_packager_formats.py537
-rw-r--r--python/mozbuild/mozpack/test/test_packager_l10n.py153
-rw-r--r--python/mozbuild/mozpack/test/test_packager_unpack.py67
-rw-r--r--python/mozbuild/mozpack/test/test_path.py152
-rw-r--r--python/mozbuild/mozpack/test/test_pkg.py138
-rw-r--r--python/mozbuild/mozpack/test/test_unify.py250
19 files changed, 5318 insertions, 0 deletions
diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/__init__.py
diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data
new file mode 100644
index 0000000000..fb7f0c4fc2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/data/test_data
@@ -0,0 +1 @@
+test_data \ No newline at end of file
diff --git a/python/mozbuild/mozpack/test/python.toml b/python/mozbuild/mozpack/test/python.toml
new file mode 100644
index 0000000000..c886e78cf1
--- /dev/null
+++ b/python/mozbuild/mozpack/test/python.toml
@@ -0,0 +1,32 @@
+[DEFAULT]
+subsuite = "mozbuild"
+
+["test_archive.py"]
+
+["test_chrome_flags.py"]
+
+["test_chrome_manifest.py"]
+
+["test_copier.py"]
+
+["test_errors.py"]
+
+["test_files.py"]
+
+["test_manifests.py"]
+
+["test_mozjar.py"]
+
+["test_packager.py"]
+
+["test_packager_formats.py"]
+
+["test_packager_l10n.py"]
+
+["test_packager_unpack.py"]
+
+["test_path.py"]
+
+["test_pkg.py"]
+
+["test_unify.py"]
diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py
new file mode 100644
index 0000000000..88cc0ece0c
--- /dev/null
+++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py
@@ -0,0 +1,15 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+if len(sys.argv) != 4:
+ raise Exception("Usage: minify_js_verify <exitcode> <orig> <minified>")
+
+retcode = int(sys.argv[1])
+
+if retcode:
+ print("Error message", file=sys.stderr)
+
+sys.exit(retcode)
diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py
new file mode 100644
index 0000000000..3417f279df
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_archive.py
@@ -0,0 +1,197 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+
+import pytest
+from mozunit import main
+
+from mozpack.archive import (
+ DEFAULT_MTIME,
+ create_tar_bz2_from_files,
+ create_tar_from_files,
+ create_tar_gz_from_files,
+)
+from mozpack.files import GeneratedFile
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+def file_hash(path):
+ h = hashlib.sha1()
+ with open(path, "rb") as fh:
+ while True:
+ data = fh.read(8192)
+ if not data:
+ break
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class TestArchive(unittest.TestCase):
+ def _create_files(self, root):
+ files = {}
+ for i in range(10):
+ p = os.path.join(root, "file%02d" % i)
+ with open(p, "wb") as fh:
+ fh.write(b"file%02d" % i)
+ # Need to set permissions or umask may influence testing.
+ os.chmod(p, MODE_STANDARD)
+ files["file%02d" % i] = p
+
+ for i in range(10):
+ files["file%02d" % (i + 10)] = GeneratedFile(b"file%02d" % (i + 10))
+
+ return files
+
+ def _verify_basic_tarfile(self, tf):
+ self.assertEqual(len(tf.getmembers()), 20)
+
+ names = ["file%02d" % i for i in range(20)]
+ self.assertEqual(tf.getnames(), names)
+
+ for ti in tf.getmembers():
+ self.assertEqual(ti.uid, 0)
+ self.assertEqual(ti.gid, 0)
+ self.assertEqual(ti.uname, "")
+ self.assertEqual(ti.gname, "")
+ self.assertEqual(ti.mode, MODE_STANDARD)
+ self.assertEqual(ti.mtime, DEFAULT_MTIME)
+
+ @pytest.mark.xfail(
+ reason="ValueError is not thrown despite being provided directory."
+ )
+ def test_dirs_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ with self.assertRaisesRegexp(ValueError, "not a regular"):
+ create_tar_from_files(fh, {"test": d})
+ finally:
+ shutil.rmtree(d)
+
+ @pytest.mark.xfail(reason="ValueError is not thrown despite uid/gid being set.")
+ def test_setuid_setgid_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ uid = os.path.join(d, "setuid")
+ gid = os.path.join(d, "setgid")
+ with open(uid, "a"):
+ pass
+ with open(gid, "a"):
+ pass
+
+ os.chmod(uid, MODE_STANDARD | stat.S_ISUID)
+ os.chmod(gid, MODE_STANDARD | stat.S_ISGID)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"):
+ create_tar_from_files(fh, {"test": uid})
+ with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"):
+ create_tar_from_files(fh, {"test": gid})
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ create_tar_from_files(fh, files)
+
+ # Output should be deterministic.
+ self.assertEqual(file_hash(tp), "01cd314e277f060e98c7de6c8ea57f96b3a2065c")
+
+ with tarfile.open(tp, "r") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ @pytest.mark.xfail(reason="hash mismatch")
+ def test_executable_preserved(self):
+ d = tempfile.mkdtemp()
+ try:
+ p = os.path.join(d, "exec")
+ with open(p, "wb") as fh:
+ fh.write("#!/bin/bash\n")
+ os.chmod(p, MODE_STANDARD | stat.S_IXUSR)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ create_tar_from_files(fh, {"exec": p})
+
+ self.assertEqual(file_hash(tp), "357e1b81c0b6cfdfa5d2d118d420025c3c76ee93")
+
+ with tarfile.open(tp, "r") as tf:
+ m = tf.getmember("exec")
+ self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_gz_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, "test.tar.gz")
+ with open(gp, "wb") as fh:
+ create_tar_gz_from_files(fh, files)
+
+ self.assertEqual(file_hash(gp), "7c4da5adc5088cdf00911d5daf9a67b15de714b7")
+
+ with tarfile.open(gp, "r:gz") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_tar_gz_name(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, "test.tar.gz")
+ with open(gp, "wb") as fh:
+ create_tar_gz_from_files(fh, files, filename="foobar")
+
+ self.assertEqual(file_hash(gp), "721e00083c17d16df2edbddf40136298c06d0c49")
+
+ with tarfile.open(gp, "r:gz") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_bz2_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ bp = os.path.join(d, "test.tar.bz2")
+ with open(bp, "wb") as fh:
+ create_tar_bz2_from_files(fh, files)
+
+ self.assertEqual(file_hash(bp), "eb5096d2fbb71df7b3d690001a6f2e82a5aad6a7")
+
+ with tarfile.open(bp, "r:bz2") as tf:
+ self._verify_basic_tarfile(tf)
+ finally:
+ shutil.rmtree(d)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py
new file mode 100644
index 0000000000..4f1a968dc2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_flags.py
@@ -0,0 +1,150 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozunit
+
+from mozpack.chrome.flags import Flag, Flags, StringFlag, VersionFlag
+from mozpack.errors import ErrorMessage
+
+
+class TestFlag(unittest.TestCase):
+ def test_flag(self):
+ flag = Flag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches(False))
+ self.assertTrue(flag.matches("false"))
+ self.assertFalse(flag.matches("true"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag=")
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag=42")
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag!=false")
+
+ flag.add_definition("flag=1")
+ self.assertEqual(str(flag), "flag=1")
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches("1"))
+ self.assertFalse(flag.matches("no"))
+
+ flag.add_definition("flag=true")
+ self.assertEqual(str(flag), "flag=true")
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches("true"))
+ self.assertFalse(flag.matches("0"))
+
+ flag.add_definition("flag=no")
+ self.assertEqual(str(flag), "flag=no")
+ self.assertTrue(flag.matches("false"))
+ self.assertFalse(flag.matches("1"))
+
+ flag.add_definition("flag")
+ self.assertEqual(str(flag), "flag")
+ self.assertFalse(flag.matches("false"))
+ self.assertTrue(flag.matches("true"))
+ self.assertFalse(flag.matches(False))
+
+ def test_string_flag(self):
+ flag = StringFlag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches("foo"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag>=2")
+
+ flag.add_definition("flag=foo")
+ self.assertEqual(str(flag), "flag=foo")
+ self.assertTrue(flag.matches("foo"))
+ self.assertFalse(flag.matches("bar"))
+
+ flag.add_definition("flag=bar")
+ self.assertEqual(str(flag), "flag=foo flag=bar")
+ self.assertTrue(flag.matches("foo"))
+ self.assertTrue(flag.matches("bar"))
+ self.assertFalse(flag.matches("baz"))
+
+ flag = StringFlag("flag")
+ flag.add_definition("flag!=bar")
+ self.assertEqual(str(flag), "flag!=bar")
+ self.assertTrue(flag.matches("foo"))
+ self.assertFalse(flag.matches("bar"))
+
+ def test_version_flag(self):
+ flag = VersionFlag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag!=2")
+
+ flag.add_definition("flag=1.0")
+ self.assertEqual(str(flag), "flag=1.0")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertFalse(flag.matches("2.0"))
+
+ flag.add_definition("flag=2.0")
+ self.assertEqual(str(flag), "flag=1.0 flag=2.0")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertFalse(flag.matches("3.0"))
+
+ flag = VersionFlag("flag")
+ flag.add_definition("flag>=2.0")
+ self.assertEqual(str(flag), "flag>=2.0")
+ self.assertFalse(flag.matches("1.0"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertTrue(flag.matches("3.0"))
+
+ flag.add_definition("flag<1.10")
+ self.assertEqual(str(flag), "flag>=2.0 flag<1.10")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertTrue(flag.matches("1.9"))
+ self.assertFalse(flag.matches("1.10"))
+ self.assertFalse(flag.matches("1.20"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertTrue(flag.matches("3.0"))
+ self.assertRaises(Exception, flag.add_definition, "flag<")
+ self.assertRaises(Exception, flag.add_definition, "flag>")
+ self.assertRaises(Exception, flag.add_definition, "flag>=")
+ self.assertRaises(Exception, flag.add_definition, "flag<=")
+ self.assertRaises(Exception, flag.add_definition, "flag!=1.0")
+
+
+class TestFlags(unittest.TestCase):
+ def setUp(self):
+ self.flags = Flags(
+ "contentaccessible=yes",
+ "appversion>=3.5",
+ "application=foo",
+ "application=bar",
+ "appversion<2.0",
+ "platform",
+ "abi!=Linux_x86-gcc3",
+ )
+
+ def test_flags_str(self):
+ self.assertEqual(
+ str(self.flags),
+ "contentaccessible=yes "
+ + "appversion>=3.5 appversion<2.0 application=foo "
+ + "application=bar platform abi!=Linux_x86-gcc3",
+ )
+
+ def test_flags_match_unset(self):
+ self.assertTrue(self.flags.match(os="WINNT"))
+
+ def test_flags_match(self):
+ self.assertTrue(self.flags.match(application="foo"))
+ self.assertFalse(self.flags.match(application="qux"))
+
+ def test_flags_match_different(self):
+ self.assertTrue(self.flags.match(abi="WINNT_x86-MSVC"))
+ self.assertFalse(self.flags.match(abi="Linux_x86-gcc3"))
+
+ def test_flags_match_version(self):
+ self.assertTrue(self.flags.match(appversion="1.0"))
+ self.assertTrue(self.flags.match(appversion="1.5"))
+ self.assertFalse(self.flags.match(appversion="2.0"))
+ self.assertFalse(self.flags.match(appversion="3.0"))
+ self.assertTrue(self.flags.match(appversion="3.5"))
+ self.assertTrue(self.flags.match(appversion="3.10"))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py
new file mode 100644
index 0000000000..c1d5826bbc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py
@@ -0,0 +1,176 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+
+from mozpack.chrome.manifest import (
+ MANIFESTS_TYPES,
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestCategory,
+ ManifestComponent,
+ ManifestContent,
+ ManifestContract,
+ ManifestInterfaces,
+ ManifestLocale,
+ ManifestOverlay,
+ ManifestOverride,
+ ManifestResource,
+ ManifestSkin,
+ ManifestStyle,
+ parse_manifest,
+ parse_manifest_line,
+)
+from mozpack.errors import AccumulatedErrors, errors
+from test_errors import TestErrors
+
+
+class TestManifest(unittest.TestCase):
+ def test_parse_manifest(self):
+ manifest = [
+ "content global content/global/",
+ "content global content/global/ application=foo application=bar"
+ + " platform",
+ "locale global en-US content/en-US/",
+ "locale global en-US content/en-US/ application=foo",
+ "skin global classic/1.0 content/skin/classic/",
+ "skin global classic/1.0 content/skin/classic/ application=foo"
+ + " os=WINNT",
+ "",
+ "manifest pdfjs/chrome.manifest",
+ "resource gre-resources toolkit/res/",
+ "override chrome://global/locale/netError.dtd"
+ + " chrome://browser/locale/netError.dtd",
+ "# Comment",
+ "component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js",
+ "contract @mozilla.org/foo;1" + " {b2bba4df-057d-41ea-b6b1-94a10a8ede68}",
+ "interfaces foo.xpt",
+ "binary-component bar.so",
+ "category command-line-handler m-browser"
+ + " @mozilla.org/browser/clh;1"
+ + " application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ "style chrome://global/content/viewSource.xul" + " chrome://browser/skin/",
+ "overlay chrome://global/content/viewSource.xul"
+ + " chrome://browser/content/viewSourceOverlay.xul",
+ ]
+ other_manifest = ["content global content/global/"]
+ expected_result = [
+ ManifestContent("", "global", "content/global/"),
+ ManifestContent(
+ "",
+ "global",
+ "content/global/",
+ "application=foo",
+ "application=bar",
+ "platform",
+ ),
+ ManifestLocale("", "global", "en-US", "content/en-US/"),
+ ManifestLocale("", "global", "en-US", "content/en-US/", "application=foo"),
+ ManifestSkin("", "global", "classic/1.0", "content/skin/classic/"),
+ ManifestSkin(
+ "",
+ "global",
+ "classic/1.0",
+ "content/skin/classic/",
+ "application=foo",
+ "os=WINNT",
+ ),
+ Manifest("", "pdfjs/chrome.manifest"),
+ ManifestResource("", "gre-resources", "toolkit/res/"),
+ ManifestOverride(
+ "",
+ "chrome://global/locale/netError.dtd",
+ "chrome://browser/locale/netError.dtd",
+ ),
+ ManifestComponent("", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}", "foo.js"),
+ ManifestContract(
+ "", "@mozilla.org/foo;1", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}"
+ ),
+ ManifestInterfaces("", "foo.xpt"),
+ ManifestBinaryComponent("", "bar.so"),
+ ManifestCategory(
+ "",
+ "command-line-handler",
+ "m-browser",
+ "@mozilla.org/browser/clh;1",
+ "application=" + "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ ),
+ ManifestStyle(
+ "", "chrome://global/content/viewSource.xul", "chrome://browser/skin/"
+ ),
+ ManifestOverlay(
+ "",
+ "chrome://global/content/viewSource.xul",
+ "chrome://browser/content/viewSourceOverlay.xul",
+ ),
+ ]
+ with mozunit.MockedOpen(
+ {
+ "manifest": "\n".join(manifest),
+ "other/manifest": "\n".join(other_manifest),
+ }
+ ):
+ # Ensure we have tests for all types of manifests.
+ self.assertEqual(
+ set(type(e) for e in expected_result), set(MANIFESTS_TYPES.values())
+ )
+ self.assertEqual(
+ list(parse_manifest(os.curdir, "manifest")), expected_result
+ )
+ self.assertEqual(
+ list(parse_manifest(os.curdir, "other/manifest")),
+ [ManifestContent("other", "global", "content/global/")],
+ )
+
+ def test_manifest_rebase(self):
+ m = parse_manifest_line("chrome", "content global content/global/")
+ m = m.rebase("")
+ self.assertEqual(str(m), "content global chrome/content/global/")
+ m = m.rebase("chrome")
+ self.assertEqual(str(m), "content global content/global/")
+
+ m = parse_manifest_line("chrome/foo", "content global content/global/")
+ m = m.rebase("chrome")
+ self.assertEqual(str(m), "content global foo/content/global/")
+ m = m.rebase("chrome/foo")
+ self.assertEqual(str(m), "content global content/global/")
+
+ m = parse_manifest_line("modules/foo", "resource foo ./")
+ m = m.rebase("modules")
+ self.assertEqual(str(m), "resource foo foo/")
+ m = m.rebase("modules/foo")
+ self.assertEqual(str(m), "resource foo ./")
+
+ m = parse_manifest_line("chrome", "content browser browser/content/")
+ m = m.rebase("chrome/browser").move("jar:browser.jar!").rebase("")
+ self.assertEqual(str(m), "content browser jar:browser.jar!/content/")
+
+
+class TestManifestErrors(TestErrors, unittest.TestCase):
+ def test_parse_manifest_errors(self):
+ manifest = [
+ "skin global classic/1.0 content/skin/classic/ platform",
+ "",
+ "binary-component bar.so",
+ "unsupported foo",
+ ]
+ with mozunit.MockedOpen({"manifest": "\n".join(manifest)}):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ list(parse_manifest(os.curdir, "manifest"))
+ out = self.get_output()
+ # Expecting 2 errors
+ self.assertEqual(len(out), 2)
+ path = os.path.abspath("manifest")
+ # First on line 1
+ self.assertTrue(out[0].startswith("error: %s:1: " % path))
+ # Second on line 4
+ self.assertTrue(out[1].startswith("error: %s:4: " % path))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py
new file mode 100644
index 0000000000..60ebd2c1e9
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_copier.py
@@ -0,0 +1,548 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import stat
+import unittest
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.copier import FileCopier, FileRegistry, FileRegistrySubtree, Jarrer
+from mozpack.errors import ErrorMessage
+from mozpack.files import ExistingFile, GeneratedFile
+from mozpack.mozjar import JarReader
+from mozpack.test.test_files import MatchTestTemplate, MockDest, TestWithTmpDir
+
+
+class BaseTestFileRegistry(MatchTestTemplate):
+ def add(self, path):
+ self.registry.add(path, GeneratedFile(path))
+
+ def do_check(self, pattern, result):
+ self.checked = True
+ if result:
+ self.assertTrue(self.registry.contains(pattern))
+ else:
+ self.assertFalse(self.registry.contains(pattern))
+ self.assertEqual(self.registry.match(pattern), result)
+
+ def do_test_file_registry(self, registry):
+ self.registry = registry
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ bar = GeneratedFile(b"bar")
+ self.registry.add("bar", bar)
+ self.assertEqual(self.registry.paths(), ["foo", "bar"])
+ self.assertEqual(self.registry["bar"], bar)
+
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo", GeneratedFile(b"foo2")
+ )
+
+ self.assertRaises(ErrorMessage, self.registry.remove, "qux")
+
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")
+ )
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar/baz", GeneratedFile(b"foobar")
+ )
+
+ self.assertEqual(self.registry.paths(), ["foo", "bar"])
+
+ self.registry.remove("foo")
+ self.assertEqual(self.registry.paths(), ["bar"])
+ self.registry.remove("bar")
+ self.assertEqual(self.registry.paths(), [])
+
+ self.prepare_match_test()
+ self.do_match_test()
+ self.assertTrue(self.checked)
+ self.assertEqual(
+ self.registry.paths(),
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+ self.registry.remove("foo/qux")
+ self.assertEqual(self.registry.paths(), ["bar", "foo/bar", "foo/baz"])
+
+ self.registry.add("foo/qux", GeneratedFile(b"fooqux"))
+ self.assertEqual(
+ self.registry.paths(), ["bar", "foo/bar", "foo/baz", "foo/qux"]
+ )
+ self.registry.remove("foo/b*")
+ self.assertEqual(self.registry.paths(), ["bar", "foo/qux"])
+
+ self.assertEqual([f for f, c in self.registry], ["bar", "foo/qux"])
+ self.assertEqual(len(self.registry), 2)
+
+ self.add("foo/.foo")
+ self.assertTrue(self.registry.contains("foo/.foo"))
+
+ def do_test_registry_paths(self, registry):
+ self.registry = registry
+
+ # Can't add a file if it requires a directory in place of a
+ # file we also require.
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")
+ )
+
+ # Can't add a file if we already have a directory there.
+ self.registry.add("bar/baz", GeneratedFile(b"barbaz"))
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Bump the count of things that require bar/ to 2.
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Drop the count of things that require bar/ to 1.
+ self.registry.remove("bar/baz")
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Drop the count of things that require bar/ to 0.
+ self.registry.remove("bar/zot")
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+
+
+class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase):
+ def test_partial_paths(self):
+ cases = {
+ "foo/bar/baz/zot": ["foo/bar/baz", "foo/bar", "foo"],
+ "foo/bar": ["foo"],
+ "bar": [],
+ }
+ reg = FileRegistry()
+ for path, parts in six.iteritems(cases):
+ self.assertEqual(reg._partial_paths(path), parts)
+
+ def test_file_registry(self):
+ self.do_test_file_registry(FileRegistry())
+
+ def test_registry_paths(self):
+ self.do_test_registry_paths(FileRegistry())
+
+ def test_required_directories(self):
+ self.registry = FileRegistry()
+
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add("bar/baz", GeneratedFile(b"barbaz"))
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.add("bar/zap/zot", GeneratedFile(b"barzapzot"))
+ self.assertEqual(self.registry.required_directories(), {"bar", "bar/zap"})
+
+ self.registry.remove("bar/zap/zot")
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.remove("bar/baz")
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.remove("bar/zot")
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add("x/y/z", GeneratedFile(b"xyz"))
+ self.assertEqual(self.registry.required_directories(), {"x", "x/y"})
+
+
+class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase):
+ def test_file_registry_subtree_base(self):
+ registry = FileRegistry()
+ self.assertEqual(registry, FileRegistrySubtree("", registry))
+ self.assertNotEqual(registry, FileRegistrySubtree("base", registry))
+
+ def create_registry(self):
+ registry = FileRegistry()
+ registry.add("foo/bar", GeneratedFile(b"foo/bar"))
+ registry.add("baz/qux", GeneratedFile(b"baz/qux"))
+ return FileRegistrySubtree("base/root", registry)
+
+ def test_file_registry_subtree(self):
+ self.do_test_file_registry(self.create_registry())
+
+ def test_registry_paths_subtree(self):
+ FileRegistry()
+ self.do_test_registry_paths(self.create_registry())
+
+
+class TestFileCopier(TestWithTmpDir):
+ def all_dirs(self, base):
+ all_dirs = set()
+ for root, dirs, files in os.walk(base):
+ if not dirs:
+ all_dirs.add(mozpath.relpath(root, base))
+ return all_dirs
+
+ def all_files(self, base):
+ all_files = set()
+ for root, dirs, files in os.walk(base):
+ for f in files:
+ all_files.add(mozpath.join(mozpath.relpath(root, base), f))
+ return all_files
+
+ def test_file_copier(self):
+ copier = FileCopier()
+ copier.add("foo/bar", GeneratedFile(b"foobar"))
+ copier.add("foo/qux", GeneratedFile(b"fooqux"))
+ copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
+ copier.add("bar", GeneratedFile(b"bar"))
+ copier.add("qux/foo", GeneratedFile(b"quxfoo"))
+ copier.add("qux/bar", GeneratedFile(b""))
+
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(
+ self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"])
+ )
+
+ self.assertEqual(
+ result.updated_files,
+ set(self.tmppath(p) for p in self.all_files(self.tmpdir)),
+ )
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ copier.remove("foo")
+ copier.add("test", GeneratedFile(b"test"))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"]))
+ self.assertEqual(
+ result.removed_files,
+ set(
+ self.tmppath(p)
+ for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file")
+ ),
+ )
+
+ def test_symlink_directory_replaced(self):
+ """Directory symlinks in destination are replaced if they need to be
+ real directories."""
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+
+ os.makedirs(self.tmppath("dest/foo"))
+ dummy = self.tmppath("dummy")
+ os.mkdir(dummy)
+ link = self.tmppath("dest/foo/bar")
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest)
+
+ st = os.lstat(link)
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ def test_remove_unaccounted_directory_symlinks(self):
+ """Directory symlinks in destination that are not in the way are
+ deleted according to remove_unaccounted and
+ remove_all_directory_symlinks.
+ """
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+
+ os.makedirs(self.tmppath("dest/foo"))
+ dummy = self.tmppath("dummy")
+ os.mkdir(dummy)
+
+ os.mkdir(self.tmppath("dest/zot"))
+ link = self.tmppath("dest/zot/zap")
+ os.symlink(dummy, link)
+
+ # If not remove_unaccounted but remove_empty_directories, then
+ # the symlinked directory remains (as does its containing
+ # directory).
+ result = copier.copy(
+ dest,
+ remove_unaccounted=False,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False,
+ )
+
+ st = os.lstat(link)
+ self.assertTrue(stat.S_ISLNK(st.st_mode))
+ self.assertFalse(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ # If remove_unaccounted but not remove_empty_directories, then
+ # only the symlinked directory is removed.
+ result = copier.copy(
+ dest,
+ remove_unaccounted=True,
+ remove_empty_directories=False,
+ remove_all_directory_symlinks=False,
+ )
+
+ st = os.lstat(self.tmppath("dest/zot"))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set())
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar", "zot"]))
+
+ # If remove_unaccounted and remove_empty_directories, then
+ # both the symlink and its containing directory are removed.
+ link = self.tmppath("dest/zot/zap")
+ os.symlink(dummy, link)
+
+ result = copier.copy(
+ dest,
+ remove_unaccounted=True,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False,
+ )
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set([self.tmppath("dest/zot")]))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
+
+ def test_permissions(self):
+ """Ensure files without write permission can be deleted."""
+ with open(self.tmppath("dummy"), "a"):
+ pass
+
+ p = self.tmppath("no_perms")
+ with open(p, "a"):
+ pass
+
+ # Make file and directory unwritable. Reminder: making a directory
+ # unwritable prevents modifications (including deletes) from the list
+ # of files in that directory.
+ os.chmod(p, 0o400)
+ os.chmod(self.tmpdir, 0o400)
+
+ copier = FileCopier()
+ copier.add("dummy", GeneratedFile(b"content"))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(result.removed_files_count, 1)
+ self.assertFalse(os.path.exists(p))
+
+ def test_no_remove(self):
+ copier = FileCopier()
+ copier.add("foo", GeneratedFile(b"foo"))
+
+ with open(self.tmppath("bar"), "a"):
+ pass
+
+ os.mkdir(self.tmppath("emptydir"))
+ d = self.tmppath("populateddir")
+ os.mkdir(d)
+
+ with open(self.tmppath("populateddir/foo"), "a"):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False)
+
+ self.assertEqual(
+ self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])
+ )
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["populateddir"]))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set([self.tmppath("emptydir")]))
+
+ def test_no_remove_empty_directories(self):
+ copier = FileCopier()
+ copier.add("foo", GeneratedFile(b"foo"))
+
+ with open(self.tmppath("bar"), "a"):
+ pass
+
+ os.mkdir(self.tmppath("emptydir"))
+ d = self.tmppath("populateddir")
+ os.mkdir(d)
+
+ with open(self.tmppath("populateddir/foo"), "a"):
+ pass
+
+ result = copier.copy(
+ self.tmpdir, remove_unaccounted=False, remove_empty_directories=False
+ )
+
+ self.assertEqual(
+ self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])
+ )
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["emptydir", "populateddir"]))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ def test_optional_exists_creates_unneeded_directory(self):
+ """Demonstrate that a directory not strictly required, but specified
+ as the path to an optional file, will be unnecessarily created.
+
+ This behaviour is wrong; fixing it is tracked by Bug 972432;
+ and this test exists to guard against unexpected changes in
+ behaviour.
+ """
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar", ExistingFile(required=False))
+
+ result = copier.copy(dest)
+
+ st = os.lstat(self.tmppath("dest/foo"))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ # What's worse, we have no record that dest was created.
+ self.assertEqual(len(result.updated_files), 0)
+
+ # But we do have an erroneous record of an optional file
+ # existing when it does not.
+ self.assertIn(self.tmppath("dest/foo/bar"), result.existing_files)
+
+ def test_remove_unaccounted_file_registry(self):
+ """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+ copier.add("foo/bar/qux", GeneratedFile(b"foobarqux"))
+ copier.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
+ copier.add("foo/toto/tata", GeneratedFile(b"footototata"))
+
+ os.makedirs(os.path.join(dest, "bar"))
+ with open(os.path.join(dest, "bar", "bar"), "w") as fh:
+ fh.write("barbar")
+ os.makedirs(os.path.join(dest, "foo", "toto"))
+ with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh:
+ fh.write("foototototo")
+
+ result = copier.copy(dest, remove_unaccounted=False)
+
+ self.assertEqual(
+ self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"}
+ )
+ self.assertEqual(
+ self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"}
+ )
+
+ copier2 = FileCopier()
+ copier2.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
+
+ # We expect only files copied from the first copier to be removed,
+ # not the extra file that was there beforehand.
+ result = copier2.copy(dest, remove_unaccounted=copier)
+
+ self.assertEqual(
+ self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"}
+ )
+ self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"})
+ self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")})
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(
+ result.removed_files,
+ {
+ self.tmppath(p)
+ for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata")
+ },
+ )
+ self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")})
+
+
+class TestJarrer(unittest.TestCase):
+ def check_jar(self, dest, copier):
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], copier.paths())
+ for f in jar:
+ self.assertEqual(f.uncompressed_data.read(), copier[f.filename].content)
+
+ def test_jarrer(self):
+ copier = Jarrer()
+ copier.add("foo/bar", GeneratedFile(b"foobar"))
+ copier.add("foo/qux", GeneratedFile(b"fooqux"))
+ copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
+ copier.add("bar", GeneratedFile(b"bar"))
+ copier.add("qux/foo", GeneratedFile(b"quxfoo"))
+ copier.add("qux/bar", GeneratedFile(b""))
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove("foo")
+ copier.add("test", GeneratedFile(b"test"))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove("test")
+ copier.add("test", GeneratedFile(b"replaced-content"))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ preloaded = ["qux/bar", "bar"]
+ copier.preload(preloaded)
+ copier.copy(dest)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertEqual(
+ [f.filename for f in jar],
+ preloaded + [p for p in copier.paths() if p not in preloaded],
+ )
+ self.assertEqual(jar.last_preloaded, preloaded[-1])
+
+ def test_jarrer_compress(self):
+ copier = Jarrer()
+ copier.add("foo/bar", GeneratedFile(b"ffffff"))
+ copier.add("foo/qux", GeneratedFile(b"ffffff"), compress=False)
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertTrue(jar["foo/bar"].compressed)
+ self.assertFalse(jar["foo/qux"].compressed)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py
new file mode 100644
index 0000000000..411b1b54c3
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_errors.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import unittest
+
+import mozunit
+import six
+
+from mozpack.errors import AccumulatedErrors, ErrorMessage, errors
+
+
+class TestErrors(object):
+ def setUp(self):
+ errors.out = six.moves.cStringIO()
+ errors.ignore_errors(False)
+
+ def tearDown(self):
+ errors.out = sys.stderr
+
+ def get_output(self):
+ return [l.strip() for l in errors.out.getvalue().splitlines()]
+
+
+class TestErrorsImpl(TestErrors, unittest.TestCase):
+ def test_plain_error(self):
+ errors.warn("foo")
+ self.assertRaises(ErrorMessage, errors.error, "foo")
+ self.assertRaises(ErrorMessage, errors.fatal, "foo")
+ self.assertEqual(self.get_output(), ["warning: foo"])
+
+ def test_ignore_errors(self):
+ errors.ignore_errors()
+ errors.warn("foo")
+ errors.error("bar")
+ self.assertRaises(ErrorMessage, errors.fatal, "foo")
+ self.assertEqual(self.get_output(), ["warning: foo", "warning: bar"])
+
+ def test_no_error(self):
+ with errors.accumulate():
+ errors.warn("1")
+
+ def test_simple_error(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error("1")
+ self.assertEqual(self.get_output(), ["error: 1"])
+
+ def test_error_loop(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ for i in range(3):
+ errors.error("%d" % i)
+ self.assertEqual(self.get_output(), ["error: 0", "error: 1", "error: 2"])
+
+ def test_multiple_errors(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error("foo")
+ for i in range(3):
+ if i == 2:
+ errors.warn("%d" % i)
+ else:
+ errors.error("%d" % i)
+ errors.error("bar")
+ self.assertEqual(
+ self.get_output(),
+ ["error: foo", "error: 0", "error: 1", "warning: 2", "error: bar"],
+ )
+
+ def test_errors_context(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ self.assertEqual(errors.get_context(), None)
+ with errors.context("foo", 1):
+ self.assertEqual(errors.get_context(), ("foo", 1))
+ errors.error("a")
+ with errors.context("bar", 2):
+ self.assertEqual(errors.get_context(), ("bar", 2))
+ errors.error("b")
+ self.assertEqual(errors.get_context(), ("foo", 1))
+ errors.error("c")
+ self.assertEqual(
+ self.get_output(),
+ [
+ "error: foo:1: a",
+ "error: bar:2: b",
+ "error: foo:1: c",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py
new file mode 100644
index 0000000000..1c86f2e0cc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_files.py
@@ -0,0 +1,1362 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensure_bytes, ensureParentDir
+from mozpack.errors import ErrorMessage, errors
+from mozpack.files import (
+ AbsoluteSymlinkFile,
+ ComposedFinder,
+ DeflatedFile,
+ Dest,
+ ExistingFile,
+ ExtractedTarFile,
+ File,
+ FileFinder,
+ GeneratedFile,
+ HardlinkFile,
+ JarFinder,
+ ManifestFile,
+ MercurialFile,
+ MercurialRevisionFinder,
+ MinifiedCommentStripped,
+ MinifiedJavaScript,
+ PreprocessedFile,
+ TarFinder,
+)
+
+# We don't have hglib installed everywhere.
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+import os
+import platform
+import random
+import sys
+import tarfile
+import unittest
+from io import BytesIO
+from tempfile import mkdtemp
+
+import mozfile
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestLocale,
+ ManifestOverride,
+ ManifestResource,
+)
+from mozpack.mozjar import JarReader, JarWriter
+
+
+class TestWithTmpDir(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ self.symlink_supported = False
+ self.hardlink_supported = False
+
+ # See comment in mozpack.files.AbsoluteSymlinkFile
+ if hasattr(os, "symlink") and platform.system() != "Windows":
+ dummy_path = self.tmppath("dummy_file")
+ with open(dummy_path, "a"):
+ pass
+
+ try:
+ os.symlink(dummy_path, self.tmppath("dummy_symlink"))
+ os.remove(self.tmppath("dummy_symlink"))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.symlink_supported = True
+
+ if hasattr(os, "link"):
+ dummy_path = self.tmppath("dummy_file")
+ with open(dummy_path, "a"):
+ pass
+
+ try:
+ os.link(dummy_path, self.tmppath("dummy_hardlink"))
+ os.remove(self.tmppath("dummy_hardlink"))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.hardlink_supported = True
+
+ def tearDown(self):
+ mozfile.rmtree(self.tmpdir)
+
+ def tmppath(self, relpath):
+ return os.path.normpath(os.path.join(self.tmpdir, relpath))
+
+
+class MockDest(BytesIO, Dest):
+ def __init__(self):
+ BytesIO.__init__(self)
+ self.mode = None
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ self.seek(0)
+ self.mode = "r"
+ return BytesIO.read(self, length)
+
+ def write(self, data):
+ if self.mode != "w":
+ self.seek(0)
+ self.truncate(0)
+ self.mode = "w"
+ return BytesIO.write(self, data)
+
+ def exists(self):
+ return True
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+
+
+class DestNoWrite(Dest):
+ def write(self, data):
+ raise RuntimeError
+
+
+class TestDest(TestWithTmpDir):
+ def test_dest(self):
+ dest = Dest(self.tmppath("dest"))
+ self.assertFalse(dest.exists())
+ dest.write(b"foo")
+ self.assertTrue(dest.exists())
+ dest.write(b"foo")
+ self.assertEqual(dest.read(4), b"foof")
+ self.assertEqual(dest.read(), b"oo")
+ self.assertEqual(dest.read(), b"")
+ dest.write(b"bar")
+ self.assertEqual(dest.read(4), b"bar")
+ dest.close()
+ self.assertEqual(dest.read(), b"bar")
+ dest.write(b"foo")
+ dest.close()
+ dest.write(b"qux")
+ self.assertEqual(dest.read(), b"qux")
+
+
+rand = bytes(
+ random.choice(b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
+ for i in six.moves.xrange(131597)
+)
+samples = [
+ b"",
+ b"test",
+ b"fooo",
+ b"same",
+ b"same",
+ b"Different and longer",
+ rand,
+ rand,
+ rand[:-1] + b"_",
+ b"test",
+]
+
+
+class TestFile(TestWithTmpDir):
+ def test_file(self):
+ """
+ Check that File.copy yields the proper content in the destination file
+ in all situations that trigger different code paths:
+ - different content
+ - different content of the same size
+ - same content
+ - long content
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ for content in samples:
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+ # Ensure the destination file, when it exists, is older than the
+ # source
+ if os.path.exists(dest):
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, "rb").read())
+ self.assertEqual(content, f.open().read())
+ self.assertEqual(content, f.open().read())
+
+ def test_file_dest(self):
+ """
+ Similar to test_file, but for a destination object instead of
+ a destination file. This ensures the destination object is being
+ used properly by File.copy, ensuring that other subclasses of Dest
+ will work.
+ """
+ src = self.tmppath("src")
+ dest = MockDest()
+
+ for content in samples:
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, dest.getvalue())
+
+ def test_file_open(self):
+ """
+ Test whether File.open returns an appropriately reset file object.
+ """
+ src = self.tmppath("src")
+ content = b"".join(samples)
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+
+ f = File(src)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_file_no_write(self):
+ """
+ Test various conditions where File.copy is expected not to write
+ in the destination file.
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"test")
+
+ # Initial copy
+ f = File(src)
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When the source file is newer, but with the same content, no copy
+ # should occur
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, "wb") as tmp:
+ tmp.write(b"fooo")
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ f.copy(dest, skip_if_older=False)
+ self.assertEqual(b"fooo", open(dest, "rb").read())
+
+
+class TestAbsoluteSymlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ AbsoluteSymlinkFile("/foo")
+
+ with self.assertRaisesRegexp(ValueError, "Symlink target not absolute"):
+ AbsoluteSymlinkFile("./foo")
+
+ def test_symlink_file(self):
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("Hello world")
+
+ s = AbsoluteSymlinkFile(source)
+ dest = self.tmppath("symlink")
+ self.assertTrue(s.copy(dest))
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, "Hello world")
+
+ def test_replace_file_with_symlink(self):
+ # If symlinks are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("source")
+
+ dest = self.tmppath("dest")
+ with open(dest, "a"):
+ pass
+
+ s = AbsoluteSymlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, "source")
+
+ def test_replace_symlink(self):
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath("source")
+ with open(source, "a"):
+ pass
+
+ dest = self.tmppath("dest")
+
+ os.symlink(self.tmppath("bad"), dest)
+ self.assertTrue(os.path.islink(dest))
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertTrue(s.copy(dest))
+
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ def test_noop(self):
+ if not hasattr(os, "symlink") or sys.platform == "win32":
+ return
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+
+ with open(source, "a"):
+ pass
+
+ os.symlink(source, dest)
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+
+class TestHardlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ HardlinkFile("/foo")
+ HardlinkFile("./foo")
+
+ def test_hardlink_file(self):
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("Hello world")
+
+ s = HardlinkFile(source)
+ dest = self.tmppath("hardlink")
+ self.assertTrue(s.copy(dest))
+
+ if self.hardlink_supported:
+ source_stat = os.stat(source)
+ dest_stat = os.stat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ with open(dest) as f:
+ content = f.read()
+ self.assertEqual(content, "Hello world")
+
+ def test_replace_file_with_hardlink(self):
+ # If hardlink are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("source")
+
+ dest = self.tmppath("dest")
+ with open(dest, "a"):
+ pass
+
+ s = HardlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.hardlink_supported:
+ source_stat = os.stat(source)
+ dest_stat = os.stat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ with open(dest) as f:
+ content = f.read()
+ self.assertEqual(content, "source")
+
+ def test_replace_hardlink(self):
+ if not self.hardlink_supported:
+ raise unittest.SkipTest("hardlink not supported")
+
+ source = self.tmppath("source")
+ with open(source, "a"):
+ pass
+
+ dest = self.tmppath("dest")
+
+ os.link(source, dest)
+
+ s = HardlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ source_stat = os.lstat(source)
+ dest_stat = os.lstat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+
+ def test_noop(self):
+ if not self.hardlink_supported:
+ raise unittest.SkipTest("hardlink not supported")
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+
+ with open(source, "a"):
+ pass
+
+ os.link(source, dest)
+
+ s = HardlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ source_stat = os.lstat(source)
+ dest_stat = os.lstat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+
+
+class TestPreprocessedFile(TestWithTmpDir):
+ def test_preprocess(self):
+ """
+ Test that copying the file invokes the preprocessor
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ f = PreprocessedFile(src, depfile_path=None, marker="#", defines={"FOO": True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ def test_preprocess_file_no_write(self):
+ """
+ Test various conditions where PreprocessedFile.copy is expected not to
+ write in the destination file.
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+ depfile = self.tmppath("depfile")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ # Initial copy
+ f = PreprocessedFile(
+ src, depfile_path=depfile, marker="#", defines={"FOO": True}
+ )
+ self.assertTrue(f.copy(dest))
+
+ # Ensure subsequent copies won't trigger writes
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\nfooo\n#endif")
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ self.assertTrue(f.copy(dest, skip_if_older=False))
+ self.assertEqual(b"fooo\n", open(dest, "rb").read())
+
+ def test_preprocess_file_dependencies(self):
+ """
+ Test that the preprocess runs if the dependencies of the source change
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+ incl = self.tmppath("incl")
+ deps = self.tmppath("src.pp")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ with open(incl, "wb") as tmp:
+ tmp.write(b"foo bar")
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=deps, marker="#", defines={"FOO": True})
+ self.assertTrue(f.copy(dest))
+
+ # Update the source so it #includes the include file.
+ with open(src, "wb") as tmp:
+ tmp.write(b"#include incl\n")
+ time = os.path.getmtime(dest) + 1
+ os.utime(src, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual(b"foo bar", open(dest, "rb").read())
+
+ # If one of the dependencies changes, the file should be updated. The
+ # mtime of the dependency is set after the destination file, to avoid
+ # both files having the same time.
+ with open(incl, "wb") as tmp:
+ tmp.write(b"quux")
+ time = os.path.getmtime(dest) + 1
+ os.utime(incl, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual(b"quux", open(dest, "rb").read())
+
+ # Perform one final copy to confirm that we don't run the preprocessor
+ # again. We update the mtime of the destination so it's newer than the
+ # input files. This would "just work" if we weren't changing
+ time = os.path.getmtime(incl) + 1
+ os.utime(dest, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+
+ def test_replace_symlink(self):
+ """
+ Test that if the destination exists, and is a symlink, the target of
+ the symlink is not overwritten by the preprocessor output.
+ """
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+ pp_source = self.tmppath("pp_in")
+ deps = self.tmppath("deps")
+
+ with open(source, "a"):
+ pass
+
+ os.symlink(source, dest)
+ self.assertTrue(os.path.islink(dest))
+
+ with open(pp_source, "wb") as tmp:
+ tmp.write(b"#define FOO\nPREPROCESSED")
+
+ f = PreprocessedFile(
+ pp_source, depfile_path=deps, marker="#", defines={"FOO": True}
+ )
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual(b"PREPROCESSED", open(dest, "rb").read())
+ self.assertFalse(os.path.islink(dest))
+ self.assertEqual(b"", open(source, "rb").read())
+
+
+class TestExistingFile(TestWithTmpDir):
+ def test_required_missing_dest(self):
+ with self.assertRaisesRegexp(ErrorMessage, "Required existing file"):
+ f = ExistingFile(required=True)
+ f.copy(self.tmppath("dest"))
+
+ def test_required_existing_dest(self):
+ p = self.tmppath("dest")
+ with open(p, "a"):
+ pass
+
+ f = ExistingFile(required=True)
+ f.copy(p)
+
+ def test_optional_missing_dest(self):
+ f = ExistingFile(required=False)
+ f.copy(self.tmppath("dest"))
+
+ def test_optional_existing_dest(self):
+ p = self.tmppath("dest")
+ with open(p, "a"):
+ pass
+
+ f = ExistingFile(required=False)
+ f.copy(p)
+
+
+class TestGeneratedFile(TestWithTmpDir):
+ def test_generated_file(self):
+ """
+ Check that GeneratedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ """
+ dest = self.tmppath("dest")
+
+ for content in samples:
+ f = GeneratedFile(content)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, "rb").read())
+
+ def test_generated_file_open(self):
+ """
+ Test whether GeneratedFile.open returns an appropriately reset file
+ object.
+ """
+ content = b"".join(samples)
+ f = GeneratedFile(content)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_generated_file_no_write(self):
+ """
+ Test various conditions where GeneratedFile.copy is expected not to
+ write in the destination file.
+ """
+ dest = self.tmppath("dest")
+
+ # Initial copy
+ f = GeneratedFile(b"test")
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When using a new instance with the same content, no copy should occur
+ f = GeneratedFile(b"test")
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = GeneratedFile(b"fooo")
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ def test_generated_file_function(self):
+ """
+ Test GeneratedFile behavior with functions.
+ """
+ dest = self.tmppath("dest")
+ data = {
+ "num_calls": 0,
+ }
+
+ def content():
+ data["num_calls"] += 1
+ return b"content"
+
+ f = GeneratedFile(content)
+ self.assertEqual(data["num_calls"], 0)
+ f.copy(dest)
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"content", open(dest, "rb").read())
+ self.assertEqual(b"content", f.open().read())
+ self.assertEqual(b"content", f.read())
+ self.assertEqual(len(b"content"), f.size())
+ self.assertEqual(data["num_calls"], 1)
+
+ f.content = b"modified"
+ f.copy(dest)
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"modified", open(dest, "rb").read())
+ self.assertEqual(b"modified", f.open().read())
+ self.assertEqual(b"modified", f.read())
+ self.assertEqual(len(b"modified"), f.size())
+
+ f.content = content
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"content", f.read())
+ self.assertEqual(data["num_calls"], 2)
+
+
+class TestDeflatedFile(TestWithTmpDir):
+ def test_deflated_file(self):
+ """
+ Check that DeflatedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ """
+ src = self.tmppath("src.jar")
+ dest = self.tmppath("dest")
+
+ contents = {}
+ with JarWriter(src) as jar:
+ for content in samples:
+ name = "".join(
+ random.choice(
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ )
+ for i in range(8)
+ )
+ jar.add(name, content, compress=True)
+ contents[name] = content
+
+ for j in JarReader(src):
+ f = DeflatedFile(j)
+ f.copy(dest)
+ self.assertEqual(contents[j.filename], open(dest, "rb").read())
+
+ def test_deflated_file_open(self):
+ """
+ Test whether DeflatedFile.open returns an appropriately reset file
+ object.
+ """
+ src = self.tmppath("src.jar")
+ content = b"".join(samples)
+ with JarWriter(src) as jar:
+ jar.add("content", content)
+
+ f = DeflatedFile(JarReader(src)["content"])
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_deflated_file_no_write(self):
+ """
+ Test various conditions where DeflatedFile.copy is expected not to
+ write in the destination file.
+ """
+ src = self.tmppath("src.jar")
+ dest = self.tmppath("dest")
+
+ with JarWriter(src) as jar:
+ jar.add("test", b"test")
+ jar.add("test2", b"test")
+ jar.add("fooo", b"fooo")
+
+ jar = JarReader(src)
+ # Initial copy
+ f = DeflatedFile(jar["test"])
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When using a different file with the same content, no copy should
+ # occur
+ f = DeflatedFile(jar["test2"])
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = DeflatedFile(jar["fooo"])
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestManifestFile(TestWithTmpDir):
+ def test_manifest_file(self):
+ f = ManifestFile("chrome")
+ f.add(ManifestContent("chrome", "global", "toolkit/content/global/"))
+ f.add(ManifestResource("chrome", "gre-resources", "toolkit/res/"))
+ f.add(ManifestResource("chrome/pdfjs", "pdfjs", "./"))
+ f.add(ManifestContent("chrome/pdfjs", "pdfjs", "pdfjs"))
+ f.add(ManifestLocale("chrome", "browser", "en-US", "en-US/locale/browser/"))
+
+ f.copy(self.tmppath("chrome.manifest"))
+ self.assertEqual(
+ open(self.tmppath("chrome.manifest")).readlines(),
+ [
+ "content global toolkit/content/global/\n",
+ "resource gre-resources toolkit/res/\n",
+ "resource pdfjs pdfjs/\n",
+ "content pdfjs pdfjs/pdfjs\n",
+ "locale browser en-US en-US/locale/browser/\n",
+ ],
+ )
+
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent("", "global", "toolkit/content/global/"),
+ )
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestOverride(
+ "chrome",
+ "chrome://global/locale/netError.dtd",
+ "chrome://browser/locale/netError.dtd",
+ ),
+ )
+
+ f.remove(ManifestContent("chrome", "global", "toolkit/content/global/"))
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent("chrome", "global", "toolkit/content/global/"),
+ )
+
+ f.copy(self.tmppath("chrome.manifest"))
+ content = open(self.tmppath("chrome.manifest"), "rb").read()
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+
+# Compiled typelib for the following IDL:
+# interface foo;
+# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)]
+# interface bar {
+# void bar(in foo f);
+# };
+# We need to make this [scriptable] so it doesn't get deleted from the
+# typelib. We don't need to make the foo interfaces below [scriptable],
+# because they will be automatically included by virtue of being an
+# argument to a method of |bar|.
+bar_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C"
+ + b"\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+ + b"\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F"
+ + b"\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00"
+ + b"\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00"
+ + b"\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80"
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)]
+# interface foo {
+# void foo();
+# };
+foo_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40"
+ + b"\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA"
+ + b"\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00"
+ + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x05\x00\x80\x06\x00\x00\x00"
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)]
+# interface foo {
+# void foo();
+# };
+foo2_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40"
+ + b"\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39"
+ + b"\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00"
+ + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x05\x00\x80\x06\x00\x00\x00"
+)
+
+
+class TestMinifiedCommentStripped(TestWithTmpDir):
+ def test_minified_comment_stripped(self):
+ propLines = [
+ "# Comments are removed",
+ "foo = bar",
+ "",
+ "# Another comment",
+ ]
+ prop = GeneratedFile("\n".join(propLines))
+ self.assertEqual(
+ MinifiedCommentStripped(prop).open().readlines(), [b"foo = bar\n", b"\n"]
+ )
+ open(self.tmppath("prop"), "w").write("\n".join(propLines))
+ MinifiedCommentStripped(File(self.tmppath("prop"))).copy(self.tmppath("prop2"))
+ self.assertEqual(open(self.tmppath("prop2")).readlines(), ["foo = bar\n", "\n"])
+
+
+class TestMinifiedJavaScript(TestWithTmpDir):
+ orig_lines = [
+ "// Comment line",
+ 'let foo = "bar";',
+ "var bar = true;",
+ "",
+ "// Another comment",
+ ]
+
+ def test_minified_javascript(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f)
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def _verify_command(self, code):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+ return [
+ sys.executable,
+ os.path.join(our_dir, "support", "minify_js_verify.py"),
+ code,
+ ]
+
+ def test_minified_verify_success(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("0"))
+
+ mini_lines = [six.ensure_text(s) for s in min_f.open().readlines()]
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def test_minified_verify_failure(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ errors.out = six.StringIO()
+ min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("1"))
+
+ mini_lines = min_f.open().readlines()
+ output = errors.out.getvalue()
+ errors.out = sys.stderr
+ self.assertEqual(
+ output,
+ "warning: JS minification verification failed for <unknown>:\n"
+ "warning: Error message\n",
+ )
+ self.assertEqual(mini_lines, orig_f.open().readlines())
+
+
+class MatchTestTemplate(object):
+ def prepare_match_test(self, with_dotfiles=False):
+ self.add("bar")
+ self.add("foo/bar")
+ self.add("foo/baz")
+ self.add("foo/qux/1")
+ self.add("foo/qux/bar")
+ self.add("foo/qux/2/test")
+ self.add("foo/qux/2/test2")
+ if with_dotfiles:
+ self.add("foo/.foo")
+ self.add("foo/.bar/foo")
+
+ def do_match_test(self):
+ self.do_check(
+ "",
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "*",
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "foo/qux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"]
+ )
+ self.do_check("foo/b*", ["foo/bar", "foo/baz"])
+ self.do_check("baz", [])
+ self.do_check("foo/foo", [])
+ self.do_check("foo/*ar", ["foo/bar"])
+ self.do_check("*ar", ["bar"])
+ self.do_check("*/bar", ["foo/bar"])
+ self.do_check(
+ "foo/*ux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"]
+ )
+ self.do_check(
+ "foo/q*ux",
+ ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"],
+ )
+ self.do_check("foo/*/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"])
+ self.do_check("**/bar", ["bar", "foo/bar", "foo/qux/bar"])
+ self.do_check("foo/**/test", ["foo/qux/2/test"])
+ self.do_check(
+ "foo",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check("**/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"])
+ self.do_check(
+ "**/foo",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check("**/barbaz", [])
+ self.do_check("f**/bar", ["foo/bar"])
+
+ def do_finder_test(self, finder):
+ self.assertTrue(finder.contains("foo/.foo"))
+ self.assertTrue(finder.contains("foo/.bar"))
+ self.assertTrue("foo/.foo" in [f for f, c in finder.find("foo/.foo")])
+ self.assertTrue("foo/.bar/foo" in [f for f, c in finder.find("foo/.bar")])
+ self.assertEqual(
+ sorted([f for f, c in finder.find("foo/.*")]), ["foo/.bar/foo", "foo/.foo"]
+ )
+ for pattern in ["foo", "**", "**/*", "**/foo", "foo/*"]:
+ self.assertFalse("foo/.foo" in [f for f, c in finder.find(pattern)])
+ self.assertFalse("foo/.bar/foo" in [f for f, c in finder.find(pattern)])
+ self.assertEqual(
+ sorted([f for f, c in finder.find(pattern)]),
+ sorted([f for f, c in finder if mozpath.match(f, pattern)]),
+ )
+
+
+def do_check(test, finder, pattern, result):
+ if result:
+ test.assertTrue(finder.contains(pattern))
+ else:
+ test.assertFalse(finder.contains(pattern))
+ test.assertEqual(sorted(list(f for f, c in finder.find(pattern))), sorted(result))
+
+
+class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ ensureParentDir(self.tmppath(path))
+ open(self.tmppath(path), "wb").write(six.ensure_binary(path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_file_finder(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir)
+ self.do_match_test()
+ self.do_finder_test(self.finder)
+
+ def test_get(self):
+ self.prepare_match_test()
+ finder = FileFinder(self.tmpdir)
+
+ self.assertIsNone(finder.get("does-not-exist"))
+ res = finder.get("bar")
+ self.assertIsInstance(res, File)
+ self.assertEqual(mozpath.normpath(res.path), mozpath.join(self.tmpdir, "bar"))
+
+ def test_ignored_dirs(self):
+ """Ignored directories should not have results returned."""
+ self.prepare_match_test()
+ self.add("fooz")
+
+ # Present to ensure prefix matching doesn't exclude.
+ self.add("foo/quxz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/qux"])
+
+ self.do_check("**", ["bar", "foo/bar", "foo/baz", "foo/quxz", "fooz"])
+ self.do_check("foo/*", ["foo/bar", "foo/baz", "foo/quxz"])
+ self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
+ self.do_check("foo/qux/**", [])
+ self.do_check("foo/qux/*", [])
+ self.do_check("foo/qux/bar", [])
+ self.do_check("foo/quxz", ["foo/quxz"])
+ self.do_check("fooz", ["fooz"])
+
+ def test_ignored_files(self):
+ """Ignored files should not have results returned."""
+ self.prepare_match_test()
+
+ # Be sure prefix match doesn't get ignored.
+ self.add("barz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/bar", "bar"])
+ self.do_check(
+ "**",
+ [
+ "barz",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ "foo/qux/bar",
+ ],
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ "foo/qux/bar",
+ ],
+ )
+
+ def test_ignored_patterns(self):
+ """Ignore entries with patterns should be honored."""
+ self.prepare_match_test()
+
+ self.add("foo/quxz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/qux/*"])
+ self.do_check("**", ["foo/bar", "foo/baz", "foo/quxz", "bar"])
+ self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
+
+ def test_dotfiles(self):
+ """Finder can find files beginning with . is configured."""
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True)
+ self.do_check(
+ "**",
+ [
+ "bar",
+ "foo/.foo",
+ "foo/.bar/foo",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+ def test_dotfiles_plus_ignore(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(
+ self.tmpdir, find_dotfiles=True, ignore=["foo/.bar/**"]
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/.foo",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+
+class TestJarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.jar.add(path, ensure_bytes(path), compress=True)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_jar_finder(self):
+ self.jar = JarWriter(file=self.tmppath("test.jar"))
+ self.prepare_match_test()
+ self.jar.finish()
+ reader = JarReader(file=self.tmppath("test.jar"))
+ self.finder = JarFinder(self.tmppath("test.jar"), reader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), DeflatedFile)
+
+
+class TestTarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.tar.addfile(tarfile.TarInfo(name=path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_tar_finder(self):
+ self.tar = tarfile.open(name=self.tmppath("test.tar.bz2"), mode="w:bz2")
+ self.prepare_match_test()
+ self.tar.close()
+ with tarfile.open(name=self.tmppath("test.tar.bz2"), mode="r:bz2") as tarreader:
+ self.finder = TarFinder(self.tmppath("test.tar.bz2"), tarreader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), ExtractedTarFile)
+
+
+class TestComposedFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path, content=None):
+ # Put foo/qux files under $tmp/b.
+ if path.startswith("foo/qux/"):
+ real_path = mozpath.join("b", path[8:])
+ else:
+ real_path = mozpath.join("a", path)
+ ensureParentDir(self.tmppath(real_path))
+ if not content:
+ content = six.ensure_binary(path)
+ open(self.tmppath(real_path), "wb").write(content)
+
+ def do_check(self, pattern, result):
+ if "*" in pattern:
+ return
+ do_check(self, self.finder, pattern, result)
+
+ def test_composed_finder(self):
+ self.prepare_match_test()
+ # Also add files in $tmp/a/foo/qux because ComposedFinder is
+ # expected to mask foo/qux entirely with content from $tmp/b.
+ ensureParentDir(self.tmppath("a/foo/qux/hoge"))
+ open(self.tmppath("a/foo/qux/hoge"), "wb").write(b"hoge")
+ open(self.tmppath("a/foo/qux/bar"), "wb").write(b"not the right content")
+ self.finder = ComposedFinder(
+ {
+ "": FileFinder(self.tmppath("a")),
+ "foo/qux": FileFinder(self.tmppath("b")),
+ }
+ )
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), File)
+
+
+@unittest.skipUnless(hglib, "hglib not available")
+@unittest.skipIf(
+ six.PY3 and os.name == "nt", "Does not currently work in Python3 on Windows"
+)
+class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir):
+ def setUp(self):
+ super(TestMercurialRevisionFinder, self).setUp()
+ hglib.init(self.tmpdir)
+ self._clients = []
+
+ def tearDown(self):
+ # Ensure the hg client process is closed. Otherwise, Windows
+ # may have trouble removing the repo directory because the process
+ # has an open handle on it.
+ for client in getattr(self, "_clients", []):
+ if client.server:
+ client.close()
+
+ self._clients[:] = []
+
+ super(TestMercurialRevisionFinder, self).tearDown()
+
+ def _client(self):
+ configs = (
+ # b'' because py2 needs !unicode
+ b'ui.username="Dummy User <dummy@example.com>"',
+ )
+ client = hglib.open(
+ six.ensure_binary(self.tmpdir),
+ encoding=b"UTF-8", # b'' because py2 needs !unicode
+ configs=configs,
+ )
+ self._clients.append(client)
+ return client
+
+ def add(self, path):
+ with self._client() as c:
+ ensureParentDir(self.tmppath(path))
+ with open(self.tmppath(path), "wb") as fh:
+ fh.write(six.ensure_binary(path))
+ c.add(six.ensure_binary(self.tmppath(path)))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def _get_finder(self, *args, **kwargs):
+ f = MercurialRevisionFinder(*args, **kwargs)
+ self._clients.append(f._client)
+ return f
+
+ def test_default_revision(self):
+ self.prepare_match_test()
+ with self._client() as c:
+ c.commit("initial commit")
+
+ self.finder = self._get_finder(self.tmpdir)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), MercurialFile)
+
+ def test_old_revision(self):
+ with self._client() as c:
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"foo initial")
+ c.add(six.ensure_binary(self.tmppath("foo")))
+ c.commit("initial")
+
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"foo second")
+ with open(self.tmppath("bar"), "wb") as fh:
+ fh.write(b"bar second")
+ c.add(six.ensure_binary(self.tmppath("bar")))
+ c.commit("second")
+ # This wipes out the working directory, ensuring the finder isn't
+ # finding anything from the filesystem.
+ c.rawcommand([b"update", b"null"])
+
+ finder = self._get_finder(self.tmpdir, "0")
+ f = finder.get("foo")
+ self.assertEqual(f.read(), b"foo initial")
+ self.assertEqual(f.read(), b"foo initial", "read again for good measure")
+ self.assertIsNone(finder.get("bar"))
+
+ finder = self._get_finder(self.tmpdir, rev="1")
+ f = finder.get("foo")
+ self.assertEqual(f.read(), b"foo second")
+ f = finder.get("bar")
+ self.assertEqual(f.read(), b"bar second")
+ f = None
+
+ def test_recognize_repo_paths(self):
+ with self._client() as c:
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"initial")
+ c.add(six.ensure_binary(self.tmppath("foo")))
+ c.commit("initial")
+ c.rawcommand([b"update", b"null"])
+
+ finder = self._get_finder(self.tmpdir, "0", recognize_repo_paths=True)
+ with self.assertRaises(NotImplementedError):
+ list(finder.find(""))
+
+ with self.assertRaises(ValueError):
+ finder.get("foo")
+ with self.assertRaises(ValueError):
+ finder.get("")
+
+ f = finder.get(self.tmppath("foo"))
+ self.assertIsInstance(f, MercurialFile)
+ self.assertEqual(f.read(), b"initial")
+ f = None
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py
new file mode 100644
index 0000000000..a5db53b58c
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_manifests.py
@@ -0,0 +1,465 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozunit
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.manifests import InstallManifest, UnreadableInstallManifest
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestInstallManifest(TestWithTmpDir):
+ def test_construct(self):
+ m = InstallManifest()
+ self.assertEqual(len(m), 0)
+
+ def test_malformed(self):
+ f = self.tmppath("manifest")
+ open(f, "wt").write("junk\n")
+ with self.assertRaises(UnreadableInstallManifest):
+ InstallManifest(f)
+
+ def test_adds(self):
+ m = InstallManifest()
+ m.add_link("s_source", "s_dest")
+ m.add_copy("c_source", "c_dest")
+ m.add_required_exists("e_dest")
+ m.add_optional_exists("o_dest")
+ m.add_pattern_link("ps_base", "ps/*", "ps_dest")
+ m.add_pattern_copy("pc_base", "pc/**", "pc_dest")
+ m.add_preprocess("p_source", "p_dest", "p_source.pp")
+ m.add_content("content", "content")
+
+ self.assertEqual(len(m), 8)
+ self.assertIn("s_dest", m)
+ self.assertIn("c_dest", m)
+ self.assertIn("p_dest", m)
+ self.assertIn("e_dest", m)
+ self.assertIn("o_dest", m)
+ self.assertIn("content", m)
+
+ with self.assertRaises(ValueError):
+ m.add_link("s_other", "s_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_copy("c_other", "c_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_preprocess("p_other", "p_dest", "p_other.pp")
+
+ with self.assertRaises(ValueError):
+ m.add_required_exists("e_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_optional_exists("o_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_link("ps_base", "ps/*", "ps_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_copy("pc_base", "pc/**", "pc_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_content("content", "content")
+
+ def _get_test_manifest(self):
+ m = InstallManifest()
+ m.add_link(self.tmppath("s_source"), "s_dest")
+ m.add_copy(self.tmppath("c_source"), "c_dest")
+ m.add_preprocess(
+ self.tmppath("p_source"),
+ "p_dest",
+ self.tmppath("p_source.pp"),
+ "#",
+ {"FOO": "BAR", "BAZ": "QUX"},
+ )
+ m.add_required_exists("e_dest")
+ m.add_optional_exists("o_dest")
+ m.add_pattern_link("ps_base", "*", "ps_dest")
+ m.add_pattern_copy("pc_base", "**", "pc_dest")
+ m.add_content("the content\non\nmultiple lines", "content")
+
+ return m
+
+ def test_serialization(self):
+ m = self._get_test_manifest()
+
+ p = self.tmppath("m")
+ m.write(path=p)
+ self.assertTrue(os.path.isfile(p))
+
+ with open(p, "r") as fh:
+ c = fh.read()
+
+ self.assertEqual(c.count("\n"), 9)
+
+ lines = c.splitlines()
+ self.assertEqual(len(lines), 9)
+
+ self.assertEqual(lines[0], "5")
+
+ m2 = InstallManifest(path=p)
+ self.assertEqual(m, m2)
+ p2 = self.tmppath("m2")
+ m2.write(path=p2)
+
+ with open(p2, "r") as fh:
+ c2 = fh.read()
+
+ self.assertEqual(c, c2)
+
+ def test_populate_registry(self):
+ m = self._get_test_manifest()
+ r = FileRegistry()
+ m.populate_registry(r)
+
+ self.assertEqual(len(r), 6)
+ self.assertEqual(
+ r.paths(), ["c_dest", "content", "e_dest", "o_dest", "p_dest", "s_dest"]
+ )
+
+ def test_pattern_expansion(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
+
+ def test_write_expand_pattern(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ track = self.tmppath("track")
+ m.write(path=track, expand_pattern=True)
+
+ m = InstallManifest(path=track)
+ self.assertEqual(
+ sorted(dest for dest in m._dests), ["dest/foo/file1", "dest/foo/file2"]
+ )
+
+ def test_or(self):
+ m1 = self._get_test_manifest()
+ orig_length = len(m1)
+ m2 = InstallManifest()
+ m2.add_link("s_source2", "s_dest2")
+ m2.add_copy("c_source2", "c_dest2")
+
+ m1 |= m2
+
+ self.assertEqual(len(m2), 2)
+ self.assertEqual(len(m1), orig_length + 2)
+
+ self.assertIn("s_dest2", m1)
+ self.assertIn("c_dest2", m1)
+
+ def test_copier_application(self):
+ dest = self.tmppath("dest")
+ os.mkdir(dest)
+
+ to_delete = self.tmppath("dest/to_delete")
+ with open(to_delete, "a"):
+ pass
+
+ with open(self.tmppath("s_source"), "wt") as fh:
+ fh.write("symlink!")
+
+ with open(self.tmppath("c_source"), "wt") as fh:
+ fh.write("copy!")
+
+ with open(self.tmppath("p_source"), "wt") as fh:
+ fh.write("#define FOO 1\npreprocess!")
+
+ with open(self.tmppath("dest/e_dest"), "a"):
+ pass
+
+ with open(self.tmppath("dest/o_dest"), "a"):
+ pass
+
+ m = self._get_test_manifest()
+ c = FileCopier()
+ m.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/s_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/c_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/p_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/e_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/o_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/content")))
+ self.assertFalse(os.path.exists(to_delete))
+
+ with open(self.tmppath("dest/s_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "symlink!")
+
+ with open(self.tmppath("dest/c_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "copy!")
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "preprocess!")
+
+ self.assertEqual(
+ result.updated_files,
+ set(
+ self.tmppath(p)
+ for p in ("dest/s_dest", "dest/c_dest", "dest/p_dest", "dest/content")
+ ),
+ )
+ self.assertEqual(
+ result.existing_files,
+ set([self.tmppath("dest/e_dest"), self.tmppath("dest/o_dest")]),
+ )
+ self.assertEqual(result.removed_files, {to_delete})
+ self.assertEqual(result.removed_directories, set())
+
+ def test_preprocessor(self):
+ manifest = self.tmppath("m")
+ deps = self.tmppath("m.pp")
+ dest = self.tmppath("dest")
+ include = self.tmppath("p_incl")
+
+ with open(include, "wt") as fh:
+ fh.write("#define INCL\n")
+ time = os.path.getmtime(include) - 3
+ os.utime(include, (time, time))
+
+ with open(self.tmppath("p_source"), "wt") as fh:
+ fh.write("#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n")
+ fh.write("#ifdef DEPTEST\nPASS2\n#endif\n")
+ fh.write("#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n")
+ time = os.path.getmtime(self.tmppath("p_source")) - 3
+ os.utime(self.tmppath("p_source"), (time, time))
+
+ # Create and write a manifest with the preprocessed file, then apply it.
+ # This should write out our preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(
+ self.tmppath("p_source"), "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"}
+ )
+ m.write(path=manifest)
+
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/p_dest")))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS1\n")
+
+ # Create a second manifest with the preprocessed file, then apply it.
+ # Since this manifest does not exist on the disk, there should not be a
+ # dependency on it, and the preprocessed file should not be modified.
+ m2 = InstallManifest()
+ m2.add_preprocess(
+ self.tmppath("p_source"), "p_dest", deps, "#", {"DEPTEST": True}
+ )
+ c = FileCopier()
+ m2.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertFalse(self.tmppath("dest/p_dest") in result.updated_files)
+ self.assertTrue(self.tmppath("dest/p_dest") in result.existing_files)
+
+ # Write out the second manifest, then load it back in from the disk.
+ # This should add the dependency on the manifest file, so our
+ # preprocessed file should be regenerated with the new defines.
+ # We also set the mtime on the destination file back, so it will be
+ # older than the manifest file.
+ m2.write(path=manifest)
+ time = os.path.getmtime(manifest) - 1
+ os.utime(self.tmppath("dest/p_dest"), (time, time))
+ m2 = InstallManifest(path=manifest)
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS2\n")
+
+ # Set the time on the manifest back, so it won't be picked up as
+ # modified in the next test
+ time = os.path.getmtime(manifest) - 1
+ os.utime(manifest, (time, time))
+
+ # Update the contents of a file included by the source file. This should
+ # cause the destination to be regenerated.
+ with open(include, "wt") as fh:
+ fh.write("#define INCLTEST\n")
+
+ time = os.path.getmtime(include) - 1
+ os.utime(self.tmppath("dest/p_dest"), (time, time))
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS2\nPASS3\n")
+
+ def test_preprocessor_dependencies(self):
+ manifest = self.tmppath("m")
+ deps = self.tmppath("m.pp")
+ dest = self.tmppath("dest")
+ source = self.tmppath("p_source")
+ destfile = self.tmppath("dest/p_dest")
+ include = self.tmppath("p_incl")
+ os.mkdir(dest)
+
+ with open(source, "wt") as fh:
+ fh.write("#define SRC\nSOURCE\n")
+ time = os.path.getmtime(source) - 3
+ os.utime(source, (time, time))
+
+ with open(include, "wt") as fh:
+ fh.write("INCLUDE\n")
+ time = os.path.getmtime(source) - 3
+ os.utime(include, (time, time))
+
+ # Create and write a manifest with the preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(source, "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"})
+ m.write(path=manifest)
+
+ time = os.path.getmtime(source) - 5
+ os.utime(manifest, (time, time))
+
+ # Now read the manifest back in, and apply it. This should write out
+ # our preprocessed file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\n")
+
+ # Next, modify the source to #INCLUDE another file.
+ with open(source, "wt") as fh:
+ fh.write("SOURCE\n#include p_incl\n")
+ time = os.path.getmtime(source) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that it also reads the newly included
+ # file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\nINCLUDE\n")
+
+ # Set the time on the source file back, so it won't be picked up as
+ # modified in the next test.
+ time = os.path.getmtime(source) - 1
+ os.utime(source, (time, time))
+
+ # Now, modify the include file (but not the original source).
+ with open(include, "wt") as fh:
+ fh.write("INCLUDE MODIFIED\n")
+ time = os.path.getmtime(include) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that the change to the include file
+ # is detected. That should cause the preprocessor to run again.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\nINCLUDE MODIFIED\n")
+
+ # ORing an InstallManifest should copy file dependencies
+ m = InstallManifest()
+ m |= InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ e = c._files["p_dest"]
+ self.assertEqual(e.extra_depends, [manifest])
+
+ def test_add_entries_from(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ p = InstallManifest()
+ p.add_entries_from(m)
+ self.assertEqual(len(p), 1)
+
+ c = FileCopier()
+ p.populate_registry(c)
+ self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
+
+ q = InstallManifest()
+ q.add_entries_from(m, base="target")
+ self.assertEqual(len(q), 1)
+
+ d = FileCopier()
+ q.populate_registry(d)
+ self.assertEqual(d.paths(), ["target/dest/foo/file1", "target/dest/foo/file2"])
+
+ # Some of the values in an InstallManifest include destination
+ # information that is present in the keys. Verify that we can
+ # round-trip serialization.
+ r = InstallManifest()
+ r.add_entries_from(m)
+ r.add_entries_from(m, base="target")
+ self.assertEqual(len(r), 2)
+
+ temp_path = self.tmppath("temp_path")
+ r.write(path=temp_path)
+
+ s = InstallManifest(path=temp_path)
+ e = FileCopier()
+ s.populate_registry(e)
+
+ self.assertEqual(
+ e.paths(),
+ [
+ "dest/foo/file1",
+ "dest/foo/file2",
+ "target/dest/foo/file1",
+ "target/dest/foo/file2",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py
new file mode 100644
index 0000000000..e96c59238f
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -0,0 +1,350 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from collections import OrderedDict
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+from mozpack.mozjar import (
+ Deflater,
+ JarLog,
+ JarReader,
+ JarReaderError,
+ JarStruct,
+ JarWriter,
+ JarWriterError,
+)
+from mozpack.test.test_files import MockDest
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestJarStruct(unittest.TestCase):
+ class Foo(JarStruct):
+ MAGIC = 0x01020304
+ STRUCT = OrderedDict(
+ [
+ ("foo", "uint32"),
+ ("bar", "uint16"),
+ ("qux", "uint16"),
+ ("length", "uint16"),
+ ("length2", "uint16"),
+ ("string", "length"),
+ ("string2", "length2"),
+ ]
+ )
+
+ def test_jar_struct(self):
+ foo = TestJarStruct.Foo()
+ self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC)
+ self.assertEqual(foo["foo"], 0)
+ self.assertEqual(foo["bar"], 0)
+ self.assertEqual(foo["qux"], 0)
+ self.assertFalse("length" in foo)
+ self.assertFalse("length2" in foo)
+ self.assertEqual(foo["string"], "")
+ self.assertEqual(foo["string2"], "")
+
+ self.assertEqual(foo.size, 16)
+
+ foo["foo"] = 0x42434445
+ foo["bar"] = 0xABCD
+ foo["qux"] = 0xEF01
+ foo["string"] = "abcde"
+ foo["string2"] = "Arbitrarily long string"
+
+ serialized = (
+ b"\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef"
+ + b"\x05\x00\x17\x00abcdeArbitrarily long string"
+ )
+ self.assertEqual(foo.size, len(serialized))
+ foo_serialized = foo.serialize()
+ self.assertEqual(foo_serialized, serialized)
+
+ def do_test_read_jar_struct(self, data):
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data)
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:])
+
+ foo = TestJarStruct.Foo(data[1:])
+ self.assertEqual(foo["foo"], 0x45444342)
+ self.assertEqual(foo["bar"], 0xCDAB)
+ self.assertEqual(foo["qux"], 0x01EF)
+ self.assertFalse("length" in foo)
+ self.assertFalse("length2" in foo)
+ self.assertEqual(foo["string"], b"012345")
+ self.assertEqual(foo["string2"], b"67")
+
+ def test_read_jar_struct(self):
+ data = (
+ b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef"
+ + b"\x01\x06\x00\x02\x0001234567890"
+ )
+ self.do_test_read_jar_struct(data)
+
+ def test_read_jar_struct_memoryview(self):
+ data = (
+ b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef"
+ + b"\x01\x06\x00\x02\x0001234567890"
+ )
+ self.do_test_read_jar_struct(memoryview(data))
+
+
+class TestDeflater(unittest.TestCase):
+ def wrap(self, data):
+ return data
+
+ def test_deflater_no_compress(self):
+ deflater = Deflater(False)
+ deflater.write(self.wrap(b"abc"))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"abc")
+ self.assertEqual(deflater.crc32, 0x352441C2)
+
+ def test_deflater_compress_no_gain(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap(b"abc"))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"abc")
+ self.assertEqual(deflater.crc32, 0x352441C2)
+
+ def test_deflater_compress(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz"))
+ self.assertTrue(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 26)
+ self.assertNotEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.crc32, 0xD46B97ED)
+ # The CRC is the same as when not compressed
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz"))
+ self.assertEqual(deflater.crc32, 0xD46B97ED)
+
+ def test_deflater_empty(self):
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 0)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"")
+ self.assertEqual(deflater.crc32, 0)
+
+
+class TestDeflaterMemoryView(TestDeflater):
+ def wrap(self, data):
+ return memoryview(data)
+
+
+class TestJar(unittest.TestCase):
+ def test_jar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ self.assertRaises(JarWriterError, jar.add, "foo", b"bar")
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False)
+ jar.add("baz\\backslash", b"aaaaaaaaaaaaaaa")
+
+ files = [j for j in JarReader(fileobj=s)]
+
+ self.assertEqual(files[0].filename, "foo")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"foo")
+
+ self.assertEqual(files[1].filename, "bar")
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertFalse(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ if os.sep == "\\":
+ self.assertEqual(
+ files[3].filename,
+ "baz/backslash",
+ "backslashes in filenames on Windows should get normalized",
+ )
+ else:
+ self.assertEqual(
+ files[3].filename,
+ "baz\\backslash",
+ "backslashes in filenames on POSIX platform are untouched",
+ )
+
+ s = MockDest()
+ with JarWriter(fileobj=s, compress=False) as jar:
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("foo", b"foo")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", True)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "bar")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[1].filename, "foo")
+ self.assertFalse(files[1].compressed)
+ self.assertEqual(files[1].read(), b"foo")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertTrue("bar" in jar)
+ self.assertTrue("foo" in jar)
+ self.assertFalse("baz" in jar)
+ self.assertTrue("baz/qux" in jar)
+ self.assertTrue(jar["bar"], files[1])
+ self.assertTrue(jar["foo"], files[0])
+ self.assertTrue(jar["baz/qux"], files[2])
+
+ s.seek(0)
+ jar = JarReader(fileobj=s)
+ self.assertTrue("bar" in jar)
+ self.assertTrue("foo" in jar)
+ self.assertFalse("baz" in jar)
+ self.assertTrue("baz/qux" in jar)
+
+ files[0].seek(0)
+ self.assertEqual(jar["bar"].filename, files[0].filename)
+ self.assertEqual(jar["bar"].compressed, files[0].compressed)
+ self.assertEqual(jar["bar"].read(), files[0].read())
+
+ files[1].seek(0)
+ self.assertEqual(jar["foo"].filename, files[1].filename)
+ self.assertEqual(jar["foo"].compressed, files[1].compressed)
+ self.assertEqual(jar["foo"].read(), files[1].read())
+
+ files[2].seek(0)
+ self.assertEqual(jar["baz/qux"].filename, files[2].filename)
+ self.assertEqual(jar["baz/qux"].compressed, files[2].compressed)
+ self.assertEqual(jar["baz/qux"].read(), files[2].read())
+
+ def test_rejar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False)
+
+ new = MockDest()
+ with JarWriter(fileobj=new) as jar:
+ for j in JarReader(fileobj=s):
+ jar.add(j.filename, j)
+
+ jar = JarReader(fileobj=new)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "foo")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"foo")
+
+ self.assertEqual(files[1].filename, "bar")
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ def test_add_from_finder(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ finder = FileFinder(test_data_path)
+ for p, f in finder.find("test_data"):
+ jar.add("test_data", f)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "test_data")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"test_data")
+
+
+class TestPreload(unittest.TestCase):
+ def test_preload(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"abcdefghijklmnopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, None)
+
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"abcdefghijklmnopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.preload(["baz/qux", "bar"])
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, "bar")
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "baz/qux")
+ self.assertEqual(files[1].filename, "bar")
+ self.assertEqual(files[2].filename, "foo")
+
+
+class TestJarLog(unittest.TestCase):
+ def test_jarlog(self):
+ s = six.moves.cStringIO(
+ "\n".join(
+ [
+ "bar/baz.jar first",
+ "bar/baz.jar second",
+ "bar/baz.jar third",
+ "bar/baz.jar second",
+ "bar/baz.jar second",
+ "omni.ja stuff",
+ "bar/baz.jar first",
+ "omni.ja other/stuff",
+ "omni.ja stuff",
+ "bar/baz.jar third",
+ ]
+ )
+ )
+ log = JarLog(fileobj=s)
+ self.assertEqual(
+ set(log.keys()),
+ set(
+ [
+ "bar/baz.jar",
+ "omni.ja",
+ ]
+ ),
+ )
+ self.assertEqual(
+ log["bar/baz.jar"],
+ [
+ "first",
+ "second",
+ "third",
+ ],
+ )
+ self.assertEqual(
+ log["omni.ja"],
+ [
+ "stuff",
+ "other/stuff",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py
new file mode 100644
index 0000000000..266902ebb2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager.py
@@ -0,0 +1,630 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+from buildconfig import topobjdir
+from mozunit import MockedOpen
+
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestContent,
+ ManifestResource,
+)
+from mozpack.errors import ErrorMessage, errors
+from mozpack.files import GeneratedFile
+from mozpack.packager import (
+ CallDeque,
+ Component,
+ SimpleManifestSink,
+ SimplePackager,
+ preprocess_manifest,
+)
+
+MANIFEST = """
+bar/*
+[foo]
+foo/*
+-foo/bar
+chrome.manifest
+[zot destdir="destdir"]
+foo/zot
+; comment
+#ifdef baz
+[baz]
+baz@SUFFIX@
+#endif
+"""
+
+
+class TestPreprocessManifest(unittest.TestCase):
+ MANIFEST_PATH = mozpath.join("$OBJDIR", "manifest")
+
+ EXPECTED_LOG = [
+ ((MANIFEST_PATH, 2), "add", "", "bar/*"),
+ ((MANIFEST_PATH, 4), "add", "foo", "foo/*"),
+ ((MANIFEST_PATH, 5), "remove", "foo", "foo/bar"),
+ ((MANIFEST_PATH, 6), "add", "foo", "chrome.manifest"),
+ ((MANIFEST_PATH, 8), "add", 'zot destdir="destdir"', "foo/zot"),
+ ]
+
+ def setUp(self):
+ class MockSink(object):
+ def __init__(self):
+ self.log = []
+
+ def add(self, component, path):
+ self._log(errors.get_context(), "add", repr(component), path)
+
+ def remove(self, component, path):
+ self._log(errors.get_context(), "remove", repr(component), path)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+ self.sink = MockSink()
+ self.cwd = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self.cwd)
+
+ def test_preprocess_manifest(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ preprocess_manifest(self.sink, "manifest")
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG)
+
+ def test_preprocess_manifest_missing_define(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ self.assertRaises(
+ Preprocessor.Error,
+ preprocess_manifest,
+ self.sink,
+ "manifest",
+ {"baz": 1},
+ )
+
+ def test_preprocess_manifest_defines(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ preprocess_manifest(self.sink, "manifest", {"baz": 1, "SUFFIX": ".exe"})
+ self.assertEqual(
+ self.sink.log,
+ self.EXPECTED_LOG + [((self.MANIFEST_PATH, 12), "add", "baz", "baz.exe")],
+ )
+
+
+class MockFinder(object):
+ def __init__(self, files):
+ self.files = files
+ self.log = []
+
+ def find(self, path):
+ self.log.append(path)
+ for f in sorted(self.files):
+ if mozpath.match(f, path):
+ yield f, self.files[f]
+
+ def __iter__(self):
+ return self.find("")
+
+
+class MockFormatter(object):
+ def __init__(self):
+ self.log = []
+
+ def add_base(self, *args):
+ self._log(errors.get_context(), "add_base", *args)
+
+ def add_manifest(self, *args):
+ self._log(errors.get_context(), "add_manifest", *args)
+
+ def add_interfaces(self, *args):
+ self._log(errors.get_context(), "add_interfaces", *args)
+
+ def add(self, *args):
+ self._log(errors.get_context(), "add", *args)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+
+class TestSimplePackager(unittest.TestCase):
+ def test_simple_packager(self):
+ class GeneratedFileWithPath(GeneratedFile):
+ def __init__(self, path, content):
+ GeneratedFile.__init__(self, content)
+ self.path = path
+
+ formatter = MockFormatter()
+ packager = SimplePackager(formatter)
+ curdir = os.path.abspath(os.curdir)
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo", "bar.manifest"),
+ b"resource bar bar/\ncontent bar bar/",
+ )
+ with errors.context("manifest", 1):
+ packager.add("foo/bar.manifest", file)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo", "baz.manifest"), b"resource baz baz/"
+ )
+ with errors.context("manifest", 2):
+ packager.add("bar/baz.manifest", file)
+
+ with errors.context("manifest", 3):
+ packager.add(
+ "qux/qux.manifest",
+ GeneratedFile(
+ b"".join(
+ [
+ b"resource qux qux/\n",
+ b"binary-component qux.so\n",
+ ]
+ )
+ ),
+ )
+ bar_xpt = GeneratedFile(b"bar.xpt")
+ qux_xpt = GeneratedFile(b"qux.xpt")
+ foo_html = GeneratedFile(b"foo_html")
+ bar_html = GeneratedFile(b"bar_html")
+ with errors.context("manifest", 4):
+ packager.add("foo/bar.xpt", bar_xpt)
+ with errors.context("manifest", 5):
+ packager.add("foo/bar/foo.html", foo_html)
+ packager.add("foo/bar/bar.html", bar_html)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo.manifest"),
+ b"".join(
+ [
+ b"manifest foo/bar.manifest\n",
+ b"manifest bar/baz.manifest\n",
+ ]
+ ),
+ )
+ with errors.context("manifest", 6):
+ packager.add("foo.manifest", file)
+ with errors.context("manifest", 7):
+ packager.add("foo/qux.xpt", qux_xpt)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "addon", "chrome.manifest"), b"resource hoge hoge/"
+ )
+ with errors.context("manifest", 8):
+ packager.add("addon/chrome.manifest", file)
+
+ install_rdf = GeneratedFile(b"<RDF></RDF>")
+ with errors.context("manifest", 9):
+ packager.add("addon/install.rdf", install_rdf)
+
+ with errors.context("manifest", 10):
+ packager.add("addon2/install.rdf", install_rdf)
+ packager.add(
+ "addon2/chrome.manifest", GeneratedFile(b"binary-component addon2.so")
+ )
+
+ with errors.context("manifest", 11):
+ packager.add("addon3/install.rdf", install_rdf)
+ packager.add(
+ "addon3/chrome.manifest",
+ GeneratedFile(b"manifest components/components.manifest"),
+ )
+ packager.add(
+ "addon3/components/components.manifest",
+ GeneratedFile(b"binary-component addon3.so"),
+ )
+
+ with errors.context("manifest", 12):
+ install_rdf_addon4 = GeneratedFile(
+ b"<RDF>\n<...>\n<em:unpack>true</em:unpack>\n<...>\n</RDF>"
+ )
+ packager.add("addon4/install.rdf", install_rdf_addon4)
+
+ with errors.context("manifest", 13):
+ install_rdf_addon5 = GeneratedFile(
+ b"<RDF>\n<...>\n<em:unpack>false</em:unpack>\n<...>\n</RDF>"
+ )
+ packager.add("addon5/install.rdf", install_rdf_addon5)
+
+ with errors.context("manifest", 14):
+ install_rdf_addon6 = GeneratedFile(
+ b"<RDF>\n<... em:unpack=true>\n<...>\n</RDF>"
+ )
+ packager.add("addon6/install.rdf", install_rdf_addon6)
+
+ with errors.context("manifest", 15):
+ install_rdf_addon7 = GeneratedFile(
+ b"<RDF>\n<... em:unpack=false>\n<...>\n</RDF>"
+ )
+ packager.add("addon7/install.rdf", install_rdf_addon7)
+
+ with errors.context("manifest", 16):
+ install_rdf_addon8 = GeneratedFile(
+ b'<RDF>\n<... em:unpack="true">\n<...>\n</RDF>'
+ )
+ packager.add("addon8/install.rdf", install_rdf_addon8)
+
+ with errors.context("manifest", 17):
+ install_rdf_addon9 = GeneratedFile(
+ b'<RDF>\n<... em:unpack="false">\n<...>\n</RDF>'
+ )
+ packager.add("addon9/install.rdf", install_rdf_addon9)
+
+ with errors.context("manifest", 18):
+ install_rdf_addon10 = GeneratedFile(
+ b"<RDF>\n<... em:unpack='true'>\n<...>\n</RDF>"
+ )
+ packager.add("addon10/install.rdf", install_rdf_addon10)
+
+ with errors.context("manifest", 19):
+ install_rdf_addon11 = GeneratedFile(
+ b"<RDF>\n<... em:unpack='false'>\n<...>\n</RDF>"
+ )
+ packager.add("addon11/install.rdf", install_rdf_addon11)
+
+ we_manifest = GeneratedFile(
+ b'{"manifest_version": 2, "name": "Test WebExtension", "version": "1.0"}'
+ )
+ # hybrid and hybrid2 are both bootstrapped extensions with
+ # embedded webextensions, they differ in the order in which
+ # the manifests are added to the packager.
+ with errors.context("manifest", 20):
+ packager.add("hybrid/install.rdf", install_rdf)
+
+ with errors.context("manifest", 21):
+ packager.add("hybrid/webextension/manifest.json", we_manifest)
+
+ with errors.context("manifest", 22):
+ packager.add("hybrid2/webextension/manifest.json", we_manifest)
+
+ with errors.context("manifest", 23):
+ packager.add("hybrid2/install.rdf", install_rdf)
+
+ with errors.context("manifest", 24):
+ packager.add("webextension/manifest.json", we_manifest)
+
+ non_we_manifest = GeneratedFile(b'{"not a webextension": true}')
+ with errors.context("manifest", 25):
+ packager.add("nonwebextension/manifest.json", non_we_manifest)
+
+ self.assertEqual(formatter.log, [])
+
+ with errors.context("dummy", 1):
+ packager.close()
+ self.maxDiff = None
+ # The formatter is expected to reorder the manifest entries so that
+ # chrome entries appear before the others.
+ self.assertEqual(
+ formatter.log,
+ [
+ (("dummy", 1), "add_base", "", False),
+ (("dummy", 1), "add_base", "addon", True),
+ (("dummy", 1), "add_base", "addon10", "unpacked"),
+ (("dummy", 1), "add_base", "addon11", True),
+ (("dummy", 1), "add_base", "addon2", "unpacked"),
+ (("dummy", 1), "add_base", "addon3", "unpacked"),
+ (("dummy", 1), "add_base", "addon4", "unpacked"),
+ (("dummy", 1), "add_base", "addon5", True),
+ (("dummy", 1), "add_base", "addon6", "unpacked"),
+ (("dummy", 1), "add_base", "addon7", True),
+ (("dummy", 1), "add_base", "addon8", "unpacked"),
+ (("dummy", 1), "add_base", "addon9", True),
+ (("dummy", 1), "add_base", "hybrid", True),
+ (("dummy", 1), "add_base", "hybrid2", True),
+ (("dummy", 1), "add_base", "qux", False),
+ (("dummy", 1), "add_base", "webextension", True),
+ (
+ (os.path.join(curdir, "foo", "bar.manifest"), 2),
+ "add_manifest",
+ ManifestContent("foo", "bar", "bar/"),
+ ),
+ (
+ (os.path.join(curdir, "foo", "bar.manifest"), 1),
+ "add_manifest",
+ ManifestResource("foo", "bar", "bar/"),
+ ),
+ (
+ ("bar/baz.manifest", 1),
+ "add_manifest",
+ ManifestResource("bar", "baz", "baz/"),
+ ),
+ (
+ ("qux/qux.manifest", 1),
+ "add_manifest",
+ ManifestResource("qux", "qux", "qux/"),
+ ),
+ (
+ ("qux/qux.manifest", 2),
+ "add_manifest",
+ ManifestBinaryComponent("qux", "qux.so"),
+ ),
+ (("manifest", 4), "add_interfaces", "foo/bar.xpt", bar_xpt),
+ (("manifest", 7), "add_interfaces", "foo/qux.xpt", qux_xpt),
+ (
+ (os.path.join(curdir, "addon", "chrome.manifest"), 1),
+ "add_manifest",
+ ManifestResource("addon", "hoge", "hoge/"),
+ ),
+ (
+ ("addon2/chrome.manifest", 1),
+ "add_manifest",
+ ManifestBinaryComponent("addon2", "addon2.so"),
+ ),
+ (
+ ("addon3/components/components.manifest", 1),
+ "add_manifest",
+ ManifestBinaryComponent("addon3/components", "addon3.so"),
+ ),
+ (("manifest", 5), "add", "foo/bar/foo.html", foo_html),
+ (("manifest", 5), "add", "foo/bar/bar.html", bar_html),
+ (("manifest", 9), "add", "addon/install.rdf", install_rdf),
+ (("manifest", 10), "add", "addon2/install.rdf", install_rdf),
+ (("manifest", 11), "add", "addon3/install.rdf", install_rdf),
+ (("manifest", 12), "add", "addon4/install.rdf", install_rdf_addon4),
+ (("manifest", 13), "add", "addon5/install.rdf", install_rdf_addon5),
+ (("manifest", 14), "add", "addon6/install.rdf", install_rdf_addon6),
+ (("manifest", 15), "add", "addon7/install.rdf", install_rdf_addon7),
+ (("manifest", 16), "add", "addon8/install.rdf", install_rdf_addon8),
+ (("manifest", 17), "add", "addon9/install.rdf", install_rdf_addon9),
+ (("manifest", 18), "add", "addon10/install.rdf", install_rdf_addon10),
+ (("manifest", 19), "add", "addon11/install.rdf", install_rdf_addon11),
+ (("manifest", 20), "add", "hybrid/install.rdf", install_rdf),
+ (
+ ("manifest", 21),
+ "add",
+ "hybrid/webextension/manifest.json",
+ we_manifest,
+ ),
+ (
+ ("manifest", 22),
+ "add",
+ "hybrid2/webextension/manifest.json",
+ we_manifest,
+ ),
+ (("manifest", 23), "add", "hybrid2/install.rdf", install_rdf),
+ (("manifest", 24), "add", "webextension/manifest.json", we_manifest),
+ (
+ ("manifest", 25),
+ "add",
+ "nonwebextension/manifest.json",
+ non_we_manifest,
+ ),
+ ],
+ )
+
+ self.assertEqual(
+ packager.get_bases(),
+ set(
+ [
+ "",
+ "addon",
+ "addon2",
+ "addon3",
+ "addon4",
+ "addon5",
+ "addon6",
+ "addon7",
+ "addon8",
+ "addon9",
+ "addon10",
+ "addon11",
+ "qux",
+ "hybrid",
+ "hybrid2",
+ "webextension",
+ ]
+ ),
+ )
+ self.assertEqual(packager.get_bases(addons=False), set(["", "qux"]))
+
+ def test_simple_packager_manifest_consistency(self):
+ formatter = MockFormatter()
+ # bar/ is detected as an addon because of install.rdf, but top-level
+ # includes a manifest inside bar/.
+ packager = SimplePackager(formatter)
+ packager.add(
+ "base.manifest",
+ GeneratedFile(
+ b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n"
+ ),
+ )
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/install.rdf", GeneratedFile(b""))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"',
+ )
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but top-level includes another manifest inside
+ # bar/.
+ packager = SimplePackager(formatter)
+ packager.add(
+ "base.manifest",
+ GeneratedFile(
+ b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n"
+ ),
+ )
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/chrome.manifest", GeneratedFile(b"resource baz baz"))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"',
+ )
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but chrome.manifest includes baz.manifest from
+ # the same directory. This shouldn't error out.
+ packager = SimplePackager(formatter)
+ packager.add("base.manifest", GeneratedFile(b"manifest foo/bar.manifest\n"))
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/chrome.manifest", GeneratedFile(b"manifest baz.manifest"))
+ packager.close()
+
+
+class TestSimpleManifestSink(unittest.TestCase):
+ def test_simple_manifest_parser(self):
+ formatter = MockFormatter()
+ foobar = GeneratedFile(b"foobar")
+ foobaz = GeneratedFile(b"foobaz")
+ fooqux = GeneratedFile(b"fooqux")
+ foozot = GeneratedFile(b"foozot")
+ finder = MockFinder(
+ {
+ "bin/foo/bar": foobar,
+ "bin/foo/baz": foobaz,
+ "bin/foo/qux": fooqux,
+ "bin/foo/zot": foozot,
+ "bin/foo/chrome.manifest": GeneratedFile(b"resource foo foo/"),
+ "bin/chrome.manifest": GeneratedFile(b"manifest foo/chrome.manifest"),
+ }
+ )
+ parser = SimpleManifestSink(finder, formatter)
+ component0 = Component("component0")
+ component1 = Component("component1")
+ component2 = Component("component2", destdir="destdir")
+ parser.add(component0, "bin/foo/b*")
+ parser.add(component1, "bin/foo/qux")
+ parser.add(component1, "bin/foo/chrome.manifest")
+ parser.add(component2, "bin/foo/zot")
+ self.assertRaises(ErrorMessage, parser.add, "component1", "bin/bar")
+
+ self.assertEqual(formatter.log, [])
+ parser.close()
+ self.assertEqual(
+ formatter.log,
+ [
+ (None, "add_base", "", False),
+ (
+ ("foo/chrome.manifest", 1),
+ "add_manifest",
+ ManifestResource("foo", "foo", "foo/"),
+ ),
+ (None, "add", "foo/bar", foobar),
+ (None, "add", "foo/baz", foobaz),
+ (None, "add", "foo/qux", fooqux),
+ (None, "add", "destdir/foo/zot", foozot),
+ ],
+ )
+
+ self.assertEqual(
+ finder.log,
+ [
+ "bin/foo/b*",
+ "bin/foo/qux",
+ "bin/foo/chrome.manifest",
+ "bin/foo/zot",
+ "bin/bar",
+ "bin/chrome.manifest",
+ ],
+ )
+
+
+class TestCallDeque(unittest.TestCase):
+ def test_call_deque(self):
+ class Logger(object):
+ def __init__(self):
+ self._log = []
+
+ def log(self, str):
+ self._log.append(str)
+
+ @staticmethod
+ def staticlog(logger, str):
+ logger.log(str)
+
+ def do_log(logger, str):
+ logger.log(str)
+
+ logger = Logger()
+ d = CallDeque()
+ d.append(logger.log, "foo")
+ d.append(logger.log, "bar")
+ d.append(logger.staticlog, logger, "baz")
+ d.append(do_log, logger, "qux")
+ self.assertEqual(logger._log, [])
+ d.execute()
+ self.assertEqual(logger._log, ["foo", "bar", "baz", "qux"])
+
+
+class TestComponent(unittest.TestCase):
+ def do_split(self, string, name, options):
+ n, o = Component._split_component_and_options(string)
+ self.assertEqual(name, n)
+ self.assertEqual(options, o)
+
+ def test_component_split_component_and_options(self):
+ self.do_split("component", "component", {})
+ self.do_split("trailingspace ", "trailingspace", {})
+ self.do_split(" leadingspace", "leadingspace", {})
+ self.do_split(" trim ", "trim", {})
+ self.do_split(' trim key="value"', "trim", {"key": "value"})
+ self.do_split(' trim empty=""', "trim", {"empty": ""})
+ self.do_split(' trim space=" "', "trim", {"space": " "})
+ self.do_split(
+ 'component key="value" key2="second" ',
+ "component",
+ {"key": "value", "key2": "second"},
+ )
+ self.do_split(
+ 'trim key=" value with spaces " key2="spaces again"',
+ "trim",
+ {"key": " value with spaces ", "key2": "spaces again"},
+ )
+
+ def do_split_error(self, string):
+ self.assertRaises(ValueError, Component._split_component_and_options, string)
+
+ def test_component_split_component_and_options_errors(self):
+ self.do_split_error('"component')
+ self.do_split_error('comp"onent')
+ self.do_split_error('component"')
+ self.do_split_error('"component"')
+ self.do_split_error("=component")
+ self.do_split_error("comp=onent")
+ self.do_split_error("component=")
+ self.do_split_error('key="val"')
+ self.do_split_error("component key=")
+ self.do_split_error('component key="val')
+ self.do_split_error('component key=val"')
+ self.do_split_error('component key="val" x')
+ self.do_split_error('component x key="val"')
+ self.do_split_error('component key1="val" x key2="val"')
+
+ def do_from_string(self, string, name, destdir=""):
+ component = Component.from_string(string)
+ self.assertEqual(name, component.name)
+ self.assertEqual(destdir, component.destdir)
+
+ def test_component_from_string(self):
+ self.do_from_string("component", "component")
+ self.do_from_string("component-with-hyphen", "component-with-hyphen")
+ self.do_from_string('component destdir="foo/bar"', "component", "foo/bar")
+ self.do_from_string('component destdir="bar spc"', "component", "bar spc")
+ self.assertRaises(ErrorMessage, Component.from_string, "")
+ self.assertRaises(ErrorMessage, Component.from_string, "component novalue=")
+ self.assertRaises(
+ ErrorMessage, Component.from_string, "component badoption=badvalue"
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py
new file mode 100644
index 0000000000..b09971a102
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_formats.py
@@ -0,0 +1,537 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from itertools import chain
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestComponent,
+ ManifestContent,
+ ManifestLocale,
+ ManifestResource,
+ ManifestSkin,
+)
+from mozpack.copier import FileRegistry
+from mozpack.errors import ErrorMessage
+from mozpack.files import GeneratedFile, ManifestFile
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.test.test_files import bar_xpt, foo2_xpt, foo_xpt
+from test_errors import TestErrors
+
+CONTENTS = {
+ "bases": {
+ # base_path: is_addon?
+ "": False,
+ "app": False,
+ "addon0": "unpacked",
+ "addon1": True,
+ "app/chrome/addons/addon2": True,
+ },
+ "manifests": [
+ ManifestContent("chrome/f", "oo", "oo/"),
+ ManifestContent("chrome/f", "bar", "oo/bar/"),
+ ManifestResource("chrome/f", "foo", "resource://bar/"),
+ ManifestBinaryComponent("components", "foo.so"),
+ ManifestContent("app/chrome", "content", "foo/"),
+ ManifestComponent("app/components", "{foo-id}", "foo.js"),
+ ManifestContent("addon0/chrome", "addon0", "foo/bar/"),
+ ManifestContent("addon1/chrome", "addon1", "foo/bar/"),
+ ManifestContent("app/chrome/addons/addon2/chrome", "addon2", "foo/bar/"),
+ ],
+ "files": {
+ "chrome/f/oo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "chrome/f/oo/baz": GeneratedFile(b"foobaz"),
+ "chrome/f/oo/qux": GeneratedFile(b"fooqux"),
+ "components/foo.so": GeneratedFile(b"foo.so"),
+ "components/foo.xpt": foo_xpt,
+ "components/bar.xpt": bar_xpt,
+ "foo": GeneratedFile(b"foo"),
+ "app/chrome/foo/foo": GeneratedFile(b"appfoo"),
+ "app/components/foo.js": GeneratedFile(b"foo.js"),
+ "addon0/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "addon0/components/foo.xpt": foo2_xpt,
+ "addon0/components/bar.xpt": bar_xpt,
+ "addon1/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "addon1/components/foo.xpt": foo2_xpt,
+ "addon1/components/bar.xpt": bar_xpt,
+ "app/chrome/addons/addon2/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "app/chrome/addons/addon2/components/foo.xpt": foo2_xpt,
+ "app/chrome/addons/addon2/components/bar.xpt": bar_xpt,
+ },
+}
+
+FILES = CONTENTS["files"]
+
+RESULT_FLAT = {
+ "chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "chrome/chrome.manifest": [
+ "manifest f/f.manifest",
+ ],
+ "chrome/f/f.manifest": [
+ "content oo oo/",
+ "content bar oo/bar/",
+ "resource foo resource://bar/",
+ ],
+ "chrome/f/oo/bar/baz": FILES["chrome/f/oo/bar/baz"],
+ "chrome/f/oo/baz": FILES["chrome/f/oo/baz"],
+ "chrome/f/oo/qux": FILES["chrome/f/oo/qux"],
+ "components/components.manifest": [
+ "binary-component foo.so",
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ "components/foo.so": FILES["components/foo.so"],
+ "components/foo.xpt": foo_xpt,
+ "components/bar.xpt": bar_xpt,
+ "foo": FILES["foo"],
+ "app/chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "app/chrome/chrome.manifest": [
+ "content content foo/",
+ ],
+ "app/chrome/foo/foo": FILES["app/chrome/foo/foo"],
+ "app/components/components.manifest": [
+ "component {foo-id} foo.js",
+ ],
+ "app/components/foo.js": FILES["app/components/foo.js"],
+}
+
+for addon in ("addon0", "addon1", "app/chrome/addons/addon2"):
+ RESULT_FLAT.update(
+ {
+ mozpath.join(addon, p): f
+ for p, f in six.iteritems(
+ {
+ "chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "chrome/chrome.manifest": [
+ "content %s foo/bar/" % mozpath.basename(addon),
+ ],
+ "chrome/foo/bar/baz": FILES[
+ mozpath.join(addon, "chrome/foo/bar/baz")
+ ],
+ "components/components.manifest": [
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ "components/bar.xpt": bar_xpt,
+ "components/foo.xpt": foo2_xpt,
+ }
+ )
+ }
+ )
+
+RESULT_JAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "components/components.manifest",
+ "components/foo.so",
+ "components/foo.xpt",
+ "components/bar.xpt",
+ "foo",
+ "app/chrome.manifest",
+ "app/components/components.manifest",
+ "app/components/foo.js",
+ "addon0/chrome.manifest",
+ "addon0/components/components.manifest",
+ "addon0/components/foo.xpt",
+ "addon0/components/bar.xpt",
+ )
+}
+
+RESULT_JAR.update(
+ {
+ "chrome/f/f.manifest": [
+ "content oo jar:oo.jar!/",
+ "content bar jar:oo.jar!/bar/",
+ "resource foo resource://bar/",
+ ],
+ "chrome/f/oo.jar": {
+ "bar/baz": FILES["chrome/f/oo/bar/baz"],
+ "baz": FILES["chrome/f/oo/baz"],
+ "qux": FILES["chrome/f/oo/qux"],
+ },
+ "app/chrome/chrome.manifest": [
+ "content content jar:foo.jar!/",
+ ],
+ "app/chrome/foo.jar": {
+ "foo": FILES["app/chrome/foo/foo"],
+ },
+ "addon0/chrome/chrome.manifest": [
+ "content addon0 jar:foo.jar!/bar/",
+ ],
+ "addon0/chrome/foo.jar": {
+ "bar/baz": FILES["addon0/chrome/foo/bar/baz"],
+ },
+ "addon1.xpi": {
+ mozpath.relpath(p, "addon1"): f
+ for p, f in six.iteritems(RESULT_FLAT)
+ if p.startswith("addon1/")
+ },
+ "app/chrome/addons/addon2.xpi": {
+ mozpath.relpath(p, "app/chrome/addons/addon2"): f
+ for p, f in six.iteritems(RESULT_FLAT)
+ if p.startswith("app/chrome/addons/addon2/")
+ },
+ }
+)
+
+RESULT_OMNIJAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ "components/foo.so",
+ "foo",
+ )
+}
+
+RESULT_OMNIJAR.update({p: RESULT_JAR[p] for p in RESULT_JAR if p.startswith("addon")})
+
+RESULT_OMNIJAR.update(
+ {
+ "omni.foo": {
+ "components/components.manifest": [
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ },
+ "chrome.manifest": [
+ "manifest components/components.manifest",
+ ],
+ "components/components.manifest": [
+ "binary-component foo.so",
+ ],
+ "app/omni.foo": {
+ p: RESULT_FLAT["app/" + p]
+ for p in chain(
+ (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "chrome/foo/foo",
+ "components/components.manifest",
+ "components/foo.js",
+ ),
+ (
+ mozpath.relpath(p, "app")
+ for p in six.iterkeys(RESULT_FLAT)
+ if p.startswith("app/chrome/addons/addon2/")
+ ),
+ )
+ },
+ }
+)
+
+RESULT_OMNIJAR["omni.foo"].update(
+ {
+ p: RESULT_FLAT[p]
+ for p in (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "chrome/f/f.manifest",
+ "chrome/f/oo/bar/baz",
+ "chrome/f/oo/baz",
+ "chrome/f/oo/qux",
+ "components/foo.xpt",
+ "components/bar.xpt",
+ )
+ }
+)
+
+RESULT_OMNIJAR_WITH_SUBPATH = {
+ k.replace("omni.foo", "bar/omni.foo"): v for k, v in RESULT_OMNIJAR.items()
+}
+
+CONTENTS_WITH_BASE = {
+ "bases": {
+ mozpath.join("base/root", b) if b else "base/root": a
+ for b, a in six.iteritems(CONTENTS["bases"])
+ },
+ "manifests": [
+ m.move(mozpath.join("base/root", m.base)) for m in CONTENTS["manifests"]
+ ],
+ "files": {
+ mozpath.join("base/root", p): f for p, f in six.iteritems(CONTENTS["files"])
+ },
+}
+
+EXTRA_CONTENTS = {
+ "extra/file": GeneratedFile(b"extra file"),
+}
+
+CONTENTS_WITH_BASE["files"].update(EXTRA_CONTENTS)
+
+
+def result_with_base(results):
+ result = {mozpath.join("base/root", p): v for p, v in six.iteritems(results)}
+ result.update(EXTRA_CONTENTS)
+ return result
+
+
+RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT)
+RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR)
+RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR)
+
+
+def fill_formatter(formatter, contents):
+ for base, is_addon in sorted(contents["bases"].items()):
+ formatter.add_base(base, is_addon)
+
+ for manifest in contents["manifests"]:
+ formatter.add_manifest(manifest)
+
+ for k, v in sorted(six.iteritems(contents["files"])):
+ if k.endswith(".xpt"):
+ formatter.add_interfaces(k, v)
+ else:
+ formatter.add(k, v)
+
+
+def get_contents(registry, read_all=False, mode="rt"):
+ result = {}
+ for k, v in registry:
+ if isinstance(v, FileRegistry):
+ result[k] = get_contents(v)
+ elif isinstance(v, ManifestFile) or read_all:
+ if "b" in mode:
+ result[k] = v.open().read()
+ else:
+ result[k] = six.ensure_text(v.open().read()).splitlines()
+ else:
+ result[k] = v
+ return result
+
+
+class TestFormatters(TestErrors, unittest.TestCase):
+ maxDiff = None
+
+ def test_bases(self):
+ formatter = FlatFormatter(FileRegistry())
+ formatter.add_base("")
+ formatter.add_base("addon0", addon=True)
+ formatter.add_base("browser")
+ self.assertEqual(formatter._get_base("platform.ini"), ("", "platform.ini"))
+ self.assertEqual(
+ formatter._get_base("browser/application.ini"),
+ ("browser", "application.ini"),
+ )
+ self.assertEqual(
+ formatter._get_base("addon0/install.rdf"), ("addon0", "install.rdf")
+ )
+
+ def do_test_contents(self, formatter, contents):
+ for f in contents["files"]:
+ # .xpt files are merged, so skip them.
+ if not f.endswith(".xpt"):
+ self.assertTrue(formatter.contains(f))
+
+ def test_flat_formatter(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_FLAT)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_jar_formatter(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_JAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_formatter(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "omni.foo")
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_flat_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_jar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "omni.foo")
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_subpath(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "bar/omni.foo")
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_SUBPATH)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_is_resource(self):
+ def is_resource(base, path):
+ registry = FileRegistry()
+ f = OmniJarFormatter(
+ registry,
+ "omni.foo",
+ non_resources=[
+ "defaults/messenger/mailViews.dat",
+ "defaults/foo/*",
+ "*/dummy",
+ ],
+ )
+ f.add_base("")
+ f.add_base("app")
+ f.add(mozpath.join(base, path), GeneratedFile(b""))
+ if f.copier.contains(mozpath.join(base, path)):
+ return False
+ self.assertTrue(f.copier.contains(mozpath.join(base, "omni.foo")))
+ self.assertTrue(f.copier[mozpath.join(base, "omni.foo")].contains(path))
+ return True
+
+ for base in ["", "app/"]:
+ self.assertTrue(is_resource(base, "chrome"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/baz.properties"))
+ self.assertFalse(is_resource(base, "chrome/icons/foo.png"))
+ self.assertTrue(is_resource(base, "components/foo.js"))
+ self.assertFalse(is_resource(base, "components/foo.so"))
+ self.assertTrue(is_resource(base, "res/foo.css"))
+ self.assertFalse(is_resource(base, "res/cursors/foo.png"))
+ self.assertFalse(is_resource(base, "res/MainMenu.nib/foo"))
+ self.assertTrue(is_resource(base, "defaults/pref/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/pref/channel-prefs.js"))
+ self.assertTrue(is_resource(base, "defaults/preferences/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/preferences/channel-prefs.js"))
+ self.assertTrue(is_resource(base, "modules/foo.jsm"))
+ self.assertTrue(is_resource(base, "greprefs.js"))
+ self.assertTrue(is_resource(base, "hyphenation/foo"))
+ self.assertTrue(is_resource(base, "update.locale"))
+ self.assertFalse(is_resource(base, "foo"))
+ self.assertFalse(is_resource(base, "foo/bar/greprefs.js"))
+ self.assertTrue(is_resource(base, "defaults/messenger/foo.dat"))
+ self.assertFalse(is_resource(base, "defaults/messenger/mailViews.dat"))
+ self.assertTrue(is_resource(base, "defaults/pref/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/foo/bar.dat"))
+ self.assertFalse(is_resource(base, "defaults/foo/bar/baz.dat"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/baz/dummy_"))
+ self.assertFalse(is_resource(base, "chrome/foo/bar/baz/dummy"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/dummy_"))
+ self.assertFalse(is_resource(base, "chrome/foo/bar/dummy"))
+
+ def test_chrome_override(self):
+ registry = FileRegistry()
+ f = FlatFormatter(registry)
+ f.add_base("")
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/unix"))
+ # A more specific entry for a given chrome name can override a more
+ # generic one.
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/win", "os=WINNT"))
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/osx", "os=Darwin"))
+
+ # Chrome with the same name overrides the previous registration.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content foo foo/" overrides ' '"content foo foo/unix"',
+ )
+
+ # Chrome with the same name and same flags overrides the previous
+ # registration.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/", "os=WINNT"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content foo foo/ os=WINNT" overrides '
+ '"content foo foo/win os=WINNT"',
+ )
+
+ # We may start with the more specific entry first
+ f.add_manifest(ManifestContent("chrome", "bar", "bar/win", "os=WINNT"))
+ # Then adding a more generic one overrides it.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "bar", "bar/unix"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content bar bar/unix" overrides ' '"content bar bar/win os=WINNT"',
+ )
+
+ # Adding something more specific still works.
+ f.add_manifest(
+ ManifestContent("chrome", "bar", "bar/win", "os=WINNT osversion>=7.0")
+ )
+
+ # Variations of skin/locales are allowed.
+ f.add_manifest(
+ ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/")
+ )
+ f.add_manifest(ManifestSkin("chrome", "foo", "modern/1.0", "foo/skin/modern/"))
+
+ f.add_manifest(ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/"))
+ f.add_manifest(ManifestLocale("chrome", "foo", "ja-JP", "foo/locale/ja-JP/"))
+
+ # But same-skin/locale still error out.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(
+ ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/foo")
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "skin foo classic/1.0 foo/skin/classic/foo" overrides '
+ '"skin foo classic/1.0 foo/skin/classic/"',
+ )
+
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(
+ ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/foo")
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "locale foo en-US foo/locale/en-US/foo" overrides '
+ '"locale foo en-US foo/locale/en-US/"',
+ )
+
+ # Duplicating existing manifest entries is not an error.
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/unix"))
+
+ self.assertEqual(
+ self.get_output(),
+ [
+ 'warning: "content foo foo/unix" is duplicated. Skipping.',
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py
new file mode 100644
index 0000000000..0714ae3252
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_l10n.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozunit
+import six
+
+from mozpack.chrome.manifest import Manifest, ManifestContent, ManifestLocale
+from mozpack.copier import FileRegistry
+from mozpack.files import GeneratedFile, ManifestFile
+from mozpack.packager import l10n
+from test_packager import MockFinder
+
+
+class TestL10NRepack(unittest.TestCase):
+ def test_l10n_repack(self):
+ foo = GeneratedFile(b"foo")
+ foobar = GeneratedFile(b"foobar")
+ qux = GeneratedFile(b"qux")
+ bar = GeneratedFile(b"bar")
+ baz = GeneratedFile(b"baz")
+ dict_aa = GeneratedFile(b"dict_aa")
+ dict_bb = GeneratedFile(b"dict_bb")
+ dict_cc = GeneratedFile(b"dict_cc")
+ barbaz = GeneratedFile(b"barbaz")
+ lst = GeneratedFile(b"foo\nbar")
+ app_finder = MockFinder(
+ {
+ "bar/foo": foo,
+ "chrome/foo/foobar": foobar,
+ "chrome/qux/qux.properties": qux,
+ "chrome/qux/baz/baz.properties": baz,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestContent("chrome", "foo", "foo/"),
+ ManifestLocale("chrome", "qux", "en-US", "qux/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/aa": dict_aa,
+ "app/chrome/bar/barbaz.dtd": barbaz,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome", [ManifestLocale("app/chrome", "bar", "en-US", "bar/")]
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/bb": dict_bb,
+ "app/dict/cc": dict_cc,
+ "app/chrome/bar/search/foo.xml": foo,
+ "app/chrome/bar/search/bar.xml": bar,
+ "app/chrome/bar/search/lst.txt": lst,
+ "META-INF/foo": foo, # Stripped.
+ "inner/META-INF/foo": foo, # Not stripped.
+ "app/META-INF/foo": foo, # Stripped.
+ "app/inner/META-INF/foo": foo, # Not stripped.
+ }
+ )
+ app_finder.jarlogs = {}
+ app_finder.base = "app"
+ foo_l10n = GeneratedFile(b"foo_l10n")
+ qux_l10n = GeneratedFile(b"qux_l10n")
+ baz_l10n = GeneratedFile(b"baz_l10n")
+ barbaz_l10n = GeneratedFile(b"barbaz_l10n")
+ lst_l10n = GeneratedFile(b"foo\nqux")
+ l10n_finder = MockFinder(
+ {
+ "chrome/qux-l10n/qux.properties": qux_l10n,
+ "chrome/qux-l10n/baz/baz.properties": baz_l10n,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/bb": dict_bb,
+ "dict/cc": dict_cc,
+ "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome",
+ [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")],
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/aa": dict_aa,
+ "app/chrome/bar-l10n/search/foo.xml": foo_l10n,
+ "app/chrome/bar-l10n/search/qux.xml": qux_l10n,
+ "app/chrome/bar-l10n/search/lst.txt": lst_l10n,
+ }
+ )
+ l10n_finder.base = "l10n"
+ copier = FileRegistry()
+ formatter = l10n.FlatFormatter(copier)
+
+ l10n._repack(
+ app_finder,
+ l10n_finder,
+ copier,
+ formatter,
+ ["dict", "chrome/**/search/*.xml"],
+ )
+ self.maxDiff = None
+
+ repacked = {
+ "bar/foo": foo,
+ "chrome/foo/foobar": foobar,
+ "chrome/qux-l10n/qux.properties": qux_l10n,
+ "chrome/qux-l10n/baz/baz.properties": baz_l10n,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestContent("chrome", "foo", "foo/"),
+ ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/bb": dict_bb,
+ "dict/cc": dict_cc,
+ "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome",
+ [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")],
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/aa": dict_aa,
+ "app/chrome/bar-l10n/search/foo.xml": foo_l10n,
+ "app/chrome/bar-l10n/search/qux.xml": qux_l10n,
+ "app/chrome/bar-l10n/search/lst.txt": lst_l10n,
+ "inner/META-INF/foo": foo,
+ "app/inner/META-INF/foo": foo,
+ }
+
+ self.assertEqual(
+ dict((p, f.open().read()) for p, f in copier),
+ dict((p, f.open().read()) for p, f in six.iteritems(repacked)),
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py
new file mode 100644
index 0000000000..57a2d71eda
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_unpack.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.packager.unpack import unpack_to_registry
+from mozpack.test.test_files import TestWithTmpDir
+from mozpack.test.test_packager_formats import CONTENTS, fill_formatter, get_contents
+
+
+class TestUnpack(TestWithTmpDir):
+ maxDiff = None
+
+ @staticmethod
+ def _get_copier(cls):
+ copier = FileCopier()
+ formatter = cls(copier)
+ fill_formatter(formatter, CONTENTS)
+ return copier
+
+ @classmethod
+ def setUpClass(cls):
+ cls.contents = get_contents(
+ cls._get_copier(FlatFormatter), read_all=True, mode="rb"
+ )
+
+ def _unpack_test(self, cls):
+ # Format a package with the given formatter class
+ copier = self._get_copier(cls)
+ copier.copy(self.tmpdir)
+
+ # Unpack that package. Its content is expected to match that of a Flat
+ # formatted package.
+ registry = FileRegistry()
+ unpack_to_registry(self.tmpdir, registry, getattr(cls, "OMNIJAR_NAME", None))
+ self.assertEqual(
+ get_contents(registry, read_all=True, mode="rb"), self.contents
+ )
+
+ def test_flat_unpack(self):
+ self._unpack_test(FlatFormatter)
+
+ def test_jar_unpack(self):
+ self._unpack_test(JarFormatter)
+
+ @staticmethod
+ def _omni_foo_formatter(name):
+ class OmniFooFormatter(OmniJarFormatter):
+ OMNIJAR_NAME = name
+
+ def __init__(self, registry):
+ super(OmniFooFormatter, self).__init__(registry, name)
+
+ return OmniFooFormatter
+
+ def test_omnijar_unpack(self):
+ self._unpack_test(self._omni_foo_formatter("omni.foo"))
+
+ def test_omnijar_subpath_unpack(self):
+ self._unpack_test(self._omni_foo_formatter("bar/omni.foo"))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py
new file mode 100644
index 0000000000..6c7aeb5400
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_path.py
@@ -0,0 +1,152 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+
+from mozpack.path import (
+ basedir,
+ basename,
+ commonprefix,
+ dirname,
+ join,
+ match,
+ normpath,
+ rebase,
+ relpath,
+ split,
+ splitext,
+)
+
+
+class TestPath(unittest.TestCase):
+ SEP = os.sep
+
+ def test_relpath(self):
+ self.assertEqual(relpath("foo", "foo"), "")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar"), "")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo"), "bar")
+ self.assertEqual(
+ relpath(self.SEP.join(("foo", "bar", "baz")), "foo"), "bar/baz"
+ )
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar/baz"), "..")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/baz"), "../bar")
+ self.assertEqual(relpath("foo/", "foo"), "")
+ self.assertEqual(relpath("foo/bar/", "foo"), "bar")
+
+ def test_join(self):
+ self.assertEqual(join("foo", "bar", "baz"), "foo/bar/baz")
+ self.assertEqual(join("foo", "", "bar"), "foo/bar")
+ self.assertEqual(join("", "foo", "bar"), "foo/bar")
+ self.assertEqual(join("", "foo", "/bar"), "/bar")
+
+ def test_normpath(self):
+ self.assertEqual(
+ normpath(self.SEP.join(("foo", "bar", "baz", "..", "qux"))), "foo/bar/qux"
+ )
+
+ def test_dirname(self):
+ self.assertEqual(dirname("foo/bar/baz"), "foo/bar")
+ self.assertEqual(dirname("foo/bar"), "foo")
+ self.assertEqual(dirname("foo"), "")
+ self.assertEqual(dirname("foo/bar/"), "foo/bar")
+
+ def test_commonprefix(self):
+ self.assertEqual(
+ commonprefix(
+ [self.SEP.join(("foo", "bar", "baz")), "foo/qux", "foo/baz/qux"]
+ ),
+ "foo/",
+ )
+ self.assertEqual(
+ commonprefix([self.SEP.join(("foo", "bar", "baz")), "foo/qux", "baz/qux"]),
+ "",
+ )
+
+ def test_basename(self):
+ self.assertEqual(basename("foo/bar/baz"), "baz")
+ self.assertEqual(basename("foo/bar"), "bar")
+ self.assertEqual(basename("foo"), "foo")
+ self.assertEqual(basename("foo/bar/"), "")
+
+ def test_split(self):
+ self.assertEqual(
+ split(self.SEP.join(("foo", "bar", "baz"))), ["foo", "bar", "baz"]
+ )
+
+ def test_splitext(self):
+ self.assertEqual(
+ splitext(self.SEP.join(("foo", "bar", "baz.qux"))), ("foo/bar/baz", ".qux")
+ )
+
+ def test_basedir(self):
+ foobarbaz = self.SEP.join(("foo", "bar", "baz"))
+ self.assertEqual(basedir(foobarbaz, ["foo", "bar", "baz"]), "foo")
+ self.assertEqual(basedir(foobarbaz, ["foo", "foo/bar", "baz"]), "foo/bar")
+ self.assertEqual(basedir(foobarbaz, ["foo/bar", "foo", "baz"]), "foo/bar")
+ self.assertEqual(basedir(foobarbaz, ["foo", "bar", ""]), "foo")
+ self.assertEqual(basedir(foobarbaz, ["bar", "baz", ""]), "")
+
+ def test_match(self):
+ self.assertTrue(match("foo", ""))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo"))
+ self.assertTrue(match("foo", "*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/*/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/*/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/b*/*z.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/b*r/ba*z.qux"))
+ self.assertFalse(match("foo/bar/baz.qux", "foo/b*z/ba*r.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/foo/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/*.qux"))
+ self.assertFalse(match("foo/bar/baz.qux", "**.qux"))
+ self.assertFalse(match("foo/bar", "foo/*/bar"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/**"))
+ self.assertFalse(match("foo/nobar/baz.qux", "foo/**/bar/**"))
+ self.assertTrue(match("foo/bar", "foo/**/bar/**"))
+
+ def test_rebase(self):
+ self.assertEqual(rebase("foo", "foo/bar", "bar/baz"), "baz")
+ self.assertEqual(rebase("foo", "foo", "bar/baz"), "bar/baz")
+ self.assertEqual(rebase("foo/bar", "foo", "baz"), "bar/baz")
+
+
+if os.altsep:
+
+ class TestAltPath(TestPath):
+ SEP = os.altsep
+
+ class TestReverseAltPath(TestPath):
+ def setUp(self):
+ sep = os.sep
+ os.sep = os.altsep
+ os.altsep = sep
+
+ def tearDown(self):
+ self.setUp()
+
+ class TestAltReverseAltPath(TestReverseAltPath):
+ SEP = os.altsep
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_pkg.py b/python/mozbuild/mozpack/test/test_pkg.py
new file mode 100644
index 0000000000..f1febbbae0
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_pkg.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from pathlib import Path
+from string import Template
+from unittest.mock import patch
+
+import mozunit
+
+import mozpack.pkg
+from mozpack.pkg import (
+ create_bom,
+ create_payload,
+ create_pkg,
+ get_app_info_plist,
+ get_apple_template,
+ get_relative_glob_list,
+ save_text_file,
+ xar_package_folder,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestPkg(TestWithTmpDir):
+ maxDiff = None
+
+ class MockSubprocessRun:
+ stderr = ""
+ stdout = ""
+ returncode = 0
+
+ def __init__(self, returncode=0):
+ self.returncode = returncode
+
+ def _mk_test_file(self, name, mode=0o777):
+ tool = Path(self.tmpdir) / f"{name}"
+ tool.touch()
+ tool.chmod(mode)
+ return tool
+
+ def test_get_apple_template(self):
+ tmpl = get_apple_template("Distribution.template")
+ assert type(tmpl) == Template
+
+ def test_get_apple_template_not_file(self):
+ with self.assertRaises(Exception):
+ get_apple_template("tmpl-should-not-exist")
+
+ def test_save_text_file(self):
+ content = "Hello"
+ destination = Path(self.tmpdir) / "test_save_text_file"
+ save_text_file(content, destination)
+ with destination.open("r") as file:
+ assert content == file.read()
+
+ def test_get_app_info_plist(self):
+ app_path = Path(self.tmpdir) / "app"
+ (app_path / "Contents").mkdir(parents=True)
+ (app_path / "Contents/Info.plist").touch()
+ data = {"foo": "bar"}
+ with patch.object(mozpack.pkg.plistlib, "load", lambda x: data):
+ assert data == get_app_info_plist(app_path)
+
+ def test_get_app_info_plist_not_file(self):
+ app_path = Path(self.tmpdir) / "app-does-not-exist"
+ with self.assertRaises(Exception):
+ get_app_info_plist(app_path)
+
+ def _mock_payload(self, returncode):
+ def _mock_run(*args, **kwargs):
+ return self.MockSubprocessRun(returncode)
+
+ return _mock_run
+
+ def test_create_payload(self):
+ destination = Path(self.tmpdir) / "mockPayload"
+ with patch.object(mozpack.pkg.subprocess, "run", self._mock_payload(0)):
+ create_payload(destination, Path(self.tmpdir), "cpio")
+
+ def test_create_bom(self):
+ bom_path = Path(self.tmpdir) / "Bom"
+ bom_path.touch()
+ root_path = Path(self.tmpdir)
+ tool_path = Path(self.tmpdir) / "not-really-used-during-test"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x: None):
+ create_bom(bom_path, root_path, tool_path)
+
+ def get_relative_glob_list(self):
+ source = Path(self.tmpdir)
+ (source / "testfile").touch()
+ glob = "*"
+ assert len(get_relative_glob_list(source, glob)) == 1
+
+ def test_xar_package_folder(self):
+ source = Path(self.tmpdir)
+ dest = source / "fakedestination"
+ dest.touch()
+ tool = source / "faketool"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x, **y: None):
+ xar_package_folder(source, dest, tool)
+
+ def test_xar_package_folder_not_absolute(self):
+ source = Path("./some/relative/path")
+ dest = Path("./some/other/relative/path")
+ tool = source / "faketool"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda: None):
+ with self.assertRaises(Exception):
+ xar_package_folder(source, dest, tool)
+
+ def test_create_pkg(self):
+ def noop(*x, **y):
+ pass
+
+ def mock_get_app_info_plist(*args):
+ return {"CFBundleShortVersionString": "1.0.0"}
+
+ def mock_get_apple_template(*args):
+ return Template("fake template")
+
+ source = Path(self.tmpdir) / "FakeApp.app"
+ source.mkdir()
+ output = Path(self.tmpdir) / "output.pkg"
+ fake_tool = Path(self.tmpdir) / "faketool"
+ with patch.multiple(
+ mozpack.pkg,
+ get_app_info_plist=mock_get_app_info_plist,
+ get_apple_template=mock_get_apple_template,
+ save_text_file=noop,
+ create_payload=noop,
+ create_bom=noop,
+ xar_package_folder=noop,
+ ):
+ create_pkg(source, output, fake_tool, fake_tool, fake_tool)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py
new file mode 100644
index 0000000000..15de50dccc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_unify.py
@@ -0,0 +1,250 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+from io import StringIO
+
+import mozunit
+
+from mozbuild.util import ensureParentDir
+from mozpack.errors import AccumulatedErrors, ErrorMessage, errors
+from mozpack.files import FileFinder
+from mozpack.mozjar import JarWriter
+from mozpack.test.test_files import MockDest, TestWithTmpDir
+from mozpack.unify import UnifiedBuildFinder, UnifiedFinder
+
+
+class TestUnified(TestWithTmpDir):
+ def create_one(self, which, path, content):
+ file = self.tmppath(os.path.join(which, path))
+ ensureParentDir(file)
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+ open(file, "wb").write(content)
+
+ def create_both(self, path, content):
+ for p in ["a", "b"]:
+ self.create_one(p, path, content)
+
+
+class TestUnifiedFinder(TestUnified):
+ def test_unified_finder(self):
+ self.create_both("foo/bar", "foobar")
+ self.create_both("foo/baz", "foobaz")
+ self.create_one("a", "bar", "bar")
+ self.create_one("b", "baz", "baz")
+ self.create_one("a", "qux", "foobar")
+ self.create_one("b", "qux", "baz")
+ self.create_one("a", "test/foo", "a\nb\nc\n")
+ self.create_one("b", "test/foo", "b\nc\na\n")
+ self.create_both("test/bar", "a\nb\nc\n")
+
+ finder = UnifiedFinder(
+ FileFinder(self.tmppath("a")),
+ FileFinder(self.tmppath("b")),
+ sorted=["test"],
+ )
+ self.assertEqual(
+ sorted(
+ [(f, c.open().read().decode("utf-8")) for f, c in finder.find("foo")]
+ ),
+ [("foo/bar", "foobar"), ("foo/baz", "foobaz")],
+ )
+ self.assertRaises(ErrorMessage, any, finder.find("bar"))
+ self.assertRaises(ErrorMessage, any, finder.find("baz"))
+ self.assertRaises(ErrorMessage, any, finder.find("qux"))
+ self.assertEqual(
+ sorted(
+ [(f, c.open().read().decode("utf-8")) for f, c in finder.find("test")]
+ ),
+ [("test/bar", "a\nb\nc\n"), ("test/foo", "a\nb\nc\n")],
+ )
+
+
+class TestUnifiedBuildFinder(TestUnified):
+ def test_unified_build_finder(self):
+ finder = UnifiedBuildFinder(
+ FileFinder(self.tmppath("a")), FileFinder(self.tmppath("b"))
+ )
+
+ # Test chrome.manifest unification
+ self.create_both("chrome.manifest", "a\nb\nc\n")
+ self.create_one("a", "chrome/chrome.manifest", "a\nb\nc\n")
+ self.create_one("b", "chrome/chrome.manifest", "b\nc\na\n")
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/chrome.manifest")
+ ]
+ ),
+ [("chrome.manifest", "a\nb\nc\n"), ("chrome/chrome.manifest", "a\nb\nc\n")],
+ )
+
+ # Test buildconfig.html unification
+ self.create_one(
+ "a",
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>foo</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ self.create_one(
+ "b",
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>bar</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/buildconfig.html")
+ ]
+ ),
+ [
+ (
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>foo</div>",
+ " <hr> </hr>",
+ " <div>bar</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ ],
+ )
+
+ # Test xpi file unification
+ xpi = MockDest()
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add("foo", "foo")
+ jar.add("bar", "bar")
+ foo_xpi = xpi.read()
+ self.create_both("foo.xpi", foo_xpi)
+
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add("foo", "bar")
+ self.create_one("a", "bar.xpi", foo_xpi)
+ self.create_one("b", "bar.xpi", xpi.read())
+
+ errors.out = StringIO()
+ with self.assertRaises(AccumulatedErrors), errors.accumulate():
+ self.assertEqual(
+ [(f, c.open().read()) for f, c in finder.find("*.xpi")],
+ [("foo.xpi", foo_xpi)],
+ )
+ errors.out = sys.stderr
+
+ # Test install.rdf unification
+ x86_64 = "Darwin_x86_64-gcc3"
+ x86 = "Darwin_x86-gcc3"
+ target_tag = "<{em}targetPlatform>{platform}</{em}targetPlatform>"
+ target_attr = '{em}targetPlatform="{platform}" '
+
+ rdf_tag = "".join(
+ [
+ '<{RDF}Description {em}bar="bar" {em}qux="qux">',
+ "<{em}foo>foo</{em}foo>",
+ "{targets}",
+ "<{em}baz>baz</{em}baz>",
+ "</{RDF}Description>",
+ ]
+ )
+ rdf_attr = "".join(
+ [
+ '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">',
+ "{targets}",
+ "<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>",
+ "</{RDF}Description>",
+ ]
+ )
+
+ for descr_ns, target_ns in (("RDF:", ""), ("", "em:"), ("RDF:", "em:")):
+ # First we need to infuse the above strings with our namespaces and
+ # platform values.
+ ns = {"RDF": descr_ns, "em": target_ns}
+ target_tag_x86_64 = target_tag.format(platform=x86_64, **ns)
+ target_tag_x86 = target_tag.format(platform=x86, **ns)
+ target_attr_x86_64 = target_attr.format(platform=x86_64, **ns)
+ target_attr_x86 = target_attr.format(platform=x86, **ns)
+
+ tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns)
+ tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns)
+ tag_merged = rdf_tag.format(
+ targets=target_tag_x86_64 + target_tag_x86, **ns
+ )
+ tag_empty = rdf_tag.format(targets="", **ns)
+
+ attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns)
+ attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns)
+ attr_merged = rdf_attr.format(
+ attr="", targets=target_tag_x86_64 + target_tag_x86, **ns
+ )
+
+ # This table defines the test cases, columns "a" and "b" being the
+ # contents of the install.rdf of the respective platform and
+ # "result" the exepected merged content after unification.
+ testcases = (
+ # _____a_____ _____b_____ ___result___#
+ (tag_x86_64, tag_x86, tag_merged),
+ (tag_x86_64, tag_empty, tag_empty),
+ (tag_empty, tag_x86, tag_empty),
+ (tag_empty, tag_empty, tag_empty),
+ (attr_x86_64, attr_x86, attr_merged),
+ (tag_x86_64, attr_x86, tag_merged),
+ (attr_x86_64, tag_x86, attr_merged),
+ (attr_x86_64, tag_empty, tag_empty),
+ (tag_empty, attr_x86, tag_empty),
+ )
+
+ # Now create the files from the above table and compare
+ results = []
+ for emid, (rdf_a, rdf_b, result) in enumerate(testcases):
+ filename = "ext/id{0}/install.rdf".format(emid)
+ self.create_one("a", filename, rdf_a)
+ self.create_one("b", filename, rdf_b)
+ results.append((filename, result))
+
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/install.rdf")
+ ]
+ ),
+ results,
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()