summaryrefslogtreecommitdiffstats
path: root/dom/cache/test/marionette
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /dom/cache/test/marionette
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'dom/cache/test/marionette')
-rw-r--r--dom/cache/test/marionette/manifest.toml5
-rw-r--r--dom/cache/test/marionette/test_cacheapi_encryption_PBM.py187
-rw-r--r--dom/cache/test/marionette/test_caches_delete_cleanup_after_shutdown.py186
3 files changed, 378 insertions, 0 deletions
diff --git a/dom/cache/test/marionette/manifest.toml b/dom/cache/test/marionette/manifest.toml
new file mode 100644
index 0000000000..20bbad726c
--- /dev/null
+++ b/dom/cache/test/marionette/manifest.toml
@@ -0,0 +1,5 @@
+[DEFAULT]
+
+["test_cacheapi_encryption_PBM.py"]
+
+["test_caches_delete_cleanup_after_shutdown.py"]
diff --git a/dom/cache/test/marionette/test_cacheapi_encryption_PBM.py b/dom/cache/test/marionette/test_cacheapi_encryption_PBM.py
new file mode 100644
index 0000000000..68db17b2c6
--- /dev/null
+++ b/dom/cache/test/marionette/test_cacheapi_encryption_PBM.py
@@ -0,0 +1,187 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import sys
+from pathlib import Path
+
+sys.path.append(os.fspath(Path(__file__).parents[3] / "quota/test/marionette"))
+
+from quota_test_case import QuotaTestCase
+
+CACHEAPI_PBM_PREF = "dom.cache.privateBrowsing.enabled"
+QM_TESTING_PREF = "dom.quotaManager.testing"
+
+
+class CacheAPIEncryptionPBM(QuotaTestCase):
+
+ """
+ Bug1856953: Ensure CacheAPI data gets encrypted in Private Browsing Mode.
+ We need to ensure data inside both sqlite fields and request/response files
+ gets encrypted
+ """
+
+ def setUp(self):
+ super(CacheAPIEncryptionPBM, self).setUp()
+
+ self.testHTML = "dom/cache/basicCacheAPI_PBM.html"
+ self.cacheName = "CachePBMTest"
+ self.profilePath = self.marionette.instance.profile.profile
+ self.cacheAPIStoragePath = None
+
+ self.defaultCacheAPIPBMPref = self.marionette.get_pref(CACHEAPI_PBM_PREF)
+ self.marionette.set_pref(CACHEAPI_PBM_PREF, True)
+
+ self.defaultQMPrefValue = self.marionette.get_pref(QM_TESTING_PREF)
+ self.marionette.set_pref(QM_TESTING_PREF, True)
+
+ self.cacheRequestStr = "https://example.com/"
+ self.cacheResponseStr = "CacheAPIEncryptionPBM"
+
+ self.cacheDBFileName = "caches.sqlite"
+ self.cacheDBJournalFileName = "caches.sqlite-wal"
+
+ self.dbCheckpointThresholdBytes = 512 * 1024
+
+ def tearDown(self):
+ super(CacheAPIEncryptionPBM, self).setUp()
+
+ self.marionette.set_pref(CACHEAPI_PBM_PREF, self.defaultCacheAPIPBMPref)
+ self.marionette.set_pref(QM_TESTING_PREF, self.defaultQMPrefValue)
+
+ def test_request_response_ondisk(self):
+ with self.using_new_window(self.testHTML, private=False) as (
+ self.origin,
+ self.persistenceType,
+ ):
+ self.runAndValidate(
+ lambda exists: self.assertTrue(
+ exists, "Failed to find expected data on disk"
+ )
+ )
+
+ def test_encrypted_request_response_ondisk(self):
+ with self.using_new_window(self.testHTML, private=True) as (
+ self.origin,
+ self.persistenceType,
+ ):
+ self.runAndValidate(
+ lambda exists: self.assertFalse(exists, "Data on disk is not encrypted")
+ )
+
+ def runAndValidate(self, validator):
+ self.marionette.execute_async_script(
+ """
+ const [name, requestStr, responseStr, resolve] = arguments;
+
+ const request = new Request(requestStr);
+ const response = new Response(responseStr);
+ window.wrappedJSObject.addDataIntoCache(name, request, response)
+ .then(resolve);
+ """,
+ script_args=(
+ self.cacheName,
+ self.cacheRequestStr,
+ self.cacheResponseStr,
+ ),
+ )
+
+ self.ensureInvariantHolds(
+ lambda _: os.path.exists(self.getCacheAPIStoragePath())
+ )
+
+ self.validateSqlite(validator)
+ self.validateBodyFile(validator)
+
+ def validateBodyFile(self, validator):
+ # Ensure response bodies have been flushed to the disk
+ self.ensureInvariantHolds(
+ lambda _: self.findDirObj(self.getCacheAPIStoragePath(), "morgue", False)
+ is not None
+ )
+
+ cacheResponseDir = self.findDirObj(
+ self.getCacheAPIStoragePath(), "morgue", False
+ )
+
+ self.ensureInvariantHolds(lambda _: any(os.listdir(cacheResponseDir)))
+
+ # Get response bodies directory corresponding to the cache 'self.CacheName' since, there's
+ # only one cache object in this origin, it must be the first one.
+ cacheResponseBodiesPath = [
+ d for d in Path(cacheResponseDir).iterdir() if d.is_dir()
+ ][0]
+
+ # Ensure bodies have been transferred to '.final' from '.tmp'
+ self.ensureInvariantHolds(
+ lambda _: self.findDirObj(cacheResponseBodiesPath, ".final", True)
+ is not None
+ )
+ cacheResponseBodyPath = self.findDirObj(cacheResponseBodiesPath, ".final", True)
+
+ # Since a cache response would get compressed using snappy; and an unencrypted response would
+ # contain 'sNaPpY' as a compression header in the response body file. Check to ensure that
+ # 'sNaPpy' does not exist if bodies are getting encrypted.
+ foundRawValue = False
+ with open(cacheResponseBodyPath, "rb") as f_binary:
+ foundRawValue = re.search(b"sNaPpY", f_binary.read()) is not None
+
+ validator(foundRawValue)
+
+ def validateSqlite(self, validator):
+ self.ensureInvariantHolds(
+ lambda _: self.findDirObj(
+ self.getCacheAPIStoragePath(), self.cacheDBJournalFileName, True
+ )
+ is not None
+ )
+ dbJournalFile = self.findDirObj(
+ self.getCacheAPIStoragePath(), self.cacheDBJournalFileName, True
+ )
+
+ self.ensureInvariantHolds(
+ lambda _: self.findDirObj(
+ self.getCacheAPIStoragePath(), self.cacheDBFileName, True
+ )
+ is not None
+ )
+ dbFile = self.findDirObj(
+ self.getCacheAPIStoragePath(), self.cacheDBFileName, True
+ )
+
+ # Confirm journal file size is less than 512KB which ensures that checkpoint
+ # has not happend yet (dom/cache/DBSchema.cpp::InitializeConnection, kWalAutoCheckpointPages)
+ self.assertTrue(
+ os.path.getsize(dbJournalFile) < self.dbCheckpointThresholdBytes
+ )
+
+ # Before checkpointing, journal file size should be greater than main sqlite db file.
+ self.assertTrue(os.path.getsize(dbJournalFile) > os.path.getsize(dbFile))
+
+ validator(
+ self.cacheRequestStr.encode("ascii") in open(dbJournalFile, "rb").read()
+ )
+
+ self.assertTrue(
+ self.resetStoragesForPrincipal(self.origin, self.persistenceType, "cache")
+ )
+
+ self.assertFalse(os.path.getsize(dbJournalFile) > os.path.getsize(dbFile))
+
+ validator(self.cacheRequestStr.encode("ascii") in open(dbFile, "rb").read())
+
+ def getCacheAPIStoragePath(self):
+ if self.cacheAPIStoragePath is not None:
+ return self.cacheAPIStoragePath
+
+ assert self.origin is not None
+ assert self.persistenceType is not None
+
+ self.cacheAPIStoragePath = self.getStoragePath(
+ self.profilePath, self.origin, self.persistenceType, "cache"
+ )
+
+ print("cacheAPI origin directory = " + self.cacheAPIStoragePath)
+ return self.cacheAPIStoragePath
diff --git a/dom/cache/test/marionette/test_caches_delete_cleanup_after_shutdown.py b/dom/cache/test/marionette/test_caches_delete_cleanup_after_shutdown.py
new file mode 100644
index 0000000000..4db7606b08
--- /dev/null
+++ b/dom/cache/test/marionette/test_caches_delete_cleanup_after_shutdown.py
@@ -0,0 +1,186 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from marionette_driver import Wait
+from marionette_harness import MarionetteTestCase
+
+"""
+ Currently we expect our size after cleanup to be 98k for the current
+ database schema and page sizes and constants used in this test. We
+ set the threshold at 128k so the test doesn't start failing if these
+ control objects/structures increase somewhat in size, but should still
+ fail if we fail to delete all of the 5,000 1k files we create on disk
+ as part of the test.
+"""
+EXPECTED_CACHEDIR_SIZE_AFTER_CLEANUP = 128 * 1024 # 128KB
+CACHE_ID = "data"
+
+
+class CachesDeleteCleanupAtShutdownTestCase(MarionetteTestCase):
+
+ """
+ Bug1784700: This test ensures that cache body files get cleaned up
+ properly after cache has been deleted. Note that body files gets
+ cleaned up asynchronously which means that these files might still
+ be around even after Caches.Delete promise gets resolved. Currently,
+ we would only clean up body files on origin initialization and that's
+ a firefox is necessary here.
+ """
+
+ def setUp(self):
+ super(CachesDeleteCleanupAtShutdownTestCase, self).setUp()
+ self.marionette.restart(in_app=False, clean=True)
+
+ def tearDown(self):
+ self.marionette.restart(in_app=False, clean=True)
+ super(CachesDeleteCleanupAtShutdownTestCase, self).tearDown()
+
+ def getUsage(self):
+ return self.marionette.execute_script(
+ """
+ return window.wrappedJSObject.getStorageEstimate();
+ """,
+ new_sandbox=False,
+ )
+
+ def doCacheWork(self, n):
+ # max timeout for this script to execute is 5 minutes
+ maxTimeout = 5 * 60 * 1000
+
+ return self.marionette.execute_script(
+ """
+ const [cacheId, n] = arguments;
+ return window.wrappedJSObject.doCacheWork(cacheId, n);
+ """,
+ script_args=(
+ CACHE_ID,
+ n,
+ ),
+ new_sandbox=False,
+ script_timeout=maxTimeout,
+ )
+
+ def openCache(self):
+ return self.marionette.execute_async_script(
+ """
+ const [cacheId, resolve] = arguments;
+ window.wrappedJSObject.openCache(cacheId).then(resolve("success"));
+ """,
+ new_sandbox=False,
+ script_args=(CACHE_ID,),
+ )
+
+ def ensureCleanDirectory(self):
+ with self.marionette.using_context("chrome"):
+ return self.marionette.execute_script(
+ """
+ let originDir = arguments[0];
+ const pathDelimiter = "/";
+
+ function getRelativeFile(relativePath) {
+ let file = Services.dirsvc
+ .get("ProfD", Ci.nsIFile)
+ .clone();
+
+ relativePath.split(pathDelimiter).forEach(function(component) {
+ if (component == "..") {
+ file = file.parent;
+ } else {
+ file.append(component);
+ }
+ });
+
+ return file;
+ }
+
+ function getCacheDir() {
+
+ const storageDirName = "storage";
+ const defaultPersistenceDirName = "default";
+
+ return getRelativeFile(
+ `${storageDirName}/${defaultPersistenceDirName}/${originDir}/cache`
+ );
+ }
+
+ const cacheDir = getCacheDir();
+ let morgueDir = cacheDir.clone();
+
+ // morgue directory should be empty
+ // or atleast directories under morgue should be empty
+ morgueDir.append("morgue");
+ for (let dir of morgueDir.directoryEntries) {
+ for (let file of dir.directoryEntries) {
+ return false;
+ }
+ }
+ return true;
+ """,
+ script_args=(
+ self.marionette.absolute_url("")[:-1]
+ .replace(":", "+")
+ .replace("/", "+"),
+ ),
+ new_sandbox=False,
+ )
+
+ def create_and_cleanup_cache(self, ensureCleanCallback, in_app):
+ # create 640 cache entries
+ self.doCacheWork(640)
+
+ self.marionette.restart(in_app=in_app)
+ print("restart successful")
+
+ self.marionette.navigate(
+ self.marionette.absolute_url("dom/cache/cacheUsage.html")
+ )
+ return ensureCleanCallback()
+
+ def test_ensure_cache_cleanup_after_clean_restart(self):
+ self.marionette.navigate(
+ self.marionette.absolute_url("dom/cache/cacheUsage.html")
+ )
+ beforeUsage = self.getUsage()
+
+ def ensureCleanCallback():
+ Wait(self.marionette, timeout=60).until(
+ lambda x: (self.getUsage() - beforeUsage)
+ < EXPECTED_CACHEDIR_SIZE_AFTER_CLEANUP,
+ message="Cache directory is not cleaned up properly",
+ )
+
+ return (
+ abs(beforeUsage - self.getUsage())
+ <= EXPECTED_CACHEDIR_SIZE_AFTER_CLEANUP
+ and self.ensureCleanDirectory()
+ )
+
+ if not self.create_and_cleanup_cache(ensureCleanCallback, True):
+ print(f"beforeUsage = {beforeUsage}, and afterUsage = {self.getUsage()}")
+ assert False
+
+ def test_ensure_cache_cleanup_after_unclean_restart(self):
+ self.marionette.navigate(
+ self.marionette.absolute_url("dom/cache/cacheUsage.html")
+ )
+ beforeUsage = self.getUsage()
+
+ def ensureCleanCallback():
+ self.openCache()
+
+ Wait(self.marionette, timeout=60).until(
+ lambda x: (self.getUsage() - beforeUsage)
+ < EXPECTED_CACHEDIR_SIZE_AFTER_CLEANUP,
+ message="Cache directory is not cleaned up properly",
+ )
+
+ return (
+ abs(beforeUsage - self.getUsage())
+ <= EXPECTED_CACHEDIR_SIZE_AFTER_CLEANUP
+ and self.ensureCleanDirectory()
+ )
+
+ if not self.create_and_cleanup_cache(ensureCleanCallback, False):
+ print(f"beforeUsage = {beforeUsage}, and afterUsage = {self.getUsage()}")
+ assert False