summaryrefslogtreecommitdiffstats
path: root/taskcluster/docker/periodic-updates
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/docker/periodic-updates')
-rw-r--r--taskcluster/docker/periodic-updates/.eslintrc.js70
-rw-r--r--taskcluster/docker/periodic-updates/Dockerfile11
-rw-r--r--taskcluster/docker/periodic-updates/README.md96
-rwxr-xr-xtaskcluster/docker/periodic-updates/runme.sh93
-rw-r--r--taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js674
-rw-r--r--taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js557
-rwxr-xr-xtaskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh618
-rwxr-xr-xtaskcluster/docker/periodic-updates/setup.sh25
8 files changed, 2144 insertions, 0 deletions
diff --git a/taskcluster/docker/periodic-updates/.eslintrc.js b/taskcluster/docker/periodic-updates/.eslintrc.js
new file mode 100644
index 0000000000..9828adaa40
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/.eslintrc.js
@@ -0,0 +1,70 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+module.exports = {
+ globals: {
+ // JS files in this folder are commonly xpcshell scripts where |arguments|
+ // is defined in the global scope.
+ arguments: false,
+ },
+ rules: {
+ // Enforce return statements in callbacks of array methods.
+ "array-callback-return": "error",
+
+ // Verify calls of super() in constructors.
+ "constructor-super": "error",
+
+ // Require default case in switch statements.
+ "default-case": "error",
+
+ // Disallow use of alert(), confirm(), and prompt().
+ "no-alert": "error",
+
+ // Disallow likely erroneous `switch` scoped lexical declarations in
+ // case/default clauses.
+ "no-case-declarations": "error",
+
+ // Disallow use of the console API.
+ "no-console": "error",
+
+ // Disallow constant expressions in conditions (except for loops).
+ "no-constant-condition": ["error", { checkLoops: false }],
+
+ // Disallow extending of native objects.
+ "no-extend-native": "error",
+
+ // Disallow case statement fallthrough without explicit `// falls through`
+ // annotation.
+ "no-fallthrough": "error",
+
+ // No reassigning native JS objects or read only globals.
+ "no-global-assign": "error",
+
+ // Disallow use of assignment in return statement.
+ "no-return-assign": ["error", "always"],
+
+ // Disallow template literal placeholder syntax in regular strings.
+ "no-template-curly-in-string": "error",
+
+ // Disallow use of this/super before calling super() in constructors.
+ "no-this-before-super": "error",
+
+ // Disallow unmodified loop conditions.
+ "no-unmodified-loop-condition": "error",
+
+ // No expressions where a statement is expected
+ "no-unused-expressions": "error",
+
+ // Disallow unnecessary escape usage in strings and regular expressions.
+ "no-useless-escape": "error",
+
+ // Require "use strict" to be defined globally in the script.
+ strict: ["error", "global"],
+
+ // Disallow Yoda conditions.
+ yoda: ["error", "never"],
+ },
+};
diff --git a/taskcluster/docker/periodic-updates/Dockerfile b/taskcluster/docker/periodic-updates/Dockerfile
new file mode 100644
index 0000000000..24cabe02b5
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/Dockerfile
@@ -0,0 +1,11 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Julien Cristau <jcristau@mozilla.com>
+
+ADD setup.sh /setup/setup.sh
+
+RUN cd /setup && ./setup.sh
+
+COPY runme.sh /
+COPY scripts/* /home/worker/scripts/
+
+CMD ["/runme.sh"]
diff --git a/taskcluster/docker/periodic-updates/README.md b/taskcluster/docker/periodic-updates/README.md
new file mode 100644
index 0000000000..d21c0c3656
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/README.md
@@ -0,0 +1,96 @@
+
+==Periodic File Updates==
+
+This docker image examines the in-tree files for HSTS preload data, HPKP pinning and blocklisting, and
+will produce a diff for each necessary to update the in-tree files.
+
+If given a conduit API token, it will also use the arcanist client to submit the commits for review.
+
+
+==Quick Start==
+
+```sh
+docker build -t hsts-local --no-cache --rm .
+
+docker run -e DO_HSTS=1 -e DO_HPKP=1 -e PRODUCT="firefox" -e BRANCH="mozilla-central" -e USE_MOZILLA_CENTRAL=1 hsts-local
+```
+
+HSTS checks will only be run if the `DO_HSTS` environment variable is set.
+Likewise for `DO_HPKP` and the HPKP checks. Environment variables are used
+rather than command line arguments to make constructing taskcluster tasks
+easier.
+
+To prevent a full build when landing with Phabricator, set the `DONTBUILD`
+environment variable.
+
+==Background==
+
+These scripts have been moved from
+`https://hg.mozilla.org/build/tools/scripts/periodic_file_updates/` and
+`security/manager/tools/` in the main repos.
+
+==HSTS Checks==
+
+`scripts/getHSTSPreloadList.js` will examine the current contents of
+nsSTSPreloadList.inc from whichever `BRANCH` is specified, add in the mandatory
+hosts, and those from the Chromium source, and check them all to see if their
+SSL configuration is valid, and whether or not they have the
+Strict-Transport-Security header set with an appropriate `max-age`.
+
+This javascript has been modified to use async calls to improve performance.
+
+==HPKP Checks==
+
+`scripts/genHPKPStaticPins.js` will ensure the list of pinned public keys are
+up to date.
+
+==Example Taskcluster Task==
+
+https://firefox-ci-tc.services.mozilla.com/tasks/create/
+
+```yaml
+provisionerId: aws-provisioner-v1
+workerType: gecko-1-b-linux
+retries: 0
+created: '2018-02-07T14:45:57.347Z'
+deadline: '2018-02-07T17:45:57.348Z'
+expires: '2019-02-07T17:45:57.348Z'
+scopes: []
+payload:
+ image: srfraser/hsts1
+ maxRunTime: 1800
+ artifacts:
+ public/build/nsSTSPreloadList.diff:
+ path: /home/worker/artifacts/nsSTSPreloadList.diff
+ expires: '2019-02-07T13:57:35.448Z'
+ type: file
+ public/build/StaticHPKPins.h.diff:
+ path: /home/worker/artifacts/StaticHPKPins.h.diff
+ expires: '2019-02-07T13:57:35.448Z'
+ type: file
+ public/build/blocklist.diff:
+ path: /home/worker/artifacts/blocklist.diff
+ expires: '2019-02-07T13:57:35.448Z'
+ type: file
+ env:
+ DO_HSTS: 1
+ DO_HPKP: 1
+ PRODUCT: firefox
+ BRANCH: mozilla-central
+ USE_MOZILLA_CENTRAL: 1
+ REVIEWERS: catlee
+metadata:
+ name: Periodic updates testing
+ description: Produce diffs for HSTS and HPKP in-tree files.
+ owner: sfraser@mozilla.com
+ source: 'https://firefox-ci-tc.services.mozilla.com/tasks/create'
+tags: {}
+extra:
+ treeherder:
+ jobKind: test
+ machine:
+ platform: linux64
+ tier: 1
+ symbol: 'hsts'
+
+```
diff --git a/taskcluster/docker/periodic-updates/runme.sh b/taskcluster/docker/periodic-updates/runme.sh
new file mode 100755
index 0000000000..368963aff6
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/runme.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+set -xe
+
+# Things to be set by task definition.
+# --pinset --hsts --hpkp
+# -b branch
+# --use-mozilla-central
+# -p firefox
+# Artifact directory
+# Artifact names.
+
+
+test "${BRANCH}"
+test "${PRODUCT}"
+
+PARAMS=""
+
+if [ -n "${USE_MOZILLA_CENTRAL}" ]
+then
+ PARAMS="${PARAMS} --use-mozilla-central"
+fi
+
+# TODO change these, so that they're run if the artifact location is specified?
+if [ -n "${DO_HSTS}" ]
+then
+ PARAMS="${PARAMS} --hsts"
+fi
+
+if [ -n "${DO_HPKP}" ]
+then
+ PARAMS="${PARAMS} --hpkp"
+fi
+
+if [ -n "${DO_REMOTE_SETTINGS}" ]
+then
+ PARAMS="${PARAMS} --remote-settings"
+fi
+
+if [ -n "${DO_SUFFIX_LIST}" ]
+then
+ PARAMS="${PARAMS} --suffix-list"
+fi
+
+if [ -n "${DONTBUILD}" ]
+then
+ PARAMS="${PARAMS} -d"
+fi
+
+
+export ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+
+# Get Arcanist API token
+
+if [ -n "${TASK_ID}" ]
+then
+ curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
+ ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}')
+fi
+if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster
+then
+ set +x # Don't echo these
+ secrets_url="${TASKCLUSTER_PROXY_URL}/api/secrets/v1/secret/${ARC_SECRET}"
+ SECRET=$(curl "${secrets_url}")
+ TOKEN=$(echo "${SECRET}" | jq -r '.secret.token')
+elif [ -n "${ARC_TOKEN}" ] # Allow for local testing.
+then
+ TOKEN="${ARC_TOKEN}"
+fi
+
+if [ -n "${TOKEN}" ]
+then
+ cat >"${HOME}/.arcrc" <<END
+{
+ "hosts": {
+ "https://phabricator.services.mozilla.com/api/": {
+ "token": "${TOKEN}"
+ }
+ }
+}
+END
+ set -x
+ chmod 600 "${HOME}/.arcrc"
+fi
+
+export HGPLAIN=1
+
+# shellcheck disable=SC2086
+/home/worker/scripts/periodic_file_updates.sh -p "${PRODUCT}" -b "${BRANCH}" -a ${PARAMS}
diff --git a/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js
new file mode 100644
index 0000000000..af297374b1
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js
@@ -0,0 +1,674 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// How to run this file:
+// 1. [obtain firefox source code]
+// 2. [build/obtain firefox binaries]
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell \
+// [path to]/genHPKPStaticpins.js \
+// [absolute path to]/PreloadedHPKPins.json \
+// [absolute path to]/StaticHPKPins.h
+"use strict";
+
+if (arguments.length != 2) {
+ throw new Error(
+ "Usage: genHPKPStaticPins.js " +
+ "<absolute path to PreloadedHPKPins.json> " +
+ "<absolute path to StaticHPKPins.h>"
+ );
+}
+
+var { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm");
+var { FileUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/FileUtils.sys.mjs"
+);
+
+var gCertDB = Cc["@mozilla.org/security/x509certdb;1"].getService(
+ Ci.nsIX509CertDB
+);
+
+const SHA256_PREFIX = "sha256/";
+const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_";
+
+// Pins expire in 14 weeks (6 weeks on Beta + 8 weeks on stable)
+const PINNING_MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 14;
+
+const FILE_HEADER =
+ "/* This Source Code Form is subject to the terms of the Mozilla Public\n" +
+ " * License, v. 2.0. If a copy of the MPL was not distributed with this\n" +
+ " * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" +
+ "\n" +
+ "/*****************************************************************************/\n" +
+ "/* This is an automatically generated file. If you're not */\n" +
+ "/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" +
+ "/*****************************************************************************/\n" +
+ "#include <stdint.h>" +
+ "\n";
+
+const DOMAINHEADER =
+ "/* Domainlist */\n" +
+ "struct TransportSecurityPreload {\n" +
+ " // See bug 1338873 about making these fields const.\n" +
+ " const char* mHost;\n" +
+ " bool mIncludeSubdomains;\n" +
+ " bool mTestMode;\n" +
+ " bool mIsMoz;\n" +
+ " int32_t mId;\n" +
+ " const StaticFingerprints* pinset;\n" +
+ "};\n\n";
+
+const PINSETDEF =
+ "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" +
+ "struct StaticFingerprints {\n" +
+ " // See bug 1338873 about making these fields const.\n" +
+ " size_t size;\n" +
+ " const char* const* data;\n" +
+ "};\n\n";
+
+// Command-line arguments
+var gStaticPins = parseJson(arguments[0]);
+
+// Open the output file.
+var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+file.initWithPath(arguments[1]);
+var gFileOutputStream = FileUtils.openSafeFileOutputStream(file);
+
+function writeString(string) {
+ gFileOutputStream.write(string, string.length);
+}
+
+function readFileToString(filename) {
+ let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+ file.initWithPath(filename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ stream.init(file, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+function stripComments(buf) {
+ let lines = buf.split("\n");
+ let entryRegex = /^\s*\/\//;
+ let data = "";
+ for (let i = 0; i < lines.length; ++i) {
+ let match = entryRegex.exec(lines[i]);
+ if (!match) {
+ data = data + lines[i];
+ }
+ }
+ return data;
+}
+
+function download(filename) {
+ let req = new XMLHttpRequest();
+ req.open("GET", filename, false); // doing the request synchronously
+ try {
+ req.send();
+ } catch (e) {
+ throw new Error(`ERROR: problem downloading '${filename}': ${e}`);
+ }
+
+ if (req.status != 200) {
+ throw new Error(
+ "ERROR: problem downloading '" + filename + "': status " + req.status
+ );
+ }
+
+ let resultDecoded;
+ try {
+ resultDecoded = atob(req.responseText);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not decode data as base64 from '" + filename + "': " + e
+ );
+ }
+ return resultDecoded;
+}
+
+function downloadAsJson(filename) {
+ // we have to filter out '//' comments, while not mangling the json
+ let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/gm, "");
+ let data = null;
+ try {
+ data = JSON.parse(result);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not parse data from '" + filename + "': " + e
+ );
+ }
+ return data;
+}
+
+// Returns a Subject Public Key Digest from the given pem, if it exists.
+function getSKDFromPem(pem) {
+ let cert = gCertDB.constructX509FromBase64(pem, pem.length);
+ return cert.sha256SubjectPublicKeyInfoDigest;
+}
+
+/**
+ * Hashes |input| using the SHA-256 algorithm in the following manner:
+ * btoa(sha256(atob(input)))
+ *
+ * @param {string} input Base64 string to decode and return the hash of.
+ * @returns {string} Base64 encoded SHA-256 hash.
+ */
+function sha256Base64(input) {
+ let decodedValue;
+ try {
+ decodedValue = atob(input);
+ } catch (e) {
+ throw new Error(`ERROR: could not decode as base64: '${input}': ${e}`);
+ }
+
+ // Convert |decodedValue| to an array so that it can be hashed by the
+ // nsICryptoHash instance below.
+ // In most cases across the code base, convertToByteArray() of
+ // nsIScriptableUnicodeConverter is used to do this, but the method doesn't
+ // seem to work here.
+ let data = [];
+ for (let i = 0; i < decodedValue.length; i++) {
+ data[i] = decodedValue.charCodeAt(i);
+ }
+
+ let hasher = Cc["@mozilla.org/security/hash;1"].createInstance(
+ Ci.nsICryptoHash
+ );
+ hasher.init(hasher.SHA256);
+ hasher.update(data, data.length);
+
+ // true is passed so that the hasher returns a Base64 encoded string.
+ return hasher.finish(true);
+}
+
+// Downloads the static certs file and tries to map Google Chrome nicknames
+// to Mozilla nicknames, as well as storing any hashes for pins for which we
+// don't have root PEMs. Each entry consists of a line containing the name of
+// the pin followed either by a hash in the format "sha256/" + base64(hash),
+// a PEM encoded public key, or a PEM encoded certificate.
+// For certificates that we have in our database,
+// return a map of Google's nickname to ours. For ones that aren't return a
+// map of Google's nickname to SHA-256 values. This code is modeled after agl's
+// https://github.com/agl/transport-security-state-generate, which doesn't
+// live in the Chromium repo because go is not an official language in
+// Chromium.
+// For all of the entries in this file:
+// - If the entry has a hash format, find the Mozilla pin name (cert nickname)
+// and stick the hash into certSKDToName
+// - If the entry has a PEM format, parse the PEM, find the Mozilla pin name
+// and stick the hash in certSKDToName
+// We MUST be able to find a corresponding cert nickname for the Chrome names,
+// otherwise we skip all pinsets referring to that Chrome name.
+function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) {
+ // Prefixes that we care about.
+ const BEGIN_CERT = "-----BEGIN CERTIFICATE-----";
+ const END_CERT = "-----END CERTIFICATE-----";
+ const BEGIN_PUB_KEY = "-----BEGIN PUBLIC KEY-----";
+ const END_PUB_KEY = "-----END PUBLIC KEY-----";
+
+ // Parsing states.
+ const PRE_NAME = 0;
+ const POST_NAME = 1;
+ const IN_CERT = 2;
+ const IN_PUB_KEY = 3;
+ let state = PRE_NAME;
+
+ let lines = download(filename).split("\n");
+ let pemCert = "";
+ let pemPubKey = "";
+ let hash = "";
+ let chromeNameToHash = {};
+ let chromeNameToMozName = {};
+ let chromeName;
+ for (let line of lines) {
+ // Skip comments and newlines.
+ if (!line.length || line[0] == "#") {
+ continue;
+ }
+ switch (state) {
+ case PRE_NAME:
+ chromeName = line;
+ state = POST_NAME;
+ break;
+ case POST_NAME:
+ if (line.startsWith(SHA256_PREFIX)) {
+ hash = line.substring(SHA256_PREFIX.length);
+ chromeNameToHash[chromeName] = hash;
+ certNameToSKD[chromeName] = hash;
+ certSKDToName[hash] = chromeName;
+ state = PRE_NAME;
+ } else if (line.startsWith(BEGIN_CERT)) {
+ state = IN_CERT;
+ } else if (line.startsWith(BEGIN_PUB_KEY)) {
+ state = IN_PUB_KEY;
+ } else if (
+ chromeName == "PinsListTimestamp" &&
+ line.match(/^[0-9]+$/)
+ ) {
+ // If the name of this entry is "PinsListTimestamp", this line should
+ // be the pins list timestamp. It should consist solely of digits.
+ // Ignore it and expect other entries to come.
+ state = PRE_NAME;
+ } else {
+ throw new Error(
+ "ERROR: couldn't parse Chrome certificate file line: " + line
+ );
+ }
+ break;
+ case IN_CERT:
+ if (line.startsWith(END_CERT)) {
+ state = PRE_NAME;
+ hash = getSKDFromPem(pemCert);
+ pemCert = "";
+ let mozName;
+ if (hash in certSKDToName) {
+ mozName = certSKDToName[hash];
+ } else {
+ // Not one of our built-in certs. Prefix the name with
+ // GOOGLE_PIN_.
+ mozName = GOOGLE_PIN_PREFIX + chromeName;
+ dump(
+ "Can't find hash in builtin certs for Chrome nickname " +
+ chromeName +
+ ", inserting " +
+ mozName +
+ "\n"
+ );
+ certSKDToName[hash] = mozName;
+ certNameToSKD[mozName] = hash;
+ }
+ chromeNameToMozName[chromeName] = mozName;
+ } else {
+ pemCert += line;
+ }
+ break;
+ case IN_PUB_KEY:
+ if (line.startsWith(END_PUB_KEY)) {
+ state = PRE_NAME;
+ hash = sha256Base64(pemPubKey);
+ pemPubKey = "";
+ chromeNameToHash[chromeName] = hash;
+ certNameToSKD[chromeName] = hash;
+ certSKDToName[hash] = chromeName;
+ } else {
+ pemPubKey += line;
+ }
+ break;
+ default:
+ throw new Error(
+ "ERROR: couldn't parse Chrome certificate file " + line
+ );
+ }
+ }
+ return [chromeNameToHash, chromeNameToMozName];
+}
+
+// We can only import pinsets from chrome if for every name in the pinset:
+// - We have a hash from Chrome's static certificate file
+// - We have a builtin cert
+// If the pinset meets these requirements, we store a map array of pinset
+// objects:
+// {
+// pinset_name : {
+// // Array of names with entries in certNameToSKD
+// sha256_hashes: []
+// }
+// }
+// and an array of imported pinset entries:
+// { name: string, include_subdomains: boolean, test_mode: boolean,
+// pins: pinset_name }
+function downloadAndParseChromePins(
+ filename,
+ chromeNameToHash,
+ chromeNameToMozName,
+ certNameToSKD,
+ certSKDToName
+) {
+ let chromePreloads = downloadAsJson(filename);
+ let chromePins = chromePreloads.pinsets;
+ let chromeImportedPinsets = {};
+ let chromeImportedEntries = [];
+
+ chromePins.forEach(function (pin) {
+ let valid = true;
+ let pinset = { name: pin.name, sha256_hashes: [] };
+ // Translate the Chrome pinset format to ours
+ pin.static_spki_hashes.forEach(function (name) {
+ if (name in chromeNameToHash) {
+ let hash = chromeNameToHash[name];
+ pinset.sha256_hashes.push(certSKDToName[hash]);
+
+ // We should have already added hashes for all of these when we
+ // imported the certificate file.
+ if (!certNameToSKD[name]) {
+ throw new Error("ERROR: No hash for name: " + name);
+ }
+ } else if (name in chromeNameToMozName) {
+ pinset.sha256_hashes.push(chromeNameToMozName[name]);
+ } else {
+ dump(
+ "Skipping Chrome pinset " +
+ pinset.name +
+ ", couldn't find " +
+ "builtin " +
+ name +
+ " from cert file\n"
+ );
+ valid = false;
+ }
+ });
+ if (valid) {
+ chromeImportedPinsets[pinset.name] = pinset;
+ }
+ });
+
+ // Grab the domain entry lists. Chrome's entry format is similar to
+ // ours, except theirs includes a HSTS mode.
+ const cData = gStaticPins.chromium_data;
+ let entries = chromePreloads.entries;
+ entries.forEach(function (entry) {
+ // HSTS entry only
+ if (!entry.pins) {
+ return;
+ }
+ let pinsetName = cData.substitute_pinsets[entry.pins];
+ if (!pinsetName) {
+ pinsetName = entry.pins;
+ }
+
+ // We trim the entry name here to avoid breaking hostname comparisons in the
+ // HPKP implementation.
+ entry.name = entry.name.trim();
+
+ let isProductionDomain = cData.production_domains.includes(entry.name);
+ let isProductionPinset = cData.production_pinsets.includes(pinsetName);
+ let excludeDomain = cData.exclude_domains.includes(entry.name);
+ let isTestMode = !isProductionPinset && !isProductionDomain;
+ if (entry.pins && !excludeDomain && chromeImportedPinsets[entry.pins]) {
+ chromeImportedEntries.push({
+ name: entry.name,
+ include_subdomains: entry.include_subdomains,
+ test_mode: isTestMode,
+ is_moz: false,
+ pins: pinsetName,
+ });
+ }
+ });
+ return [chromeImportedPinsets, chromeImportedEntries];
+}
+
+// Returns a pair of maps [certNameToSKD, certSKDToName] between cert
+// nicknames and digests of the SPKInfo for the mozilla trust store
+function loadNSSCertinfo(extraCertificates) {
+ let allCerts = gCertDB.getCerts();
+ let certNameToSKD = {};
+ let certSKDToName = {};
+ for (let cert of allCerts) {
+ if (!cert.isBuiltInRoot) {
+ continue;
+ }
+ let name = cert.displayName;
+ let SKD = cert.sha256SubjectPublicKeyInfoDigest;
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+
+ for (let cert of extraCertificates) {
+ let name = cert.commonName;
+ let SKD = cert.sha256SubjectPublicKeyInfoDigest;
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+
+ {
+ // This is the pinning test certificate. The key hash identifies the
+ // default RSA key from pykey.
+ let name = "End Entity Test Cert";
+ let SKD = "VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8=";
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+ return [certNameToSKD, certSKDToName];
+}
+
+function parseJson(filename) {
+ let json = stripComments(readFileToString(filename));
+ return JSON.parse(json);
+}
+
+function nameToAlias(certName) {
+ // change the name to a string valid as a c identifier
+ // remove non-ascii characters
+ certName = certName.replace(/[^[:ascii:]]/g, "_");
+ // replace non word characters
+ certName = certName.replace(/[^A-Za-z0-9]/g, "_");
+
+ return "k" + certName + "Fingerprint";
+}
+
+function compareByName(a, b) {
+ return a.name.localeCompare(b.name);
+}
+
+function genExpirationTime() {
+ let now = new Date();
+ let nowMillis = now.getTime();
+ let expirationMillis = nowMillis + PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000;
+ let expirationMicros = expirationMillis * 1000;
+ return (
+ "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" +
+ expirationMicros +
+ ");\n"
+ );
+}
+
+function writeFullPinset(certNameToSKD, certSKDToName, pinset) {
+ if (!pinset.sha256_hashes || !pinset.sha256_hashes.length) {
+ throw new Error(`ERROR: Pinset ${pinset.name} does not contain any hashes`);
+ }
+ writeFingerprints(
+ certNameToSKD,
+ certSKDToName,
+ pinset.name,
+ pinset.sha256_hashes
+ );
+}
+
+function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) {
+ let varPrefix = "kPinset_" + name;
+ writeString("static const char* const " + varPrefix + "_Data[] = {\n");
+ let SKDList = [];
+ for (let certName of hashes) {
+ if (!(certName in certNameToSKD)) {
+ throw new Error(`ERROR: Can't find '${certName}' in certNameToSKD`);
+ }
+ SKDList.push(certNameToSKD[certName]);
+ }
+ for (let skd of SKDList.sort()) {
+ writeString(" " + nameToAlias(certSKDToName[skd]) + ",\n");
+ }
+ if (!hashes.length) {
+ // ANSI C requires that an initialiser list be non-empty.
+ writeString(" 0\n");
+ }
+ writeString("};\n");
+ writeString(
+ "static const StaticFingerprints " +
+ varPrefix +
+ " = {\n " +
+ "sizeof(" +
+ varPrefix +
+ "_Data) / sizeof(const char*),\n " +
+ varPrefix +
+ "_Data\n};\n\n"
+ );
+}
+
+function writeEntry(entry) {
+ let printVal = ` { "${entry.name}", `;
+ if (entry.include_subdomains) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ // Default to test mode if not specified.
+ let testMode = true;
+ if (entry.hasOwnProperty("test_mode")) {
+ testMode = entry.test_mode;
+ }
+ if (testMode) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ if (
+ entry.is_moz ||
+ (entry.pins.includes("mozilla") && entry.pins != "mozilla_test")
+ ) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ if ("id" in entry) {
+ if (entry.id >= 256) {
+ throw new Error("ERROR: Not enough buckets in histogram");
+ }
+ if (entry.id >= 0) {
+ printVal += entry.id + ", ";
+ }
+ } else {
+ printVal += "-1, ";
+ }
+ printVal += "&kPinset_" + entry.pins;
+ printVal += " },\n";
+ writeString(printVal);
+}
+
+function writeDomainList(chromeImportedEntries) {
+ writeString("/* Sort hostnames for binary search. */\n");
+ writeString(
+ "static const TransportSecurityPreload " +
+ "kPublicKeyPinningPreloadList[] = {\n"
+ );
+ let count = 0;
+ let mozillaDomains = {};
+ gStaticPins.entries.forEach(function (entry) {
+ mozillaDomains[entry.name] = true;
+ });
+ // For any domain for which we have set pins, exclude them from
+ // chromeImportedEntries.
+ for (let i = chromeImportedEntries.length - 1; i >= 0; i--) {
+ if (mozillaDomains[chromeImportedEntries[i].name]) {
+ dump(
+ "Skipping duplicate pinset for domain " +
+ JSON.stringify(chromeImportedEntries[i], undefined, 2) +
+ "\n"
+ );
+ chromeImportedEntries.splice(i, 1);
+ }
+ }
+ let sortedEntries = gStaticPins.entries;
+ sortedEntries.push.apply(sortedEntries, chromeImportedEntries);
+ for (let entry of sortedEntries.sort(compareByName)) {
+ count++;
+ writeEntry(entry);
+ }
+ writeString("};\n");
+
+ writeString("\n// Pinning Preload List Length = " + count + ";\n");
+ writeString("\nstatic const int32_t kUnknownId = -1;\n");
+}
+
+function writeFile(
+ certNameToSKD,
+ certSKDToName,
+ chromeImportedPinsets,
+ chromeImportedEntries
+) {
+ // Compute used pins from both Chrome's and our pinsets, so we can output
+ // them later.
+ let usedFingerprints = {};
+ let mozillaPins = {};
+ gStaticPins.pinsets.forEach(function (pinset) {
+ mozillaPins[pinset.name] = true;
+ pinset.sha256_hashes.forEach(function (name) {
+ usedFingerprints[name] = true;
+ });
+ });
+ for (let key in chromeImportedPinsets) {
+ let pinset = chromeImportedPinsets[key];
+ pinset.sha256_hashes.forEach(function (name) {
+ usedFingerprints[name] = true;
+ });
+ }
+
+ writeString(FILE_HEADER);
+
+ // Write actual fingerprints.
+ Object.keys(usedFingerprints)
+ .sort()
+ .forEach(function (certName) {
+ if (certName) {
+ writeString("/* " + certName + " */\n");
+ writeString("static const char " + nameToAlias(certName) + "[] =\n");
+ writeString(' "' + certNameToSKD[certName] + '";\n');
+ writeString("\n");
+ }
+ });
+
+ // Write the pinsets
+ writeString(PINSETDEF);
+ writeString("/* PreloadedHPKPins.json pinsets */\n");
+ gStaticPins.pinsets.sort(compareByName).forEach(function (pinset) {
+ writeFullPinset(certNameToSKD, certSKDToName, pinset);
+ });
+ writeString("/* Chrome static pinsets */\n");
+ for (let key in chromeImportedPinsets) {
+ if (mozillaPins[key]) {
+ dump("Skipping duplicate pinset " + key + "\n");
+ } else {
+ dump("Writing pinset " + key + "\n");
+ writeFullPinset(certNameToSKD, certSKDToName, chromeImportedPinsets[key]);
+ }
+ }
+
+ // Write the domainlist entries.
+ writeString(DOMAINHEADER);
+ writeDomainList(chromeImportedEntries);
+ writeString("\n");
+ writeString(genExpirationTime());
+}
+
+function loadExtraCertificates(certStringList) {
+ let constructedCerts = [];
+ for (let certString of certStringList) {
+ constructedCerts.push(gCertDB.constructX509FromBase64(certString));
+ }
+ return constructedCerts;
+}
+
+var extraCertificates = loadExtraCertificates(gStaticPins.extra_certificates);
+var [certNameToSKD, certSKDToName] = loadNSSCertinfo(extraCertificates);
+var [chromeNameToHash, chromeNameToMozName] = downloadAndParseChromeCerts(
+ gStaticPins.chromium_data.cert_file_url,
+ certNameToSKD,
+ certSKDToName
+);
+var [chromeImportedPinsets, chromeImportedEntries] = downloadAndParseChromePins(
+ gStaticPins.chromium_data.json_file_url,
+ chromeNameToHash,
+ chromeNameToMozName,
+ certNameToSKD,
+ certSKDToName
+);
+
+writeFile(
+ certNameToSKD,
+ certSKDToName,
+ chromeImportedPinsets,
+ chromeImportedEntries
+);
+
+FileUtils.closeSafeFileOutputStream(gFileOutputStream);
diff --git a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js
new file mode 100644
index 0000000000..aeaa29bc2d
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js
@@ -0,0 +1,557 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+// How to run this file:
+// 1. [obtain firefox source code]
+// 2. [build/obtain firefox binaries]
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell [path to]/getHSTSPreloadlist.js [absolute path to]/nsSTSPreloadlist.inc'
+// Note: Running this file outputs a new nsSTSPreloadlist.inc in the current
+// working directory.
+
+var gSSService = Cc["@mozilla.org/ssservice;1"].getService(
+ Ci.nsISiteSecurityService
+);
+
+const { FileUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/FileUtils.sys.mjs"
+);
+
+const SOURCE =
+ "https://chromium.googlesource.com/chromium/src/+/refs/heads/main/net/http/transport_security_state_static.json?format=TEXT";
+const TOOL_SOURCE =
+ "https://hg.mozilla.org/mozilla-central/file/default/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js";
+const OUTPUT = "nsSTSPreloadList.inc";
+const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18;
+const MAX_CONCURRENT_REQUESTS = 500;
+const MAX_RETRIES = 1;
+const REQUEST_TIMEOUT = 30 * 1000;
+const ERROR_NONE = "no error";
+const ERROR_CONNECTING_TO_HOST = "could not connect to host";
+const ERROR_NO_HSTS_HEADER = "did not receive HSTS header";
+const ERROR_MAX_AGE_TOO_LOW = "max-age too low: ";
+const HEADER = `/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*****************************************************************************/
+/* This is an automatically generated file. If you're not */
+/* nsSiteSecurityService.cpp, you shouldn't be #including it. */
+/*****************************************************************************/
+
+#include <stdint.h>
+`;
+
+const GPERF_DELIM = "%%\n";
+
+function download() {
+ let req = new XMLHttpRequest();
+ req.open("GET", SOURCE, false); // doing the request synchronously
+ try {
+ req.send();
+ } catch (e) {
+ throw new Error(`ERROR: problem downloading '${SOURCE}': ${e}`);
+ }
+
+ if (req.status != 200) {
+ throw new Error(
+ "ERROR: problem downloading '" + SOURCE + "': status " + req.status
+ );
+ }
+
+ let resultDecoded;
+ try {
+ resultDecoded = atob(req.responseText);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not decode data as base64 from '" + SOURCE + "': " + e
+ );
+ }
+
+ // we have to filter out '//' comments, while not mangling the json
+ let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/gm, "");
+ let data = null;
+ try {
+ data = JSON.parse(result);
+ } catch (e) {
+ throw new Error(`ERROR: could not parse data from '${SOURCE}': ${e}`);
+ }
+ return data;
+}
+
+function getHosts(rawdata) {
+ let hosts = [];
+
+ if (!rawdata || !rawdata.entries) {
+ throw new Error(
+ "ERROR: source data not formatted correctly: 'entries' not found"
+ );
+ }
+
+ for (let entry of rawdata.entries) {
+ if (entry.mode && entry.mode == "force-https") {
+ if (entry.name) {
+ // We trim the entry name here to avoid malformed URI exceptions when we
+ // later try to connect to the domain.
+ entry.name = entry.name.trim();
+ entry.retries = MAX_RETRIES;
+ // We prefer the camelCase variable to the JSON's snake case version
+ entry.includeSubdomains = entry.include_subdomains;
+ hosts.push(entry);
+ } else {
+ throw new Error("ERROR: entry not formatted correctly: no name found");
+ }
+ }
+ }
+
+ return hosts;
+}
+
+function processStsHeader(host, header, status, securityInfo) {
+ let maxAge = {
+ value: 0,
+ };
+ let includeSubdomains = {
+ value: false,
+ };
+ let error = ERROR_NONE;
+ if (
+ header != null &&
+ securityInfo != null &&
+ securityInfo.overridableErrorCategory ==
+ Ci.nsITransportSecurityInfo.ERROR_UNSET
+ ) {
+ try {
+ let uri = Services.io.newURI("https://" + host.name);
+ gSSService.processHeader(uri, header, {}, maxAge, includeSubdomains);
+ } catch (e) {
+ dump(
+ "ERROR: could not process header '" +
+ header +
+ "' from " +
+ host.name +
+ ": " +
+ e +
+ "\n"
+ );
+ error = e;
+ }
+ } else if (status == 0) {
+ error = ERROR_CONNECTING_TO_HOST;
+ } else {
+ error = ERROR_NO_HSTS_HEADER;
+ }
+
+ if (error == ERROR_NONE && maxAge.value < MINIMUM_REQUIRED_MAX_AGE) {
+ error = ERROR_MAX_AGE_TOO_LOW;
+ }
+
+ return {
+ name: host.name,
+ maxAge: maxAge.value,
+ includeSubdomains: includeSubdomains.value,
+ error,
+ retries: host.retries - 1,
+ forceInclude: host.forceInclude,
+ };
+}
+
+// RedirectAndAuthStopper prevents redirects and HTTP authentication
+function RedirectAndAuthStopper() {}
+
+RedirectAndAuthStopper.prototype = {
+ // nsIChannelEventSink
+ asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) {
+ throw Components.Exception("", Cr.NS_ERROR_ENTITY_CHANGED);
+ },
+
+ // nsIAuthPrompt2
+ promptAuth(channel, level, authInfo) {
+ return false;
+ },
+
+ asyncPromptAuth(channel, callback, context, level, authInfo) {
+ throw Components.Exception("", Cr.NS_ERROR_NOT_IMPLEMENTED);
+ },
+
+ getInterface(iid) {
+ return this.QueryInterface(iid);
+ },
+
+ QueryInterface: ChromeUtils.generateQI([
+ "nsIChannelEventSink",
+ "nsIAuthPrompt2",
+ ]),
+};
+
+function fetchstatus(host) {
+ return new Promise((resolve, reject) => {
+ let xhr = new XMLHttpRequest();
+ let uri = "https://" + host.name + "/";
+
+ xhr.open("head", uri, true);
+ xhr.setRequestHeader("X-Automated-Tool", TOOL_SOURCE);
+ xhr.timeout = REQUEST_TIMEOUT;
+
+ let errorHandler = () => {
+ dump("ERROR: exception making request to " + host.name + "\n");
+ resolve(
+ processStsHeader(
+ host,
+ null,
+ xhr.status,
+ xhr.channel && xhr.channel.securityInfo
+ )
+ );
+ };
+
+ xhr.onerror = errorHandler;
+ xhr.ontimeout = errorHandler;
+ xhr.onabort = errorHandler;
+
+ xhr.onload = () => {
+ let header = xhr.getResponseHeader("strict-transport-security");
+ resolve(
+ processStsHeader(host, header, xhr.status, xhr.channel.securityInfo)
+ );
+ };
+
+ xhr.channel.notificationCallbacks = new RedirectAndAuthStopper();
+ xhr.send();
+ });
+}
+
+async function getHSTSStatus(host) {
+ do {
+ host = await fetchstatus(host);
+ } while (shouldRetry(host));
+ return host;
+}
+
+function compareHSTSStatus(a, b) {
+ if (a.name > b.name) {
+ return 1;
+ }
+ if (a.name < b.name) {
+ return -1;
+ }
+ return 0;
+}
+
+function writeTo(string, fos) {
+ fos.write(string, string.length);
+}
+
+// Determines and returns a string representing a declaration of when this
+// preload list should no longer be used.
+// This is the current time plus MINIMUM_REQUIRED_MAX_AGE.
+function getExpirationTimeString() {
+ let now = new Date();
+ let nowMillis = now.getTime();
+ // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds
+ let expirationMillis = nowMillis + MINIMUM_REQUIRED_MAX_AGE * 1000;
+ let expirationMicros = expirationMillis * 1000;
+ return (
+ "const PRTime gPreloadListExpirationTime = INT64_C(" +
+ expirationMicros +
+ ");\n"
+ );
+}
+
+function shouldRetry(response) {
+ return (
+ response.error != ERROR_NO_HSTS_HEADER &&
+ response.error != ERROR_MAX_AGE_TOO_LOW &&
+ response.error != ERROR_NONE &&
+ response.retries > 0
+ );
+}
+
+// Copied from browser/components/migration/MigrationUtils.sys.mjs
+function spinResolve(promise) {
+ if (!(promise instanceof Promise)) {
+ return promise;
+ }
+ let done = false;
+ let result = null;
+ let error = null;
+ promise
+ .catch(e => {
+ error = e;
+ })
+ .then(r => {
+ result = r;
+ done = true;
+ });
+
+ Services.tm.spinEventLoopUntil(
+ "getHSTSPreloadList.js:spinResolve",
+ () => done
+ );
+ if (error) {
+ throw error;
+ } else {
+ return result;
+ }
+}
+
+async function probeHSTSStatuses(inHosts) {
+ let totalLength = inHosts.length;
+ dump("Examining " + totalLength + " hosts.\n");
+
+ // Make requests in batches of MAX_CONCURRENT_REQUESTS. Otherwise, we have
+ // too many in-flight requests and the time it takes to process them causes
+ // them all to time out.
+ let allResults = [];
+ while (inHosts.length) {
+ let promises = [];
+ for (let i = 0; i < MAX_CONCURRENT_REQUESTS && inHosts.length; i++) {
+ let host = inHosts.shift();
+ promises.push(getHSTSStatus(host));
+ }
+ let results = await Promise.all(promises);
+ let progress = (
+ (100 * (totalLength - inHosts.length)) /
+ totalLength
+ ).toFixed(2);
+ dump(progress + "% done\n");
+ allResults = allResults.concat(results);
+ }
+
+ dump("HSTS Probe received " + allResults.length + " statuses.\n");
+ return allResults;
+}
+
+function readCurrentList(filename) {
+ var currentHosts = {};
+ var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+ file.initWithPath(filename);
+ var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsILineInputStream
+ );
+ fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF);
+ var line = {};
+
+ // While we generate entries matching the latest version format,
+ // we still need to be able to read entries in the previous version formats
+ // for bootstrapping a latest version preload list from a previous version
+ // preload list. Hence these regexes.
+ const entryRegexes = [
+ /([^,]+), (0|1)/, // v3
+ / {2}\/\* "([^"]*)", (true|false) \*\//, // v2
+ / {2}{ "([^"]*)", (true|false) },/, // v1
+ ];
+
+ while (fis.readLine(line)) {
+ let match;
+ entryRegexes.find(r => {
+ match = r.exec(line.value);
+ return match;
+ });
+ if (match) {
+ currentHosts[match[1]] = match[2] == "1" || match[2] == "true";
+ }
+ }
+ return currentHosts;
+}
+
+function combineLists(newHosts, currentHosts) {
+ let newHostsSet = new Set();
+
+ for (let newHost of newHosts) {
+ newHostsSet.add(newHost.name);
+ }
+
+ for (let currentHost in currentHosts) {
+ if (!newHostsSet.has(currentHost)) {
+ newHosts.push({ name: currentHost, retries: MAX_RETRIES });
+ }
+ }
+}
+
+const TEST_ENTRIES = [
+ {
+ name: "includesubdomains.preloaded.test",
+ includeSubdomains: true,
+ },
+ {
+ name: "includesubdomains2.preloaded.test",
+ includeSubdomains: true,
+ },
+ {
+ name: "noincludesubdomains.preloaded.test",
+ includeSubdomains: false,
+ },
+];
+
+function deleteTestHosts(currentHosts) {
+ for (let testEntry of TEST_ENTRIES) {
+ delete currentHosts[testEntry.name];
+ }
+}
+
+function getTestHosts() {
+ let hosts = [];
+ for (let testEntry of TEST_ENTRIES) {
+ hosts.push({
+ name: testEntry.name,
+ maxAge: MINIMUM_REQUIRED_MAX_AGE,
+ includeSubdomains: testEntry.includeSubdomains,
+ error: ERROR_NONE,
+ // This deliberately doesn't have a value for `retries` (because we should
+ // never attempt to connect to this host).
+ forceInclude: true,
+ });
+ }
+ return hosts;
+}
+
+async function insertHosts(inoutHostList, inAddedHosts) {
+ for (let host of inAddedHosts) {
+ inoutHostList.push(host);
+ }
+}
+
+function filterForcedInclusions(inHosts, outNotForced, outForced) {
+ // Apply our filters (based on policy today) to determine which entries
+ // will be included without being checked (forced); the others will be
+ // checked using active probing.
+ for (let host of inHosts) {
+ if (
+ host.policy == "google" ||
+ host.policy == "public-suffix" ||
+ host.policy == "public-suffix-requested"
+ ) {
+ host.forceInclude = true;
+ host.error = ERROR_NONE;
+ outForced.push(host);
+ } else {
+ outNotForced.push(host);
+ }
+ }
+}
+
+function output(statuses) {
+ dump("INFO: Writing output to " + OUTPUT + "\n");
+ try {
+ let file = new FileUtils.File(
+ PathUtils.join(Services.dirsvc.get("CurWorkD", Ci.nsIFile).path, OUTPUT)
+ );
+ let fos = FileUtils.openSafeFileOutputStream(file);
+ writeTo(HEADER, fos);
+ writeTo(getExpirationTimeString(), fos);
+
+ writeTo(GPERF_DELIM, fos);
+
+ for (let status of statuses) {
+ let includeSubdomains = status.includeSubdomains ? 1 : 0;
+ writeTo(status.name + ", " + includeSubdomains + "\n", fos);
+ }
+
+ writeTo(GPERF_DELIM, fos);
+ FileUtils.closeSafeFileOutputStream(fos);
+ dump("finished writing output file\n");
+ } catch (e) {
+ dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n");
+ throw e;
+ }
+}
+
+function errorToString(status) {
+ return status.error == ERROR_MAX_AGE_TOO_LOW
+ ? status.error + status.maxAge
+ : status.error;
+}
+
+async function main(args) {
+ if (args.length != 1) {
+ throw new Error(
+ "Usage: getHSTSPreloadList.js <absolute path to current nsSTSPreloadList.inc>"
+ );
+ }
+
+ // get the current preload list
+ let currentHosts = readCurrentList(args[0]);
+ // delete any hosts we use in tests so we don't actually connect to them
+ deleteTestHosts(currentHosts);
+ // disable the current preload list so it won't interfere with requests we make
+ Services.prefs.setBoolPref(
+ "network.stricttransportsecurity.preloadlist",
+ false
+ );
+ // download and parse the raw json file from the Chromium source
+ let rawdata = download();
+ // get just the hosts with mode: "force-https"
+ let hosts = getHosts(rawdata);
+ // add hosts in the current list to the new list (avoiding duplicates)
+ combineLists(hosts, currentHosts);
+
+ // Don't contact hosts that are forced to be included anyway
+ let hostsToContact = [];
+ let forcedHosts = [];
+ filterForcedInclusions(hosts, hostsToContact, forcedHosts);
+
+ // Initialize the final status list
+ let hstsStatuses = [];
+ // Add the hosts we use in tests
+ dump("Adding test hosts\n");
+ insertHosts(hstsStatuses, getTestHosts());
+ // Add in the hosts that are forced
+ dump("Adding forced hosts\n");
+ insertHosts(hstsStatuses, forcedHosts);
+
+ let total = await probeHSTSStatuses(hostsToContact)
+ .then(function (probedStatuses) {
+ return hstsStatuses.concat(probedStatuses);
+ })
+ .then(function (statuses) {
+ return statuses.sort(compareHSTSStatus);
+ })
+ .then(function (statuses) {
+ for (let status of statuses) {
+ // If we've encountered an error for this entry (other than the site not
+ // sending an HSTS header), be safe and don't remove it from the list
+ // (given that it was already on the list).
+ if (
+ !status.forceInclude &&
+ status.error != ERROR_NONE &&
+ status.error != ERROR_NO_HSTS_HEADER &&
+ status.error != ERROR_MAX_AGE_TOO_LOW &&
+ status.name in currentHosts
+ ) {
+ // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n");
+ status.maxAge = MINIMUM_REQUIRED_MAX_AGE;
+ status.includeSubdomains = currentHosts[status.name];
+ }
+ }
+ return statuses;
+ })
+ .then(function (statuses) {
+ // Filter out entries we aren't including.
+ var includedStatuses = statuses.filter(function (status) {
+ if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) {
+ // dump("INFO: " + status.name + " NOT ON the preload list\n");
+ return false;
+ }
+
+ // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n");
+ if (status.forceInclude && status.error != ERROR_NONE) {
+ dump(
+ status.name +
+ ": " +
+ errorToString(status) +
+ " (error ignored - included regardless)\n"
+ );
+ }
+ return true;
+ });
+ return includedStatuses;
+ });
+
+ // Write the output file
+ output(total);
+
+ dump("HSTS probing all done\n");
+}
+
+// arguments is a global within xpcshell
+spinResolve(main(arguments));
diff --git a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh
new file mode 100755
index 0000000000..b88ee476da
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh
@@ -0,0 +1,618 @@
+#!/bin/bash
+
+set -ex
+
+function usage {
+ cat <<EOF
+
+Usage: $(basename "$0") -h # Displays this usage/help text
+Usage: $(basename "$0") -x # lists exit codes
+Usage: $(basename "$0") [-p product]
+ [-r existing_repo_dir]
+ # Use mozilla-central builds to check HSTS & HPKP
+ [--use-mozilla-central]
+ # Use archive.m.o instead of the taskcluster index to get xpcshell
+ [--use-ftp-builds]
+ # One (or more) of the following actions must be specified.
+ --hsts | --hpkp | --remote-settings | --suffix-list
+ -b branch
+
+EOF
+}
+
+PRODUCT="firefox"
+BRANCH=""
+PLATFORM_EXT="tar.bz2"
+UNPACK_CMD="tar jxf"
+CLOSED_TREE=false
+DONTBUILD=false
+APPROVAL=false
+COMMIT_AUTHOR='ffxbld <ffxbld@mozilla.com>'
+REPODIR=''
+HGHOST="hg.mozilla.org"
+STAGEHOST="archive.mozilla.org"
+WGET="wget -nv"
+UNTAR="tar -zxf"
+DIFF="$(command -v diff) -u"
+BASEDIR="${HOME}"
+
+SCRIPTDIR="$(realpath "$(dirname "$0")")"
+HG="$(command -v hg)"
+DATADIR="${BASEDIR}/data"
+mkdir -p "${DATADIR}"
+
+USE_MC=false
+USE_TC=true
+JQ="$(command -v jq)"
+
+DO_HSTS=false
+HSTS_PRELOAD_SCRIPT="${SCRIPTDIR}/getHSTSPreloadList.js"
+HSTS_PRELOAD_ERRORS="nsSTSPreloadList.errors"
+HSTS_PRELOAD_INC_OLD="${DATADIR}/nsSTSPreloadList.inc"
+HSTS_PRELOAD_INC_NEW="${BASEDIR}/${PRODUCT}/nsSTSPreloadList.inc"
+HSTS_UPDATED=false
+
+DO_HPKP=false
+HPKP_PRELOAD_SCRIPT="${SCRIPTDIR}/genHPKPStaticPins.js"
+HPKP_PRELOAD_ERRORS="StaticHPKPins.errors"
+HPKP_PRELOAD_JSON="${DATADIR}/PreloadedHPKPins.json"
+HPKP_PRELOAD_INC="StaticHPKPins.h"
+HPKP_PRELOAD_INPUT="${DATADIR}/${HPKP_PRELOAD_INC}"
+HPKP_PRELOAD_OUTPUT="${DATADIR}/${HPKP_PRELOAD_INC}.out"
+HPKP_UPDATED=false
+
+DO_REMOTE_SETTINGS=false
+REMOTE_SETTINGS_SERVER=''
+REMOTE_SETTINGS_INPUT="${DATADIR}/remote-settings.in"
+REMOTE_SETTINGS_OUTPUT="${DATADIR}/remote-settings.out"
+REMOTE_SETTINGS_DIR="/services/settings/dumps"
+REMOTE_SETTINGS_UPDATED=false
+
+DO_SUFFIX_LIST=false
+GITHUB_SUFFIX_URL="https://raw.githubusercontent.com/publicsuffix/list/master/public_suffix_list.dat"
+GITHUB_SUFFIX_LOCAL="public_suffix_list.dat"
+HG_SUFFIX_LOCAL="effective_tld_names.dat"
+HG_SUFFIX_PATH="/netwerk/dns/${HG_SUFFIX_LOCAL}"
+SUFFIX_LIST_UPDATED=false
+
+ARTIFACTS_DIR="${ARTIFACTS_DIR:-.}"
+# Defaults
+HSTS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HSTS_DIFF_ARTIFACT:-"nsSTSPreloadList.diff"}"
+HPKP_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HPKP_DIFF_ARTIFACT:-"StaticHPKPins.h.diff"}"
+REMOTE_SETTINGS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${REMOTE_SETTINGS_DIFF_ARTIFACT:-"remote-settings.diff"}"
+SUFFIX_LIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${SUFFIX_LIST_DIFF_ARTIFACT:-"effective_tld_names.diff"}"
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+index_base="$TASKCLUSTER_ROOT_URL/api/index/v1"
+
+# Cleanup common artifacts.
+function preflight_cleanup {
+ cd "${BASEDIR}"
+ rm -rf "${PRODUCT}" tests "${BROWSER_ARCHIVE}" "${TESTS_ARCHIVE}"
+}
+
+function download_shared_artifacts_from_ftp {
+ cd "${BASEDIR}"
+
+ # Download everything we need to run js with xpcshell
+ echo "INFO: Downloading all the necessary pieces from ${STAGEHOST}..."
+ ARTIFACT_DIR="nightly/latest-${REPODIR}"
+ if [ "${USE_MC}" == "true" ]; then
+ ARTIFACT_DIR="nightly/latest-mozilla-central"
+ fi
+
+ BROWSER_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${BROWSER_ARCHIVE}"
+ TESTS_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${TESTS_ARCHIVE}"
+
+ echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+ ${WGET} "${BROWSER_ARCHIVE_URL}"
+ echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+ ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function download_shared_artifacts_from_tc {
+ cd "${BASEDIR}"
+ TASKID_FILE="taskId.json"
+
+ # Download everything we need to run js with xpcshell
+ echo "INFO: Downloading all the necessary pieces from the taskcluster index..."
+ TASKID_URL="$index_base/task/gecko.v2.${REPODIR}.shippable.latest.${PRODUCT}.linux64-opt"
+ if [ "${USE_MC}" == "true" ]; then
+ TASKID_URL="$index_base/task/gecko.v2.mozilla-central.shippable.latest.${PRODUCT}.linux64-opt"
+ fi
+ ${WGET} -O ${TASKID_FILE} "${TASKID_URL}"
+ INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})"
+ if [ -z "${INDEX_TASK_ID}" ]; then
+ echo "Failed to look up taskId at ${TASKID_URL}"
+ exit 22
+ else
+ echo "INFO: Got taskId of $INDEX_TASK_ID"
+ fi
+
+ TASKSTATUS_FILE="taskstatus.json"
+ STATUS_URL="$queue_base/task/${INDEX_TASK_ID}/status"
+ ${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}"
+ LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1))
+ echo "INFO: Examining run number ${LAST_RUN_INDEX}"
+
+ BROWSER_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}"
+ echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+ ${WGET} "${BROWSER_ARCHIVE_URL}"
+
+ TESTS_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}"
+ echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+ ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function unpack_artifacts {
+ cd "${BASEDIR}"
+ if [ ! -f "${BROWSER_ARCHIVE}" ]; then
+ echo "Downloaded file '${BROWSER_ARCHIVE}' not found in directory '$(pwd)'." >&2
+ exit 31
+ fi
+ if [ ! -f "${TESTS_ARCHIVE}" ]; then
+ echo "Downloaded file '${TESTS_ARCHIVE}' not found in directory '$(pwd)'." >&2
+ exit 32
+ fi
+ # Unpack the browser and move xpcshell in place for updating the preload list.
+ echo "INFO: Unpacking resources..."
+ ${UNPACK_CMD} "${BROWSER_ARCHIVE}"
+ mkdir -p tests
+ cd tests
+ ${UNTAR} "../${TESTS_ARCHIVE}"
+ cd "${BASEDIR}"
+ cp tests/bin/xpcshell "${PRODUCT}"
+}
+
+# Downloads the current in-tree HSTS (HTTP Strict Transport Security) files.
+# Runs a simple xpcshell script to generate up-to-date HSTS information.
+# Compares the new HSTS output with the old to determine whether we need to update.
+function compare_hsts_files {
+ cd "${BASEDIR}"
+
+ HSTS_PRELOAD_INC_HG="${HGREPO}/raw-file/default/security/manager/ssl/$(basename "${HSTS_PRELOAD_INC_OLD}")"
+
+ echo "INFO: Downloading existing include file..."
+ rm -rf "${HSTS_PRELOAD_ERRORS}" "${HSTS_PRELOAD_INC_OLD}"
+ echo "INFO: ${WGET} ${HSTS_PRELOAD_INC_HG}"
+ ${WGET} -O "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_HG}"
+
+ if [ ! -f "${HSTS_PRELOAD_INC_OLD}" ]; then
+ echo "Downloaded file '${HSTS_PRELOAD_INC_OLD}' not found in directory '$(pwd)' - this should have been downloaded above from ${HSTS_PRELOAD_INC_HG}." >&2
+ exit 41
+ fi
+
+ # Run the script to get an updated preload list.
+ echo "INFO: Generating new HSTS preload list..."
+ cd "${BASEDIR}/${PRODUCT}"
+ if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HSTS_PRELOAD_SCRIPT}" "${HSTS_PRELOAD_INC_OLD}"; then
+ echo "HSTS preload list generation failed" >&2
+ exit 43
+ fi
+
+ # The created files should be non-empty.
+ echo "INFO: Checking whether new HSTS preload list is valid..."
+ if [ ! -s "${HSTS_PRELOAD_INC_NEW}" ]; then
+ echo "New HSTS preload list ${HSTS_PRELOAD_INC_NEW} is empty. That's less good." >&2
+ exit 42
+ fi
+ cd "${BASEDIR}"
+
+ # Check for differences
+ echo "INFO: diffing old/new HSTS preload lists into ${HSTS_DIFF_ARTIFACT}"
+ ${DIFF} "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_NEW}" | tee "${HSTS_DIFF_ARTIFACT}"
+ if [ -s "${HSTS_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+# Downloads the current in-tree HPKP (HTTP public key pinning) files.
+# Runs a simple xpcshell script to generate up-to-date HPKP information.
+# Compares the new HPKP output with the old to determine whether we need to update.
+function compare_hpkp_files {
+ cd "${BASEDIR}"
+ HPKP_PRELOAD_JSON_HG="${HGREPO}/raw-file/default/security/manager/tools/$(basename "${HPKP_PRELOAD_JSON}")"
+
+ HPKP_PRELOAD_OUTPUT_HG="${HGREPO}/raw-file/default/security/manager/ssl/${HPKP_PRELOAD_INC}"
+
+ rm -f "${HPKP_PRELOAD_OUTPUT}"
+ ${WGET} -O "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT_HG}"
+ ${WGET} -O "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_JSON_HG}"
+
+ # Run the script to get an updated preload list.
+ echo "INFO: Generating new HPKP preload list..."
+ cd "${BASEDIR}/${PRODUCT}"
+ if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HPKP_PRELOAD_SCRIPT}" "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_OUTPUT}" > "${HPKP_PRELOAD_ERRORS}"; then
+ echo "HPKP preload list generation failed" >&2
+ exit 54
+ fi
+
+ # The created files should be non-empty.
+ echo "INFO: Checking whether new HPKP preload list is valid..."
+
+ if [ ! -s "${HPKP_PRELOAD_OUTPUT}" ]; then
+ echo "${HPKP_PRELOAD_OUTPUT} is empty. That's less good." >&2
+ exit 52
+ fi
+ if ! grep kPreloadPKPinsExpirationTime "${HPKP_PRELOAD_OUTPUT}"; then
+ echo "${HPKP_PRELOAD_OUTPUT} is missing an expiration time. Truncated?" >&2
+ exit 53
+ fi
+ cd "${BASEDIR}"
+
+ echo "INFO: diffing old/new HPKP preload lists..."
+ ${DIFF} "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT}" | tee "${HPKP_DIFF_ARTIFACT}"
+ if [ -s "${HPKP_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+function is_valid_xml {
+ xmlfile=$1
+ XMLLINT=$(command -v xmllint 2>/dev/null | head -n1)
+
+ if [ ! -x "${XMLLINT}" ]; then
+ echo "ERROR: xmllint not found in PATH"
+ exit 60
+ fi
+ ${XMLLINT} --nonet --noout "${xmlfile}"
+}
+
+# Downloads the public suffix list
+function compare_suffix_lists {
+ HG_SUFFIX_URL="${HGREPO}/raw-file/default/${HG_SUFFIX_PATH}"
+ cd "${BASEDIR}"
+
+ echo "INFO: ${WGET} -O ${GITHUB_SUFFIX_LOCAL} ${GITHUB_SUFFIX_URL}"
+ rm -f "${GITHUB_SUFFIX_LOCAL}"
+ ${WGET} -O "${GITHUB_SUFFIX_LOCAL}" "${GITHUB_SUFFIX_URL}"
+
+ echo "INFO: ${WGET} -O ${HG_SUFFIX_LOCAL} ${HG_SUFFIX_URL}"
+ rm -f "${HG_SUFFIX_LOCAL}"
+ ${WGET} -O "${HG_SUFFIX_LOCAL}" "${HG_SUFFIX_URL}"
+
+ echo "INFO: diffing in-tree suffix list against the suffix list from AMO..."
+ ${DIFF} ${GITHUB_SUFFIX_LOCAL} ${HG_SUFFIX_LOCAL} | tee "${SUFFIX_LIST_DIFF_ARTIFACT}"
+ if [ -s "${SUFFIX_LIST_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+function compare_remote_settings_files {
+ REMOTE_SETTINGS_SERVER="https://firefox.settings.services.mozilla.com/v1"
+
+ # 1. List remote settings collections from server.
+ echo "INFO: fetch remote settings list from server"
+ ${WGET} -qO- "${REMOTE_SETTINGS_SERVER}/buckets/monitor/collections/changes/records" |\
+ ${JQ} -r '.data[] | .bucket+"/"+.collection+"/"+(.last_modified|tostring)' |\
+ # 2. For each entry ${bucket, collection, last_modified}
+ while IFS="/" read -r bucket collection last_modified; do
+
+ # 3. Download the dump from HG into REMOTE_SETTINGS_INPUT folder
+ hg_dump_url="${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${bucket}/${collection}.json"
+ local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json"
+ mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}"
+ ${WGET} -qO "$local_location_input" "$hg_dump_url"
+ if [ $? -eq 8 ]; then
+ # We don't keep any dump for this collection, skip it.
+ # Try to clean up in case no collection in this bucket has dump.
+ rmdir "$REMOTE_SETTINGS_INPUT/${bucket}" --ignore-fail-on-non-empty
+ continue
+ fi
+
+ # 4. Download server version into REMOTE_SETTINGS_OUTPUT folder
+ remote_records_url="$REMOTE_SETTINGS_SERVER/buckets/${bucket}/collections/${collection}/changeset?_expected=${last_modified}"
+ local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json"
+ mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}"
+ ${WGET} -qO- "$remote_records_url" | ${JQ} '{"data": .changes, "timestamp": .timestamp}' > "${local_location_output}"
+
+ # 5. Download attachments if needed.
+ if [ "${bucket}" = "blocklists" ] && [ "${collection}" = "addons-bloomfilters" ]; then
+ # Find the attachment with the most recent generation_time, like _updateMLBF in Blocklist.jsm.
+ # The server should return one "bloomfilter-base" record, but in case it returns multiple,
+ # return the most recent one. The server may send multiple entries if we ever decide to use
+ # the "filter_expression" feature of Remote Settings to send different records to specific
+ # channels. In that case this code should be updated to recognize the filter expression,
+ # but until we do, simply select the most recent record - can't go wrong with that.
+ # Note that "attachment_type" and "generation_time" are specific to addons-bloomfilters.
+ update_remote_settings_attachment "${bucket}" "${collection}" addons-mlbf.bin \
+ 'map(select(.attachment_type == "bloomfilter-base")) | sort_by(.generation_time) | last'
+ fi
+ # Here is an example to download an attachment with record identifier "ID":
+ # update_remote_settings_attachment "${bucket}" "${collection}" ID '.[] | select(.id == "ID")'
+ # NOTE: The downloaded data is not validated. xpcshell should be used for that.
+ done
+
+ echo "INFO: diffing old/new remote settings dumps..."
+ ${DIFF} -r "${REMOTE_SETTINGS_INPUT}" "${REMOTE_SETTINGS_OUTPUT}" > "${REMOTE_SETTINGS_DIFF_ARTIFACT}"
+ if [ -s "${REMOTE_SETTINGS_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+# Helper for compare_remote_settings_files to download attachments from remote settings.
+# The format and location is documented at:
+# https://firefox-source-docs.mozilla.org/services/common/services/RemoteSettings.html#packaging-attachments
+function update_remote_settings_attachment() {
+ local bucket=$1
+ local collection=$2
+ local attachment_id=$3
+ # $4 is a jq filter on the arrays that should return one record with the attachment
+ local jq_attachment_selector=".data | map(select(.attachment)) | $4"
+
+ # These paths match _readAttachmentDump in services/settings/Attachments.jsm.
+ local path_to_attachment="${bucket}/${collection}/${attachment_id}"
+ local path_to_meta="${bucket}/${collection}/${attachment_id}.meta.json"
+ local old_meta="$REMOTE_SETTINGS_INPUT/${path_to_meta}"
+ local new_meta="$REMOTE_SETTINGS_OUTPUT/${path_to_meta}"
+
+ # Those files should have been created by compare_remote_settings_files before the function call.
+ local local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json"
+ local local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json"
+
+ # Compute the metadata based on already-downloaded records.
+ mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}/${collection}"
+ ${JQ} -cj <"$local_location_input" "${jq_attachment_selector}" > "${old_meta}"
+ mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}"
+ ${JQ} -cj <"$local_location_output" "${jq_attachment_selector}" > "${new_meta}"
+
+ if cmp --silent "${old_meta}" "${new_meta}" ; then
+ # Metadata not changed, don't bother downloading the attachments themselves.
+ return
+ fi
+ # Metadata changed. Download attachments.
+
+ echo "INFO: Downloading updated remote settings dump: ${bucket}/${collection}/${attachment_id}"
+
+ # Overwrited old_meta with the actual file from the repo. The content should be equivalent,
+ # but can have minor differences (e.g. different line endings) if the checked in file was not
+ # generated by this script (e.g. manually checked in).
+ ${WGET} -qO "${old_meta}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_meta}"
+
+ ${WGET} -qO "${REMOTE_SETTINGS_INPUT}/${path_to_attachment}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_attachment}"
+
+ if [ -z "${ATTACHMENT_BASE_URL}" ] ; then
+ ATTACHMENT_BASE_URL=$(${WGET} -qO- "${REMOTE_SETTINGS_SERVER}" | ${JQ} -r .capabilities.attachments.base_url)
+ fi
+ attachment_path_from_meta=$(${JQ} -r < "${new_meta}" .attachment.location)
+ ${WGET} -qO "${REMOTE_SETTINGS_OUTPUT}/${path_to_attachment}" "${ATTACHMENT_BASE_URL}${attachment_path_from_meta}"
+}
+
+# Clones an hg repo
+function clone_repo {
+ cd "${BASEDIR}"
+ if [ ! -d "${REPODIR}" ]; then
+ ${HG} robustcheckout --sharebase /tmp/hg-store -b default "${HGREPO}" "${REPODIR}"
+ fi
+
+ ${HG} -R "${REPODIR}" pull
+ ${HG} -R "${REPODIR}" update -C default
+}
+
+# Copies new HSTS files in place, and commits them.
+function stage_hsts_files {
+ cd "${BASEDIR}"
+ cp -f "${HSTS_PRELOAD_INC_NEW}" "${REPODIR}/security/manager/ssl/"
+}
+
+function stage_hpkp_files {
+ cd "${BASEDIR}"
+ cp -f "${HPKP_PRELOAD_OUTPUT}" "${REPODIR}/security/manager/ssl/${HPKP_PRELOAD_INC}"
+}
+
+function stage_remote_settings_files {
+ cd "${BASEDIR}"
+ cp -a "${REMOTE_SETTINGS_OUTPUT}"/* "${REPODIR}${REMOTE_SETTINGS_DIR}"
+}
+
+function stage_tld_suffix_files {
+ cd "${BASEDIR}"
+ cp -a "${GITHUB_SUFFIX_LOCAL}" "${REPODIR}/${HG_SUFFIX_PATH}"
+}
+
+# Push all pending commits to Phabricator
+function push_repo {
+ cd "${REPODIR}"
+ if [ ! -r "${HOME}/.arcrc" ]
+ then
+ return 1
+ fi
+ if ! ARC=$(command -v arc)
+ then
+ return 1
+ fi
+ if [ -z "${REVIEWERS}" ]
+ then
+ return 1
+ fi
+ # Clean up older review requests
+ # Turn Needs Review D624: No bug, Automated HSTS ...
+ # into D624
+ for diff in $($ARC list | grep "Needs Review" | grep -E "${BRANCH} repo-update" | awk 'match($0, /D[0-9]+[^: ]/) { print substr($0, RSTART, RLENGTH) }')
+ do
+ echo "Removing old request $diff"
+ # There is no 'arc abandon', see bug 1452082
+ echo '{"transactions": [{"type":"abandon", "value": true}], "objectIdentifier": "'"${diff}"'"}' | $ARC call-conduit -- differential.revision.edit
+ done
+
+ $ARC diff --verbatim --reviewers "${REVIEWERS}"
+}
+
+
+
+# Main
+
+# Parse our command-line options.
+while [ $# -gt 0 ]; do
+ case "$1" in
+ -h) usage; exit 0 ;;
+ -p) PRODUCT="$2"; shift ;;
+ -b) BRANCH="$2"; shift ;;
+ -n) DRY_RUN=true ;;
+ -c) CLOSED_TREE=true ;;
+ -d) DONTBUILD=true ;;
+ -a) APPROVAL=true ;;
+ --pinset) DO_PRELOAD_PINSET=true ;;
+ --hsts) DO_HSTS=true ;;
+ --hpkp) DO_HPKP=true ;;
+ --remote-settings) DO_REMOTE_SETTINGS=true ;;
+ --suffix-list) DO_SUFFIX_LIST=true ;;
+ -r) REPODIR="$2"; shift ;;
+ --use-mozilla-central) USE_MC=true ;;
+ --use-ftp-builds) USE_TC=false ;;
+ -*) usage
+ exit 11 ;;
+ *) break ;; # terminate while loop
+ esac
+ shift
+done
+
+# Must supply a code branch to work with.
+if [ "${BRANCH}" == "" ]; then
+ echo "Error: You must specify a branch with -b branchname." >&2
+ usage
+ exit 12
+fi
+
+# Must choose at least one update action.
+if [ "$DO_HSTS" == "false" ] && [ "$DO_HPKP" == "false" ] && [ "$DO_REMOTE_SETTINGS" == "false" ] && [ "$DO_SUFFIX_LIST" == "false" ]
+then
+ echo "Error: you must specify at least one action from: --hsts, --hpkp, --remote-settings, or --suffix-list" >&2
+ usage
+ exit 13
+fi
+
+# per-product constants
+case "${PRODUCT}" in
+ thunderbird)
+ COMMIT_AUTHOR="tbirdbld <tbirdbld@thunderbird.net>"
+ ;;
+ firefox)
+ ;;
+ *)
+ echo "Error: Invalid product specified"
+ usage
+ exit 14
+ ;;
+esac
+
+if [ "${REPODIR}" == "" ]; then
+ REPODIR="$(basename "${BRANCH}")"
+fi
+
+case "${BRANCH}" in
+ mozilla-central|comm-central|try )
+ HGREPO="https://${HGHOST}/${BRANCH}"
+ ;;
+ mozilla-*|comm-* )
+ HGREPO="https://${HGHOST}/releases/${BRANCH}"
+ ;;
+ * )
+ HGREPO="https://${HGHOST}/projects/${BRANCH}"
+ ;;
+esac
+
+BROWSER_ARCHIVE="target.${PLATFORM_EXT}"
+TESTS_ARCHIVE="target.common.tests.tar.gz"
+
+preflight_cleanup
+if [ "${DO_HSTS}" == "true" ] || [ "${DO_HPKP}" == "true" ] || [ "${DO_PRELOAD_PINSET}" == "true" ]
+then
+ if [ "${USE_TC}" == "true" ]; then
+ download_shared_artifacts_from_tc
+ else
+ download_shared_artifacts_from_ftp
+ fi
+ unpack_artifacts
+fi
+
+if [ "${DO_HSTS}" == "true" ]; then
+ if compare_hsts_files
+ then
+ HSTS_UPDATED=true
+ fi
+fi
+if [ "${DO_HPKP}" == "true" ]; then
+ if compare_hpkp_files
+ then
+ HPKP_UPDATED=true
+ fi
+fi
+if [ "${DO_REMOTE_SETTINGS}" == "true" ]; then
+ if compare_remote_settings_files
+ then
+ REMOTE_SETTINGS_UPDATED=true
+ fi
+fi
+if [ "${DO_SUFFIX_LIST}" == "true" ]; then
+ if compare_suffix_lists
+ then
+ SUFFIX_LIST_UPDATED=true
+ fi
+fi
+
+
+if [ "${HSTS_UPDATED}" == "false" ] && [ "${HPKP_UPDATED}" == "false" ] && [ "${REMOTE_SETTINGS_UPDATED}" == "false" ] && [ "${SUFFIX_LIST_UPDATED}" == "false" ]; then
+ echo "INFO: no updates required. Exiting."
+ exit 0
+else
+ if [ "${DRY_RUN}" == "true" ]; then
+ echo "INFO: Updates are available, not updating hg in dry-run mode."
+ exit 2
+ fi
+fi
+
+clone_repo
+
+COMMIT_MESSAGE="No Bug, ${BRANCH} repo-update"
+if [ "${HSTS_UPDATED}" == "true" ]
+then
+ stage_hsts_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} HSTS"
+fi
+
+if [ "${HPKP_UPDATED}" == "true" ]
+then
+ stage_hpkp_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} HPKP"
+fi
+
+if [ "${REMOTE_SETTINGS_UPDATED}" == "true" ]
+then
+ stage_remote_settings_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} remote-settings"
+fi
+
+if [ "${SUFFIX_LIST_UPDATED}" == "true" ]
+then
+ stage_tld_suffix_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} tld-suffixes"
+fi
+
+
+if [ ${DONTBUILD} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)"
+fi
+if [ ${CLOSED_TREE} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE"
+fi
+if [ ${APPROVAL} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=repo-update"
+fi
+
+
+if ${HG} -R "${REPODIR}" commit -u "${COMMIT_AUTHOR}" -m "${COMMIT_MESSAGE}"
+then
+ push_repo
+fi
+
+echo "All done"
diff --git a/taskcluster/docker/periodic-updates/setup.sh b/taskcluster/docker/periodic-updates/setup.sh
new file mode 100755
index 0000000000..d05c60883c
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/setup.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -ve
+
+apt-get update -q
+apt-get install \
+ arcanist \
+ curl \
+ jq \
+ libasound2 \
+ libdbus-glib-1-2 \
+ libgtk-3-0 \
+ libx11-xcb1 \
+ libxml2-utils \
+ libxt6 \
+ libxtst6 \
+ shellcheck \
+ unzip \
+ bzip2 \
+ wget
+
+rm -rf /setup