summaryrefslogtreecommitdiffstats
path: root/taskcluster/docker/periodic-updates/scripts
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /taskcluster/docker/periodic-updates/scripts
parentInitial commit. (diff)
downloadfirefox-43a97878ce14b72f0981164f87f2e35e14151312.tar.xz
firefox-43a97878ce14b72f0981164f87f2e35e14151312.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'taskcluster/docker/periodic-updates/scripts')
-rw-r--r--taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js674
-rw-r--r--taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js557
-rwxr-xr-xtaskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh618
3 files changed, 1849 insertions, 0 deletions
diff --git a/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js
new file mode 100644
index 0000000000..6984f54d26
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js
@@ -0,0 +1,674 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// How to run this file:
+// 1. [obtain firefox source code]
+// 2. [build/obtain firefox binaries]
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell \
+// [path to]/genHPKPStaticpins.js \
+// [absolute path to]/PreloadedHPKPins.json \
+// [absolute path to]/StaticHPKPins.h
+"use strict";
+
+if (arguments.length != 2) {
+ throw new Error(
+ "Usage: genHPKPStaticPins.js " +
+ "<absolute path to PreloadedHPKPins.json> " +
+ "<absolute path to StaticHPKPins.h>"
+ );
+}
+
+var { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm");
+var { FileUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/FileUtils.sys.mjs"
+);
+
+var gCertDB = Cc["@mozilla.org/security/x509certdb;1"].getService(
+ Ci.nsIX509CertDB
+);
+
+const SHA256_PREFIX = "sha256/";
+const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_";
+
+// Pins expire in 14 weeks (6 weeks on Beta + 8 weeks on stable)
+const PINNING_MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 14;
+
+const FILE_HEADER =
+ "/* This Source Code Form is subject to the terms of the Mozilla Public\n" +
+ " * License, v. 2.0. If a copy of the MPL was not distributed with this\n" +
+ " * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" +
+ "\n" +
+ "/*****************************************************************************/\n" +
+ "/* This is an automatically generated file. If you're not */\n" +
+ "/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" +
+ "/*****************************************************************************/\n" +
+ "#include <stdint.h>" +
+ "\n";
+
+const DOMAINHEADER =
+ "/* Domainlist */\n" +
+ "struct TransportSecurityPreload {\n" +
+ " // See bug 1338873 about making these fields const.\n" +
+ " const char* mHost;\n" +
+ " bool mIncludeSubdomains;\n" +
+ " bool mTestMode;\n" +
+ " bool mIsMoz;\n" +
+ " int32_t mId;\n" +
+ " const StaticFingerprints* pinset;\n" +
+ "};\n\n";
+
+const PINSETDEF =
+ "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" +
+ "struct StaticFingerprints {\n" +
+ " // See bug 1338873 about making these fields const.\n" +
+ " size_t size;\n" +
+ " const char* const* data;\n" +
+ "};\n\n";
+
+// Command-line arguments
+var gStaticPins = parseJson(arguments[0]);
+
+// Open the output file.
+var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+file.initWithPath(arguments[1]);
+var gFileOutputStream = FileUtils.openSafeFileOutputStream(file);
+
+function writeString(string) {
+ gFileOutputStream.write(string, string.length);
+}
+
+function readFileToString(filename) {
+ let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+ file.initWithPath(filename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ stream.init(file, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+function stripComments(buf) {
+ let lines = buf.split("\n");
+ let entryRegex = /^\s*\/\//;
+ let data = "";
+ for (let i = 0; i < lines.length; ++i) {
+ let match = entryRegex.exec(lines[i]);
+ if (!match) {
+ data = data + lines[i];
+ }
+ }
+ return data;
+}
+
+function download(filename) {
+ let req = new XMLHttpRequest();
+ req.open("GET", filename, false); // doing the request synchronously
+ try {
+ req.send();
+ } catch (e) {
+ throw new Error(`ERROR: problem downloading '${filename}': ${e}`);
+ }
+
+ if (req.status != 200) {
+ throw new Error(
+ "ERROR: problem downloading '" + filename + "': status " + req.status
+ );
+ }
+
+ let resultDecoded;
+ try {
+ resultDecoded = atob(req.responseText);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not decode data as base64 from '" + filename + "': " + e
+ );
+ }
+ return resultDecoded;
+}
+
+function downloadAsJson(filename) {
+ // we have to filter out '//' comments, while not mangling the json
+ let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/gm, "");
+ let data = null;
+ try {
+ data = JSON.parse(result);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not parse data from '" + filename + "': " + e
+ );
+ }
+ return data;
+}
+
+// Returns a Subject Public Key Digest from the given pem, if it exists.
+function getSKDFromPem(pem) {
+ let cert = gCertDB.constructX509FromBase64(pem, pem.length);
+ return cert.sha256SubjectPublicKeyInfoDigest;
+}
+
+/**
+ * Hashes |input| using the SHA-256 algorithm in the following manner:
+ * btoa(sha256(atob(input)))
+ *
+ * @param {string} input Base64 string to decode and return the hash of.
+ * @returns {string} Base64 encoded SHA-256 hash.
+ */
+function sha256Base64(input) {
+ let decodedValue;
+ try {
+ decodedValue = atob(input);
+ } catch (e) {
+ throw new Error(`ERROR: could not decode as base64: '${input}': ${e}`);
+ }
+
+ // Convert |decodedValue| to an array so that it can be hashed by the
+ // nsICryptoHash instance below.
+ // In most cases across the code base, convertToByteArray() of
+ // nsIScriptableUnicodeConverter is used to do this, but the method doesn't
+ // seem to work here.
+ let data = [];
+ for (let i = 0; i < decodedValue.length; i++) {
+ data[i] = decodedValue.charCodeAt(i);
+ }
+
+ let hasher = Cc["@mozilla.org/security/hash;1"].createInstance(
+ Ci.nsICryptoHash
+ );
+ hasher.init(hasher.SHA256);
+ hasher.update(data, data.length);
+
+ // true is passed so that the hasher returns a Base64 encoded string.
+ return hasher.finish(true);
+}
+
+// Downloads the static certs file and tries to map Google Chrome nicknames
+// to Mozilla nicknames, as well as storing any hashes for pins for which we
+// don't have root PEMs. Each entry consists of a line containing the name of
+// the pin followed either by a hash in the format "sha256/" + base64(hash),
+// a PEM encoded public key, or a PEM encoded certificate.
+// For certificates that we have in our database,
+// return a map of Google's nickname to ours. For ones that aren't return a
+// map of Google's nickname to SHA-256 values. This code is modeled after agl's
+// https://github.com/agl/transport-security-state-generate, which doesn't
+// live in the Chromium repo because go is not an official language in
+// Chromium.
+// For all of the entries in this file:
+// - If the entry has a hash format, find the Mozilla pin name (cert nickname)
+// and stick the hash into certSKDToName
+// - If the entry has a PEM format, parse the PEM, find the Mozilla pin name
+// and stick the hash in certSKDToName
+// We MUST be able to find a corresponding cert nickname for the Chrome names,
+// otherwise we skip all pinsets referring to that Chrome name.
+function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) {
+ // Prefixes that we care about.
+ const BEGIN_CERT = "-----BEGIN CERTIFICATE-----";
+ const END_CERT = "-----END CERTIFICATE-----";
+ const BEGIN_PUB_KEY = "-----BEGIN PUBLIC KEY-----";
+ const END_PUB_KEY = "-----END PUBLIC KEY-----";
+
+ // Parsing states.
+ const PRE_NAME = 0;
+ const POST_NAME = 1;
+ const IN_CERT = 2;
+ const IN_PUB_KEY = 3;
+ let state = PRE_NAME;
+
+ let lines = download(filename).split("\n");
+ let pemCert = "";
+ let pemPubKey = "";
+ let hash = "";
+ let chromeNameToHash = {};
+ let chromeNameToMozName = {};
+ let chromeName;
+ for (let line of lines) {
+ // Skip comments and newlines.
+ if (!line.length || line[0] == "#") {
+ continue;
+ }
+ switch (state) {
+ case PRE_NAME:
+ chromeName = line;
+ state = POST_NAME;
+ break;
+ case POST_NAME:
+ if (line.startsWith(SHA256_PREFIX)) {
+ hash = line.substring(SHA256_PREFIX.length);
+ chromeNameToHash[chromeName] = hash;
+ certNameToSKD[chromeName] = hash;
+ certSKDToName[hash] = chromeName;
+ state = PRE_NAME;
+ } else if (line.startsWith(BEGIN_CERT)) {
+ state = IN_CERT;
+ } else if (line.startsWith(BEGIN_PUB_KEY)) {
+ state = IN_PUB_KEY;
+ } else if (
+ chromeName == "PinsListTimestamp" &&
+ line.match(/^[0-9]+$/)
+ ) {
+ // If the name of this entry is "PinsListTimestamp", this line should
+ // be the pins list timestamp. It should consist solely of digits.
+ // Ignore it and expect other entries to come.
+ state = PRE_NAME;
+ } else {
+ throw new Error(
+ "ERROR: couldn't parse Chrome certificate file line: " + line
+ );
+ }
+ break;
+ case IN_CERT:
+ if (line.startsWith(END_CERT)) {
+ state = PRE_NAME;
+ hash = getSKDFromPem(pemCert);
+ pemCert = "";
+ let mozName;
+ if (hash in certSKDToName) {
+ mozName = certSKDToName[hash];
+ } else {
+ // Not one of our built-in certs. Prefix the name with
+ // GOOGLE_PIN_.
+ mozName = GOOGLE_PIN_PREFIX + chromeName;
+ dump(
+ "Can't find hash in builtin certs for Chrome nickname " +
+ chromeName +
+ ", inserting " +
+ mozName +
+ "\n"
+ );
+ certSKDToName[hash] = mozName;
+ certNameToSKD[mozName] = hash;
+ }
+ chromeNameToMozName[chromeName] = mozName;
+ } else {
+ pemCert += line;
+ }
+ break;
+ case IN_PUB_KEY:
+ if (line.startsWith(END_PUB_KEY)) {
+ state = PRE_NAME;
+ hash = sha256Base64(pemPubKey);
+ pemPubKey = "";
+ chromeNameToHash[chromeName] = hash;
+ certNameToSKD[chromeName] = hash;
+ certSKDToName[hash] = chromeName;
+ } else {
+ pemPubKey += line;
+ }
+ break;
+ default:
+ throw new Error(
+ "ERROR: couldn't parse Chrome certificate file " + line
+ );
+ }
+ }
+ return [chromeNameToHash, chromeNameToMozName];
+}
+
+// We can only import pinsets from chrome if for every name in the pinset:
+// - We have a hash from Chrome's static certificate file
+// - We have a builtin cert
+// If the pinset meets these requirements, we store a map array of pinset
+// objects:
+// {
+// pinset_name : {
+// // Array of names with entries in certNameToSKD
+// sha256_hashes: []
+// }
+// }
+// and an array of imported pinset entries:
+// { name: string, include_subdomains: boolean, test_mode: boolean,
+// pins: pinset_name }
+function downloadAndParseChromePins(
+ filename,
+ chromeNameToHash,
+ chromeNameToMozName,
+ certNameToSKD,
+ certSKDToName
+) {
+ let chromePreloads = downloadAsJson(filename);
+ let chromePins = chromePreloads.pinsets;
+ let chromeImportedPinsets = {};
+ let chromeImportedEntries = [];
+
+ chromePins.forEach(function(pin) {
+ let valid = true;
+ let pinset = { name: pin.name, sha256_hashes: [] };
+ // Translate the Chrome pinset format to ours
+ pin.static_spki_hashes.forEach(function(name) {
+ if (name in chromeNameToHash) {
+ let hash = chromeNameToHash[name];
+ pinset.sha256_hashes.push(certSKDToName[hash]);
+
+ // We should have already added hashes for all of these when we
+ // imported the certificate file.
+ if (!certNameToSKD[name]) {
+ throw new Error("ERROR: No hash for name: " + name);
+ }
+ } else if (name in chromeNameToMozName) {
+ pinset.sha256_hashes.push(chromeNameToMozName[name]);
+ } else {
+ dump(
+ "Skipping Chrome pinset " +
+ pinset.name +
+ ", couldn't find " +
+ "builtin " +
+ name +
+ " from cert file\n"
+ );
+ valid = false;
+ }
+ });
+ if (valid) {
+ chromeImportedPinsets[pinset.name] = pinset;
+ }
+ });
+
+ // Grab the domain entry lists. Chrome's entry format is similar to
+ // ours, except theirs includes a HSTS mode.
+ const cData = gStaticPins.chromium_data;
+ let entries = chromePreloads.entries;
+ entries.forEach(function(entry) {
+ // HSTS entry only
+ if (!entry.pins) {
+ return;
+ }
+ let pinsetName = cData.substitute_pinsets[entry.pins];
+ if (!pinsetName) {
+ pinsetName = entry.pins;
+ }
+
+ // We trim the entry name here to avoid breaking hostname comparisons in the
+ // HPKP implementation.
+ entry.name = entry.name.trim();
+
+ let isProductionDomain = cData.production_domains.includes(entry.name);
+ let isProductionPinset = cData.production_pinsets.includes(pinsetName);
+ let excludeDomain = cData.exclude_domains.includes(entry.name);
+ let isTestMode = !isProductionPinset && !isProductionDomain;
+ if (entry.pins && !excludeDomain && chromeImportedPinsets[entry.pins]) {
+ chromeImportedEntries.push({
+ name: entry.name,
+ include_subdomains: entry.include_subdomains,
+ test_mode: isTestMode,
+ is_moz: false,
+ pins: pinsetName,
+ });
+ }
+ });
+ return [chromeImportedPinsets, chromeImportedEntries];
+}
+
+// Returns a pair of maps [certNameToSKD, certSKDToName] between cert
+// nicknames and digests of the SPKInfo for the mozilla trust store
+function loadNSSCertinfo(extraCertificates) {
+ let allCerts = gCertDB.getCerts();
+ let certNameToSKD = {};
+ let certSKDToName = {};
+ for (let cert of allCerts) {
+ if (!cert.isBuiltInRoot) {
+ continue;
+ }
+ let name = cert.displayName;
+ let SKD = cert.sha256SubjectPublicKeyInfoDigest;
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+
+ for (let cert of extraCertificates) {
+ let name = cert.commonName;
+ let SKD = cert.sha256SubjectPublicKeyInfoDigest;
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+
+ {
+ // This is the pinning test certificate. The key hash identifies the
+ // default RSA key from pykey.
+ let name = "End Entity Test Cert";
+ let SKD = "VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8=";
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+ return [certNameToSKD, certSKDToName];
+}
+
+function parseJson(filename) {
+ let json = stripComments(readFileToString(filename));
+ return JSON.parse(json);
+}
+
+function nameToAlias(certName) {
+ // change the name to a string valid as a c identifier
+ // remove non-ascii characters
+ certName = certName.replace(/[^[:ascii:]]/g, "_");
+ // replace non word characters
+ certName = certName.replace(/[^A-Za-z0-9]/g, "_");
+
+ return "k" + certName + "Fingerprint";
+}
+
+function compareByName(a, b) {
+ return a.name.localeCompare(b.name);
+}
+
+function genExpirationTime() {
+ let now = new Date();
+ let nowMillis = now.getTime();
+ let expirationMillis = nowMillis + PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000;
+ let expirationMicros = expirationMillis * 1000;
+ return (
+ "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" +
+ expirationMicros +
+ ");\n"
+ );
+}
+
+function writeFullPinset(certNameToSKD, certSKDToName, pinset) {
+ if (!pinset.sha256_hashes || !pinset.sha256_hashes.length) {
+ throw new Error(`ERROR: Pinset ${pinset.name} does not contain any hashes`);
+ }
+ writeFingerprints(
+ certNameToSKD,
+ certSKDToName,
+ pinset.name,
+ pinset.sha256_hashes
+ );
+}
+
+function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) {
+ let varPrefix = "kPinset_" + name;
+ writeString("static const char* const " + varPrefix + "_Data[] = {\n");
+ let SKDList = [];
+ for (let certName of hashes) {
+ if (!(certName in certNameToSKD)) {
+ throw new Error(`ERROR: Can't find '${certName}' in certNameToSKD`);
+ }
+ SKDList.push(certNameToSKD[certName]);
+ }
+ for (let skd of SKDList.sort()) {
+ writeString(" " + nameToAlias(certSKDToName[skd]) + ",\n");
+ }
+ if (!hashes.length) {
+ // ANSI C requires that an initialiser list be non-empty.
+ writeString(" 0\n");
+ }
+ writeString("};\n");
+ writeString(
+ "static const StaticFingerprints " +
+ varPrefix +
+ " = {\n " +
+ "sizeof(" +
+ varPrefix +
+ "_Data) / sizeof(const char*),\n " +
+ varPrefix +
+ "_Data\n};\n\n"
+ );
+}
+
+function writeEntry(entry) {
+ let printVal = ` { "${entry.name}", `;
+ if (entry.include_subdomains) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ // Default to test mode if not specified.
+ let testMode = true;
+ if (entry.hasOwnProperty("test_mode")) {
+ testMode = entry.test_mode;
+ }
+ if (testMode) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ if (
+ entry.is_moz ||
+ (entry.pins.includes("mozilla") && entry.pins != "mozilla_test")
+ ) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ if ("id" in entry) {
+ if (entry.id >= 256) {
+ throw new Error("ERROR: Not enough buckets in histogram");
+ }
+ if (entry.id >= 0) {
+ printVal += entry.id + ", ";
+ }
+ } else {
+ printVal += "-1, ";
+ }
+ printVal += "&kPinset_" + entry.pins;
+ printVal += " },\n";
+ writeString(printVal);
+}
+
+function writeDomainList(chromeImportedEntries) {
+ writeString("/* Sort hostnames for binary search. */\n");
+ writeString(
+ "static const TransportSecurityPreload " +
+ "kPublicKeyPinningPreloadList[] = {\n"
+ );
+ let count = 0;
+ let mozillaDomains = {};
+ gStaticPins.entries.forEach(function(entry) {
+ mozillaDomains[entry.name] = true;
+ });
+ // For any domain for which we have set pins, exclude them from
+ // chromeImportedEntries.
+ for (let i = chromeImportedEntries.length - 1; i >= 0; i--) {
+ if (mozillaDomains[chromeImportedEntries[i].name]) {
+ dump(
+ "Skipping duplicate pinset for domain " +
+ JSON.stringify(chromeImportedEntries[i], undefined, 2) +
+ "\n"
+ );
+ chromeImportedEntries.splice(i, 1);
+ }
+ }
+ let sortedEntries = gStaticPins.entries;
+ sortedEntries.push.apply(sortedEntries, chromeImportedEntries);
+ for (let entry of sortedEntries.sort(compareByName)) {
+ count++;
+ writeEntry(entry);
+ }
+ writeString("};\n");
+
+ writeString("\n// Pinning Preload List Length = " + count + ";\n");
+ writeString("\nstatic const int32_t kUnknownId = -1;\n");
+}
+
+function writeFile(
+ certNameToSKD,
+ certSKDToName,
+ chromeImportedPinsets,
+ chromeImportedEntries
+) {
+ // Compute used pins from both Chrome's and our pinsets, so we can output
+ // them later.
+ let usedFingerprints = {};
+ let mozillaPins = {};
+ gStaticPins.pinsets.forEach(function(pinset) {
+ mozillaPins[pinset.name] = true;
+ pinset.sha256_hashes.forEach(function(name) {
+ usedFingerprints[name] = true;
+ });
+ });
+ for (let key in chromeImportedPinsets) {
+ let pinset = chromeImportedPinsets[key];
+ pinset.sha256_hashes.forEach(function(name) {
+ usedFingerprints[name] = true;
+ });
+ }
+
+ writeString(FILE_HEADER);
+
+ // Write actual fingerprints.
+ Object.keys(usedFingerprints)
+ .sort()
+ .forEach(function(certName) {
+ if (certName) {
+ writeString("/* " + certName + " */\n");
+ writeString("static const char " + nameToAlias(certName) + "[] =\n");
+ writeString(' "' + certNameToSKD[certName] + '";\n');
+ writeString("\n");
+ }
+ });
+
+ // Write the pinsets
+ writeString(PINSETDEF);
+ writeString("/* PreloadedHPKPins.json pinsets */\n");
+ gStaticPins.pinsets.sort(compareByName).forEach(function(pinset) {
+ writeFullPinset(certNameToSKD, certSKDToName, pinset);
+ });
+ writeString("/* Chrome static pinsets */\n");
+ for (let key in chromeImportedPinsets) {
+ if (mozillaPins[key]) {
+ dump("Skipping duplicate pinset " + key + "\n");
+ } else {
+ dump("Writing pinset " + key + "\n");
+ writeFullPinset(certNameToSKD, certSKDToName, chromeImportedPinsets[key]);
+ }
+ }
+
+ // Write the domainlist entries.
+ writeString(DOMAINHEADER);
+ writeDomainList(chromeImportedEntries);
+ writeString("\n");
+ writeString(genExpirationTime());
+}
+
+function loadExtraCertificates(certStringList) {
+ let constructedCerts = [];
+ for (let certString of certStringList) {
+ constructedCerts.push(gCertDB.constructX509FromBase64(certString));
+ }
+ return constructedCerts;
+}
+
+var extraCertificates = loadExtraCertificates(gStaticPins.extra_certificates);
+var [certNameToSKD, certSKDToName] = loadNSSCertinfo(extraCertificates);
+var [chromeNameToHash, chromeNameToMozName] = downloadAndParseChromeCerts(
+ gStaticPins.chromium_data.cert_file_url,
+ certNameToSKD,
+ certSKDToName
+);
+var [chromeImportedPinsets, chromeImportedEntries] = downloadAndParseChromePins(
+ gStaticPins.chromium_data.json_file_url,
+ chromeNameToHash,
+ chromeNameToMozName,
+ certNameToSKD,
+ certSKDToName
+);
+
+writeFile(
+ certNameToSKD,
+ certSKDToName,
+ chromeImportedPinsets,
+ chromeImportedEntries
+);
+
+FileUtils.closeSafeFileOutputStream(gFileOutputStream);
diff --git a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js
new file mode 100644
index 0000000000..37f5ab9221
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js
@@ -0,0 +1,557 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+// How to run this file:
+// 1. [obtain firefox source code]
+// 2. [build/obtain firefox binaries]
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell [path to]/getHSTSPreloadlist.js [absolute path to]/nsSTSPreloadlist.inc'
+// Note: Running this file outputs a new nsSTSPreloadlist.inc in the current
+// working directory.
+
+var gSSService = Cc["@mozilla.org/ssservice;1"].getService(
+ Ci.nsISiteSecurityService
+);
+
+const { FileUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/FileUtils.sys.mjs"
+);
+
+const SOURCE =
+ "https://chromium.googlesource.com/chromium/src/+/refs/heads/main/net/http/transport_security_state_static.json?format=TEXT";
+const TOOL_SOURCE =
+ "https://hg.mozilla.org/mozilla-central/file/default/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js";
+const OUTPUT = "nsSTSPreloadList.inc";
+const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18;
+const MAX_CONCURRENT_REQUESTS = 500;
+const MAX_RETRIES = 1;
+const REQUEST_TIMEOUT = 30 * 1000;
+const ERROR_NONE = "no error";
+const ERROR_CONNECTING_TO_HOST = "could not connect to host";
+const ERROR_NO_HSTS_HEADER = "did not receive HSTS header";
+const ERROR_MAX_AGE_TOO_LOW = "max-age too low: ";
+const HEADER = `/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*****************************************************************************/
+/* This is an automatically generated file. If you're not */
+/* nsSiteSecurityService.cpp, you shouldn't be #including it. */
+/*****************************************************************************/
+
+#include <stdint.h>
+`;
+
+const GPERF_DELIM = "%%\n";
+
+function download() {
+ let req = new XMLHttpRequest();
+ req.open("GET", SOURCE, false); // doing the request synchronously
+ try {
+ req.send();
+ } catch (e) {
+ throw new Error(`ERROR: problem downloading '${SOURCE}': ${e}`);
+ }
+
+ if (req.status != 200) {
+ throw new Error(
+ "ERROR: problem downloading '" + SOURCE + "': status " + req.status
+ );
+ }
+
+ let resultDecoded;
+ try {
+ resultDecoded = atob(req.responseText);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not decode data as base64 from '" + SOURCE + "': " + e
+ );
+ }
+
+ // we have to filter out '//' comments, while not mangling the json
+ let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/gm, "");
+ let data = null;
+ try {
+ data = JSON.parse(result);
+ } catch (e) {
+ throw new Error(`ERROR: could not parse data from '${SOURCE}': ${e}`);
+ }
+ return data;
+}
+
+function getHosts(rawdata) {
+ let hosts = [];
+
+ if (!rawdata || !rawdata.entries) {
+ throw new Error(
+ "ERROR: source data not formatted correctly: 'entries' not found"
+ );
+ }
+
+ for (let entry of rawdata.entries) {
+ if (entry.mode && entry.mode == "force-https") {
+ if (entry.name) {
+ // We trim the entry name here to avoid malformed URI exceptions when we
+ // later try to connect to the domain.
+ entry.name = entry.name.trim();
+ entry.retries = MAX_RETRIES;
+ // We prefer the camelCase variable to the JSON's snake case version
+ entry.includeSubdomains = entry.include_subdomains;
+ hosts.push(entry);
+ } else {
+ throw new Error("ERROR: entry not formatted correctly: no name found");
+ }
+ }
+ }
+
+ return hosts;
+}
+
+function processStsHeader(host, header, status, securityInfo) {
+ let maxAge = {
+ value: 0,
+ };
+ let includeSubdomains = {
+ value: false,
+ };
+ let error = ERROR_NONE;
+ if (
+ header != null &&
+ securityInfo != null &&
+ securityInfo.overridableErrorCategory ==
+ Ci.nsITransportSecurityInfo.ERROR_UNSET
+ ) {
+ try {
+ let uri = Services.io.newURI("https://" + host.name);
+ gSSService.processHeader(uri, header, {}, maxAge, includeSubdomains);
+ } catch (e) {
+ dump(
+ "ERROR: could not process header '" +
+ header +
+ "' from " +
+ host.name +
+ ": " +
+ e +
+ "\n"
+ );
+ error = e;
+ }
+ } else if (status == 0) {
+ error = ERROR_CONNECTING_TO_HOST;
+ } else {
+ error = ERROR_NO_HSTS_HEADER;
+ }
+
+ if (error == ERROR_NONE && maxAge.value < MINIMUM_REQUIRED_MAX_AGE) {
+ error = ERROR_MAX_AGE_TOO_LOW;
+ }
+
+ return {
+ name: host.name,
+ maxAge: maxAge.value,
+ includeSubdomains: includeSubdomains.value,
+ error,
+ retries: host.retries - 1,
+ forceInclude: host.forceInclude,
+ };
+}
+
+// RedirectAndAuthStopper prevents redirects and HTTP authentication
+function RedirectAndAuthStopper() {}
+
+RedirectAndAuthStopper.prototype = {
+ // nsIChannelEventSink
+ asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) {
+ throw Components.Exception("", Cr.NS_ERROR_ENTITY_CHANGED);
+ },
+
+ // nsIAuthPrompt2
+ promptAuth(channel, level, authInfo) {
+ return false;
+ },
+
+ asyncPromptAuth(channel, callback, context, level, authInfo) {
+ throw Components.Exception("", Cr.NS_ERROR_NOT_IMPLEMENTED);
+ },
+
+ getInterface(iid) {
+ return this.QueryInterface(iid);
+ },
+
+ QueryInterface: ChromeUtils.generateQI([
+ "nsIChannelEventSink",
+ "nsIAuthPrompt2",
+ ]),
+};
+
+function fetchstatus(host) {
+ return new Promise((resolve, reject) => {
+ let xhr = new XMLHttpRequest();
+ let uri = "https://" + host.name + "/";
+
+ xhr.open("head", uri, true);
+ xhr.setRequestHeader("X-Automated-Tool", TOOL_SOURCE);
+ xhr.timeout = REQUEST_TIMEOUT;
+
+ let errorHandler = () => {
+ dump("ERROR: exception making request to " + host.name + "\n");
+ resolve(
+ processStsHeader(
+ host,
+ null,
+ xhr.status,
+ xhr.channel && xhr.channel.securityInfo
+ )
+ );
+ };
+
+ xhr.onerror = errorHandler;
+ xhr.ontimeout = errorHandler;
+ xhr.onabort = errorHandler;
+
+ xhr.onload = () => {
+ let header = xhr.getResponseHeader("strict-transport-security");
+ resolve(
+ processStsHeader(host, header, xhr.status, xhr.channel.securityInfo)
+ );
+ };
+
+ xhr.channel.notificationCallbacks = new RedirectAndAuthStopper();
+ xhr.send();
+ });
+}
+
+async function getHSTSStatus(host) {
+ do {
+ host = await fetchstatus(host);
+ } while (shouldRetry(host));
+ return host;
+}
+
+function compareHSTSStatus(a, b) {
+ if (a.name > b.name) {
+ return 1;
+ }
+ if (a.name < b.name) {
+ return -1;
+ }
+ return 0;
+}
+
+function writeTo(string, fos) {
+ fos.write(string, string.length);
+}
+
+// Determines and returns a string representing a declaration of when this
+// preload list should no longer be used.
+// This is the current time plus MINIMUM_REQUIRED_MAX_AGE.
+function getExpirationTimeString() {
+ let now = new Date();
+ let nowMillis = now.getTime();
+ // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds
+ let expirationMillis = nowMillis + MINIMUM_REQUIRED_MAX_AGE * 1000;
+ let expirationMicros = expirationMillis * 1000;
+ return (
+ "const PRTime gPreloadListExpirationTime = INT64_C(" +
+ expirationMicros +
+ ");\n"
+ );
+}
+
+function shouldRetry(response) {
+ return (
+ response.error != ERROR_NO_HSTS_HEADER &&
+ response.error != ERROR_MAX_AGE_TOO_LOW &&
+ response.error != ERROR_NONE &&
+ response.retries > 0
+ );
+}
+
+// Copied from browser/components/migration/MigrationUtils.sys.mjs
+function spinResolve(promise) {
+ if (!(promise instanceof Promise)) {
+ return promise;
+ }
+ let done = false;
+ let result = null;
+ let error = null;
+ promise
+ .catch(e => {
+ error = e;
+ })
+ .then(r => {
+ result = r;
+ done = true;
+ });
+
+ Services.tm.spinEventLoopUntil(
+ "getHSTSPreloadList.js:spinResolve",
+ () => done
+ );
+ if (error) {
+ throw error;
+ } else {
+ return result;
+ }
+}
+
+async function probeHSTSStatuses(inHosts) {
+ let totalLength = inHosts.length;
+ dump("Examining " + totalLength + " hosts.\n");
+
+ // Make requests in batches of MAX_CONCURRENT_REQUESTS. Otherwise, we have
+ // too many in-flight requests and the time it takes to process them causes
+ // them all to time out.
+ let allResults = [];
+ while (inHosts.length) {
+ let promises = [];
+ for (let i = 0; i < MAX_CONCURRENT_REQUESTS && inHosts.length; i++) {
+ let host = inHosts.shift();
+ promises.push(getHSTSStatus(host));
+ }
+ let results = await Promise.all(promises);
+ let progress = (
+ (100 * (totalLength - inHosts.length)) /
+ totalLength
+ ).toFixed(2);
+ dump(progress + "% done\n");
+ allResults = allResults.concat(results);
+ }
+
+ dump("HSTS Probe received " + allResults.length + " statuses.\n");
+ return allResults;
+}
+
+function readCurrentList(filename) {
+ var currentHosts = {};
+ var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+ file.initWithPath(filename);
+ var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsILineInputStream
+ );
+ fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF);
+ var line = {};
+
+ // While we generate entries matching the latest version format,
+ // we still need to be able to read entries in the previous version formats
+ // for bootstrapping a latest version preload list from a previous version
+ // preload list. Hence these regexes.
+ const entryRegexes = [
+ /([^,]+), (0|1)/, // v3
+ / {2}\/\* "([^"]*)", (true|false) \*\//, // v2
+ / {2}{ "([^"]*)", (true|false) },/, // v1
+ ];
+
+ while (fis.readLine(line)) {
+ let match;
+ entryRegexes.find(r => {
+ match = r.exec(line.value);
+ return match;
+ });
+ if (match) {
+ currentHosts[match[1]] = match[2] == "1" || match[2] == "true";
+ }
+ }
+ return currentHosts;
+}
+
+function combineLists(newHosts, currentHosts) {
+ let newHostsSet = new Set();
+
+ for (let newHost of newHosts) {
+ newHostsSet.add(newHost.name);
+ }
+
+ for (let currentHost in currentHosts) {
+ if (!newHostsSet.has(currentHost)) {
+ newHosts.push({ name: currentHost, retries: MAX_RETRIES });
+ }
+ }
+}
+
+const TEST_ENTRIES = [
+ {
+ name: "includesubdomains.preloaded.test",
+ includeSubdomains: true,
+ },
+ {
+ name: "includesubdomains2.preloaded.test",
+ includeSubdomains: true,
+ },
+ {
+ name: "noincludesubdomains.preloaded.test",
+ includeSubdomains: false,
+ },
+];
+
+function deleteTestHosts(currentHosts) {
+ for (let testEntry of TEST_ENTRIES) {
+ delete currentHosts[testEntry.name];
+ }
+}
+
+function getTestHosts() {
+ let hosts = [];
+ for (let testEntry of TEST_ENTRIES) {
+ hosts.push({
+ name: testEntry.name,
+ maxAge: MINIMUM_REQUIRED_MAX_AGE,
+ includeSubdomains: testEntry.includeSubdomains,
+ error: ERROR_NONE,
+ // This deliberately doesn't have a value for `retries` (because we should
+ // never attempt to connect to this host).
+ forceInclude: true,
+ });
+ }
+ return hosts;
+}
+
+async function insertHosts(inoutHostList, inAddedHosts) {
+ for (let host of inAddedHosts) {
+ inoutHostList.push(host);
+ }
+}
+
+function filterForcedInclusions(inHosts, outNotForced, outForced) {
+ // Apply our filters (based on policy today) to determine which entries
+ // will be included without being checked (forced); the others will be
+ // checked using active probing.
+ for (let host of inHosts) {
+ if (
+ host.policy == "google" ||
+ host.policy == "public-suffix" ||
+ host.policy == "public-suffix-requested"
+ ) {
+ host.forceInclude = true;
+ host.error = ERROR_NONE;
+ outForced.push(host);
+ } else {
+ outNotForced.push(host);
+ }
+ }
+}
+
+function output(statuses) {
+ dump("INFO: Writing output to " + OUTPUT + "\n");
+ try {
+ let file = new FileUtils.File(
+ PathUtils.join(Services.dirsvc.get("CurWorkD", Ci.nsIFile).path, OUTPUT)
+ );
+ let fos = FileUtils.openSafeFileOutputStream(file);
+ writeTo(HEADER, fos);
+ writeTo(getExpirationTimeString(), fos);
+
+ writeTo(GPERF_DELIM, fos);
+
+ for (let status of statuses) {
+ let includeSubdomains = status.includeSubdomains ? 1 : 0;
+ writeTo(status.name + ", " + includeSubdomains + "\n", fos);
+ }
+
+ writeTo(GPERF_DELIM, fos);
+ FileUtils.closeSafeFileOutputStream(fos);
+ dump("finished writing output file\n");
+ } catch (e) {
+ dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n");
+ throw e;
+ }
+}
+
+function errorToString(status) {
+ return status.error == ERROR_MAX_AGE_TOO_LOW
+ ? status.error + status.maxAge
+ : status.error;
+}
+
+async function main(args) {
+ if (args.length != 1) {
+ throw new Error(
+ "Usage: getHSTSPreloadList.js <absolute path to current nsSTSPreloadList.inc>"
+ );
+ }
+
+ // get the current preload list
+ let currentHosts = readCurrentList(args[0]);
+ // delete any hosts we use in tests so we don't actually connect to them
+ deleteTestHosts(currentHosts);
+ // disable the current preload list so it won't interfere with requests we make
+ Services.prefs.setBoolPref(
+ "network.stricttransportsecurity.preloadlist",
+ false
+ );
+ // download and parse the raw json file from the Chromium source
+ let rawdata = download();
+ // get just the hosts with mode: "force-https"
+ let hosts = getHosts(rawdata);
+ // add hosts in the current list to the new list (avoiding duplicates)
+ combineLists(hosts, currentHosts);
+
+ // Don't contact hosts that are forced to be included anyway
+ let hostsToContact = [];
+ let forcedHosts = [];
+ filterForcedInclusions(hosts, hostsToContact, forcedHosts);
+
+ // Initialize the final status list
+ let hstsStatuses = [];
+ // Add the hosts we use in tests
+ dump("Adding test hosts\n");
+ insertHosts(hstsStatuses, getTestHosts());
+ // Add in the hosts that are forced
+ dump("Adding forced hosts\n");
+ insertHosts(hstsStatuses, forcedHosts);
+
+ let total = await probeHSTSStatuses(hostsToContact)
+ .then(function(probedStatuses) {
+ return hstsStatuses.concat(probedStatuses);
+ })
+ .then(function(statuses) {
+ return statuses.sort(compareHSTSStatus);
+ })
+ .then(function(statuses) {
+ for (let status of statuses) {
+ // If we've encountered an error for this entry (other than the site not
+ // sending an HSTS header), be safe and don't remove it from the list
+ // (given that it was already on the list).
+ if (
+ !status.forceInclude &&
+ status.error != ERROR_NONE &&
+ status.error != ERROR_NO_HSTS_HEADER &&
+ status.error != ERROR_MAX_AGE_TOO_LOW &&
+ status.name in currentHosts
+ ) {
+ // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n");
+ status.maxAge = MINIMUM_REQUIRED_MAX_AGE;
+ status.includeSubdomains = currentHosts[status.name];
+ }
+ }
+ return statuses;
+ })
+ .then(function(statuses) {
+ // Filter out entries we aren't including.
+ var includedStatuses = statuses.filter(function(status) {
+ if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) {
+ // dump("INFO: " + status.name + " NOT ON the preload list\n");
+ return false;
+ }
+
+ // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n");
+ if (status.forceInclude && status.error != ERROR_NONE) {
+ dump(
+ status.name +
+ ": " +
+ errorToString(status) +
+ " (error ignored - included regardless)\n"
+ );
+ }
+ return true;
+ });
+ return includedStatuses;
+ });
+
+ // Write the output file
+ output(total);
+
+ dump("HSTS probing all done\n");
+}
+
+// arguments is a global within xpcshell
+spinResolve(main(arguments));
diff --git a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh
new file mode 100755
index 0000000000..b88ee476da
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh
@@ -0,0 +1,618 @@
+#!/bin/bash
+
+set -ex
+
+function usage {
+ cat <<EOF
+
+Usage: $(basename "$0") -h # Displays this usage/help text
+Usage: $(basename "$0") -x # lists exit codes
+Usage: $(basename "$0") [-p product]
+ [-r existing_repo_dir]
+ # Use mozilla-central builds to check HSTS & HPKP
+ [--use-mozilla-central]
+ # Use archive.m.o instead of the taskcluster index to get xpcshell
+ [--use-ftp-builds]
+ # One (or more) of the following actions must be specified.
+ --hsts | --hpkp | --remote-settings | --suffix-list
+ -b branch
+
+EOF
+}
+
+PRODUCT="firefox"
+BRANCH=""
+PLATFORM_EXT="tar.bz2"
+UNPACK_CMD="tar jxf"
+CLOSED_TREE=false
+DONTBUILD=false
+APPROVAL=false
+COMMIT_AUTHOR='ffxbld <ffxbld@mozilla.com>'
+REPODIR=''
+HGHOST="hg.mozilla.org"
+STAGEHOST="archive.mozilla.org"
+WGET="wget -nv"
+UNTAR="tar -zxf"
+DIFF="$(command -v diff) -u"
+BASEDIR="${HOME}"
+
+SCRIPTDIR="$(realpath "$(dirname "$0")")"
+HG="$(command -v hg)"
+DATADIR="${BASEDIR}/data"
+mkdir -p "${DATADIR}"
+
+USE_MC=false
+USE_TC=true
+JQ="$(command -v jq)"
+
+DO_HSTS=false
+HSTS_PRELOAD_SCRIPT="${SCRIPTDIR}/getHSTSPreloadList.js"
+HSTS_PRELOAD_ERRORS="nsSTSPreloadList.errors"
+HSTS_PRELOAD_INC_OLD="${DATADIR}/nsSTSPreloadList.inc"
+HSTS_PRELOAD_INC_NEW="${BASEDIR}/${PRODUCT}/nsSTSPreloadList.inc"
+HSTS_UPDATED=false
+
+DO_HPKP=false
+HPKP_PRELOAD_SCRIPT="${SCRIPTDIR}/genHPKPStaticPins.js"
+HPKP_PRELOAD_ERRORS="StaticHPKPins.errors"
+HPKP_PRELOAD_JSON="${DATADIR}/PreloadedHPKPins.json"
+HPKP_PRELOAD_INC="StaticHPKPins.h"
+HPKP_PRELOAD_INPUT="${DATADIR}/${HPKP_PRELOAD_INC}"
+HPKP_PRELOAD_OUTPUT="${DATADIR}/${HPKP_PRELOAD_INC}.out"
+HPKP_UPDATED=false
+
+DO_REMOTE_SETTINGS=false
+REMOTE_SETTINGS_SERVER=''
+REMOTE_SETTINGS_INPUT="${DATADIR}/remote-settings.in"
+REMOTE_SETTINGS_OUTPUT="${DATADIR}/remote-settings.out"
+REMOTE_SETTINGS_DIR="/services/settings/dumps"
+REMOTE_SETTINGS_UPDATED=false
+
+DO_SUFFIX_LIST=false
+GITHUB_SUFFIX_URL="https://raw.githubusercontent.com/publicsuffix/list/master/public_suffix_list.dat"
+GITHUB_SUFFIX_LOCAL="public_suffix_list.dat"
+HG_SUFFIX_LOCAL="effective_tld_names.dat"
+HG_SUFFIX_PATH="/netwerk/dns/${HG_SUFFIX_LOCAL}"
+SUFFIX_LIST_UPDATED=false
+
+ARTIFACTS_DIR="${ARTIFACTS_DIR:-.}"
+# Defaults
+HSTS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HSTS_DIFF_ARTIFACT:-"nsSTSPreloadList.diff"}"
+HPKP_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HPKP_DIFF_ARTIFACT:-"StaticHPKPins.h.diff"}"
+REMOTE_SETTINGS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${REMOTE_SETTINGS_DIFF_ARTIFACT:-"remote-settings.diff"}"
+SUFFIX_LIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${SUFFIX_LIST_DIFF_ARTIFACT:-"effective_tld_names.diff"}"
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+index_base="$TASKCLUSTER_ROOT_URL/api/index/v1"
+
+# Cleanup common artifacts.
+function preflight_cleanup {
+ cd "${BASEDIR}"
+ rm -rf "${PRODUCT}" tests "${BROWSER_ARCHIVE}" "${TESTS_ARCHIVE}"
+}
+
+function download_shared_artifacts_from_ftp {
+ cd "${BASEDIR}"
+
+ # Download everything we need to run js with xpcshell
+ echo "INFO: Downloading all the necessary pieces from ${STAGEHOST}..."
+ ARTIFACT_DIR="nightly/latest-${REPODIR}"
+ if [ "${USE_MC}" == "true" ]; then
+ ARTIFACT_DIR="nightly/latest-mozilla-central"
+ fi
+
+ BROWSER_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${BROWSER_ARCHIVE}"
+ TESTS_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${TESTS_ARCHIVE}"
+
+ echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+ ${WGET} "${BROWSER_ARCHIVE_URL}"
+ echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+ ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function download_shared_artifacts_from_tc {
+ cd "${BASEDIR}"
+ TASKID_FILE="taskId.json"
+
+ # Download everything we need to run js with xpcshell
+ echo "INFO: Downloading all the necessary pieces from the taskcluster index..."
+ TASKID_URL="$index_base/task/gecko.v2.${REPODIR}.shippable.latest.${PRODUCT}.linux64-opt"
+ if [ "${USE_MC}" == "true" ]; then
+ TASKID_URL="$index_base/task/gecko.v2.mozilla-central.shippable.latest.${PRODUCT}.linux64-opt"
+ fi
+ ${WGET} -O ${TASKID_FILE} "${TASKID_URL}"
+ INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})"
+ if [ -z "${INDEX_TASK_ID}" ]; then
+ echo "Failed to look up taskId at ${TASKID_URL}"
+ exit 22
+ else
+ echo "INFO: Got taskId of $INDEX_TASK_ID"
+ fi
+
+ TASKSTATUS_FILE="taskstatus.json"
+ STATUS_URL="$queue_base/task/${INDEX_TASK_ID}/status"
+ ${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}"
+ LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1))
+ echo "INFO: Examining run number ${LAST_RUN_INDEX}"
+
+ BROWSER_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}"
+ echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+ ${WGET} "${BROWSER_ARCHIVE_URL}"
+
+ TESTS_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}"
+ echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+ ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function unpack_artifacts {
+ cd "${BASEDIR}"
+ if [ ! -f "${BROWSER_ARCHIVE}" ]; then
+ echo "Downloaded file '${BROWSER_ARCHIVE}' not found in directory '$(pwd)'." >&2
+ exit 31
+ fi
+ if [ ! -f "${TESTS_ARCHIVE}" ]; then
+ echo "Downloaded file '${TESTS_ARCHIVE}' not found in directory '$(pwd)'." >&2
+ exit 32
+ fi
+ # Unpack the browser and move xpcshell in place for updating the preload list.
+ echo "INFO: Unpacking resources..."
+ ${UNPACK_CMD} "${BROWSER_ARCHIVE}"
+ mkdir -p tests
+ cd tests
+ ${UNTAR} "../${TESTS_ARCHIVE}"
+ cd "${BASEDIR}"
+ cp tests/bin/xpcshell "${PRODUCT}"
+}
+
+# Downloads the current in-tree HSTS (HTTP Strict Transport Security) files.
+# Runs a simple xpcshell script to generate up-to-date HSTS information.
+# Compares the new HSTS output with the old to determine whether we need to update.
+function compare_hsts_files {
+ cd "${BASEDIR}"
+
+ HSTS_PRELOAD_INC_HG="${HGREPO}/raw-file/default/security/manager/ssl/$(basename "${HSTS_PRELOAD_INC_OLD}")"
+
+ echo "INFO: Downloading existing include file..."
+ rm -rf "${HSTS_PRELOAD_ERRORS}" "${HSTS_PRELOAD_INC_OLD}"
+ echo "INFO: ${WGET} ${HSTS_PRELOAD_INC_HG}"
+ ${WGET} -O "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_HG}"
+
+ if [ ! -f "${HSTS_PRELOAD_INC_OLD}" ]; then
+ echo "Downloaded file '${HSTS_PRELOAD_INC_OLD}' not found in directory '$(pwd)' - this should have been downloaded above from ${HSTS_PRELOAD_INC_HG}." >&2
+ exit 41
+ fi
+
+ # Run the script to get an updated preload list.
+ echo "INFO: Generating new HSTS preload list..."
+ cd "${BASEDIR}/${PRODUCT}"
+ if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HSTS_PRELOAD_SCRIPT}" "${HSTS_PRELOAD_INC_OLD}"; then
+ echo "HSTS preload list generation failed" >&2
+ exit 43
+ fi
+
+ # The created files should be non-empty.
+ echo "INFO: Checking whether new HSTS preload list is valid..."
+ if [ ! -s "${HSTS_PRELOAD_INC_NEW}" ]; then
+ echo "New HSTS preload list ${HSTS_PRELOAD_INC_NEW} is empty. That's less good." >&2
+ exit 42
+ fi
+ cd "${BASEDIR}"
+
+ # Check for differences
+ echo "INFO: diffing old/new HSTS preload lists into ${HSTS_DIFF_ARTIFACT}"
+ ${DIFF} "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_NEW}" | tee "${HSTS_DIFF_ARTIFACT}"
+ if [ -s "${HSTS_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+# Downloads the current in-tree HPKP (HTTP public key pinning) files.
+# Runs a simple xpcshell script to generate up-to-date HPKP information.
+# Compares the new HPKP output with the old to determine whether we need to update.
+function compare_hpkp_files {
+ cd "${BASEDIR}"
+ HPKP_PRELOAD_JSON_HG="${HGREPO}/raw-file/default/security/manager/tools/$(basename "${HPKP_PRELOAD_JSON}")"
+
+ HPKP_PRELOAD_OUTPUT_HG="${HGREPO}/raw-file/default/security/manager/ssl/${HPKP_PRELOAD_INC}"
+
+ rm -f "${HPKP_PRELOAD_OUTPUT}"
+ ${WGET} -O "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT_HG}"
+ ${WGET} -O "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_JSON_HG}"
+
+ # Run the script to get an updated preload list.
+ echo "INFO: Generating new HPKP preload list..."
+ cd "${BASEDIR}/${PRODUCT}"
+ if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HPKP_PRELOAD_SCRIPT}" "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_OUTPUT}" > "${HPKP_PRELOAD_ERRORS}"; then
+ echo "HPKP preload list generation failed" >&2
+ exit 54
+ fi
+
+ # The created files should be non-empty.
+ echo "INFO: Checking whether new HPKP preload list is valid..."
+
+ if [ ! -s "${HPKP_PRELOAD_OUTPUT}" ]; then
+ echo "${HPKP_PRELOAD_OUTPUT} is empty. That's less good." >&2
+ exit 52
+ fi
+ if ! grep kPreloadPKPinsExpirationTime "${HPKP_PRELOAD_OUTPUT}"; then
+ echo "${HPKP_PRELOAD_OUTPUT} is missing an expiration time. Truncated?" >&2
+ exit 53
+ fi
+ cd "${BASEDIR}"
+
+ echo "INFO: diffing old/new HPKP preload lists..."
+ ${DIFF} "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT}" | tee "${HPKP_DIFF_ARTIFACT}"
+ if [ -s "${HPKP_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+function is_valid_xml {
+ xmlfile=$1
+ XMLLINT=$(command -v xmllint 2>/dev/null | head -n1)
+
+ if [ ! -x "${XMLLINT}" ]; then
+ echo "ERROR: xmllint not found in PATH"
+ exit 60
+ fi
+ ${XMLLINT} --nonet --noout "${xmlfile}"
+}
+
+# Downloads the public suffix list
+function compare_suffix_lists {
+ HG_SUFFIX_URL="${HGREPO}/raw-file/default/${HG_SUFFIX_PATH}"
+ cd "${BASEDIR}"
+
+ echo "INFO: ${WGET} -O ${GITHUB_SUFFIX_LOCAL} ${GITHUB_SUFFIX_URL}"
+ rm -f "${GITHUB_SUFFIX_LOCAL}"
+ ${WGET} -O "${GITHUB_SUFFIX_LOCAL}" "${GITHUB_SUFFIX_URL}"
+
+ echo "INFO: ${WGET} -O ${HG_SUFFIX_LOCAL} ${HG_SUFFIX_URL}"
+ rm -f "${HG_SUFFIX_LOCAL}"
+ ${WGET} -O "${HG_SUFFIX_LOCAL}" "${HG_SUFFIX_URL}"
+
+ echo "INFO: diffing in-tree suffix list against the suffix list from AMO..."
+ ${DIFF} ${GITHUB_SUFFIX_LOCAL} ${HG_SUFFIX_LOCAL} | tee "${SUFFIX_LIST_DIFF_ARTIFACT}"
+ if [ -s "${SUFFIX_LIST_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+function compare_remote_settings_files {
+ REMOTE_SETTINGS_SERVER="https://firefox.settings.services.mozilla.com/v1"
+
+ # 1. List remote settings collections from server.
+ echo "INFO: fetch remote settings list from server"
+ ${WGET} -qO- "${REMOTE_SETTINGS_SERVER}/buckets/monitor/collections/changes/records" |\
+ ${JQ} -r '.data[] | .bucket+"/"+.collection+"/"+(.last_modified|tostring)' |\
+ # 2. For each entry ${bucket, collection, last_modified}
+ while IFS="/" read -r bucket collection last_modified; do
+
+ # 3. Download the dump from HG into REMOTE_SETTINGS_INPUT folder
+ hg_dump_url="${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${bucket}/${collection}.json"
+ local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json"
+ mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}"
+ ${WGET} -qO "$local_location_input" "$hg_dump_url"
+ if [ $? -eq 8 ]; then
+ # We don't keep any dump for this collection, skip it.
+ # Try to clean up in case no collection in this bucket has dump.
+ rmdir "$REMOTE_SETTINGS_INPUT/${bucket}" --ignore-fail-on-non-empty
+ continue
+ fi
+
+ # 4. Download server version into REMOTE_SETTINGS_OUTPUT folder
+ remote_records_url="$REMOTE_SETTINGS_SERVER/buckets/${bucket}/collections/${collection}/changeset?_expected=${last_modified}"
+ local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json"
+ mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}"
+ ${WGET} -qO- "$remote_records_url" | ${JQ} '{"data": .changes, "timestamp": .timestamp}' > "${local_location_output}"
+
+ # 5. Download attachments if needed.
+ if [ "${bucket}" = "blocklists" ] && [ "${collection}" = "addons-bloomfilters" ]; then
+ # Find the attachment with the most recent generation_time, like _updateMLBF in Blocklist.jsm.
+ # The server should return one "bloomfilter-base" record, but in case it returns multiple,
+ # return the most recent one. The server may send multiple entries if we ever decide to use
+ # the "filter_expression" feature of Remote Settings to send different records to specific
+ # channels. In that case this code should be updated to recognize the filter expression,
+ # but until we do, simply select the most recent record - can't go wrong with that.
+ # Note that "attachment_type" and "generation_time" are specific to addons-bloomfilters.
+ update_remote_settings_attachment "${bucket}" "${collection}" addons-mlbf.bin \
+ 'map(select(.attachment_type == "bloomfilter-base")) | sort_by(.generation_time) | last'
+ fi
+ # Here is an example to download an attachment with record identifier "ID":
+ # update_remote_settings_attachment "${bucket}" "${collection}" ID '.[] | select(.id == "ID")'
+ # NOTE: The downloaded data is not validated. xpcshell should be used for that.
+ done
+
+ echo "INFO: diffing old/new remote settings dumps..."
+ ${DIFF} -r "${REMOTE_SETTINGS_INPUT}" "${REMOTE_SETTINGS_OUTPUT}" > "${REMOTE_SETTINGS_DIFF_ARTIFACT}"
+ if [ -s "${REMOTE_SETTINGS_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+# Helper for compare_remote_settings_files to download attachments from remote settings.
+# The format and location is documented at:
+# https://firefox-source-docs.mozilla.org/services/common/services/RemoteSettings.html#packaging-attachments
+function update_remote_settings_attachment() {
+ local bucket=$1
+ local collection=$2
+ local attachment_id=$3
+ # $4 is a jq filter on the arrays that should return one record with the attachment
+ local jq_attachment_selector=".data | map(select(.attachment)) | $4"
+
+ # These paths match _readAttachmentDump in services/settings/Attachments.jsm.
+ local path_to_attachment="${bucket}/${collection}/${attachment_id}"
+ local path_to_meta="${bucket}/${collection}/${attachment_id}.meta.json"
+ local old_meta="$REMOTE_SETTINGS_INPUT/${path_to_meta}"
+ local new_meta="$REMOTE_SETTINGS_OUTPUT/${path_to_meta}"
+
+ # Those files should have been created by compare_remote_settings_files before the function call.
+ local local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json"
+ local local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json"
+
+ # Compute the metadata based on already-downloaded records.
+ mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}/${collection}"
+ ${JQ} -cj <"$local_location_input" "${jq_attachment_selector}" > "${old_meta}"
+ mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}"
+ ${JQ} -cj <"$local_location_output" "${jq_attachment_selector}" > "${new_meta}"
+
+ if cmp --silent "${old_meta}" "${new_meta}" ; then
+ # Metadata not changed, don't bother downloading the attachments themselves.
+ return
+ fi
+ # Metadata changed. Download attachments.
+
+ echo "INFO: Downloading updated remote settings dump: ${bucket}/${collection}/${attachment_id}"
+
+ # Overwrited old_meta with the actual file from the repo. The content should be equivalent,
+ # but can have minor differences (e.g. different line endings) if the checked in file was not
+ # generated by this script (e.g. manually checked in).
+ ${WGET} -qO "${old_meta}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_meta}"
+
+ ${WGET} -qO "${REMOTE_SETTINGS_INPUT}/${path_to_attachment}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_attachment}"
+
+ if [ -z "${ATTACHMENT_BASE_URL}" ] ; then
+ ATTACHMENT_BASE_URL=$(${WGET} -qO- "${REMOTE_SETTINGS_SERVER}" | ${JQ} -r .capabilities.attachments.base_url)
+ fi
+ attachment_path_from_meta=$(${JQ} -r < "${new_meta}" .attachment.location)
+ ${WGET} -qO "${REMOTE_SETTINGS_OUTPUT}/${path_to_attachment}" "${ATTACHMENT_BASE_URL}${attachment_path_from_meta}"
+}
+
+# Clones an hg repo
+function clone_repo {
+ cd "${BASEDIR}"
+ if [ ! -d "${REPODIR}" ]; then
+ ${HG} robustcheckout --sharebase /tmp/hg-store -b default "${HGREPO}" "${REPODIR}"
+ fi
+
+ ${HG} -R "${REPODIR}" pull
+ ${HG} -R "${REPODIR}" update -C default
+}
+
+# Copies new HSTS files in place, and commits them.
+function stage_hsts_files {
+ cd "${BASEDIR}"
+ cp -f "${HSTS_PRELOAD_INC_NEW}" "${REPODIR}/security/manager/ssl/"
+}
+
+function stage_hpkp_files {
+ cd "${BASEDIR}"
+ cp -f "${HPKP_PRELOAD_OUTPUT}" "${REPODIR}/security/manager/ssl/${HPKP_PRELOAD_INC}"
+}
+
+function stage_remote_settings_files {
+ cd "${BASEDIR}"
+ cp -a "${REMOTE_SETTINGS_OUTPUT}"/* "${REPODIR}${REMOTE_SETTINGS_DIR}"
+}
+
+function stage_tld_suffix_files {
+ cd "${BASEDIR}"
+ cp -a "${GITHUB_SUFFIX_LOCAL}" "${REPODIR}/${HG_SUFFIX_PATH}"
+}
+
+# Push all pending commits to Phabricator
+function push_repo {
+ cd "${REPODIR}"
+ if [ ! -r "${HOME}/.arcrc" ]
+ then
+ return 1
+ fi
+ if ! ARC=$(command -v arc)
+ then
+ return 1
+ fi
+ if [ -z "${REVIEWERS}" ]
+ then
+ return 1
+ fi
+ # Clean up older review requests
+ # Turn Needs Review D624: No bug, Automated HSTS ...
+ # into D624
+ for diff in $($ARC list | grep "Needs Review" | grep -E "${BRANCH} repo-update" | awk 'match($0, /D[0-9]+[^: ]/) { print substr($0, RSTART, RLENGTH) }')
+ do
+ echo "Removing old request $diff"
+ # There is no 'arc abandon', see bug 1452082
+ echo '{"transactions": [{"type":"abandon", "value": true}], "objectIdentifier": "'"${diff}"'"}' | $ARC call-conduit -- differential.revision.edit
+ done
+
+ $ARC diff --verbatim --reviewers "${REVIEWERS}"
+}
+
+
+
+# Main
+
+# Parse our command-line options.
+while [ $# -gt 0 ]; do
+ case "$1" in
+ -h) usage; exit 0 ;;
+ -p) PRODUCT="$2"; shift ;;
+ -b) BRANCH="$2"; shift ;;
+ -n) DRY_RUN=true ;;
+ -c) CLOSED_TREE=true ;;
+ -d) DONTBUILD=true ;;
+ -a) APPROVAL=true ;;
+ --pinset) DO_PRELOAD_PINSET=true ;;
+ --hsts) DO_HSTS=true ;;
+ --hpkp) DO_HPKP=true ;;
+ --remote-settings) DO_REMOTE_SETTINGS=true ;;
+ --suffix-list) DO_SUFFIX_LIST=true ;;
+ -r) REPODIR="$2"; shift ;;
+ --use-mozilla-central) USE_MC=true ;;
+ --use-ftp-builds) USE_TC=false ;;
+ -*) usage
+ exit 11 ;;
+ *) break ;; # terminate while loop
+ esac
+ shift
+done
+
+# Must supply a code branch to work with.
+if [ "${BRANCH}" == "" ]; then
+ echo "Error: You must specify a branch with -b branchname." >&2
+ usage
+ exit 12
+fi
+
+# Must choose at least one update action.
+if [ "$DO_HSTS" == "false" ] && [ "$DO_HPKP" == "false" ] && [ "$DO_REMOTE_SETTINGS" == "false" ] && [ "$DO_SUFFIX_LIST" == "false" ]
+then
+ echo "Error: you must specify at least one action from: --hsts, --hpkp, --remote-settings, or --suffix-list" >&2
+ usage
+ exit 13
+fi
+
+# per-product constants
+case "${PRODUCT}" in
+ thunderbird)
+ COMMIT_AUTHOR="tbirdbld <tbirdbld@thunderbird.net>"
+ ;;
+ firefox)
+ ;;
+ *)
+ echo "Error: Invalid product specified"
+ usage
+ exit 14
+ ;;
+esac
+
+if [ "${REPODIR}" == "" ]; then
+ REPODIR="$(basename "${BRANCH}")"
+fi
+
+case "${BRANCH}" in
+ mozilla-central|comm-central|try )
+ HGREPO="https://${HGHOST}/${BRANCH}"
+ ;;
+ mozilla-*|comm-* )
+ HGREPO="https://${HGHOST}/releases/${BRANCH}"
+ ;;
+ * )
+ HGREPO="https://${HGHOST}/projects/${BRANCH}"
+ ;;
+esac
+
+BROWSER_ARCHIVE="target.${PLATFORM_EXT}"
+TESTS_ARCHIVE="target.common.tests.tar.gz"
+
+preflight_cleanup
+if [ "${DO_HSTS}" == "true" ] || [ "${DO_HPKP}" == "true" ] || [ "${DO_PRELOAD_PINSET}" == "true" ]
+then
+ if [ "${USE_TC}" == "true" ]; then
+ download_shared_artifacts_from_tc
+ else
+ download_shared_artifacts_from_ftp
+ fi
+ unpack_artifacts
+fi
+
+if [ "${DO_HSTS}" == "true" ]; then
+ if compare_hsts_files
+ then
+ HSTS_UPDATED=true
+ fi
+fi
+if [ "${DO_HPKP}" == "true" ]; then
+ if compare_hpkp_files
+ then
+ HPKP_UPDATED=true
+ fi
+fi
+if [ "${DO_REMOTE_SETTINGS}" == "true" ]; then
+ if compare_remote_settings_files
+ then
+ REMOTE_SETTINGS_UPDATED=true
+ fi
+fi
+if [ "${DO_SUFFIX_LIST}" == "true" ]; then
+ if compare_suffix_lists
+ then
+ SUFFIX_LIST_UPDATED=true
+ fi
+fi
+
+
+if [ "${HSTS_UPDATED}" == "false" ] && [ "${HPKP_UPDATED}" == "false" ] && [ "${REMOTE_SETTINGS_UPDATED}" == "false" ] && [ "${SUFFIX_LIST_UPDATED}" == "false" ]; then
+ echo "INFO: no updates required. Exiting."
+ exit 0
+else
+ if [ "${DRY_RUN}" == "true" ]; then
+ echo "INFO: Updates are available, not updating hg in dry-run mode."
+ exit 2
+ fi
+fi
+
+clone_repo
+
+COMMIT_MESSAGE="No Bug, ${BRANCH} repo-update"
+if [ "${HSTS_UPDATED}" == "true" ]
+then
+ stage_hsts_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} HSTS"
+fi
+
+if [ "${HPKP_UPDATED}" == "true" ]
+then
+ stage_hpkp_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} HPKP"
+fi
+
+if [ "${REMOTE_SETTINGS_UPDATED}" == "true" ]
+then
+ stage_remote_settings_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} remote-settings"
+fi
+
+if [ "${SUFFIX_LIST_UPDATED}" == "true" ]
+then
+ stage_tld_suffix_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} tld-suffixes"
+fi
+
+
+if [ ${DONTBUILD} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)"
+fi
+if [ ${CLOSED_TREE} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE"
+fi
+if [ ${APPROVAL} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=repo-update"
+fi
+
+
+if ${HG} -R "${REPODIR}" commit -u "${COMMIT_AUTHOR}" -m "${COMMIT_MESSAGE}"
+then
+ push_repo
+fi
+
+echo "All done"