From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- taskcluster/docker/periodic-updates/.eslintrc.js | 60 ++ taskcluster/docker/periodic-updates/Dockerfile | 11 + taskcluster/docker/periodic-updates/README.md | 96 +++ taskcluster/docker/periodic-updates/runme.sh | 98 +++ .../periodic-updates/scripts/genHPKPStaticPins.js | 673 +++++++++++++++++++++ .../periodic-updates/scripts/getHSTSPreloadList.js | 557 +++++++++++++++++ .../scripts/periodic_file_updates.sh | 672 ++++++++++++++++++++ taskcluster/docker/periodic-updates/setup.sh | 24 + 8 files changed, 2191 insertions(+) create mode 100644 taskcluster/docker/periodic-updates/.eslintrc.js create mode 100644 taskcluster/docker/periodic-updates/Dockerfile create mode 100644 taskcluster/docker/periodic-updates/README.md create mode 100755 taskcluster/docker/periodic-updates/runme.sh create mode 100644 taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js create mode 100644 taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js create mode 100755 taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh create mode 100755 taskcluster/docker/periodic-updates/setup.sh (limited to 'taskcluster/docker/periodic-updates') diff --git a/taskcluster/docker/periodic-updates/.eslintrc.js b/taskcluster/docker/periodic-updates/.eslintrc.js new file mode 100644 index 0000000000..ecaf80d033 --- /dev/null +++ b/taskcluster/docker/periodic-updates/.eslintrc.js @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +"use strict"; + +module.exports = { + globals: { + // JS files in this folder are commonly xpcshell scripts where |arguments| + // is defined in the global scope. + arguments: false, + }, + rules: { + // Enforce return statements in callbacks of array methods. + "array-callback-return": "error", + + // Verify calls of super() in constructors. + "constructor-super": "error", + + // Require default case in switch statements. + "default-case": "error", + + // Disallow use of alert(), confirm(), and prompt(). + "no-alert": "error", + + // Disallow likely erroneous `switch` scoped lexical declarations in + // case/default clauses. + "no-case-declarations": "error", + + // Disallow use of the console API. + "no-console": "error", + + // Disallow constant expressions in conditions (except for loops). + "no-constant-condition": ["error", { checkLoops: false }], + + // Disallow extending of native objects. + "no-extend-native": "error", + + // Disallow use of assignment in return statement. + "no-return-assign": ["error", "always"], + + // Disallow template literal placeholder syntax in regular strings. + "no-template-curly-in-string": "error", + + // Disallow unmodified loop conditions. + "no-unmodified-loop-condition": "error", + + // No expressions where a statement is expected + "no-unused-expressions": "error", + + // Disallow unnecessary escape usage in strings and regular expressions. + "no-useless-escape": "error", + + // Require "use strict" to be defined globally in the script. + strict: ["error", "global"], + + // Disallow Yoda conditions. + yoda: ["error", "never"], + }, +}; diff --git a/taskcluster/docker/periodic-updates/Dockerfile b/taskcluster/docker/periodic-updates/Dockerfile new file mode 100644 index 0000000000..24cabe02b5 --- /dev/null +++ b/taskcluster/docker/periodic-updates/Dockerfile @@ -0,0 +1,11 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Julien Cristau + +ADD setup.sh /setup/setup.sh + +RUN cd /setup && ./setup.sh + +COPY runme.sh / +COPY scripts/* /home/worker/scripts/ + +CMD ["/runme.sh"] diff --git a/taskcluster/docker/periodic-updates/README.md b/taskcluster/docker/periodic-updates/README.md new file mode 100644 index 0000000000..d21c0c3656 --- /dev/null +++ b/taskcluster/docker/periodic-updates/README.md @@ -0,0 +1,96 @@ + +==Periodic File Updates== + +This docker image examines the in-tree files for HSTS preload data, HPKP pinning and blocklisting, and +will produce a diff for each necessary to update the in-tree files. + +If given a conduit API token, it will also use the arcanist client to submit the commits for review. + + +==Quick Start== + +```sh +docker build -t hsts-local --no-cache --rm . + +docker run -e DO_HSTS=1 -e DO_HPKP=1 -e PRODUCT="firefox" -e BRANCH="mozilla-central" -e USE_MOZILLA_CENTRAL=1 hsts-local +``` + +HSTS checks will only be run if the `DO_HSTS` environment variable is set. +Likewise for `DO_HPKP` and the HPKP checks. Environment variables are used +rather than command line arguments to make constructing taskcluster tasks +easier. + +To prevent a full build when landing with Phabricator, set the `DONTBUILD` +environment variable. + +==Background== + +These scripts have been moved from +`https://hg.mozilla.org/build/tools/scripts/periodic_file_updates/` and +`security/manager/tools/` in the main repos. + +==HSTS Checks== + +`scripts/getHSTSPreloadList.js` will examine the current contents of +nsSTSPreloadList.inc from whichever `BRANCH` is specified, add in the mandatory +hosts, and those from the Chromium source, and check them all to see if their +SSL configuration is valid, and whether or not they have the +Strict-Transport-Security header set with an appropriate `max-age`. + +This javascript has been modified to use async calls to improve performance. + +==HPKP Checks== + +`scripts/genHPKPStaticPins.js` will ensure the list of pinned public keys are +up to date. + +==Example Taskcluster Task== + +https://firefox-ci-tc.services.mozilla.com/tasks/create/ + +```yaml +provisionerId: aws-provisioner-v1 +workerType: gecko-1-b-linux +retries: 0 +created: '2018-02-07T14:45:57.347Z' +deadline: '2018-02-07T17:45:57.348Z' +expires: '2019-02-07T17:45:57.348Z' +scopes: [] +payload: + image: srfraser/hsts1 + maxRunTime: 1800 + artifacts: + public/build/nsSTSPreloadList.diff: + path: /home/worker/artifacts/nsSTSPreloadList.diff + expires: '2019-02-07T13:57:35.448Z' + type: file + public/build/StaticHPKPins.h.diff: + path: /home/worker/artifacts/StaticHPKPins.h.diff + expires: '2019-02-07T13:57:35.448Z' + type: file + public/build/blocklist.diff: + path: /home/worker/artifacts/blocklist.diff + expires: '2019-02-07T13:57:35.448Z' + type: file + env: + DO_HSTS: 1 + DO_HPKP: 1 + PRODUCT: firefox + BRANCH: mozilla-central + USE_MOZILLA_CENTRAL: 1 + REVIEWERS: catlee +metadata: + name: Periodic updates testing + description: Produce diffs for HSTS and HPKP in-tree files. + owner: sfraser@mozilla.com + source: 'https://firefox-ci-tc.services.mozilla.com/tasks/create' +tags: {} +extra: + treeherder: + jobKind: test + machine: + platform: linux64 + tier: 1 + symbol: 'hsts' + +``` diff --git a/taskcluster/docker/periodic-updates/runme.sh b/taskcluster/docker/periodic-updates/runme.sh new file mode 100755 index 0000000000..b9f6eb31c3 --- /dev/null +++ b/taskcluster/docker/periodic-updates/runme.sh @@ -0,0 +1,98 @@ +#!/bin/bash + +set -xe + +# Things to be set by task definition. +# --pinset --hsts --hpkp +# -b branch +# --use-mozilla-central +# -p firefox +# Artifact directory +# Artifact names. + + +test "${BRANCH}" +test "${PRODUCT}" + +PARAMS="" + +if [ -n "${USE_MOZILLA_CENTRAL}" ] +then + PARAMS="${PARAMS} --use-mozilla-central" +fi + +# TODO change these, so that they're run if the artifact location is specified? +if [ -n "${DO_HSTS}" ] +then + PARAMS="${PARAMS} --hsts" +fi + +if [ -n "${DO_HPKP}" ] +then + PARAMS="${PARAMS} --hpkp" +fi + +if [ -n "${DO_REMOTE_SETTINGS}" ] +then + PARAMS="${PARAMS} --remote-settings" +fi + +if [ -n "${DO_SUFFIX_LIST}" ] +then + PARAMS="${PARAMS} --suffix-list" +fi + +if [ -n "${DO_MOBILE_EXPERIMENTS}" ] +then + PARAMS="${PARAMS} --mobile-experiments" +fi + +if [ -n "${DONTBUILD}" ] +then + PARAMS="${PARAMS} -d" +fi + + +export ARTIFACTS_DIR="/home/worker/artifacts" +mkdir -p "$ARTIFACTS_DIR" + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" + +# Get Arcanist API token + +if [ -n "${TASK_ID}" ] +then + curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" + ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}') +fi +if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster +then + set +x # Don't echo these + secrets_url="${TASKCLUSTER_PROXY_URL}/api/secrets/v1/secret/${ARC_SECRET}" + SECRET=$(curl "${secrets_url}") + TOKEN=$(echo "${SECRET}" | jq -r '.secret.token') +elif [ -n "${ARC_TOKEN}" ] # Allow for local testing. +then + TOKEN="${ARC_TOKEN}" +fi + +if [ -n "${TOKEN}" ] +then + cat >"${HOME}/.arcrc" < " + + "" + ); +} + +var { NetUtil } = ChromeUtils.importESModule( + "resource://gre/modules/NetUtil.sys.mjs" +); +var { FileUtils } = ChromeUtils.importESModule( + "resource://gre/modules/FileUtils.sys.mjs" +); + +var gCertDB = Cc["@mozilla.org/security/x509certdb;1"].getService( + Ci.nsIX509CertDB +); + +const SHA256_PREFIX = "sha256/"; +const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_"; + +// Pins expire in 14 weeks (6 weeks on Beta + 8 weeks on stable) +const PINNING_MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 14; + +const FILE_HEADER = + "/* This Source Code Form is subject to the terms of the Mozilla Public\n" + + " * License, v. 2.0. If a copy of the MPL was not distributed with this\n" + + " * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" + + "\n" + + "/*****************************************************************************/\n" + + "/* This is an automatically generated file. If you're not */\n" + + "/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" + + "/*****************************************************************************/\n" + + "#include " + + "\n"; + +const DOMAINHEADER = + "/* Domainlist */\n" + + "struct TransportSecurityPreload {\n" + + " // See bug 1338873 about making these fields const.\n" + + " const char* mHost;\n" + + " bool mIncludeSubdomains;\n" + + " bool mTestMode;\n" + + " bool mIsMoz;\n" + + " int32_t mId;\n" + + " const StaticFingerprints* pinset;\n" + + "};\n\n"; + +const PINSETDEF = + "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" + + "struct StaticFingerprints {\n" + + " // See bug 1338873 about making these fields const.\n" + + " size_t size;\n" + + " const char* const* data;\n" + + "};\n\n"; + +// Command-line arguments +var gStaticPins = parseJson(arguments[0]); + +// Open the output file. +var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); +file.initWithPath(arguments[1]); +var gFileOutputStream = FileUtils.openSafeFileOutputStream(file); + +function writeString(string) { + gFileOutputStream.write(string, string.length); +} + +function readFileToString(filename) { + let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); + file.initWithPath(filename); + let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance( + Ci.nsIFileInputStream + ); + stream.init(file, -1, 0, 0); + let buf = NetUtil.readInputStreamToString(stream, stream.available()); + return buf; +} + +function stripComments(buf) { + let lines = buf.split("\n"); + let entryRegex = /^\s*\/\//; + let data = ""; + for (let i = 0; i < lines.length; ++i) { + let match = entryRegex.exec(lines[i]); + if (!match) { + data = data + lines[i]; + } + } + return data; +} + +function download(filename) { + let req = new XMLHttpRequest(); + req.open("GET", filename, false); // doing the request synchronously + try { + req.send(); + } catch (e) { + throw new Error(`ERROR: problem downloading '${filename}': ${e}`); + } + + if (req.status != 200) { + throw new Error( + "ERROR: problem downloading '" + filename + "': status " + req.status + ); + } + + let resultDecoded; + try { + resultDecoded = atob(req.responseText); + } catch (e) { + throw new Error( + "ERROR: could not decode data as base64 from '" + filename + "': " + e + ); + } + return resultDecoded; +} + +function downloadAsJson(filename) { + // we have to filter out '//' comments, while not mangling the json + let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/gm, ""); + let data = null; + try { + data = JSON.parse(result); + } catch (e) { + throw new Error( + "ERROR: could not parse data from '" + filename + "': " + e + ); + } + return data; +} + +// Returns a Subject Public Key Digest from the given pem, if it exists. +function getSKDFromPem(pem) { + let cert = gCertDB.constructX509FromBase64(pem, pem.length); + return cert.sha256SubjectPublicKeyInfoDigest; +} + +/** + * Hashes |input| using the SHA-256 algorithm in the following manner: + * btoa(sha256(atob(input))) + * + * @param {string} input Base64 string to decode and return the hash of. + * @returns {string} Base64 encoded SHA-256 hash. + */ +function sha256Base64(input) { + let decodedValue; + try { + decodedValue = atob(input); + } catch (e) { + throw new Error(`ERROR: could not decode as base64: '${input}': ${e}`); + } + + // Convert |decodedValue| to an array so that it can be hashed by the + // nsICryptoHash instance below. + // In most cases across the code base, convertToByteArray() of + // nsIScriptableUnicodeConverter is used to do this, but the method doesn't + // seem to work here. + let data = []; + for (let i = 0; i < decodedValue.length; i++) { + data[i] = decodedValue.charCodeAt(i); + } + + let hasher = Cc["@mozilla.org/security/hash;1"].createInstance( + Ci.nsICryptoHash + ); + hasher.init(hasher.SHA256); + hasher.update(data, data.length); + + // true is passed so that the hasher returns a Base64 encoded string. + return hasher.finish(true); +} + +// Downloads the static certs file and tries to map Google Chrome nicknames +// to Mozilla nicknames, as well as storing any hashes for pins for which we +// don't have root PEMs. Each entry consists of a line containing the name of +// the pin followed either by a hash in the format "sha256/" + base64(hash), +// a PEM encoded public key, or a PEM encoded certificate. +// For certificates that we have in our database, +// return a map of Google's nickname to ours. For ones that aren't return a +// map of Google's nickname to SHA-256 values. This code is modeled after agl's +// https://github.com/agl/transport-security-state-generate, which doesn't +// live in the Chromium repo because go is not an official language in +// Chromium. +// For all of the entries in this file: +// - If the entry has a hash format, find the Mozilla pin name (cert nickname) +// and stick the hash into certSKDToName +// - If the entry has a PEM format, parse the PEM, find the Mozilla pin name +// and stick the hash in certSKDToName +// We MUST be able to find a corresponding cert nickname for the Chrome names, +// otherwise we skip all pinsets referring to that Chrome name. +function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) { + // Prefixes that we care about. + const BEGIN_CERT = "-----BEGIN CERTIFICATE-----"; + const END_CERT = "-----END CERTIFICATE-----"; + const BEGIN_PUB_KEY = "-----BEGIN PUBLIC KEY-----"; + const END_PUB_KEY = "-----END PUBLIC KEY-----"; + + // Parsing states. + const PRE_NAME = 0; + const POST_NAME = 1; + const IN_CERT = 2; + const IN_PUB_KEY = 3; + let state = PRE_NAME; + + let lines = download(filename).split("\n"); + let pemCert = ""; + let pemPubKey = ""; + let hash = ""; + let chromeNameToHash = {}; + let chromeNameToMozName = {}; + let chromeName; + for (let line of lines) { + // Skip comments and newlines. + if (!line.length || line[0] == "#") { + continue; + } + switch (state) { + case PRE_NAME: + chromeName = line; + state = POST_NAME; + break; + case POST_NAME: + if (line.startsWith(SHA256_PREFIX)) { + hash = line.substring(SHA256_PREFIX.length); + chromeNameToHash[chromeName] = hash; + certNameToSKD[chromeName] = hash; + certSKDToName[hash] = chromeName; + state = PRE_NAME; + } else if (line.startsWith(BEGIN_CERT)) { + state = IN_CERT; + } else if (line.startsWith(BEGIN_PUB_KEY)) { + state = IN_PUB_KEY; + } else if ( + chromeName == "PinsListTimestamp" && + line.match(/^[0-9]+$/) + ) { + // If the name of this entry is "PinsListTimestamp", this line should + // be the pins list timestamp. It should consist solely of digits. + // Ignore it and expect other entries to come. + state = PRE_NAME; + } else { + throw new Error( + "ERROR: couldn't parse Chrome certificate file line: " + line + ); + } + break; + case IN_CERT: + if (line.startsWith(END_CERT)) { + state = PRE_NAME; + hash = getSKDFromPem(pemCert); + pemCert = ""; + let mozName; + if (hash in certSKDToName) { + mozName = certSKDToName[hash]; + } else { + // Not one of our built-in certs. Prefix the name with + // GOOGLE_PIN_. + mozName = GOOGLE_PIN_PREFIX + chromeName; + dump( + "Can't find hash in builtin certs for Chrome nickname " + + chromeName + + ", inserting " + + mozName + + "\n" + ); + certSKDToName[hash] = mozName; + certNameToSKD[mozName] = hash; + } + chromeNameToMozName[chromeName] = mozName; + } else { + pemCert += line; + } + break; + case IN_PUB_KEY: + if (line.startsWith(END_PUB_KEY)) { + state = PRE_NAME; + hash = sha256Base64(pemPubKey); + pemPubKey = ""; + chromeNameToHash[chromeName] = hash; + certNameToSKD[chromeName] = hash; + certSKDToName[hash] = chromeName; + } else { + pemPubKey += line; + } + break; + default: + throw new Error( + "ERROR: couldn't parse Chrome certificate file " + line + ); + } + } + return [chromeNameToHash, chromeNameToMozName]; +} + +// We can only import pinsets from chrome if for every name in the pinset: +// - We have a hash from Chrome's static certificate file +// - We have a builtin cert +// If the pinset meets these requirements, we store a map array of pinset +// objects: +// { +// pinset_name : { +// // Array of names with entries in certNameToSKD +// sha256_hashes: [] +// } +// } +// and an array of imported pinset entries: +// { name: string, include_subdomains: boolean, test_mode: boolean, +// pins: pinset_name } +function downloadAndParseChromePins( + filename, + chromeNameToHash, + chromeNameToMozName, + certNameToSKD, + certSKDToName +) { + let chromePreloads = downloadAsJson(filename); + let chromePins = chromePreloads.pinsets; + let chromeImportedPinsets = {}; + let chromeImportedEntries = []; + + chromePins.forEach(function (pin) { + let valid = true; + let pinset = { name: pin.name, sha256_hashes: [] }; + // Translate the Chrome pinset format to ours + pin.static_spki_hashes.forEach(function (name) { + if (name in chromeNameToHash) { + let hash = chromeNameToHash[name]; + pinset.sha256_hashes.push(certSKDToName[hash]); + + // We should have already added hashes for all of these when we + // imported the certificate file. + if (!certNameToSKD[name]) { + throw new Error("ERROR: No hash for name: " + name); + } + } else if (name in chromeNameToMozName) { + pinset.sha256_hashes.push(chromeNameToMozName[name]); + } else { + dump( + "Skipping Chrome pinset " + + pinset.name + + ", couldn't find " + + "builtin " + + name + + " from cert file\n" + ); + valid = false; + } + }); + if (valid) { + chromeImportedPinsets[pinset.name] = pinset; + } + }); + + // Grab the domain entry lists. Chrome's entry format is similar to + // ours, except theirs includes a HSTS mode. + const cData = gStaticPins.chromium_data; + let entries = chromePreloads.entries; + entries.forEach(function (entry) { + // HSTS entry only + if (!entry.pins) { + return; + } + let pinsetName = cData.substitute_pinsets[entry.pins]; + if (!pinsetName) { + pinsetName = entry.pins; + } + + // We trim the entry name here to avoid breaking hostname comparisons in the + // HPKP implementation. + entry.name = entry.name.trim(); + + let isProductionDomain = cData.production_domains.includes(entry.name); + let isProductionPinset = cData.production_pinsets.includes(pinsetName); + let excludeDomain = cData.exclude_domains.includes(entry.name); + let isTestMode = !isProductionPinset && !isProductionDomain; + if (entry.pins && !excludeDomain && chromeImportedPinsets[entry.pins]) { + chromeImportedEntries.push({ + name: entry.name, + include_subdomains: entry.include_subdomains, + test_mode: isTestMode, + is_moz: false, + pins: pinsetName, + }); + } + }); + return [chromeImportedPinsets, chromeImportedEntries]; +} + +// Returns a pair of maps [certNameToSKD, certSKDToName] between cert +// nicknames and digests of the SPKInfo for the mozilla trust store +function loadNSSCertinfo(extraCertificates) { + let allCerts = gCertDB.getCerts(); + let certNameToSKD = {}; + let certSKDToName = {}; + for (let cert of allCerts) { + let name = cert.displayName; + let SKD = cert.sha256SubjectPublicKeyInfoDigest; + certNameToSKD[name] = SKD; + certSKDToName[SKD] = name; + } + + for (let cert of extraCertificates) { + let name = cert.commonName; + let SKD = cert.sha256SubjectPublicKeyInfoDigest; + certNameToSKD[name] = SKD; + certSKDToName[SKD] = name; + } + + { + // This is the pinning test certificate. The key hash identifies the + // default RSA key from pykey. + let name = "End Entity Test Cert"; + let SKD = "VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8="; + certNameToSKD[name] = SKD; + certSKDToName[SKD] = name; + } + return [certNameToSKD, certSKDToName]; +} + +function parseJson(filename) { + let json = stripComments(readFileToString(filename)); + return JSON.parse(json); +} + +function nameToAlias(certName) { + // change the name to a string valid as a c identifier + // remove non-ascii characters + certName = certName.replace(/[^[:ascii:]]/g, "_"); + // replace non word characters + certName = certName.replace(/[^A-Za-z0-9]/g, "_"); + + return "k" + certName + "Fingerprint"; +} + +function compareByName(a, b) { + return a.name.localeCompare(b.name); +} + +function genExpirationTime() { + let now = new Date(); + let nowMillis = now.getTime(); + let expirationMillis = nowMillis + PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000; + let expirationMicros = expirationMillis * 1000; + return ( + "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" + + expirationMicros + + ");\n" + ); +} + +function writeFullPinset(certNameToSKD, certSKDToName, pinset) { + if (!pinset.sha256_hashes || !pinset.sha256_hashes.length) { + throw new Error(`ERROR: Pinset ${pinset.name} does not contain any hashes`); + } + writeFingerprints( + certNameToSKD, + certSKDToName, + pinset.name, + pinset.sha256_hashes + ); +} + +function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) { + let varPrefix = "kPinset_" + name; + writeString("static const char* const " + varPrefix + "_Data[] = {\n"); + let SKDList = []; + for (let certName of hashes) { + if (!(certName in certNameToSKD)) { + throw new Error(`ERROR: Can't find '${certName}' in certNameToSKD`); + } + SKDList.push(certNameToSKD[certName]); + } + for (let skd of SKDList.sort()) { + writeString(" " + nameToAlias(certSKDToName[skd]) + ",\n"); + } + if (!hashes.length) { + // ANSI C requires that an initialiser list be non-empty. + writeString(" 0\n"); + } + writeString("};\n"); + writeString( + "static const StaticFingerprints " + + varPrefix + + " = {\n " + + "sizeof(" + + varPrefix + + "_Data) / sizeof(const char*),\n " + + varPrefix + + "_Data\n};\n\n" + ); +} + +function writeEntry(entry) { + let printVal = ` { "${entry.name}", `; + if (entry.include_subdomains) { + printVal += "true, "; + } else { + printVal += "false, "; + } + // Default to test mode if not specified. + let testMode = true; + if (entry.hasOwnProperty("test_mode")) { + testMode = entry.test_mode; + } + if (testMode) { + printVal += "true, "; + } else { + printVal += "false, "; + } + if ( + entry.is_moz || + (entry.pins.includes("mozilla") && entry.pins != "mozilla_test") + ) { + printVal += "true, "; + } else { + printVal += "false, "; + } + if ("id" in entry) { + if (entry.id >= 256) { + throw new Error("ERROR: Not enough buckets in histogram"); + } + if (entry.id >= 0) { + printVal += entry.id + ", "; + } + } else { + printVal += "-1, "; + } + printVal += "&kPinset_" + entry.pins; + printVal += " },\n"; + writeString(printVal); +} + +function writeDomainList(chromeImportedEntries) { + writeString("/* Sort hostnames for binary search. */\n"); + writeString( + "static const TransportSecurityPreload " + + "kPublicKeyPinningPreloadList[] = {\n" + ); + let count = 0; + let mozillaDomains = {}; + gStaticPins.entries.forEach(function (entry) { + mozillaDomains[entry.name] = true; + }); + // For any domain for which we have set pins, exclude them from + // chromeImportedEntries. + for (let i = chromeImportedEntries.length - 1; i >= 0; i--) { + if (mozillaDomains[chromeImportedEntries[i].name]) { + dump( + "Skipping duplicate pinset for domain " + + JSON.stringify(chromeImportedEntries[i], undefined, 2) + + "\n" + ); + chromeImportedEntries.splice(i, 1); + } + } + let sortedEntries = gStaticPins.entries; + sortedEntries.push.apply(sortedEntries, chromeImportedEntries); + for (let entry of sortedEntries.sort(compareByName)) { + count++; + writeEntry(entry); + } + writeString("};\n"); + + writeString("\n// Pinning Preload List Length = " + count + ";\n"); + writeString("\nstatic const int32_t kUnknownId = -1;\n"); +} + +function writeFile( + certNameToSKD, + certSKDToName, + chromeImportedPinsets, + chromeImportedEntries +) { + // Compute used pins from both Chrome's and our pinsets, so we can output + // them later. + let usedFingerprints = {}; + let mozillaPins = {}; + gStaticPins.pinsets.forEach(function (pinset) { + mozillaPins[pinset.name] = true; + pinset.sha256_hashes.forEach(function (name) { + usedFingerprints[name] = true; + }); + }); + for (let key in chromeImportedPinsets) { + let pinset = chromeImportedPinsets[key]; + pinset.sha256_hashes.forEach(function (name) { + usedFingerprints[name] = true; + }); + } + + writeString(FILE_HEADER); + + // Write actual fingerprints. + Object.keys(usedFingerprints) + .sort() + .forEach(function (certName) { + if (certName) { + writeString("/* " + certName + " */\n"); + writeString("static const char " + nameToAlias(certName) + "[] =\n"); + writeString(' "' + certNameToSKD[certName] + '";\n'); + writeString("\n"); + } + }); + + // Write the pinsets + writeString(PINSETDEF); + writeString("/* PreloadedHPKPins.json pinsets */\n"); + gStaticPins.pinsets.sort(compareByName).forEach(function (pinset) { + writeFullPinset(certNameToSKD, certSKDToName, pinset); + }); + writeString("/* Chrome static pinsets */\n"); + for (let key in chromeImportedPinsets) { + if (mozillaPins[key]) { + dump("Skipping duplicate pinset " + key + "\n"); + } else { + dump("Writing pinset " + key + "\n"); + writeFullPinset(certNameToSKD, certSKDToName, chromeImportedPinsets[key]); + } + } + + // Write the domainlist entries. + writeString(DOMAINHEADER); + writeDomainList(chromeImportedEntries); + writeString("\n"); + writeString(genExpirationTime()); +} + +function loadExtraCertificates(certStringList) { + let constructedCerts = []; + for (let certString of certStringList) { + constructedCerts.push(gCertDB.constructX509FromBase64(certString)); + } + return constructedCerts; +} + +var extraCertificates = loadExtraCertificates(gStaticPins.extra_certificates); +var [certNameToSKD, certSKDToName] = loadNSSCertinfo(extraCertificates); +var [chromeNameToHash, chromeNameToMozName] = downloadAndParseChromeCerts( + gStaticPins.chromium_data.cert_file_url, + certNameToSKD, + certSKDToName +); +var [chromeImportedPinsets, chromeImportedEntries] = downloadAndParseChromePins( + gStaticPins.chromium_data.json_file_url, + chromeNameToHash, + chromeNameToMozName, + certNameToSKD, + certSKDToName +); + +writeFile( + certNameToSKD, + certSKDToName, + chromeImportedPinsets, + chromeImportedEntries +); + +FileUtils.closeSafeFileOutputStream(gFileOutputStream); diff --git a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js new file mode 100644 index 0000000000..aeaa29bc2d --- /dev/null +++ b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js @@ -0,0 +1,557 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +"use strict"; + +// How to run this file: +// 1. [obtain firefox source code] +// 2. [build/obtain firefox binaries] +// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell [path to]/getHSTSPreloadlist.js [absolute path to]/nsSTSPreloadlist.inc' +// Note: Running this file outputs a new nsSTSPreloadlist.inc in the current +// working directory. + +var gSSService = Cc["@mozilla.org/ssservice;1"].getService( + Ci.nsISiteSecurityService +); + +const { FileUtils } = ChromeUtils.importESModule( + "resource://gre/modules/FileUtils.sys.mjs" +); + +const SOURCE = + "https://chromium.googlesource.com/chromium/src/+/refs/heads/main/net/http/transport_security_state_static.json?format=TEXT"; +const TOOL_SOURCE = + "https://hg.mozilla.org/mozilla-central/file/default/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js"; +const OUTPUT = "nsSTSPreloadList.inc"; +const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18; +const MAX_CONCURRENT_REQUESTS = 500; +const MAX_RETRIES = 1; +const REQUEST_TIMEOUT = 30 * 1000; +const ERROR_NONE = "no error"; +const ERROR_CONNECTING_TO_HOST = "could not connect to host"; +const ERROR_NO_HSTS_HEADER = "did not receive HSTS header"; +const ERROR_MAX_AGE_TOO_LOW = "max-age too low: "; +const HEADER = `/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/*****************************************************************************/ +/* This is an automatically generated file. If you're not */ +/* nsSiteSecurityService.cpp, you shouldn't be #including it. */ +/*****************************************************************************/ + +#include +`; + +const GPERF_DELIM = "%%\n"; + +function download() { + let req = new XMLHttpRequest(); + req.open("GET", SOURCE, false); // doing the request synchronously + try { + req.send(); + } catch (e) { + throw new Error(`ERROR: problem downloading '${SOURCE}': ${e}`); + } + + if (req.status != 200) { + throw new Error( + "ERROR: problem downloading '" + SOURCE + "': status " + req.status + ); + } + + let resultDecoded; + try { + resultDecoded = atob(req.responseText); + } catch (e) { + throw new Error( + "ERROR: could not decode data as base64 from '" + SOURCE + "': " + e + ); + } + + // we have to filter out '//' comments, while not mangling the json + let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/gm, ""); + let data = null; + try { + data = JSON.parse(result); + } catch (e) { + throw new Error(`ERROR: could not parse data from '${SOURCE}': ${e}`); + } + return data; +} + +function getHosts(rawdata) { + let hosts = []; + + if (!rawdata || !rawdata.entries) { + throw new Error( + "ERROR: source data not formatted correctly: 'entries' not found" + ); + } + + for (let entry of rawdata.entries) { + if (entry.mode && entry.mode == "force-https") { + if (entry.name) { + // We trim the entry name here to avoid malformed URI exceptions when we + // later try to connect to the domain. + entry.name = entry.name.trim(); + entry.retries = MAX_RETRIES; + // We prefer the camelCase variable to the JSON's snake case version + entry.includeSubdomains = entry.include_subdomains; + hosts.push(entry); + } else { + throw new Error("ERROR: entry not formatted correctly: no name found"); + } + } + } + + return hosts; +} + +function processStsHeader(host, header, status, securityInfo) { + let maxAge = { + value: 0, + }; + let includeSubdomains = { + value: false, + }; + let error = ERROR_NONE; + if ( + header != null && + securityInfo != null && + securityInfo.overridableErrorCategory == + Ci.nsITransportSecurityInfo.ERROR_UNSET + ) { + try { + let uri = Services.io.newURI("https://" + host.name); + gSSService.processHeader(uri, header, {}, maxAge, includeSubdomains); + } catch (e) { + dump( + "ERROR: could not process header '" + + header + + "' from " + + host.name + + ": " + + e + + "\n" + ); + error = e; + } + } else if (status == 0) { + error = ERROR_CONNECTING_TO_HOST; + } else { + error = ERROR_NO_HSTS_HEADER; + } + + if (error == ERROR_NONE && maxAge.value < MINIMUM_REQUIRED_MAX_AGE) { + error = ERROR_MAX_AGE_TOO_LOW; + } + + return { + name: host.name, + maxAge: maxAge.value, + includeSubdomains: includeSubdomains.value, + error, + retries: host.retries - 1, + forceInclude: host.forceInclude, + }; +} + +// RedirectAndAuthStopper prevents redirects and HTTP authentication +function RedirectAndAuthStopper() {} + +RedirectAndAuthStopper.prototype = { + // nsIChannelEventSink + asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) { + throw Components.Exception("", Cr.NS_ERROR_ENTITY_CHANGED); + }, + + // nsIAuthPrompt2 + promptAuth(channel, level, authInfo) { + return false; + }, + + asyncPromptAuth(channel, callback, context, level, authInfo) { + throw Components.Exception("", Cr.NS_ERROR_NOT_IMPLEMENTED); + }, + + getInterface(iid) { + return this.QueryInterface(iid); + }, + + QueryInterface: ChromeUtils.generateQI([ + "nsIChannelEventSink", + "nsIAuthPrompt2", + ]), +}; + +function fetchstatus(host) { + return new Promise((resolve, reject) => { + let xhr = new XMLHttpRequest(); + let uri = "https://" + host.name + "/"; + + xhr.open("head", uri, true); + xhr.setRequestHeader("X-Automated-Tool", TOOL_SOURCE); + xhr.timeout = REQUEST_TIMEOUT; + + let errorHandler = () => { + dump("ERROR: exception making request to " + host.name + "\n"); + resolve( + processStsHeader( + host, + null, + xhr.status, + xhr.channel && xhr.channel.securityInfo + ) + ); + }; + + xhr.onerror = errorHandler; + xhr.ontimeout = errorHandler; + xhr.onabort = errorHandler; + + xhr.onload = () => { + let header = xhr.getResponseHeader("strict-transport-security"); + resolve( + processStsHeader(host, header, xhr.status, xhr.channel.securityInfo) + ); + }; + + xhr.channel.notificationCallbacks = new RedirectAndAuthStopper(); + xhr.send(); + }); +} + +async function getHSTSStatus(host) { + do { + host = await fetchstatus(host); + } while (shouldRetry(host)); + return host; +} + +function compareHSTSStatus(a, b) { + if (a.name > b.name) { + return 1; + } + if (a.name < b.name) { + return -1; + } + return 0; +} + +function writeTo(string, fos) { + fos.write(string, string.length); +} + +// Determines and returns a string representing a declaration of when this +// preload list should no longer be used. +// This is the current time plus MINIMUM_REQUIRED_MAX_AGE. +function getExpirationTimeString() { + let now = new Date(); + let nowMillis = now.getTime(); + // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds + let expirationMillis = nowMillis + MINIMUM_REQUIRED_MAX_AGE * 1000; + let expirationMicros = expirationMillis * 1000; + return ( + "const PRTime gPreloadListExpirationTime = INT64_C(" + + expirationMicros + + ");\n" + ); +} + +function shouldRetry(response) { + return ( + response.error != ERROR_NO_HSTS_HEADER && + response.error != ERROR_MAX_AGE_TOO_LOW && + response.error != ERROR_NONE && + response.retries > 0 + ); +} + +// Copied from browser/components/migration/MigrationUtils.sys.mjs +function spinResolve(promise) { + if (!(promise instanceof Promise)) { + return promise; + } + let done = false; + let result = null; + let error = null; + promise + .catch(e => { + error = e; + }) + .then(r => { + result = r; + done = true; + }); + + Services.tm.spinEventLoopUntil( + "getHSTSPreloadList.js:spinResolve", + () => done + ); + if (error) { + throw error; + } else { + return result; + } +} + +async function probeHSTSStatuses(inHosts) { + let totalLength = inHosts.length; + dump("Examining " + totalLength + " hosts.\n"); + + // Make requests in batches of MAX_CONCURRENT_REQUESTS. Otherwise, we have + // too many in-flight requests and the time it takes to process them causes + // them all to time out. + let allResults = []; + while (inHosts.length) { + let promises = []; + for (let i = 0; i < MAX_CONCURRENT_REQUESTS && inHosts.length; i++) { + let host = inHosts.shift(); + promises.push(getHSTSStatus(host)); + } + let results = await Promise.all(promises); + let progress = ( + (100 * (totalLength - inHosts.length)) / + totalLength + ).toFixed(2); + dump(progress + "% done\n"); + allResults = allResults.concat(results); + } + + dump("HSTS Probe received " + allResults.length + " statuses.\n"); + return allResults; +} + +function readCurrentList(filename) { + var currentHosts = {}; + var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); + file.initWithPath(filename); + var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance( + Ci.nsILineInputStream + ); + fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF); + var line = {}; + + // While we generate entries matching the latest version format, + // we still need to be able to read entries in the previous version formats + // for bootstrapping a latest version preload list from a previous version + // preload list. Hence these regexes. + const entryRegexes = [ + /([^,]+), (0|1)/, // v3 + / {2}\/\* "([^"]*)", (true|false) \*\//, // v2 + / {2}{ "([^"]*)", (true|false) },/, // v1 + ]; + + while (fis.readLine(line)) { + let match; + entryRegexes.find(r => { + match = r.exec(line.value); + return match; + }); + if (match) { + currentHosts[match[1]] = match[2] == "1" || match[2] == "true"; + } + } + return currentHosts; +} + +function combineLists(newHosts, currentHosts) { + let newHostsSet = new Set(); + + for (let newHost of newHosts) { + newHostsSet.add(newHost.name); + } + + for (let currentHost in currentHosts) { + if (!newHostsSet.has(currentHost)) { + newHosts.push({ name: currentHost, retries: MAX_RETRIES }); + } + } +} + +const TEST_ENTRIES = [ + { + name: "includesubdomains.preloaded.test", + includeSubdomains: true, + }, + { + name: "includesubdomains2.preloaded.test", + includeSubdomains: true, + }, + { + name: "noincludesubdomains.preloaded.test", + includeSubdomains: false, + }, +]; + +function deleteTestHosts(currentHosts) { + for (let testEntry of TEST_ENTRIES) { + delete currentHosts[testEntry.name]; + } +} + +function getTestHosts() { + let hosts = []; + for (let testEntry of TEST_ENTRIES) { + hosts.push({ + name: testEntry.name, + maxAge: MINIMUM_REQUIRED_MAX_AGE, + includeSubdomains: testEntry.includeSubdomains, + error: ERROR_NONE, + // This deliberately doesn't have a value for `retries` (because we should + // never attempt to connect to this host). + forceInclude: true, + }); + } + return hosts; +} + +async function insertHosts(inoutHostList, inAddedHosts) { + for (let host of inAddedHosts) { + inoutHostList.push(host); + } +} + +function filterForcedInclusions(inHosts, outNotForced, outForced) { + // Apply our filters (based on policy today) to determine which entries + // will be included without being checked (forced); the others will be + // checked using active probing. + for (let host of inHosts) { + if ( + host.policy == "google" || + host.policy == "public-suffix" || + host.policy == "public-suffix-requested" + ) { + host.forceInclude = true; + host.error = ERROR_NONE; + outForced.push(host); + } else { + outNotForced.push(host); + } + } +} + +function output(statuses) { + dump("INFO: Writing output to " + OUTPUT + "\n"); + try { + let file = new FileUtils.File( + PathUtils.join(Services.dirsvc.get("CurWorkD", Ci.nsIFile).path, OUTPUT) + ); + let fos = FileUtils.openSafeFileOutputStream(file); + writeTo(HEADER, fos); + writeTo(getExpirationTimeString(), fos); + + writeTo(GPERF_DELIM, fos); + + for (let status of statuses) { + let includeSubdomains = status.includeSubdomains ? 1 : 0; + writeTo(status.name + ", " + includeSubdomains + "\n", fos); + } + + writeTo(GPERF_DELIM, fos); + FileUtils.closeSafeFileOutputStream(fos); + dump("finished writing output file\n"); + } catch (e) { + dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n"); + throw e; + } +} + +function errorToString(status) { + return status.error == ERROR_MAX_AGE_TOO_LOW + ? status.error + status.maxAge + : status.error; +} + +async function main(args) { + if (args.length != 1) { + throw new Error( + "Usage: getHSTSPreloadList.js " + ); + } + + // get the current preload list + let currentHosts = readCurrentList(args[0]); + // delete any hosts we use in tests so we don't actually connect to them + deleteTestHosts(currentHosts); + // disable the current preload list so it won't interfere with requests we make + Services.prefs.setBoolPref( + "network.stricttransportsecurity.preloadlist", + false + ); + // download and parse the raw json file from the Chromium source + let rawdata = download(); + // get just the hosts with mode: "force-https" + let hosts = getHosts(rawdata); + // add hosts in the current list to the new list (avoiding duplicates) + combineLists(hosts, currentHosts); + + // Don't contact hosts that are forced to be included anyway + let hostsToContact = []; + let forcedHosts = []; + filterForcedInclusions(hosts, hostsToContact, forcedHosts); + + // Initialize the final status list + let hstsStatuses = []; + // Add the hosts we use in tests + dump("Adding test hosts\n"); + insertHosts(hstsStatuses, getTestHosts()); + // Add in the hosts that are forced + dump("Adding forced hosts\n"); + insertHosts(hstsStatuses, forcedHosts); + + let total = await probeHSTSStatuses(hostsToContact) + .then(function (probedStatuses) { + return hstsStatuses.concat(probedStatuses); + }) + .then(function (statuses) { + return statuses.sort(compareHSTSStatus); + }) + .then(function (statuses) { + for (let status of statuses) { + // If we've encountered an error for this entry (other than the site not + // sending an HSTS header), be safe and don't remove it from the list + // (given that it was already on the list). + if ( + !status.forceInclude && + status.error != ERROR_NONE && + status.error != ERROR_NO_HSTS_HEADER && + status.error != ERROR_MAX_AGE_TOO_LOW && + status.name in currentHosts + ) { + // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n"); + status.maxAge = MINIMUM_REQUIRED_MAX_AGE; + status.includeSubdomains = currentHosts[status.name]; + } + } + return statuses; + }) + .then(function (statuses) { + // Filter out entries we aren't including. + var includedStatuses = statuses.filter(function (status) { + if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) { + // dump("INFO: " + status.name + " NOT ON the preload list\n"); + return false; + } + + // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n"); + if (status.forceInclude && status.error != ERROR_NONE) { + dump( + status.name + + ": " + + errorToString(status) + + " (error ignored - included regardless)\n" + ); + } + return true; + }); + return includedStatuses; + }); + + // Write the output file + output(total); + + dump("HSTS probing all done\n"); +} + +// arguments is a global within xpcshell +spinResolve(main(arguments)); diff --git a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh new file mode 100755 index 0000000000..c5b9c78f6f --- /dev/null +++ b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh @@ -0,0 +1,672 @@ +#!/bin/bash + +set -ex + +function usage { + cat <&2 + exit 31 + fi + if [ ! -f "${TESTS_ARCHIVE}" ]; then + echo "Downloaded file '${TESTS_ARCHIVE}' not found in directory '$(pwd)'." >&2 + exit 32 + fi + # Unpack the browser and move xpcshell in place for updating the preload list. + echo "INFO: Unpacking resources..." + ${UNPACK_CMD} "${BROWSER_ARCHIVE}" + mkdir -p tests + cd tests + ${UNTAR} "../${TESTS_ARCHIVE}" + cd "${BASEDIR}" + cp tests/bin/xpcshell "${PRODUCT}" +} + +# Downloads the current in-tree HSTS (HTTP Strict Transport Security) files. +# Runs a simple xpcshell script to generate up-to-date HSTS information. +# Compares the new HSTS output with the old to determine whether we need to update. +function compare_hsts_files { + cd "${BASEDIR}" + + HSTS_PRELOAD_INC_HG="${HGREPO}/raw-file/default/security/manager/ssl/$(basename "${HSTS_PRELOAD_INC_OLD}")" + + echo "INFO: Downloading existing include file..." + rm -rf "${HSTS_PRELOAD_ERRORS}" "${HSTS_PRELOAD_INC_OLD}" + echo "INFO: ${WGET} ${HSTS_PRELOAD_INC_HG}" + ${WGET} -O "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_HG}" + + if [ ! -f "${HSTS_PRELOAD_INC_OLD}" ]; then + echo "Downloaded file '${HSTS_PRELOAD_INC_OLD}' not found in directory '$(pwd)' - this should have been downloaded above from ${HSTS_PRELOAD_INC_HG}." >&2 + exit 41 + fi + + # Run the script to get an updated preload list. + echo "INFO: Generating new HSTS preload list..." + cd "${BASEDIR}/${PRODUCT}" + if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HSTS_PRELOAD_SCRIPT}" "${HSTS_PRELOAD_INC_OLD}"; then + echo "HSTS preload list generation failed" >&2 + exit 43 + fi + + # The created files should be non-empty. + echo "INFO: Checking whether new HSTS preload list is valid..." + if [ ! -s "${HSTS_PRELOAD_INC_NEW}" ]; then + echo "New HSTS preload list ${HSTS_PRELOAD_INC_NEW} is empty. That's less good." >&2 + exit 42 + fi + cd "${BASEDIR}" + + # Check for differences + echo "INFO: diffing old/new HSTS preload lists into ${HSTS_DIFF_ARTIFACT}" + ${DIFF} "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_NEW}" | tee "${HSTS_DIFF_ARTIFACT}" + if [ -s "${HSTS_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +# Downloads the current in-tree HPKP (HTTP public key pinning) files. +# Runs a simple xpcshell script to generate up-to-date HPKP information. +# Compares the new HPKP output with the old to determine whether we need to update. +function compare_hpkp_files { + cd "${BASEDIR}" + HPKP_PRELOAD_JSON_HG="${HGREPO}/raw-file/default/security/manager/tools/$(basename "${HPKP_PRELOAD_JSON}")" + + HPKP_PRELOAD_OUTPUT_HG="${HGREPO}/raw-file/default/security/manager/ssl/${HPKP_PRELOAD_INC}" + + rm -f "${HPKP_PRELOAD_OUTPUT}" + ${WGET} -O "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT_HG}" + ${WGET} -O "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_JSON_HG}" + + # Run the script to get an updated preload list. + echo "INFO: Generating new HPKP preload list..." + cd "${BASEDIR}/${PRODUCT}" + if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HPKP_PRELOAD_SCRIPT}" "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_OUTPUT}" > "${HPKP_PRELOAD_ERRORS}"; then + echo "HPKP preload list generation failed" >&2 + exit 54 + fi + + # The created files should be non-empty. + echo "INFO: Checking whether new HPKP preload list is valid..." + + if [ ! -s "${HPKP_PRELOAD_OUTPUT}" ]; then + echo "${HPKP_PRELOAD_OUTPUT} is empty. That's less good." >&2 + exit 52 + fi + if ! grep kPreloadPKPinsExpirationTime "${HPKP_PRELOAD_OUTPUT}"; then + echo "${HPKP_PRELOAD_OUTPUT} is missing an expiration time. Truncated?" >&2 + exit 53 + fi + cd "${BASEDIR}" + + echo "INFO: diffing old/new HPKP preload lists..." + ${DIFF} "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT}" | tee "${HPKP_DIFF_ARTIFACT}" + if [ -s "${HPKP_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +function is_valid_xml { + xmlfile=$1 + XMLLINT=$(command -v xmllint 2>/dev/null | head -n1) + + if [ ! -x "${XMLLINT}" ]; then + echo "ERROR: xmllint not found in PATH" + exit 60 + fi + ${XMLLINT} --nonet --noout "${xmlfile}" +} + +# Downloads the public suffix list +function compare_suffix_lists { + HG_SUFFIX_URL="${HGREPO}/raw-file/default/${HG_SUFFIX_PATH}" + cd "${BASEDIR}" + + echo "INFO: ${WGET} -O ${GITHUB_SUFFIX_LOCAL} ${GITHUB_SUFFIX_URL}" + rm -f "${GITHUB_SUFFIX_LOCAL}" + ${WGET} -O "${GITHUB_SUFFIX_LOCAL}" "${GITHUB_SUFFIX_URL}" + + echo "INFO: ${WGET} -O ${HG_SUFFIX_LOCAL} ${HG_SUFFIX_URL}" + rm -f "${HG_SUFFIX_LOCAL}" + ${WGET} -O "${HG_SUFFIX_LOCAL}" "${HG_SUFFIX_URL}" + + echo "INFO: diffing in-tree suffix list against the suffix list from AMO..." + ${DIFF} ${GITHUB_SUFFIX_LOCAL} ${HG_SUFFIX_LOCAL} | tee "${SUFFIX_LIST_DIFF_ARTIFACT}" + if [ -s "${SUFFIX_LIST_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +function compare_remote_settings_files { + REMOTE_SETTINGS_SERVER="https://firefox.settings.services.mozilla.com/v1" + + # 1. List remote settings collections from server. + echo "INFO: fetch remote settings list from server" + ${WGET} -qO- "${REMOTE_SETTINGS_SERVER}/buckets/monitor/collections/changes/records" |\ + ${JQ} -r '.data[] | .bucket+"/"+.collection+"/"+(.last_modified|tostring)' |\ + # 2. For each entry ${bucket, collection, last_modified} + while IFS="/" read -r bucket collection last_modified; do + + # 3. Download the dump from HG into REMOTE_SETTINGS_INPUT folder + hg_dump_url="${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${bucket}/${collection}.json" + local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json" + mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}" + ${WGET} -qO "$local_location_input" "$hg_dump_url" + if [ $? -eq 8 ]; then + # We don't keep any dump for this collection, skip it. + # Try to clean up in case no collection in this bucket has dump. + rmdir "$REMOTE_SETTINGS_INPUT/${bucket}" --ignore-fail-on-non-empty + continue + fi + + # 4. Download server version into REMOTE_SETTINGS_OUTPUT folder + remote_records_url="$REMOTE_SETTINGS_SERVER/buckets/${bucket}/collections/${collection}/changeset?_expected=${last_modified}" + local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json" + mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}" + ${WGET} -qO- "$remote_records_url" | ${JQ} '{"data": .changes, "timestamp": .timestamp}' > "${local_location_output}" + + # 5. Download attachments if needed. + if [ "${bucket}" = "blocklists" ] && [ "${collection}" = "addons-bloomfilters" ]; then + # Find the attachment with the most recent generation_time, like _updateMLBF in Blocklist.jsm. + # The server should return one "bloomfilter-base" record, but in case it returns multiple, + # return the most recent one. The server may send multiple entries if we ever decide to use + # the "filter_expression" feature of Remote Settings to send different records to specific + # channels. In that case this code should be updated to recognize the filter expression, + # but until we do, simply select the most recent record - can't go wrong with that. + # Note that "attachment_type" and "generation_time" are specific to addons-bloomfilters. + update_remote_settings_attachment "${bucket}" "${collection}" addons-mlbf.bin \ + 'map(select(.attachment_type == "bloomfilter-base")) | sort_by(.generation_time) | last' + fi + # TODO: Bug 1873448. This cannot handle new/removed files currently, due to the + # build system making it difficult. + if [ "${bucket}" = "main" ] && [ "${collection}" = "search-config-icons" ]; then + ${JQ} -r '.data[] | .id' < "${local_location_output}" |\ + while read -r id; do + # We do not want quotes around ${id} + # shellcheck disable=SC2086 + update_remote_settings_attachment "${bucket}" "${collection}" ${id} ".[] | select(.id == \"${id}\")" + done + fi + # NOTE: The downloaded data is not validated. xpcshell should be used for that. + done + + echo "INFO: diffing old/new remote settings dumps..." + ${DIFF} -r "${REMOTE_SETTINGS_INPUT}" "${REMOTE_SETTINGS_OUTPUT}" > "${REMOTE_SETTINGS_DIFF_ARTIFACT}" + if [ -s "${REMOTE_SETTINGS_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +# Helper for compare_remote_settings_files to download attachments from remote settings. +# The format and location is documented at: +# https://firefox-source-docs.mozilla.org/services/common/services/RemoteSettings.html#packaging-attachments +function update_remote_settings_attachment() { + local bucket=$1 + local collection=$2 + local attachment_id=$3 + # $4 is a jq filter on the arrays that should return one record with the attachment + local jq_attachment_selector=".data | map(select(.attachment)) | $4" + + # These paths match _readAttachmentDump in services/settings/Attachments.jsm. + local path_to_attachment="${bucket}/${collection}/${attachment_id}" + local path_to_meta="${bucket}/${collection}/${attachment_id}.meta.json" + local old_meta="$REMOTE_SETTINGS_INPUT/${path_to_meta}" + local new_meta="$REMOTE_SETTINGS_OUTPUT/${path_to_meta}" + + # Those files should have been created by compare_remote_settings_files before the function call. + local local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json" + local local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json" + + # Compute the metadata based on already-downloaded records. + mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}/${collection}" + ${JQ} -cj <"$local_location_input" "${jq_attachment_selector}" > "${old_meta}" + mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}" + ${JQ} -cj <"$local_location_output" "${jq_attachment_selector}" > "${new_meta}" + + if cmp --silent "${old_meta}" "${new_meta}" ; then + # Metadata not changed, don't bother downloading the attachments themselves. + return + fi + # Metadata changed. Download attachments. + + echo "INFO: Downloading updated remote settings dump: ${bucket}/${collection}/${attachment_id}" + + # Overwrited old_meta with the actual file from the repo. The content should be equivalent, + # but can have minor differences (e.g. different line endings) if the checked in file was not + # generated by this script (e.g. manually checked in). + ${WGET} -qO "${old_meta}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_meta}" + + ${WGET} -qO "${REMOTE_SETTINGS_INPUT}/${path_to_attachment}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_attachment}" + + if [ -z "${ATTACHMENT_BASE_URL}" ] ; then + ATTACHMENT_BASE_URL=$(${WGET} -qO- "${REMOTE_SETTINGS_SERVER}" | ${JQ} -r .capabilities.attachments.base_url) + fi + attachment_path_from_meta=$(${JQ} -r < "${new_meta}" .attachment.location) + ${WGET} -qO "${REMOTE_SETTINGS_OUTPUT}/${path_to_attachment}" "${ATTACHMENT_BASE_URL}${attachment_path_from_meta}" +} + +function compare_mobile_experiments() { + echo "INFO ${WGET} ${EXPERIMENTER_URL}" + ${WGET} -O experiments.json "${EXPERIMENTER_URL}" + ${WGET} -O fenix-experiments-old.json "${HGREPO}/raw-file/default/${FENIX_INITIAL_EXPERIMENTS}" + ${WGET} -O focus-experiments-old.json "${HGREPO}/raw-file/default/${FOCUS_INITIAL_EXPERIMENTS}" + + # shellcheck disable=SC2016 + ${JQ} --arg APP_NAME fenix '{"data":map(select(.appName == $APP_NAME))}' < experiments.json > fenix-experiments-new.json + # shellcheck disable=SC2016 + ${JQ} --arg APP_NAME focus_android '{"data":map(select(.appName == $APP_NAME))}' < experiments.json > focus-experiments-new.json + + ( ${DIFF} fenix-experiments-old.json fenix-experiments-new.json; ${DIFF} focus-experiments-old.json focus-experiments-new.json ) > "${EXPERIMENTER_DIFF_ARTIFACT}" + if [ -s "${EXPERIMENTER_DIFF_ARTIFACT}" ]; then + # no change + return 1 + else + return 0 + fi +} + +# Clones an hg repo +function clone_repo { + cd "${BASEDIR}" + if [ ! -d "${REPODIR}" ]; then + ${HG} robustcheckout --sharebase /tmp/hg-store -b default "${HGREPO}" "${REPODIR}" + fi + + ${HG} -R "${REPODIR}" pull + ${HG} -R "${REPODIR}" update -C default +} + +# Copies new HSTS files in place, and commits them. +function stage_hsts_files { + cd "${BASEDIR}" + cp -f "${HSTS_PRELOAD_INC_NEW}" "${REPODIR}/security/manager/ssl/" +} + +function stage_hpkp_files { + cd "${BASEDIR}" + cp -f "${HPKP_PRELOAD_OUTPUT}" "${REPODIR}/security/manager/ssl/${HPKP_PRELOAD_INC}" +} + +function stage_remote_settings_files { + cd "${BASEDIR}" + cp -a "${REMOTE_SETTINGS_OUTPUT}"/* "${REPODIR}${REMOTE_SETTINGS_DIR}" +} + +function stage_tld_suffix_files { + cd "${BASEDIR}" + cp -a "${GITHUB_SUFFIX_LOCAL}" "${REPODIR}/${HG_SUFFIX_PATH}" +} + +function stage_mobile_experiments_files { + cd "${BASEDIR}" + + cp fenix-experiments-new.json "${REPODIR}/${FENIX_INITIAL_EXPERIMENTS}" + cp focus-experiments-new.json "${REPODIR}/${FOCUS_INITIAL_EXPERIMENTS}" +} + +# Push all pending commits to Phabricator +function push_repo { + cd "${REPODIR}" + if [ ! -r "${HOME}/.arcrc" ] + then + return 1 + fi + if ! ARC=$(command -v arc) && ! ARC=$(command -v arcanist) + then + return 1 + fi + if [ -z "${REVIEWERS}" ] + then + return 1 + fi + # Clean up older review requests + # Turn Needs Review D624: No bug, Automated HSTS ... + # into D624 + for diff in $($ARC list | grep "Needs Review" | grep -E "${BRANCH} repo-update" | awk 'match($0, /D[0-9]+[^: ]/) { print substr($0, RSTART, RLENGTH) }') + do + echo "Removing old request $diff" + # There is no 'arc abandon', see bug 1452082 + echo '{"transactions": [{"type":"abandon", "value": true}], "objectIdentifier": "'"${diff}"'"}' | $ARC call-conduit -- differential.revision.edit + done + + $ARC diff --verbatim --reviewers "${REVIEWERS}" +} + + + +# Main + +# Parse our command-line options. +while [ $# -gt 0 ]; do + case "$1" in + -h) usage; exit 0 ;; + -p) PRODUCT="$2"; shift ;; + -b) BRANCH="$2"; shift ;; + -n) DRY_RUN=true ;; + -c) CLOSED_TREE=true ;; + -d) DONTBUILD=true ;; + -a) APPROVAL=true ;; + --pinset) DO_PRELOAD_PINSET=true ;; + --hsts) DO_HSTS=true ;; + --hpkp) DO_HPKP=true ;; + --remote-settings) DO_REMOTE_SETTINGS=true ;; + --suffix-list) DO_SUFFIX_LIST=true ;; + --mobile-experiments) DO_MOBILE_EXPERIMENTS=true ;; + -r) REPODIR="$2"; shift ;; + --use-mozilla-central) USE_MC=true ;; + --use-ftp-builds) USE_TC=false ;; + -*) usage + exit 11 ;; + *) break ;; # terminate while loop + esac + shift +done + +# Must supply a code branch to work with. +if [ "${BRANCH}" == "" ]; then + echo "Error: You must specify a branch with -b branchname." >&2 + usage + exit 12 +fi + +# Must choose at least one update action. +if [ "$DO_HSTS" == "false" ] && [ "$DO_HPKP" == "false" ] && [ "$DO_REMOTE_SETTINGS" == "false" ] && [ "$DO_SUFFIX_LIST" == "false" ] && [ "$DO_MOBILE_EXPERIMENTS" == false ] +then + echo "Error: you must specify at least one action from: --hsts, --hpkp, --remote-settings, or --suffix-list" >&2 + usage + exit 13 +fi + +# per-product constants +case "${PRODUCT}" in + thunderbird) + COMMIT_AUTHOR="tbirdbld " + ;; + firefox) + ;; + *) + echo "Error: Invalid product specified" + usage + exit 14 + ;; +esac + +if [ "${REPODIR}" == "" ]; then + REPODIR="$(basename "${BRANCH}")" +fi + +case "${BRANCH}" in + mozilla-central|comm-central|try ) + HGREPO="https://${HGHOST}/${BRANCH}" + ;; + mozilla-*|comm-* ) + HGREPO="https://${HGHOST}/releases/${BRANCH}" + ;; + * ) + HGREPO="https://${HGHOST}/projects/${BRANCH}" + ;; +esac + +BROWSER_ARCHIVE="target.${PLATFORM_EXT}" +TESTS_ARCHIVE="target.common.tests.tar.gz" + +preflight_cleanup +if [ "${DO_HSTS}" == "true" ] || [ "${DO_HPKP}" == "true" ] || [ "${DO_PRELOAD_PINSET}" == "true" ] +then + if [ "${USE_TC}" == "true" ]; then + download_shared_artifacts_from_tc + else + download_shared_artifacts_from_ftp + fi + unpack_artifacts +fi + +if [ "${DO_HSTS}" == "true" ]; then + if compare_hsts_files + then + HSTS_UPDATED=true + fi +fi +if [ "${DO_HPKP}" == "true" ]; then + if compare_hpkp_files + then + HPKP_UPDATED=true + fi +fi +if [ "${DO_REMOTE_SETTINGS}" == "true" ]; then + if compare_remote_settings_files + then + REMOTE_SETTINGS_UPDATED=true + fi +fi +if [ "${DO_SUFFIX_LIST}" == "true" ]; then + if compare_suffix_lists + then + SUFFIX_LIST_UPDATED=true + fi +fi +if [ "${DO_MOBILE_EXPERIMENTS}" == "true" ]; then + if compare_mobile_experiments + then + MOBILE_EXPERIMENTS_UPDATED=true + fi +fi + + +if [ "${HSTS_UPDATED}" == "false" ] && [ "${HPKP_UPDATED}" == "false" ] && [ "${REMOTE_SETTINGS_UPDATED}" == "false" ] && [ "${SUFFIX_LIST_UPDATED}" == "false" ] && [ "${MOBILE_EXPERIMENTS_UPDATED}" == "false" ]; then + echo "INFO: no updates required. Exiting." + exit 0 +else + if [ "${DRY_RUN}" == "true" ]; then + echo "INFO: Updates are available, not updating hg in dry-run mode." + exit 2 + fi +fi + +clone_repo + +COMMIT_MESSAGE="No Bug, ${BRANCH} repo-update" +if [ "${HSTS_UPDATED}" == "true" ] +then + stage_hsts_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} HSTS" +fi + +if [ "${HPKP_UPDATED}" == "true" ] +then + stage_hpkp_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} HPKP" +fi + +if [ "${REMOTE_SETTINGS_UPDATED}" == "true" ] +then + stage_remote_settings_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} remote-settings" +fi + +if [ "${SUFFIX_LIST_UPDATED}" == "true" ] +then + stage_tld_suffix_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} tld-suffixes" +fi + +if [ "${MOBILE_EXPERIMENTS_UPDATED}" == "true" ] +then + stage_mobile_experiments_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} mobile-experiments" +fi + +if [ ${DONTBUILD} == true ]; then + COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)" +fi +if [ ${CLOSED_TREE} == true ]; then + COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE" +fi +if [ ${APPROVAL} == true ]; then + COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=repo-update" +fi + + +if ${HG} -R "${REPODIR}" commit -u "${COMMIT_AUTHOR}" -m "${COMMIT_MESSAGE}" +then + push_repo +fi + +echo "All done" diff --git a/taskcluster/docker/periodic-updates/setup.sh b/taskcluster/docker/periodic-updates/setup.sh new file mode 100755 index 0000000000..dd4faf5fc2 --- /dev/null +++ b/taskcluster/docker/periodic-updates/setup.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -ve + +apt-get update -q +apt-get install \ + arcanist \ + curl \ + jq \ + libasound2 \ + libgtk-3-0 \ + libx11-xcb1 \ + libxml2-utils \ + libxt6 \ + libxtst6 \ + shellcheck \ + unzip \ + bzip2 \ + wget + +rm -rf /setup -- cgit v1.2.3