diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-21 11:44:51 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-21 11:44:51 +0000 |
commit | 9e3c08db40b8916968b9f30096c7be3f00ce9647 (patch) | |
tree | a68f146d7fa01f0134297619fbe7e33db084e0aa /security/nss/automation/taskcluster/graph/src | |
parent | Initial commit. (diff) | |
download | thunderbird-9e3c08db40b8916968b9f30096c7be3f00ce9647.tar.xz thunderbird-9e3c08db40b8916968b9f30096c7be3f00ce9647.zip |
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'security/nss/automation/taskcluster/graph/src')
7 files changed, 1917 insertions, 0 deletions
diff --git a/security/nss/automation/taskcluster/graph/src/context_hash.js b/security/nss/automation/taskcluster/graph/src/context_hash.js new file mode 100644 index 0000000000..0699a0590e --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/context_hash.js @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import fs from "fs"; +import path from "path"; +import crypto from "crypto"; +import flatmap from "flatmap"; + +// Compute the SHA-256 digest. +function sha256(data) { + let hash = crypto.createHash("sha256"); + hash.update(data); + return hash.digest("hex"); +} + +// Recursively collect a list of all files of a given directory. +function collectFilesInDirectory(dir) { + return flatmap(fs.readdirSync(dir), entry => { + let entry_path = path.join(dir, entry); + + if (fs.lstatSync(entry_path).isDirectory()) { + return collectFilesInDirectory(entry_path); + } + + return [entry_path]; + }); +} + +// A list of hashes for each file in the given path. +function collectFileHashes(context_path) { + let root = path.join(__dirname, "../../../.."); + let dir = path.join(root, context_path); + let files = collectFilesInDirectory(dir).sort(); + + return files.map(file => { + return sha256(file + "|" + fs.readFileSync(file, "utf-8")); + }); +} + +// Compute a context hash for the given context path. +export default function (context_path) { + // Regenerate all images when the image_builder changes. + let hashes = collectFileHashes("automation/taskcluster/image_builder"); + + // Regenerate images when the image itself changes. + hashes = hashes.concat(collectFileHashes(context_path)); + + // Generate a new prefix every month to ensure the image stays buildable. + let now = new Date(); + let prefix = `${now.getUTCFullYear()}-${now.getUTCMonth() + 1}:`; + return sha256(prefix + hashes.join(",")); +} diff --git a/security/nss/automation/taskcluster/graph/src/extend.js b/security/nss/automation/taskcluster/graph/src/extend.js new file mode 100644 index 0000000000..b97a382ba1 --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/extend.js @@ -0,0 +1,1266 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import merge from "./merge"; +import * as queue from "./queue"; + +const LINUX_IMAGE = { + name: "linux", + path: "automation/taskcluster/docker" +}; + +const LINUX_BUILDS_IMAGE = { + name: "linux-builds", + path: "automation/taskcluster/docker-builds" +}; + +const LINUX_INTEROP_IMAGE = { + name: "linux-interop", + path: "automation/taskcluster/docker-interop" +}; + +const ACVP_IMAGE = { + name: "acvp", + path: "automation/taskcluster/docker-acvp" +}; + +const ECCKIILA_IMAGE = { + name: "ecckiila", + path: "automation/taskcluster/docker-ecckiila" +}; + +const CLANG_FORMAT_IMAGE = { + name: "clang-format", + path: "automation/taskcluster/docker-clang-format" +}; + +const LINUX_GCC44_IMAGE = { + name: "linux-gcc-4.4", + path: "automation/taskcluster/docker-gcc-4.4" +}; + +const FUZZ_IMAGE = { + name: "fuzz", + path: "automation/taskcluster/docker-fuzz" +}; + +// Bug 1488148 - temporary image for fuzzing 32-bit builds. +const FUZZ_IMAGE_32 = { + name: "fuzz32", + path: "automation/taskcluster/docker-fuzz32" +}; + +const WINDOWS_CHECKOUT_CMD = + "bash -c \"hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " + + "(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " + + "(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)\""; +const MAC_CHECKOUT_CMD = ["bash", "-c", + "hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " + + "(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " + + "(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)"]; + +/*****************************************************************************/ + +queue.filter(task => { + if (task.group == "Builds") { + // Remove extra builds on {A,UB}San and ARM. + if (task.collection == "asan" || task.platform == "aarch64") { + return false; + } + + // Make modular builds only on Linux make. + if (task.symbol == "modular" && task.collection != "make") { + return false; + } + } + + if (task.tests == "bogo" || task.tests == "interop" || task.tests == "tlsfuzzer") { + // No windows + if (task.platform == "windows2012-64" || + task.platform == "windows2012-32") { + return false; + } + + // No ARM; TODO: enable + if (task.platform == "aarch64") { + return false; + } + + // No mac + if (task.platform == "mac") { + return false; + } + } + + if (task.tests == "fips" && + (task.platform == "mac" || task.platform == "aarch64")) { + return false; + } + + // Only old make builds have -Ddisable_libpkix=0 and can run chain tests. + if (task.tests == "chains" && task.collection != "make") { + return false; + } + + // Don't run all additional hardware tests on ARM. + if (task.group == "Cipher" && task.platform == "aarch64" && task.env && + (task.env.NSS_DISABLE_PCLMUL == "1" || task.env.NSS_DISABLE_SSE4_1 == "1" + || task.env.NSS_DISABLE_AVX == "1" || task.env.NSS_DISABLE_AVX2 == "1")) { + return false; + } + + // Don't run ARM specific hardware tests on non-ARM. + // TODO: our server that runs task cluster doesn't support Intel SHA extensions. + if (task.group == "Cipher" && task.platform != "aarch64" && task.env && + (task.env.NSS_DISABLE_HW_SHA1 == "1" || task.env.NSS_DISABLE_HW_SHA2 == "1")) { + return false; + } + + // Don't run DBM builds on aarch64. + if (task.group == "DBM" && task.platform == "aarch64") { + return false; + } + + return true; +}); + +queue.map(task => { + if (task.collection == "asan") { + // CRMF and FIPS tests still leak, unfortunately. + if (task.tests == "crmf") { + task.env.ASAN_OPTIONS = "detect_leaks=0"; + } + } + + if (task.tests == "ssl") { + if (!task.env) { + task.env = {}; + } + + // Stress tests to not include other SSL tests + if (task.symbol == "stress") { + task.env.NSS_SSL_TESTS = "normal_normal"; + } else { + task.env.NSS_SSL_TESTS = "crl iopr policy normal_normal"; + } + + // FIPS runs + if (task.collection == "fips") { + task.env.NSS_SSL_TESTS += " fips_fips fips_normal normal_fips"; + } + + if (task.platform == "mac") { + task.maxRunTime = 7200; + } + } + + // Windows is slow. + if ((task.platform == "windows2012-32" || task.platform == "windows2012-64") && + task.tests == "chains") { + task.maxRunTime = 7200; + } + + if (task.platform == "mac" && task.tests == "tools") { + task.maxRunTime = 7200; + } + return task; +}); + +/*****************************************************************************/ + +export default async function main() { + await scheduleLinux("Linux 32 (opt)", { + platform: "linux32", + image: LINUX_IMAGE + }, "-t ia32 --opt"); + + await scheduleLinux("Linux 32 (debug)", { + platform: "linux32", + collection: "debug", + image: LINUX_IMAGE + }, "-t ia32"); + + await scheduleLinux("Linux 64 (opt)", { + platform: "linux64", + image: LINUX_IMAGE + }, "--opt"); + + await scheduleLinux("Linux 64 (debug)", { + platform: "linux64", + collection: "debug", + image: LINUX_IMAGE + }); + + await scheduleLinux("Linux 64 (debug, make)", { + env: {USE_64: "1"}, + platform: "linux64", + image: LINUX_IMAGE, + collection: "make", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh" + ], + }); + + await scheduleLinux("Linux 64 (opt, make)", { + env: {USE_64: "1", BUILD_OPT: "1"}, + platform: "linux64", + image: LINUX_IMAGE, + collection: "make", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh" + ], + }); + + await scheduleLinux("Linux 32 (debug, make)", { + platform: "linux32", + image: LINUX_IMAGE, + collection: "make", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh" + ], + }); + + await scheduleLinux("Linux 64 (ASan, debug)", { + env: { + UBSAN_OPTIONS: "print_stacktrace=1", + NSS_DISABLE_ARENA_FREE_LIST: "1", + NSS_DISABLE_UNLOAD: "1", + CC: "clang", + CCC: "clang++", + }, + platform: "linux64", + collection: "asan", + image: LINUX_IMAGE, + features: ["allowPtrace"], + }, "--ubsan --asan"); + + await scheduleLinux("Linux 64 (FIPS opt)", { + platform: "linux64", + collection: "fips", + image: LINUX_IMAGE, + }, "--enable-fips --opt"); + + await scheduleWindows("Windows 2012 64 (debug, make)", { + platform: "windows2012-64", + collection: "make", + env: {USE_64: "1"} + }, "build.sh"); + + await scheduleWindows("Windows 2012 32 (debug, make)", { + platform: "windows2012-32", + collection: "make" + }, "build.sh"); + + await scheduleWindows("Windows 2012 64 (opt)", { + platform: "windows2012-64", + }, "build_gyp.sh --opt"); + + await scheduleWindows("Windows 2012 64 (debug)", { + platform: "windows2012-64", + collection: "debug" + }, "build_gyp.sh"); + + await scheduleWindows("Windows 2012 64 Static (opt)", { + platform: "windows2012-64", + collection: "opt-static" + }, "build_gyp.sh --opt --static"); + + await scheduleWindows("Windows 2012 32 (opt)", { + platform: "windows2012-32", + }, "build_gyp.sh --opt -t ia32"); + + await scheduleWindows("Windows 2012 32 (debug)", { + platform: "windows2012-32", + collection: "debug" + }, "build_gyp.sh -t ia32"); + + await scheduleFuzzing(); + await scheduleFuzzing32(); + + await scheduleTools(); + + let aarch64_base = { + image: "franziskus/nss-aarch64-ci", + provisioner: "localprovisioner", + workerType: "nss-aarch64", + platform: "aarch64", + maxRunTime: 7200 + }; + + await scheduleLinux("Linux AArch64 (debug)", + merge(aarch64_base, { + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh" + ], + collection: "debug", + }) + ); + + await scheduleLinux("Linux AArch64 (opt)", + merge(aarch64_base, { + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh --opt" + ], + collection: "opt", + }) + ); + + await scheduleLinux("Linux AArch64 (debug, make)", + merge(aarch64_base, { + env: {USE_64: "1"}, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh" + ], + collection: "make", + }) + ); + + await scheduleMac("Mac (opt)", {collection: "opt"}, "--opt"); + await scheduleMac("Mac Static (opt)", {collection: "opt-static"}, "--opt --static -Ddisable_libpkix=1"); + await scheduleMac("Mac (debug)", {collection: "debug"}); + + // Must be executed after all other tasks are scheduled + queue.clearFilters(); + await scheduleCodeReview(); +} + + +async function scheduleMac(name, base, args = "") { + let mac_base = merge(base, { + env: { + PATH: "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin", + NSS_TASKCLUSTER_MAC: "1", + DOMSUF: "localdomain", + HOST: "localhost", + }, + provisioner: "localprovisioner", + workerType: "nss-macos-10-12", + platform: "mac" + }); + + // Build base definition. + let build_base_without_command_symbol = merge(mac_base, { + provisioner: "localprovisioner", + workerType: "nss-macos-10-12", + platform: "mac", + maxRunTime: 7200, + artifacts: [{ + expires: 24 * 7, + type: "directory", + path: "public" + }], + kind: "build", + }); + + let gyp_cmd = "nss/automation/taskcluster/scripts/build_gyp.sh "; + + if (!("collection" in base) || + (base.collection != "make" && + base.collection != "asan" && + base.collection != "fips" && + base.collection != "fuzz")) { + let nspr_gyp = gyp_cmd + "--nspr-only --nspr-test-build --nspr-test-run "; + let nspr_build = merge(build_base_without_command_symbol, { + command: [ + MAC_CHECKOUT_CMD, + ["bash", "-c", + nspr_gyp + args] + ], + symbol: "NSPR" + }); + // The task that tests NSPR. + let nspr_task_build = queue.scheduleTask(merge(nspr_build, {name})); + } + + let build_base = merge(build_base_without_command_symbol, { + command: [ + MAC_CHECKOUT_CMD, + ["bash", "-c", + gyp_cmd + args] + ], + symbol: "B" + }); + + // The task that builds NSPR+NSS. + let task_build = queue.scheduleTask(merge(build_base, {name})); + + // The task that generates certificates. + let task_cert = queue.scheduleTask(merge(build_base, { + name: "Certificates", + command: [ + MAC_CHECKOUT_CMD, + ["bash", "-c", + "nss/automation/taskcluster/scripts/gen_certs.sh"] + ], + parent: task_build, + symbol: "Certs" + })); + + // Schedule tests. + scheduleTests(task_build, task_cert, merge(mac_base, { + command: [ + MAC_CHECKOUT_CMD, + ["bash", "-c", + "nss/automation/taskcluster/scripts/run_tests.sh"] + ] + })); + + return queue.submit(); +} + +/*****************************************************************************/ + +async function scheduleLinux(name, overrides, args = "") { + let checkout_and_gyp = "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh "; + let artifacts_and_kind = { + artifacts: { + public: { + expires: 24 * 7, + type: "directory", + path: "/home/worker/artifacts" + } + }, + kind: "build", + }; + + if (!("collection" in overrides) || + (overrides.collection != "make" && + overrides.collection != "asan" && + overrides.collection != "fips" && + overrides.collection != "fuzz")) { + let nspr_gyp = checkout_and_gyp + "--nspr-only --nspr-test-build --nspr-test-run "; + + let nspr_base = merge({ + command: [ + "/bin/bash", + "-c", + nspr_gyp + args + ], + }, overrides); + let nspr_without_symbol = merge(nspr_base, artifacts_and_kind); + let nspr_build = merge(nspr_without_symbol, { + symbol: "NSPR", + }); + // The task that tests NSPR. + let nspr_task_build = queue.scheduleTask(merge(nspr_build, {name})); + } + + // Construct a base definition. This takes |overrides| second because + // callers expect to be able to overwrite the |command| key. + let base = merge({ + command: [ + "/bin/bash", + "-c", + checkout_and_gyp + args + ], + }, overrides); + + let base_without_symbol = merge(base, artifacts_and_kind); + + // The base for building. + let build_base = merge(base_without_symbol, { + symbol: "B", + }); + + // The task that builds NSPR+NSS. + let task_build = queue.scheduleTask(merge(build_base, {name})); + + // Make builds run FIPS tests, which need an extra FIPS build. + if (base.collection == "make") { + let extra_build = queue.scheduleTask(merge(build_base, { + env: { NSS_FORCE_FIPS: "1" }, + group: "FIPS", + name: `${name} w/ NSS_FORCE_FIPS` + })); + + // The task that generates certificates. + let task_cert = queue.scheduleTask(merge(build_base, { + name: "Certificates", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/gen_certs.sh" + ], + parent: extra_build, + symbol: "Certs-F", + group: "FIPS", + })); + + // Schedule FIPS tests. + queue.scheduleTask(merge(base, { + parent: task_cert, + name: "FIPS", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh" + ], + cycle: "standard", + kind: "test", + name: "FIPS tests", + symbol: "Tests-F", + tests: "fips", + group: "FIPS" + })); + } + + // The task that generates certificates. + let cert_base = merge(build_base, { + name: "Certificates", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/gen_certs.sh" + ], + parent: task_build, + symbol: "Certs" + }); + let task_cert = queue.scheduleTask(cert_base); + + // Schedule tests. + scheduleTests(task_build, task_cert, merge(base, { + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh" + ], + provisioner: "nss-t", + workerType: "t-linux-xlarge-gcp" + })); + + // Extra builds. + let extra_base = merge(build_base, { + group: "Builds", + image: LINUX_BUILDS_IMAGE, + }); + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ clang-4`, + env: { + CC: "clang-4.0", + CCC: "clang++-4.0", + }, + symbol: "clang-4" + })); + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ clang-10`, + env: { + CC: "clang-10", + CCC: "clang++-10", + }, + symbol: "clang-10" + })); + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ gcc-4.4`, + image: LINUX_GCC44_IMAGE, + env: { + USE_64: "1", + CC: "gcc-4.4", + CCC: "g++-4.4", + // gcc-4.6 introduced nullptr. + NSS_DISABLE_GTESTS: "1", + }, + // Use the old Makefile-based build system, GYP doesn't have a proper GCC + // version check for __int128 support. It's mainly meant to cover RHEL6. + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh", + ], + symbol: "gcc-4.4" + })); + + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ gcc-4.8`, + env: { + CC: "gcc-4.8", + CCC: "g++-4.8", + // gcc-4.8 has incomplete c++11 support + NSS_DISABLE_GTESTS: "1", + }, + // Use -Ddisable-intelhw_sha=1, GYP doesn't have a proper GCC version + // check for Intel SHA support. + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh", + ], + symbol: "gcc-4.8" + })); + + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ gcc-5`, + env: { + CC: "gcc-5", + CCC: "g++-5" + }, + symbol: "gcc-5" + })); + + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ gcc-11`, + env: { + CC: "gcc-11", + CCC: "g++-11", + }, + symbol: "gcc-11" + })); + + queue.scheduleTask(merge(extra_base, { + name: `${name} w/ modular builds`, + image: LINUX_IMAGE, + env: {NSS_BUILD_MODULAR: "1"}, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh", + ], + symbol: "modular" + })); + + if (base.collection != "make") { + let task_build_dbm = queue.scheduleTask(merge(extra_base, { + name: `${name} w/ legacy-db`, + command: [ + "/bin/bash", + "-c", + checkout_and_gyp + "--enable-legacy-db" + ], + symbol: "B", + group: "DBM", + })); + + let task_cert_dbm = queue.scheduleTask(merge(cert_base, { + parent: task_build_dbm, + group: "DBM", + symbol: "Certs" + })); + } + + return queue.submit(); +} + +/*****************************************************************************/ + +function scheduleFuzzingRun(base, name, target, max_len, symbol = null, corpus = null) { + const MAX_FUZZ_TIME = 300; + + queue.scheduleTask(merge(base, { + name, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/fuzz.sh " + + `${target} nss/fuzz/corpus/${corpus || target} ` + + `-max_total_time=${MAX_FUZZ_TIME} ` + + `-max_len=${max_len}` + ], + provisioner: "nss-t", + workerType: "t-linux-xlarge-gcp", + symbol: symbol || name + })); +} + +async function scheduleFuzzing() { + let base = { + env: { + ASAN_OPTIONS: "allocator_may_return_null=1:detect_stack_use_after_return=1", + UBSAN_OPTIONS: "print_stacktrace=1", + NSS_DISABLE_ARENA_FREE_LIST: "1", + NSS_DISABLE_UNLOAD: "1", + CC: "clang", + CCC: "clang++" + }, + features: ["allowPtrace"], + platform: "linux64", + collection: "fuzz", + image: FUZZ_IMAGE + }; + + // Build base definition. + let build_base = merge(base, { + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && " + + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz" + ], + artifacts: { + public: { + expires: 24 * 7, + type: "directory", + path: "/home/worker/artifacts" + } + }, + kind: "build", + symbol: "B" + }); + + // The task that builds NSPR+NSS. + let task_build = queue.scheduleTask(merge(build_base, { + name: "Linux x64 (debug, fuzz)" + })); + + // The task that builds NSPR+NSS (TLS fuzzing mode). + let task_build_tls = queue.scheduleTask(merge(build_base, { + name: "Linux x64 (debug, TLS fuzz)", + symbol: "B", + group: "TLS", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && " + + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz=tls" + ], + })); + + // Schedule tests. + queue.scheduleTask(merge(base, { + parent: task_build_tls, + name: "Gtests", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh" + ], + env: {GTESTFILTER: "*Fuzz*"}, + tests: "ssl_gtests gtests", + cycle: "standard", + symbol: "Gtest", + kind: "test" + })); + + // Schedule fuzzing runs. + let run_base = merge(base, {parent: task_build, kind: "test"}); + scheduleFuzzingRun(run_base, "CertDN", "certDN", 4096); + scheduleFuzzingRun(run_base, "QuickDER", "quickder", 10000); + + // Schedule MPI fuzzing runs. + let mpi_base = merge(run_base, {group: "MPI"}); + let mpi_names = ["add", "addmod", "div", "mod", "mulmod", "sqr", + "sqrmod", "sub", "submod"]; + for (let name of mpi_names) { + scheduleFuzzingRun(mpi_base, `MPI (${name})`, `mpi-${name}`, 4096, name); + } + scheduleFuzzingRun(mpi_base, `MPI (invmod)`, `mpi-invmod`, 256, "invmod"); + scheduleFuzzingRun(mpi_base, `MPI (expmod)`, `mpi-expmod`, 2048, "expmod"); + + // Schedule TLS fuzzing runs (non-fuzzing mode). + let tls_base = merge(run_base, {group: "TLS"}); + scheduleFuzzingRun(tls_base, "TLS Client", "tls-client", 20000, "client-nfm", + "tls-client-no_fuzzer_mode"); + scheduleFuzzingRun(tls_base, "TLS Server", "tls-server", 20000, "server-nfm", + "tls-server-no_fuzzer_mode"); + scheduleFuzzingRun(tls_base, "DTLS Client", "dtls-client", 20000, + "dtls-client-nfm", "dtls-client-no_fuzzer_mode"); + scheduleFuzzingRun(tls_base, "DTLS Server", "dtls-server", 20000, + "dtls-server-nfm", "dtls-server-no_fuzzer_mode"); + + // Schedule TLS fuzzing runs (fuzzing mode). + let tls_fm_base = merge(tls_base, {parent: task_build_tls}); + scheduleFuzzingRun(tls_fm_base, "TLS Client", "tls-client", 20000, "client"); + scheduleFuzzingRun(tls_fm_base, "TLS Server", "tls-server", 20000, "server"); + scheduleFuzzingRun(tls_fm_base, "DTLS Client", "dtls-client", 20000, "dtls-client"); + scheduleFuzzingRun(tls_fm_base, "DTLS Server", "dtls-server", 20000, "dtls-server"); + + return queue.submit(); +} + +async function scheduleFuzzing32() { + let base = { + env: { + ASAN_OPTIONS: "allocator_may_return_null=1:detect_stack_use_after_return=1", + UBSAN_OPTIONS: "print_stacktrace=1", + NSS_DISABLE_ARENA_FREE_LIST: "1", + NSS_DISABLE_UNLOAD: "1", + CC: "clang", + CCC: "clang++" + }, + features: ["allowPtrace"], + platform: "linux32", + collection: "fuzz", + image: FUZZ_IMAGE_32 + }; + + // Build base definition. + let build_base = merge(base, { + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && " + + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz -t ia32" + ], + artifacts: { + public: { + expires: 24 * 7, + type: "directory", + path: "/home/worker/artifacts" + } + }, + kind: "build", + symbol: "B" + }); + + // The task that builds NSPR+NSS. + let task_build = queue.scheduleTask(merge(build_base, { + name: "Linux 32 (debug, fuzz)" + })); + + // The task that builds NSPR+NSS (TLS fuzzing mode). + let task_build_tls = queue.scheduleTask(merge(build_base, { + name: "Linux 32 (debug, TLS fuzz)", + symbol: "B", + group: "TLS", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && " + + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz=tls -t ia32" + ], + })); + + // Schedule tests. + queue.scheduleTask(merge(base, { + parent: task_build_tls, + name: "Gtests", + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh" + ], + env: {GTESTFILTER: "*Fuzz*"}, + tests: "ssl_gtests gtests", + cycle: "standard", + symbol: "Gtest", + kind: "test" + })); + + // Schedule fuzzing runs. + let run_base = merge(base, {parent: task_build, kind: "test"}); + scheduleFuzzingRun(run_base, "CertDN", "certDN", 4096); + scheduleFuzzingRun(run_base, "QuickDER", "quickder", 10000); + + // Schedule MPI fuzzing runs. + let mpi_base = merge(run_base, {group: "MPI"}); + let mpi_names = ["add", "addmod", "div", "expmod", "mod", "mulmod", "sqr", + "sqrmod", "sub", "submod"]; + for (let name of mpi_names) { + scheduleFuzzingRun(mpi_base, `MPI (${name})`, `mpi-${name}`, 4096, name); + } + scheduleFuzzingRun(mpi_base, `MPI (invmod)`, `mpi-invmod`, 256, "invmod"); + + // Schedule TLS fuzzing runs (non-fuzzing mode). + let tls_base = merge(run_base, {group: "TLS"}); + scheduleFuzzingRun(tls_base, "TLS Client", "tls-client", 20000, "client-nfm", + "tls-client-no_fuzzer_mode"); + scheduleFuzzingRun(tls_base, "TLS Server", "tls-server", 20000, "server-nfm", + "tls-server-no_fuzzer_mode"); + scheduleFuzzingRun(tls_base, "DTLS Client", "dtls-client", 20000, + "dtls-client-nfm", "dtls-client-no_fuzzer_mode"); + scheduleFuzzingRun(tls_base, "DTLS Server", "dtls-server", 20000, + "dtls-server-nfm", "dtls-server-no_fuzzer_mode"); + + // Schedule TLS fuzzing runs (fuzzing mode). + let tls_fm_base = merge(tls_base, {parent: task_build_tls}); + scheduleFuzzingRun(tls_fm_base, "TLS Client", "tls-client", 20000, "client"); + scheduleFuzzingRun(tls_fm_base, "TLS Server", "tls-server", 20000, "server"); + scheduleFuzzingRun(tls_fm_base, "DTLS Client", "dtls-client", 20000, "dtls-client"); + scheduleFuzzingRun(tls_fm_base, "DTLS Server", "dtls-server", 20000, "dtls-server"); + + return queue.submit(); +} + +/*****************************************************************************/ + +async function scheduleWindows(name, base, build_script) { + base = merge(base, { + workerType: "b-win2012-azure", + env: { + PATH: "c:\\mozilla-build\\bin;c:\\mozilla-build\\python;" + + "c:\\mozilla-build\\msys\\local\\bin;c:\\mozilla-build\\7zip;" + + "c:\\mozilla-build\\info-zip;c:\\mozilla-build\\python\\Scripts;" + + "c:\\mozilla-build\\yasm;c:\\mozilla-build\\msys\\bin;" + + "c:\\Windows\\system32;c:\\mozilla-build\\upx391w;" + + "c:\\mozilla-build\\moztools-x64\\bin;c:\\mozilla-build\\wget;c:\\Program Files\\Mercurial", + DOMSUF: "localdomain", + HOST: "localhost", + }, + features: ["taskclusterProxy"], + scopes: ["project:releng:services/tooltool/api/download/internal"], + }); + + let artifacts_and_kind = { + artifacts: [{ + expires: 24 * 7, + type: "directory", + path: "public\\build" + }], + kind: "build", + }; + + let build_without_command_symbol = merge(base, artifacts_and_kind); + + // Build base definition. + let build_base = merge(build_without_command_symbol, { + command: [ + WINDOWS_CHECKOUT_CMD, + `bash -c 'nss/automation/taskcluster/windows/${build_script}'` + ], + symbol: "B" + }); + + if (!("collection" in base) || + (base.collection != "make" && + base.collection != "asan" && + base.collection != "fips" && + base.collection != "fuzz")) { + let nspr_gyp = + `bash -c 'nss/automation/taskcluster/windows/${build_script} --nspr-only --nspr-test-build --nspr-test-run'`; + let nspr_build = merge(build_without_command_symbol, { + command: [ + WINDOWS_CHECKOUT_CMD, + nspr_gyp + ], + symbol: "NSPR" + }); + // The task that tests NSPR. + let task_build = queue.scheduleTask(merge(nspr_build, {name})); + } + + // Make builds run FIPS tests, which need an extra FIPS build. + if (base.collection == "make") { + let extra_build = queue.scheduleTask(merge(build_base, { + env: { NSS_FORCE_FIPS: "1" }, + group: "FIPS", + name: `${name} w/ NSS_FORCE_FIPS` + })); + + // The task that generates certificates. + let task_cert = queue.scheduleTask(merge(build_base, { + name: "Certificates", + command: [ + WINDOWS_CHECKOUT_CMD, + "bash -c nss/automation/taskcluster/windows/gen_certs.sh" + ], + parent: extra_build, + symbol: "Certs-F", + group: "FIPS", + })); + + // Schedule FIPS tests. + queue.scheduleTask(merge(base, { + parent: task_cert, + name: "FIPS", + command: [ + WINDOWS_CHECKOUT_CMD, + "bash -c nss/automation/taskcluster/windows/run_tests.sh" + ], + cycle: "standard", + kind: "test", + name: "FIPS tests", + symbol: "Tests-F", + tests: "fips", + group: "FIPS" + })); + } + + // The task that builds NSPR+NSS. + let task_build = queue.scheduleTask(merge(build_base, {name})); + + // The task that generates certificates. + let task_cert = queue.scheduleTask(merge(build_base, { + name: "Certificates", + command: [ + WINDOWS_CHECKOUT_CMD, + "bash -c nss/automation/taskcluster/windows/gen_certs.sh" + ], + parent: task_build, + symbol: "Certs" + })); + + // Schedule tests. + scheduleTests(task_build, task_cert, merge(base, { + command: [ + WINDOWS_CHECKOUT_CMD, + "bash -c nss/automation/taskcluster/windows/run_tests.sh" + ] + })); + + return queue.submit(); +} + +/*****************************************************************************/ + +function scheduleTests(task_build, task_cert, test_base) { + test_base = merge(test_base, {kind: "test"}); + let no_cert_base = merge(test_base, {parent: task_build}); + let cert_base = merge(test_base, {parent: task_cert}); + let cert_base_long = merge(cert_base, {maxRunTime: 7200}); + + // Schedule tests that do NOT need certificates. This is defined as + // the test itself not needing certs AND not running under the upgradedb + // cycle (which itself needs certs). If cycle is not defined, default is all. + queue.scheduleTask(merge(no_cert_base, { + name: "Gtests", symbol: "Gtest", tests: "ssl_gtests gtests", cycle: "standard" + })); + queue.scheduleTask(merge(no_cert_base, { + name: "Bogo tests", + symbol: "Bogo", + tests: "bogo", + cycle: "standard", + image: LINUX_INTEROP_IMAGE, + })); + queue.scheduleTask(merge(no_cert_base, { + name: "Interop tests", + symbol: "Interop", + tests: "interop", + cycle: "standard", + image: LINUX_INTEROP_IMAGE, + })); + queue.scheduleTask(merge(no_cert_base, { + name: "tlsfuzzer tests", symbol: "tlsfuzzer", tests: "tlsfuzzer", cycle: "standard" + })); + queue.scheduleTask(merge(no_cert_base, { + name: "MPI tests", symbol: "MPI", tests: "mpi", cycle: "standard" + })); + queue.scheduleTask(merge(cert_base, { + name: "Chains tests", symbol: "Chains", tests: "chains" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "Default", tests: "cipher", group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoAES", tests: "cipher", + env: {NSS_DISABLE_HW_AES: "1"}, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoSHA", tests: "cipher", + env: { + NSS_DISABLE_HW_SHA1: "1", + NSS_DISABLE_HW_SHA2: "1" + }, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoPCLMUL", tests: "cipher", + env: {NSS_DISABLE_PCLMUL: "1"}, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoAVX", tests: "cipher", + env: {NSS_DISABLE_AVX: "1"}, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoAVX2", tests: "cipher", + env: {NSS_DISABLE_AVX2: "1"}, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoSSSE3|NEON", tests: "cipher", + env: { + NSS_DISABLE_ARM_NEON: "1", + NSS_DISABLE_SSSE3: "1" + }, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base_long, { + name: "Cipher tests", symbol: "NoSSE4.1", tests: "cipher", + env: {NSS_DISABLE_SSE4_1: "1"}, group: "Cipher" + })); + queue.scheduleTask(merge(cert_base, { + name: "EC tests", symbol: "EC", tests: "ec" + })); + queue.scheduleTask(merge(cert_base, { + name: "Lowhash tests", symbol: "Lowhash", tests: "lowhash" + })); + queue.scheduleTask(merge(cert_base, { + name: "SDR tests", symbol: "SDR", tests: "sdr" + })); + queue.scheduleTask(merge(cert_base, { + name: "Policy tests", symbol: "Policy", tests: "policy" + })); + + // Schedule tests that need certificates. + queue.scheduleTask(merge(cert_base, { + name: "CRMF tests", symbol: "CRMF", tests: "crmf" + })); + queue.scheduleTask(merge(cert_base, { + name: "DB tests", symbol: "DB", tests: "dbtests" + })); + queue.scheduleTask(merge(cert_base, { + name: "Merge tests", symbol: "Merge", tests: "merge" + })); + queue.scheduleTask(merge(cert_base, { + name: "S/MIME tests", symbol: "SMIME", tests: "smime" + })); + queue.scheduleTask(merge(cert_base, { + name: "Tools tests", symbol: "Tools", tests: "tools" + })); + + // SSL tests, need certificates too. + let ssl_base = merge(cert_base, {tests: "ssl", group: "SSL"}); + queue.scheduleTask(merge(ssl_base, { + name: "SSL tests (standard)", symbol: "standard", cycle: "standard" + })); + queue.scheduleTask(merge(ssl_base, { + name: "SSL tests (pkix)", symbol: "pkix", cycle: "pkix" + })); + queue.scheduleTask(merge(ssl_base, { + name: "SSL tests (stress)", symbol: "stress", cycle: "sharedb", + env: {NSS_SSL_RUN: "stress"} + })); +} + +/*****************************************************************************/ + +async function scheduleTools() { + let base = { + platform: "nss-tools", + kind: "test" + }; + + // ABI check task + queue.scheduleTask(merge(base, { + symbol: "abi", + name: "abi", + image: LINUX_BUILDS_IMAGE, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/check_abi.sh" + ], + })); + + queue.scheduleTask(merge(base, { + symbol: "clang-format", + name: "clang-format", + image: CLANG_FORMAT_IMAGE, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/clang-format/run_clang_format.sh" + ] + })); + + queue.scheduleTask(merge(base, { + symbol: "acvp", + name: "acvp", + image: ACVP_IMAGE, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && bin/run.sh" + ] + })); + + + queue.scheduleTask(merge(base, { + symbol: "scan-build", + name: "scan-build", + image: FUZZ_IMAGE, + env: { + USE_64: "1", + CC: "clang", + CCC: "clang++", + }, + artifacts: { + public: { + expires: 24 * 7, + type: "directory", + path: "/home/worker/artifacts" + } + }, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_scan_build.sh" + ] + })); + + queue.scheduleTask(merge(base, { + symbol: "hacl", + name: "hacl", + image: LINUX_BUILDS_IMAGE, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_hacl.sh" + ] + })); + + queue.scheduleTask(merge(base, { + symbol: "ecckiila", + name: "ecckiila", + image: ECCKIILA_IMAGE, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && bin/ecckiila.sh && bin/run.sh" + ] + })); + + queue.scheduleTask(merge(base, { + symbol: "Coverage", + name: "Coverage", + image: FUZZ_IMAGE, + type: "other", + features: ["allowPtrace"], + artifacts: { + public: { + expires: 24 * 7, + type: "directory", + path: "/home/worker/artifacts" + } + }, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/gen_coverage_report.sh" + ] + })); + + return queue.submit(); +} + +async function scheduleCodeReview() { + let tasks = queue.taggedTasks("code-review"); + if(! tasks) { + console.debug("No code review tasks, skipping ending task"); + return + } + + // From https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/ci/code-review/kind.yml + queue.scheduleTask({ + platform: "nss-tools", + name: "code-review-issues", + description: "List all issues found in static analysis and linting tasks", + + // No logic on that task + image: LINUX_IMAGE, + command: ["/bin/true"], + + // This task must run after all analyzer tasks are completed + parents: tasks, + + // This option permits to run the task + // regardless of the analyzers tasks exit status + // as we are interested in the task failures + requires: "all-resolved", + + // Publish code review trigger on pulse + routes: ["project.relman.codereview.v1.try_ending"], + + kind: "code-review", + symbol: "E" + }); + + return queue.submit(); +}; diff --git a/security/nss/automation/taskcluster/graph/src/image_builder.js b/security/nss/automation/taskcluster/graph/src/image_builder.js new file mode 100644 index 0000000000..7f7e762635 --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/image_builder.js @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import * as queue from "./queue"; +import context_hash from "./context_hash"; +import taskcluster from "taskcluster-client"; + +async function taskHasImageArtifact(taskId) { + let queue = new taskcluster.Queue(taskcluster.fromEnvVars()); + let {artifacts} = await queue.listLatestArtifacts(taskId); + return artifacts.some(artifact => artifact.name == "public/image.tar.zst"); +} + +async function findTaskWithImageArtifact(ns) { + let index = new taskcluster.Index(taskcluster.fromEnvVars()); + let {taskId} = await index.findTask(ns); + let has_image = await taskHasImageArtifact(taskId); + return has_image ? taskId : null; +} + +export async function findTask({name, path}) { + let hash = await context_hash(path); + let ns = `docker.images.v1.${process.env.TC_PROJECT}.${name}.hash.${hash}`; + return findTaskWithImageArtifact(ns).catch(() => null); +} + +export async function buildTask({name, path}) { + let hash = await context_hash(path); + let ns = `docker.images.v1.${process.env.TC_PROJECT}.${name}.hash.${hash}`; + + return { + name: `Image Builder (${name})`, + image: "nssdev/image_builder:0.1.5", + routes: ["index." + ns], + env: { + NSS_HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY, + NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION, + PROJECT: process.env.TC_PROJECT, + CONTEXT_PATH: path, + HASH: hash + }, + artifacts: { + "public/image.tar.zst": { + type: "file", + expires: 24 * 90, + path: "/artifacts/image.tar.zst" + } + }, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/build_image.sh" + ], + platform: "nss-decision", + features: ["dind"], + maxRunTime: 7200, + kind: "build", + symbol: `I(${name})` + }; +} diff --git a/security/nss/automation/taskcluster/graph/src/index.js b/security/nss/automation/taskcluster/graph/src/index.js new file mode 100644 index 0000000000..78f4af8e48 --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/index.js @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import * as try_syntax from "./try_syntax"; +import * as queue from "./queue"; +import extend from "./extend"; + +const main = async () => { + // Init try syntax filter. + if (process.env.TC_PROJECT == "nss-try") { + await try_syntax.initFilter(); + } + + // Extend the task graph. + await extend(); +}; + +main().catch(err => { + console.error(err); + process.exit(1); +}); diff --git a/security/nss/automation/taskcluster/graph/src/merge.js b/security/nss/automation/taskcluster/graph/src/merge.js new file mode 100644 index 0000000000..17043dd8e4 --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/merge.js @@ -0,0 +1,10 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import {recursive as merge} from "merge"; + +// We always want to clone. +export default function (...args) { + return merge(true, ...args); +} diff --git a/security/nss/automation/taskcluster/graph/src/queue.js b/security/nss/automation/taskcluster/graph/src/queue.js new file mode 100644 index 0000000000..1baa604174 --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/queue.js @@ -0,0 +1,304 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import {clone} from "merge"; +import merge from "./merge"; +import slugid from "slugid"; +import taskcluster from "taskcluster-client"; +import * as image_builder from "./image_builder"; + +let maps = []; +let filters = []; + +let tasks = new Map(); +let tags = new Map(); +let image_tasks = new Map(); +let parameters = {}; + +let queue = new taskcluster.Queue({ + rootUrl: process.env.TASKCLUSTER_PROXY_URL, +}); + +function fromNow(hours) { + let d = new Date(); + d.setHours(d.getHours() + (hours|0)); + return d.toJSON(); +} + +function parseRoutes(routes) { + let rv = [ + `tc-treeherder.v2.${process.env.TC_PROJECT}.${process.env.NSS_HEAD_REVISION}.${process.env.NSS_PUSHLOG_ID}`, + ...routes + ]; + + // Notify about failures (except on try). + // Turned off, too noisy. + /*if (process.env.TC_PROJECT != "nss-try") { + rv.push(`notify.email.${process.env.TC_OWNER}.on-failed`, + `notify.email.${process.env.TC_OWNER}.on-exception`); + }*/ + + return rv; +} + +function parseFeatures(list) { + return list.reduce((map, feature) => { + map[feature] = true; + return map; + }, {}); +} + +function parseArtifacts(artifacts) { + let copy = clone(artifacts); + Object.keys(copy).forEach(key => { + copy[key].expires = fromNow(copy[key].expires); + }); + return copy; +} + +function parseCollection(name) { + let collection = {}; + collection[name] = true; + return collection; +} + +function parseTreeherder(def) { + let treeherder = { + build: { + platform: def.platform + }, + machine: { + platform: def.platform + }, + symbol: def.symbol, + jobKind: def.kind + }; + + if (def.group) { + treeherder.groupSymbol = def.group; + } + + if (def.collection) { + treeherder.collection = parseCollection(def.collection); + } + + if (def.tier) { + treeherder.tier = def.tier; + } + + return treeherder; +} + +function convertTask(def) { + let scopes = []; + let dependencies = []; + + let env = merge({ + NSS_HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY, + NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION, + NSS_MAX_MP_PBE_ITERATION_COUNT: "100", + }, def.env || {}); + + if (def.parent) { + dependencies.push(def.parent); + env.TC_PARENT_TASK_ID = def.parent; + } + if (def.parents) { + dependencies = dependencies.concat(def.parents); + } + + if (def.tests) { + env.NSS_TESTS = def.tests; + } + + if (def.cycle) { + env.NSS_CYCLES = def.cycle; + } + if (def.kind === "build") { + // Disable leak checking during builds (bug 1579290). + if (env.ASAN_OPTIONS) { + env.ASAN_OPTIONS += ":detect_leaks=0"; + } else { + env.ASAN_OPTIONS = "detect_leaks=0"; + } + } + + let payload = { + env, + command: def.command, + maxRunTime: def.maxRunTime || 3600 + }; + + if (def.image) { + payload.image = def.image; + } + + if (def.artifacts) { + payload.artifacts = parseArtifacts(def.artifacts); + } + + if (def.features) { + payload.features = parseFeatures(def.features); + + if (payload.features.allowPtrace) { + scopes.push("docker-worker:feature:allowPtrace"); + } + } + + if (def.scopes) { + // Need to add existing scopes in the task definition + scopes.push.apply(scopes, def.scopes) + } + + let extra = Object.assign({ + treeherder: parseTreeherder(def) + }, parameters); + + return { + provisionerId: def.provisioner || `nss-${process.env.MOZ_SCM_LEVEL}`, + workerType: def.workerType || "linux-gcp", + schedulerId: process.env.TC_SCHEDULER_ID, + taskGroupId: process.env.TASK_ID, + + scopes, + created: fromNow(0), + deadline: fromNow(24), + + dependencies, + requires: def.requires || "all-completed", + routes: parseRoutes(def.routes || []), + + metadata: { + name: def.name, + description: def.name, + owner: process.env.TC_OWNER, + source: process.env.TC_SOURCE + }, + + payload, + extra, + }; +} + +export function map(fun) { + maps.push(fun); +} + +export function filter(fun) { + filters.push(fun); +} + +export function addParameters(params) { + parameters = Object.assign(parameters, params); +} + +export function clearFilters(fun) { + filters = []; +} + +export function taggedTasks(tag) { + return tags[tag]; +} + +export function scheduleTask(def) { + let taskId = slugid.v4(); + tasks.set(taskId, merge({}, def)); + return taskId; +} + +export async function submit() { + let promises = new Map(); + + for (let [taskId, task] of tasks) { + // Allow filtering tasks before we schedule them. + if (!filters.every(filter => filter(task))) { + continue; + } + + // Allow changing tasks before we schedule them. + maps.forEach(map => { task = map(merge({}, task)) }); + + let log_id = `${task.name} @ ${task.platform}[${task.collection || "opt"}]`; + if (task.group) { + log_id = `${task.group}::${log_id}`; + } + console.log(`+ Submitting ${log_id}.`); + + // Index that task for each tag specified + if(task.tags) { + task.tags.map(tag => { + if(!tags[tag]) { + tags[tag] = []; + } + tags[tag].push(taskId); + }); + } + + let parent = task.parent; + + // Convert the task definition. + task = await convertTask(task); + + // Convert the docker image definition. + let image_def = task.payload.image; + if (image_def && image_def.hasOwnProperty("path")) { + let key = `${image_def.name}:${image_def.path}`; + let data = {}; + + // Check the cache first. + if (image_tasks.has(key)) { + data = image_tasks.get(key); + } else { + data.taskId = await image_builder.findTask(image_def); + data.isPending = !data.taskId; + + // No task found. + if (data.isPending) { + let image_task = await image_builder.buildTask(image_def); + + // Schedule a new image builder task immediately. + data.taskId = slugid.v4(); + + try { + await queue.createTask(data.taskId, convertTask(image_task)); + } catch (e) { + console.error("! FAIL: Scheduling image builder task failed."); + continue; /* Skip this task on failure. */ + } + } + + // Store in cache. + image_tasks.set(key, data); + } + + if (data.isPending) { + task.dependencies.push(data.taskId); + } + + task.payload.image = { + path: "public/image.tar.zst", + taskId: data.taskId, + type: "task-image" + }; + } + + // Wait for the parent task to be created before scheduling dependants. + let predecessor = parent ? promises.get(parent) : Promise.resolve(); + + promises.set(taskId, predecessor.then(() => { + // Schedule the task. + return queue.createTask(taskId, task).catch(err => { + console.error(`! FAIL: Scheduling ${log_id} failed.`, err); + }); + })); + } + + // Wait for all requests to finish. + if (promises.length) { + await Promise.all([...promises.values()]); + console.log("=== Total:", promises.length, "tasks. ==="); + } + + tasks.clear(); +} diff --git a/security/nss/automation/taskcluster/graph/src/try_syntax.js b/security/nss/automation/taskcluster/graph/src/try_syntax.js new file mode 100644 index 0000000000..e7d1c43e8a --- /dev/null +++ b/security/nss/automation/taskcluster/graph/src/try_syntax.js @@ -0,0 +1,201 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +import * as queue from "./queue"; +import path from 'path' +import fs from 'fs' +import intersect from "intersect"; +import parse_args from "minimist"; +import util from "util"; +import child_process from 'child_process'; + +let execFile = util.promisify(child_process.execFile); + +function parseOptions(opts) { + opts = parse_args(opts.split(/\s+/), { + default: {build: "do", platform: "all", unittests: "none", tools: "none"}, + alias: {b: "build", p: "platform", u: "unittests", t: "tools", e: "extra-builds"}, + string: ["build", "platform", "unittests", "tools", "extra-builds"] + }); + + // Parse build types (d=debug, o=opt). + let builds = intersect(opts.build.split(""), ["d", "o"]); + + // If the given value is nonsense default to debug and opt builds. + if (builds.length == 0) { + builds = ["d", "o"]; + } + + // Parse platforms. + let allPlatforms = ["linux", "linux64", "linux64-asan", "linux64-fips", + "win", "win64", "win-make", "win64-make", + "linux64-make", "linux-make", "linux-fuzz", + "linux64-fuzz", "aarch64", "aarch64-make", "mac"]; + let platforms = intersect(opts.platform.split(/\s*,\s*/), allPlatforms); + + // If the given value is nonsense or "none" default to all platforms. + if (platforms.length == 0 && opts.platform != "none") { + platforms = allPlatforms; + } + + // Parse unit tests. + let aliases = {"gtests": "gtest"}; + let allUnitTests = ["bogo", "crmf", "chains", "cipher", "db", "ec", "fips", + "gtest", "interop", "lowhash", "merge", "sdr", "smime", "tools", + "ssl", "mpi", "scert", "spki", "policy", "tlsfuzzer"]; + let unittests = intersect(opts.unittests.split(/\s*,\s*/).map(t => { + return aliases[t] || t; + }), allUnitTests); + + // If the given value is "all" run all tests. + // If it's nonsense then don't run any tests. + if (opts.unittests == "all") { + unittests = allUnitTests; + } else if (unittests.length == 0) { + unittests = []; + } + + // Parse tools. + let allTools = ["clang-format", "scan-build", "hacl", "ecckiila", "saw", "abi", "coverage"]; + let tools = intersect(opts.tools.split(/\s*,\s*/), allTools); + + // If the given value is "all" run all tools. + // If it's nonsense then don't run any tools. + if (opts.tools == "all") { + tools = allTools; + } else if (tools.length == 0) { + tools = []; + } + + return { + builds: builds, + platforms: platforms, + unittests: unittests, + extra: (opts.e == "all"), + tools: tools + }; +} + +function filter(opts) { + return function (task) { + // Filter tools. We can immediately return here as those + // are not affected by platform or build type selectors. + if (task.platform == "nss-tools") { + return opts.tools.some(tool => { + return task.symbol.toLowerCase().startsWith(tool) || + (task.group && task.group.toLowerCase().startsWith(tool)); + }); + } + + // Filter unit tests. + if (task.tests) { + let found = opts.unittests.some(test => { + if (task.group && task.group.toLowerCase() == "ssl" && test == "ssl") { + return true; + } + if (task.group && task.group.toLowerCase() == "cipher" && test == "cipher") { + return true; + } + return task.symbol.toLowerCase().startsWith(test); + }); + + if (!found) { + return false; + } + } + + // Filter extra builds. + if (task.group == "Builds" && !opts.extra) { + return false; + } + + let coll = name => name == (task.collection || "opt"); + + // Filter by platform. + let found = opts.platforms.some(platform => { + let aliases = { + "aarch64-make": "aarch64", + "linux": "linux32", + "linux-fuzz": "linux32", + "linux64-asan": "linux64", + "linux64-fips": "linux64", + "linux64-fuzz": "linux64", + "linux64-make": "linux64", + "linux-make": "linux32", + "win64-make": "windows2012-64", + "win-make": "windows2012-32", + "win64": "windows2012-64", + "win": "windows2012-32" + }; + + // Check the platform name. + let keep = (task.platform == (aliases[platform] || platform)); + + // Additional checks. + if (platform == "linux64-asan") { + keep &= coll("asan"); + } else if (platform == "linux64-fips") { + keep &= coll("fips"); + } else if (platform == "linux64-make" || platform == "linux-make" || + platform == "win64-make" || platform == "win-make" || + platform == "aarch64-make") { + keep &= coll("make"); + } else if (platform == "linux64-fuzz" || platform == "linux-fuzz") { + keep &= coll("fuzz"); + } else { + keep &= coll("opt") || coll("debug"); + } + + return keep; + }); + + if (!found) { + return false; + } + + // Finally, filter by build type. + let isDebug = coll("debug") || coll("asan") || coll("make") || + coll("fuzz"); + return (isDebug && opts.builds.includes("d")) || + (!isDebug && opts.builds.includes("o")); + } +} + +async function getCommitComment() { + const res = await execFile('hg', ['log', '-r', '.', '-T', '{desc}']); + return res.stdout; +}; + +export async function initFilter() { + let comment = await getCommitComment(); + + // Load try_task_config.json + // Add parameters to queue for created tasks + let config_path = path.normalize(path.join(__dirname, '../../../../try_task_config.json')) + if (fs.existsSync(config_path)) { + var payload = JSON.parse(fs.readFileSync(config_path)); + if (payload['version'] == 2) { + queue.addParameters(payload['parameters']); + } + } + + // Check for try syntax in changeset comment. + let match = comment.match(/\btry:\s*(.*)\s*$/m); + + // Add try syntax filter. + if (match) { + let match1 = match[1]; + queue.filter(filter(parseOptions(match1))); + + if (match1.includes("--nspr-patch")) { + queue.map(task => { + if (!task.env) { + task.env = {}; + } + task.env.ALLOW_NSPR_PATCH = "1"; + return task; + }); + } + } +} |