From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- taskcluster/scripts/builder/build-haz-linux.sh | 184 ++++ taskcluster/scripts/builder/build-l10n.sh | 90 ++ taskcluster/scripts/builder/build-linux.sh | 125 +++ taskcluster/scripts/builder/build-sm-package.sh | 35 + taskcluster/scripts/builder/build-sm.sh | 64 ++ taskcluster/scripts/builder/repackage.sh | 95 ++ taskcluster/scripts/copy.sh | 9 + taskcluster/scripts/misc/afl-nyx.patch | 18 + .../misc/android-gradle-dependencies-lite.sh | 21 + .../scripts/misc/android-gradle-dependencies.sh | 21 + .../misc/android-gradle-dependencies/after.sh | 30 + .../misc/android-gradle-dependencies/before.sh | 30 + .../misc/android-gradle-dependencies/nexus.xml | 413 ++++++++ taskcluster/scripts/misc/are-we-esmified-yet.py | 191 ++++ taskcluster/scripts/misc/browsertime.sh | 19 + taskcluster/scripts/misc/build-afl.sh | 24 + taskcluster/scripts/misc/build-binutils-linux.sh | 14 + .../scripts/misc/build-breakpad-injector.sh | 29 + taskcluster/scripts/misc/build-cctools-port.sh | 102 ++ taskcluster/scripts/misc/build-clang-mingw.sh | 255 +++++ .../scripts/misc/build-clang-tidy-external.sh | 11 + taskcluster/scripts/misc/build-clang.sh | 72 ++ taskcluster/scripts/misc/build-compiler-rt-wasi.sh | 36 + taskcluster/scripts/misc/build-compiler-rt.sh | 39 + taskcluster/scripts/misc/build-cpython.sh | 116 +++ taskcluster/scripts/misc/build-custom-car.sh | 205 ++++ taskcluster/scripts/misc/build-custom-v8.sh | 47 + taskcluster/scripts/misc/build-dist-toolchains.sh | 12 + taskcluster/scripts/misc/build-dmg-hfsplus.sh | 36 + taskcluster/scripts/misc/build-gcc-linux.sh | 28 + .../scripts/misc/build-gcc-sixgill-plugin-linux.sh | 80 ++ taskcluster/scripts/misc/build-geckodriver.sh | 60 ++ taskcluster/scripts/misc/build-gn-common.sh | 36 + taskcluster/scripts/misc/build-gn-linux.sh | 13 + taskcluster/scripts/misc/build-gn-macosx.sh | 23 + taskcluster/scripts/misc/build-gn-win64.sh | 16 + taskcluster/scripts/misc/build-hfsplus-linux.sh | 14 + taskcluster/scripts/misc/build-libunwind.sh | 23 + taskcluster/scripts/misc/build-llvm-common.sh | 201 ++++ taskcluster/scripts/misc/build-llvm-symbolizer.sh | 5 + taskcluster/scripts/misc/build-mar-tools.sh | 27 + taskcluster/scripts/misc/build-mingw-fxc2-x86.sh | 25 + taskcluster/scripts/misc/build-mingw32-nsis.sh | 71 ++ taskcluster/scripts/misc/build-mkbom-linux.sh | 17 + taskcluster/scripts/misc/build-mozmake.sh | 66 ++ taskcluster/scripts/misc/build-msix-packaging.sh | 33 + taskcluster/scripts/misc/build-nasm.sh | 63 ++ taskcluster/scripts/misc/build-nodejs.sh | 22 + taskcluster/scripts/misc/build-nsis.sh | 32 + taskcluster/scripts/misc/build-pkgconf.sh | 44 + taskcluster/scripts/misc/build-resourcemonitor.sh | 40 + .../scripts/misc/build-rust-based-toolchain.sh | 79 ++ taskcluster/scripts/misc/build-sysroot-wasi.sh | 53 + taskcluster/scripts/misc/build-sysroot.sh | 126 +++ taskcluster/scripts/misc/build-upx.sh | 26 + taskcluster/scripts/misc/build-winchecksec.sh | 198 ++++ taskcluster/scripts/misc/build-wine.sh | 29 + taskcluster/scripts/misc/build-xar-linux.sh | 25 + taskcluster/scripts/misc/build-xz.sh | 25 + taskcluster/scripts/misc/cargo-apk-Cargo.lock | 802 +++++++++++++++ taskcluster/scripts/misc/cctools.patch | 17 + taskcluster/scripts/misc/dummy.sh | 1 + taskcluster/scripts/misc/fetch-chromium.py | 241 +++++ taskcluster/scripts/misc/fetch-content | 900 +++++++++++++++++ taskcluster/scripts/misc/get_vs.py | 111 +++ taskcluster/scripts/misc/mingw-composition.patch | 50 + taskcluster/scripts/misc/mingw-dispatchqueue.patch | 157 +++ taskcluster/scripts/misc/mingw-dwrite_3.patch | 87 ++ taskcluster/scripts/misc/mingw-enum.patch | 25 + taskcluster/scripts/misc/mingw-ts_sd.patch | 33 + taskcluster/scripts/misc/mingw-unknown.patch | 46 + taskcluster/scripts/misc/mingw-widl.patch | 35 + taskcluster/scripts/misc/moz.build | 8 + taskcluster/scripts/misc/osx-cross-linker | 8 + taskcluster/scripts/misc/pack-cpython.sh | 36 + taskcluster/scripts/misc/pack.sh | 24 + .../scripts/misc/private_local_toolchain.sh | 14 + .../scripts/misc/repack-android-avd-linux.sh | 30 + .../scripts/misc/repack-android-emulator-linux.sh | 28 + .../scripts/misc/repack-android-ndk-linux.sh | 17 + .../scripts/misc/repack-android-sdk-linux.sh | 22 + .../misc/repack-android-system-images-linux.sh | 17 + taskcluster/scripts/misc/repack-clang.sh | 52 + taskcluster/scripts/misc/repack-jdk-linux.sh | 17 + taskcluster/scripts/misc/repack-node.sh | 14 + taskcluster/scripts/misc/repack_rust.py | 674 +++++++++++++ .../scripts/misc/run-profileserver-macos.sh | 20 + taskcluster/scripts/misc/run-profileserver.sh | 42 + .../scripts/misc/source-test-clang-setup.sh | 27 + taskcluster/scripts/misc/source-test-common.sh | 16 + .../scripts/misc/source-test-infer-setup.sh | 18 + taskcluster/scripts/misc/summarize-tgdiff.py | 52 + taskcluster/scripts/misc/tooltool-download.sh | 21 + taskcluster/scripts/misc/unify.sh | 42 + taskcluster/scripts/misc/unpack-sdk.py | 105 ++ taskcluster/scripts/misc/verify-devtools-bundle.py | 85 ++ taskcluster/scripts/misc/vs-cleanup.sh | 13 + taskcluster/scripts/misc/vs-setup.sh | 42 + taskcluster/scripts/misc/wasi-sdk-11.patch | 14 + taskcluster/scripts/misc/wasi-sdk.patch | 45 + .../scripts/misc/wr-cargotest-macos-build.sh | 24 + .../scripts/misc/wr-macos-cross-build-setup.sh | 60 ++ taskcluster/scripts/misc/wrench-android-build.sh | 26 + taskcluster/scripts/misc/wrench-deps-vendoring.sh | 23 + taskcluster/scripts/misc/wrench-macos-build.sh | 59 ++ taskcluster/scripts/misc/wrench-windows-tests.sh | 28 + taskcluster/scripts/misc/zstdpy | 79 ++ taskcluster/scripts/run-task | 1021 ++++++++++++++++++++ taskcluster/scripts/tester/run-wizard | 176 ++++ taskcluster/scripts/tester/test-linux.sh | 283 ++++++ 110 files changed, 9630 insertions(+) create mode 100755 taskcluster/scripts/builder/build-haz-linux.sh create mode 100755 taskcluster/scripts/builder/build-l10n.sh create mode 100755 taskcluster/scripts/builder/build-linux.sh create mode 100755 taskcluster/scripts/builder/build-sm-package.sh create mode 100755 taskcluster/scripts/builder/build-sm.sh create mode 100755 taskcluster/scripts/builder/repackage.sh create mode 100755 taskcluster/scripts/copy.sh create mode 100644 taskcluster/scripts/misc/afl-nyx.patch create mode 100755 taskcluster/scripts/misc/android-gradle-dependencies-lite.sh create mode 100755 taskcluster/scripts/misc/android-gradle-dependencies.sh create mode 100755 taskcluster/scripts/misc/android-gradle-dependencies/after.sh create mode 100755 taskcluster/scripts/misc/android-gradle-dependencies/before.sh create mode 100644 taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml create mode 100644 taskcluster/scripts/misc/are-we-esmified-yet.py create mode 100755 taskcluster/scripts/misc/browsertime.sh create mode 100755 taskcluster/scripts/misc/build-afl.sh create mode 100755 taskcluster/scripts/misc/build-binutils-linux.sh create mode 100755 taskcluster/scripts/misc/build-breakpad-injector.sh create mode 100755 taskcluster/scripts/misc/build-cctools-port.sh create mode 100755 taskcluster/scripts/misc/build-clang-mingw.sh create mode 100755 taskcluster/scripts/misc/build-clang-tidy-external.sh create mode 100755 taskcluster/scripts/misc/build-clang.sh create mode 100755 taskcluster/scripts/misc/build-compiler-rt-wasi.sh create mode 100755 taskcluster/scripts/misc/build-compiler-rt.sh create mode 100755 taskcluster/scripts/misc/build-cpython.sh create mode 100755 taskcluster/scripts/misc/build-custom-car.sh create mode 100755 taskcluster/scripts/misc/build-custom-v8.sh create mode 100755 taskcluster/scripts/misc/build-dist-toolchains.sh create mode 100755 taskcluster/scripts/misc/build-dmg-hfsplus.sh create mode 100755 taskcluster/scripts/misc/build-gcc-linux.sh create mode 100755 taskcluster/scripts/misc/build-gcc-sixgill-plugin-linux.sh create mode 100755 taskcluster/scripts/misc/build-geckodriver.sh create mode 100755 taskcluster/scripts/misc/build-gn-common.sh create mode 100755 taskcluster/scripts/misc/build-gn-linux.sh create mode 100755 taskcluster/scripts/misc/build-gn-macosx.sh create mode 100755 taskcluster/scripts/misc/build-gn-win64.sh create mode 100755 taskcluster/scripts/misc/build-hfsplus-linux.sh create mode 100755 taskcluster/scripts/misc/build-libunwind.sh create mode 100755 taskcluster/scripts/misc/build-llvm-common.sh create mode 100755 taskcluster/scripts/misc/build-llvm-symbolizer.sh create mode 100755 taskcluster/scripts/misc/build-mar-tools.sh create mode 100755 taskcluster/scripts/misc/build-mingw-fxc2-x86.sh create mode 100755 taskcluster/scripts/misc/build-mingw32-nsis.sh create mode 100755 taskcluster/scripts/misc/build-mkbom-linux.sh create mode 100755 taskcluster/scripts/misc/build-mozmake.sh create mode 100755 taskcluster/scripts/misc/build-msix-packaging.sh create mode 100755 taskcluster/scripts/misc/build-nasm.sh create mode 100755 taskcluster/scripts/misc/build-nodejs.sh create mode 100755 taskcluster/scripts/misc/build-nsis.sh create mode 100755 taskcluster/scripts/misc/build-pkgconf.sh create mode 100755 taskcluster/scripts/misc/build-resourcemonitor.sh create mode 100755 taskcluster/scripts/misc/build-rust-based-toolchain.sh create mode 100755 taskcluster/scripts/misc/build-sysroot-wasi.sh create mode 100755 taskcluster/scripts/misc/build-sysroot.sh create mode 100755 taskcluster/scripts/misc/build-upx.sh create mode 100755 taskcluster/scripts/misc/build-winchecksec.sh create mode 100755 taskcluster/scripts/misc/build-wine.sh create mode 100755 taskcluster/scripts/misc/build-xar-linux.sh create mode 100755 taskcluster/scripts/misc/build-xz.sh create mode 100644 taskcluster/scripts/misc/cargo-apk-Cargo.lock create mode 100644 taskcluster/scripts/misc/cctools.patch create mode 100755 taskcluster/scripts/misc/dummy.sh create mode 100644 taskcluster/scripts/misc/fetch-chromium.py create mode 100755 taskcluster/scripts/misc/fetch-content create mode 100755 taskcluster/scripts/misc/get_vs.py create mode 100644 taskcluster/scripts/misc/mingw-composition.patch create mode 100644 taskcluster/scripts/misc/mingw-dispatchqueue.patch create mode 100644 taskcluster/scripts/misc/mingw-dwrite_3.patch create mode 100644 taskcluster/scripts/misc/mingw-enum.patch create mode 100644 taskcluster/scripts/misc/mingw-ts_sd.patch create mode 100644 taskcluster/scripts/misc/mingw-unknown.patch create mode 100644 taskcluster/scripts/misc/mingw-widl.patch create mode 100644 taskcluster/scripts/misc/moz.build create mode 100755 taskcluster/scripts/misc/osx-cross-linker create mode 100755 taskcluster/scripts/misc/pack-cpython.sh create mode 100755 taskcluster/scripts/misc/pack.sh create mode 100755 taskcluster/scripts/misc/private_local_toolchain.sh create mode 100755 taskcluster/scripts/misc/repack-android-avd-linux.sh create mode 100755 taskcluster/scripts/misc/repack-android-emulator-linux.sh create mode 100755 taskcluster/scripts/misc/repack-android-ndk-linux.sh create mode 100755 taskcluster/scripts/misc/repack-android-sdk-linux.sh create mode 100755 taskcluster/scripts/misc/repack-android-system-images-linux.sh create mode 100755 taskcluster/scripts/misc/repack-clang.sh create mode 100755 taskcluster/scripts/misc/repack-jdk-linux.sh create mode 100755 taskcluster/scripts/misc/repack-node.sh create mode 100755 taskcluster/scripts/misc/repack_rust.py create mode 100755 taskcluster/scripts/misc/run-profileserver-macos.sh create mode 100755 taskcluster/scripts/misc/run-profileserver.sh create mode 100755 taskcluster/scripts/misc/source-test-clang-setup.sh create mode 100755 taskcluster/scripts/misc/source-test-common.sh create mode 100755 taskcluster/scripts/misc/source-test-infer-setup.sh create mode 100644 taskcluster/scripts/misc/summarize-tgdiff.py create mode 100644 taskcluster/scripts/misc/tooltool-download.sh create mode 100755 taskcluster/scripts/misc/unify.sh create mode 100644 taskcluster/scripts/misc/unpack-sdk.py create mode 100644 taskcluster/scripts/misc/verify-devtools-bundle.py create mode 100644 taskcluster/scripts/misc/vs-cleanup.sh create mode 100644 taskcluster/scripts/misc/vs-setup.sh create mode 100644 taskcluster/scripts/misc/wasi-sdk-11.patch create mode 100644 taskcluster/scripts/misc/wasi-sdk.patch create mode 100755 taskcluster/scripts/misc/wr-cargotest-macos-build.sh create mode 100755 taskcluster/scripts/misc/wr-macos-cross-build-setup.sh create mode 100755 taskcluster/scripts/misc/wrench-android-build.sh create mode 100755 taskcluster/scripts/misc/wrench-deps-vendoring.sh create mode 100755 taskcluster/scripts/misc/wrench-macos-build.sh create mode 100644 taskcluster/scripts/misc/wrench-windows-tests.sh create mode 100755 taskcluster/scripts/misc/zstdpy create mode 100755 taskcluster/scripts/run-task create mode 100755 taskcluster/scripts/tester/run-wizard create mode 100755 taskcluster/scripts/tester/test-linux.sh (limited to 'taskcluster/scripts') diff --git a/taskcluster/scripts/builder/build-haz-linux.sh b/taskcluster/scripts/builder/build-haz-linux.sh new file mode 100755 index 0000000000..1abcbf6efe --- /dev/null +++ b/taskcluster/scripts/builder/build-haz-linux.sh @@ -0,0 +1,184 @@ +#!/bin/bash -ex + +function usage() { + echo "Usage: $0 [--project ] flags..." + echo "flags are treated the same way as a commit message would be" + echo "(as in, they are scanned for directives just like a try: ... line)" +} + +PROJECT=js +WORKSPACE= +while [[ $# -gt 0 ]]; do + if [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]; then + usage + exit 0 + elif [[ "$1" == "--project" ]]; then + shift + PROJECT="$1" + shift + elif [[ "$1" == "--no-tooltool" ]]; then + shift + elif [[ -z "$WORKSPACE" ]]; then + WORKSPACE=$( cd "$1" && pwd ) + shift + break + fi +done + +function check_commit_msg () { + ( set +e; + if [[ -n "$AUTOMATION" ]]; then + hg --cwd "$GECKO_PATH" log -r. --template '{desc}\n' | grep -F -q -- "$1" + else + echo -- "$SCRIPT_FLAGS" | grep -F -q -- "$1" + fi + ) +} + +if check_commit_msg "--dep"; then + HAZ_DEP=1 +fi + +SCRIPT_FLAGS=$* + +ANALYSIS_DIR="$WORKSPACE/haz-$PROJECT" + +# Ensure all the scripts in this dir are on the path.... +DIRNAME=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +PATH=$DIRNAME:$PATH + +# Use GECKO_BASE_REPOSITORY as a signal for whether we are running in automation. +export AUTOMATION=${GECKO_BASE_REPOSITORY:+1} + +: "${GECKO_PATH:="$DIRNAME"/../../..}" + +if ! [ -d "$GECKO_PATH" ]; then + echo "GECKO_PATH must be set to a directory containing a gecko source checkout" >&2 + exit 1 +fi + +# Directory to hold the compiled JS shell that will run the analysis. +HAZARD_SHELL_OBJDIR=$WORKSPACE/obj-haz-shell + +export NO_MERCURIAL_SETUP_CHECK=1 + +if [[ "$PROJECT" = "browser" ]]; then ( + cd "$WORKSPACE" + set "$WORKSPACE" + # Mozbuild config: + export MOZBUILD_STATE_PATH=$WORKSPACE/mozbuild/ + # Create .mozbuild so mach doesn't complain about this + mkdir -p "$MOZBUILD_STATE_PATH" +) fi + +# Build the shell +export HAZARD_SHELL_OBJDIR # This will be picked up by mozconfig.haz_shell. +$GECKO_PATH/mach hazards build-shell + +# Run a self-test +$GECKO_PATH/mach hazards self-test --shell-objdir="$HAZARD_SHELL_OBJDIR" + +# Artifacts folder is outside of the cache. +mkdir -p "$HOME"/artifacts/ || true + +function grab_artifacts () { + local artifacts + artifacts="$HOME/artifacts" + + [ -d "$ANALYSIS_DIR" ] && ( + cd "$ANALYSIS_DIR" + ls -lah + + # Do not error out if no files found + shopt -s nullglob + set +e + local important + important=(refs.txt unnecessary.txt hazards.txt gcFunctions.txt allFunctions.txt heapWriteHazards.txt rootingHazards.json hazards.html) + + # Bundle up the less important but still useful intermediate outputs, + # just to cut down on the clutter in treeherder's Job Details pane. + tar -acvf "${artifacts}/hazardIntermediates.tar.xz" --exclude-from <(IFS=$'\n'; echo "${important[*]}") *.txt *.lst build_xgill.log + + # Upload the important outputs individually, so that they will be + # visible in Job Details and accessible to automated jobs. + for f in "${important[@]}"; do + gzip -9 -c "$f" > "${artifacts}/$f.gz" + done + + # Check whether the user requested .xdb file upload in the top commit comment + if check_commit_msg "--upload-xdbs"; then + HAZ_UPLOAD_XDBS=1 + fi + + if [ -n "$HAZ_UPLOAD_XDBS" ]; then + for f in *.xdb; do + xz -c "$f" > "${artifacts}/$f.bz2" + done + fi + ) +} + +function check_hazards () { + ( + set +e + NUM_HAZARDS=$(grep -c 'Function.*has unrooted.*live across GC call' "$1"/hazards.txt) + NUM_UNSAFE=$(grep -c '^Function.*takes unsafe address of unrooted' "$1"/refs.txt) + NUM_UNNECESSARY=$(grep -c '^Function.* has unnecessary root' "$1"/unnecessary.txt) + NUM_DROPPED=$(grep -c '^Dropped CFG' "$1"/build_xgill.log) + NUM_WRITE_HAZARDS=$(perl -lne 'print $1 if m!found (\d+)/\d+ allowed errors!' "$1"/heapWriteHazards.txt) + NUM_MISSING=$(grep -c '^Function.*expected hazard.*but none were found' "$1"/hazards.txt) + + set +x + echo "TinderboxPrint: rooting hazards
$NUM_HAZARDS" + echo "TinderboxPrint: (unsafe references to unrooted GC pointers)
$NUM_UNSAFE" + echo "TinderboxPrint: (unnecessary roots)
$NUM_UNNECESSARY" + echo "TinderboxPrint: missing expected hazards
$NUM_MISSING" + echo "TinderboxPrint: heap write hazards
$NUM_WRITE_HAZARDS" + + # Display errors in a way that will get picked up by the taskcluster scraper. + perl -lne 'print "TEST-UNEXPECTED-FAIL | hazards | $1 $2" if /^Function.* has (unrooted .*live across GC call).* (at .*)$/' "$1"/hazards.txt + + exit_status=0 + + if [ $NUM_HAZARDS -gt 0 ]; then + echo "TEST-UNEXPECTED-FAIL | hazards | $NUM_HAZARDS rooting hazards detected" >&2 + echo "TinderboxPrint: documentation
static rooting hazard analysis failures, visit \"Inspect Task\" link for hazard details" + exit_status=1 + fi + + if [ $NUM_MISSING -gt 0 ]; then + echo "TEST-UNEXPECTED-FAIL | hazards | $NUM_MISSING expected hazards went undetected" >&2 + echo "TinderboxPrint: documentation
static rooting hazard analysis failures, visit \"Inspect Task\" link for hazard details" + exit_status=1 + fi + + NUM_ALLOWED_WRITE_HAZARDS=0 + if [ $NUM_WRITE_HAZARDS -gt $NUM_ALLOWED_WRITE_HAZARDS ]; then + echo "TEST-UNEXPECTED-FAIL | heap-write-hazards | $NUM_WRITE_HAZARDS heap write hazards detected out of $NUM_ALLOWED_WRITE_HAZARDS allowed" >&2 + echo "TinderboxPrint: documentation
heap write hazard analysis failures, visit \"Inspect Task\" link for hazard details" + exit_status = 1 + fi + + if [ $NUM_DROPPED -gt 0 ]; then + echo "TEST-UNEXPECTED-FAIL | hazards | $NUM_DROPPED CFGs dropped" >&2 + echo "TinderboxPrint: sixgill unable to handle constructs
$NUM_DROPPED" + exit_status=1 + fi + + if [ $exit_status -ne 0 ]; then + exit $exit_status + fi + ) +} + +trap grab_artifacts EXIT + +# Gather the information from the source tree by compiling it. +$GECKO_PATH/mach hazards gather --project=$PROJECT --work-dir="$ANALYSIS_DIR" + +# Analyze the collected information. +$GECKO_PATH/mach hazards analyze --project=$PROJECT --shell-objdir="$HAZARD_SHELL_OBJDIR" --work-dir="$ANALYSIS_DIR" + +check_hazards "$ANALYSIS_DIR" + +################################### script end ################################### diff --git a/taskcluster/scripts/builder/build-l10n.sh b/taskcluster/scripts/builder/build-l10n.sh new file mode 100755 index 0000000000..0a324c7479 --- /dev/null +++ b/taskcluster/scripts/builder/build-l10n.sh @@ -0,0 +1,90 @@ +#! /bin/bash -vex + +set -x -e + +echo "running as" $(id) + +#### +# Taskcluster friendly wrapper for performing fx desktop l10n repacks via mozharness. +# Based on ./build-linux.sh +#### + +# Inputs, with defaults + +: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT} +: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG} +: MOZHARNESS_CONFIG_PATHS ${MOZHARNESS_CONFIG_PATHS} +: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS} +: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS} + +: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache} + +: MOZ_SCM_LEVEL ${MOZ_SCM_LEVEL:=1} + +: MOZ_SCM_LEVEL ${MOZ_SCM_LEVEL:=1} + +: WORKSPACE ${WORKSPACE:=/builds/worker/workspace} +: MOZ_OBJDIR ${MOZ_OBJDIR:=$WORKSPACE/obj-build} + +set -v + +fail() { + echo # make sure error message is on a new line + echo "[build-l10n.sh:error]" "${@}" + exit 1 +} + +export MOZ_CRASHREPORTER_NO_REPORT=1 +export TINDERBOX_OUTPUT=1 + +# test required parameters are supplied +if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi +if [[ -z "${MOZHARNESS_CONFIG}" && -z "${EXTRA_MOZHARNESS_CONFIG}" ]]; then fail "MOZHARNESS_CONFIG or EXTRA_MOZHARNESS_CONFIG is not set"; fi + +# set up mozharness configuration, via command line, env, etc. + +# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the +# cache. However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be +# entirely effective. +export TOOLTOOL_CACHE + +export MOZ_OBJDIR + +config_path_cmds="" +for path in ${MOZHARNESS_CONFIG_PATHS}; do + config_path_cmds="${config_path_cmds} --extra-config-path ${GECKO_PATH}/${path}" +done + +# support multiple, space delimited, config files +config_cmds="" +for cfg in $MOZHARNESS_CONFIG; do + config_cmds="${config_cmds} --config ${cfg}" +done + +# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions +# in the mozharness configuration) +if [ -n "$MOZHARNESS_ACTIONS" ]; then + actions="" + for action in $MOZHARNESS_ACTIONS; do + actions="$actions --$action" + done +fi + +# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run +if [ -n "$MOZHARNESS_OPTIONS" ]; then + options="" + for option in $MOZHARNESS_OPTIONS; do + options="$options --$option" + done +fi + +cd /builds/worker + +$GECKO_PATH/mach python -- \ + $GECKO_PATH/testing/${MOZHARNESS_SCRIPT} \ + ${config_path_cmds} \ + ${config_cmds} \ + $actions \ + $options \ + --log-level=debug \ + --work-dir=$WORKSPACE \ diff --git a/taskcluster/scripts/builder/build-linux.sh b/taskcluster/scripts/builder/build-linux.sh new file mode 100755 index 0000000000..35c54788b4 --- /dev/null +++ b/taskcluster/scripts/builder/build-linux.sh @@ -0,0 +1,125 @@ +#! /bin/bash -vex + +set -x -e + +echo "running as" $(id) + +#### +# Taskcluster friendly wrapper for performing fx desktop builds via mozharness. +#### + +# Inputs, with defaults + +: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT} +: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG} +: MOZHARNESS_CONFIG_PATHS ${MOZHARNESS_CONFIG_PATHS} +: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS} +: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS} + +: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache} + +: MOZ_SCM_LEVEL ${MOZ_SCM_LEVEL:=1} + +: NEED_XVFB ${NEED_XVFB:=false} + +: MH_CUSTOM_BUILD_VARIANT_CFG ${MH_CUSTOM_BUILD_VARIANT_CFG} +: MH_BRANCH ${MH_BRANCH:=mozilla-central} +: MH_BUILD_POOL ${MH_BUILD_POOL:=staging} + +: WORKSPACE ${WORKSPACE:=/builds/worker/workspace} +: MOZ_OBJDIR ${MOZ_OBJDIR:=$WORKSPACE/obj-build} + +set -v + +fail() { + echo # make sure error message is on a new line + echo "[build-linux.sh:error]" "${@}" + exit 1 +} + +export MOZ_CRASHREPORTER_NO_REPORT=1 +export TINDERBOX_OUTPUT=1 + +# use "simple" package names so that they can be hard-coded in the task's +# extras.locations +export MOZ_SIMPLE_PACKAGE_NAME=target + +# test required parameters are supplied +if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi +if [[ -z "${MOZHARNESS_CONFIG}" && -z "${EXTRA_MOZHARNESS_CONFIG}" ]]; then fail "MOZHARNESS_CONFIG or EXTRA_MOZHARNESS_CONFIG is not set"; fi + +# run XVfb in the background, if necessary +if $NEED_XVFB; then + . /builds/worker/scripts/xvfb.sh + + cleanup() { + local rv=$? + cleanup_xvfb + exit $rv + } + trap cleanup EXIT INT + + start_xvfb '1024x768x24' 2 +fi + +# set up mozharness configuration, via command line, env, etc. + +debug_flag="" +if [ 0$DEBUG -ne 0 ]; then + debug_flag='--debug' +fi + +custom_build_variant_cfg_flag="" +if [ -n "${MH_CUSTOM_BUILD_VARIANT_CFG}" ]; then + custom_build_variant_cfg_flag="--custom-build-variant-cfg=${MH_CUSTOM_BUILD_VARIANT_CFG}" +fi + +# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the +# cache. However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be +# entirely effective. +export TOOLTOOL_CACHE + +export MOZ_OBJDIR + +config_path_cmds="" +for path in ${MOZHARNESS_CONFIG_PATHS}; do + config_path_cmds="${config_path_cmds} --extra-config-path ${GECKO_PATH}/${path}" +done + +# support multiple, space delimited, config files +config_cmds="" +for cfg in $MOZHARNESS_CONFIG; do + config_cmds="${config_cmds} --config ${cfg}" +done + +# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions +# in the mozharness configuration) +if [ -n "$MOZHARNESS_ACTIONS" ]; then + actions="" + for action in $MOZHARNESS_ACTIONS; do + actions="$actions --$action" + done +fi + +# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run +if [ -n "$MOZHARNESS_OPTIONS" ]; then + options="" + for option in $MOZHARNESS_OPTIONS; do + options="$options --$option" + done +fi + +cd /builds/worker + +$GECKO_PATH/mach python -- \ + $GECKO_PATH/testing/${MOZHARNESS_SCRIPT} \ + ${config_path_cmds} \ + ${config_cmds} \ + $debug_flag \ + $custom_build_variant_cfg_flag \ + $actions \ + $options \ + --log-level=debug \ + --work-dir=$WORKSPACE \ + --branch=${MH_BRANCH} \ + --build-pool=${MH_BUILD_POOL} diff --git a/taskcluster/scripts/builder/build-sm-package.sh b/taskcluster/scripts/builder/build-sm-package.sh new file mode 100755 index 0000000000..816256ea9c --- /dev/null +++ b/taskcluster/scripts/builder/build-sm-package.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -xe + +# Default variables values. +: ${WORK:=$HOME/workspace} + +mkdir -p $UPLOAD_DIR + +# Package up the sources into the release tarball. +AUTOMATION=1 DIST=$UPLOAD_DIR $GECKO_PATH/js/src/make-source-package.py + +# Extract the tarball into a new directory in the workspace. + +PACKAGE_DIR=$WORK/sm-package + +# Do not use -p option because the package directory should not exist. +mkdir $PACKAGE_DIR +pushd $PACKAGE_DIR + +tar -xvf $UPLOAD_DIR/mozjs-*.tar.*z* + +: ${PYTHON3:=python3} + +status=0 +( + # Build the freshly extracted, packaged SpiderMonkey. + cd ./mozjs-* + AUTOMATION=1 $PYTHON3 js/src/devtools/automation/autospider.py --skip-tests=checks $SPIDERMONKEY_VARIANT +) || status=$? + +# Copy artifacts for upload by TaskCluster +cp -rL ./mozjs-*/obj-spider/dist/bin/{js,jsapi-tests,js-gdb.py,libmozjs*} $UPLOAD_DIR + +exit $status diff --git a/taskcluster/scripts/builder/build-sm.sh b/taskcluster/scripts/builder/build-sm.sh new file mode 100755 index 0000000000..7ee31bbcf4 --- /dev/null +++ b/taskcluster/scripts/builder/build-sm.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +set -x + +# Default variables values. +: ${SPIDERMONKEY_VARIANT:=plain} +: ${WORK:=$HOME/workspace} +: ${PYTHON3:=python3} + +# Ensure upload dir exists +mkdir -p $UPLOAD_DIR + +# Run the script +export MOZ_UPLOAD_DIR="$(cd "$UPLOAD_DIR"; pwd)" +export OBJDIR=$WORK/obj-spider +AUTOMATION=1 $PYTHON3 $GECKO_PATH/js/src/devtools/automation/autospider.py ${SPIDERMONKEY_PLATFORM:+--platform=$SPIDERMONKEY_PLATFORM} $SPIDERMONKEY_VARIANT +BUILD_STATUS=$? + +# Copy artifacts for upload by TaskCluster. +upload=${MOZ_JS_UPLOAD_BINARIES_DEFAULT-1} +# User-provided override switch. +if [ -n "$MOZ_JS_UPLOAD_BINARIES" ]; then + upload=1 +fi +if [ "$upload" = "1" ]; then + ( + cd "$OBJDIR/dist/bin" + zip "$UPLOAD_DIR/target.jsshell.zip" {js,jsapi-tests,js-gdb.py,libnspr4.so,libplds4.so,libplc4.so} + ) + cp -L "$OBJDIR/mozinfo.json" "$UPLOAD_DIR/target.mozinfo.json" + + # Fuzzing users want the correct version of llvm-symbolizer available in the + # same directory as the built output. + if [ -d "$MOZ_FETCHES_DIR/llvm-symbolizer" ]; then + for f in "$MOZ_FETCHES_DIR/llvm-symbolizer/bin/llvm-symbolizer"*; do + gzip -c "$f" > "$UPLOAD_DIR/llvm-symbolizer.gz" || echo "gzip $f failed" >&2 + break + done + fi +else # !upload +# Provide a note for users on why we don't include artifacts for these builds +# by default, and how they can get the artifacts if they really need them. + cat >"$UPLOAD_DIR"/README-artifacts.txt <<'EOF' +Artifact upload has been disabled for this build due to infrequent usage of the +generated artifacts. If you find yourself in a position where you need the +shell or similar artifacts from this build, please redo your push with the +environment variable MOZ_JS_UPLOAD_BINARIES set to 1. You can provide this as +the option `--env MOZ_JS_UPLOAD_BINARIES=1` to `mach try fuzzy` or `mach try auto`. +EOF +fi + +# Fuzzing also uses a few fields in target.json file for automated downloads to +# identify what was built. +if [ -n "$MOZ_BUILD_DATE" ] && [ -n "$GECKO_HEAD_REV" ]; then + cat >"$UPLOAD_DIR"/target.json < +Date: Fri Jul 14 11:04:04 2023 -0400 + + Increase MAP_SIZE for Nyx + +diff --git a/include/config.h b/include/config.h +index 988e536e..5e9870c0 100644 +--- a/include/config.h ++++ b/include/config.h +@@ -442,7 +442,7 @@ + problems with complex programs). You need to recompile the target binary + after changing this - otherwise, SEGVs may ensue. */ + +-#define MAP_SIZE_POW2 16 ++#define MAP_SIZE_POW2 23 + + /* Do not change this unless you really know what you are doing. */ diff --git a/taskcluster/scripts/misc/android-gradle-dependencies-lite.sh b/taskcluster/scripts/misc/android-gradle-dependencies-lite.sh new file mode 100755 index 0000000000..bf91d64709 --- /dev/null +++ b/taskcluster/scripts/misc/android-gradle-dependencies-lite.sh @@ -0,0 +1,21 @@ +#!/bin/bash -vex + +set -x -e + +echo "running as" $(id) + +set -v + +cd $GECKO_PATH + +# Needed for Nexus +export PATH=$MOZ_FETCHES_DIR/jdk-8/bin:$PATH + +. taskcluster/scripts/misc/android-gradle-dependencies/before.sh + +export MOZCONFIG=mobile/android/config/mozconfigs/android-arm-gradle-dependencies/nightly-lite +./mach build +./mach gradle downloadDependencies +./mach android gradle-dependencies + +. taskcluster/scripts/misc/android-gradle-dependencies/after.sh diff --git a/taskcluster/scripts/misc/android-gradle-dependencies.sh b/taskcluster/scripts/misc/android-gradle-dependencies.sh new file mode 100755 index 0000000000..2624dc961a --- /dev/null +++ b/taskcluster/scripts/misc/android-gradle-dependencies.sh @@ -0,0 +1,21 @@ +#!/bin/bash -vex + +set -x -e + +echo "running as" $(id) + +set -v + +cd $GECKO_PATH + +# Nexus needs Java 8 +export PATH=$MOZ_FETCHES_DIR/jdk-8/bin:$PATH + +. taskcluster/scripts/misc/android-gradle-dependencies/before.sh + +export MOZCONFIG=mobile/android/config/mozconfigs/android-arm-gradle-dependencies/nightly +./mach build +./mach gradle downloadDependencies +./mach android gradle-dependencies + +. taskcluster/scripts/misc/android-gradle-dependencies/after.sh diff --git a/taskcluster/scripts/misc/android-gradle-dependencies/after.sh b/taskcluster/scripts/misc/android-gradle-dependencies/after.sh new file mode 100755 index 0000000000..9943cf2c41 --- /dev/null +++ b/taskcluster/scripts/misc/android-gradle-dependencies/after.sh @@ -0,0 +1,30 @@ +#!/bin/bash -vex + +set -x -e + +echo "running as" $(id) + +: WORKSPACE ${WORKSPACE:=/builds/worker/workspace} + +set -v + +# Package everything up. +pushd $WORKSPACE +mkdir -p android-gradle-dependencies /builds/worker/artifacts + +# NEXUS_WORK is exported by `before.sh`. +cp -R ${NEXUS_WORK}/storage/mozilla android-gradle-dependencies +cp -R ${NEXUS_WORK}/storage/central android-gradle-dependencies +cp -R ${NEXUS_WORK}/storage/google android-gradle-dependencies +cp -R ${NEXUS_WORK}/storage/gradle-plugins android-gradle-dependencies + +# The Gradle wrapper will have downloaded and verified the hash of exactly one +# Gradle distribution. It will be located in $GRADLE_USER_HOME, like +# ~/.gradle/wrapper/dists/gradle-8.5-bin/$PROJECT_HASH/gradle-8.5. We +# want to remove the version from the internal directory for use via tooltool in +# a mozconfig. +cp -a ${GRADLE_USER_HOME}/wrapper/dists/gradle-*-*/*/gradle-*/ android-gradle-dependencies/gradle-dist + +tar cavf /builds/worker/artifacts/android-gradle-dependencies.tar.zst android-gradle-dependencies + +popd diff --git a/taskcluster/scripts/misc/android-gradle-dependencies/before.sh b/taskcluster/scripts/misc/android-gradle-dependencies/before.sh new file mode 100755 index 0000000000..7150731d73 --- /dev/null +++ b/taskcluster/scripts/misc/android-gradle-dependencies/before.sh @@ -0,0 +1,30 @@ +#!/bin/bash -vex + +set -x -e + +echo "running as" $(id) + +: WORKSPACE ${WORKSPACE:=/builds/worker/workspace} + +set -v + +# Export NEXUS_WORK so that `after.sh` can use it. +export NEXUS_WORK=/builds/worker/workspace/sonatype-nexus-work +mkdir -p ${NEXUS_WORK}/conf +cp /builds/worker/workspace/build/src/taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml ${NEXUS_WORK}/conf/nexus.xml + +RUN_AS_USER=worker $MOZ_FETCHES_DIR/sonatype-nexus/bin/nexus restart + +# Wait "a while" for Nexus to actually start. Don't fail if this fails. +wget --quiet --retry-connrefused --waitretry=2 --tries=100 \ + http://localhost:8081/nexus/service/local/status || true +rm -rf status + +# It's helpful when debugging to see the "latest state". +curl http://localhost:8081/nexus/service/local/status || true + +# Verify Nexus has actually started. Fail if this fails. +curl --fail --silent --location http://localhost:8081/nexus/service/local/status | grep 'STARTED' + +# It's helpful when debugging to see the repository configurations. +curl http://localhost:8081/nexus/service/local/repositories || true diff --git a/taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml b/taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml new file mode 100644 index 0000000000..e3e37373d8 --- /dev/null +++ b/taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml @@ -0,0 +1,413 @@ + + + + + 2.8.0 + 2.12.0-01 + + 20000 + 3 + + + + 60000 + + + true + 8082 + strict + + + true + + + + mozilla + Mozilla Maven + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + true + 1440 + true + true + true + READ_ONLY + true + true + + file + + + https://maven.mozilla.org/maven2/ + + + RELEASE + STRICT + true + false + -1 + 1440 + 1440 + true + + + + gradle-plugins + Gradle Plugins + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + true + 1440 + true + true + true + READ_ONLY + true + true + + file + + + https://plugins.gradle.org/m2/ + + + RELEASE + STRICT + true + false + -1 + 1440 + 1440 + true + + + + google + google + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + true + 1440 + true + true + true + READ_ONLY + true + true + + file + + + https://maven.google.com/ + + + RELEASE + + STRICT_IF_EXISTS + true + false + -1 + 1440 + 1440 + true + + + + central + Central + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + true + 1440 + true + true + true + READ_ONLY + true + true + + file + + + https://repo1.maven.org/maven2/ + + + ALLOW + -1 + 1440 + false + false + WARN + RELEASE + + + + apache-snapshots + Apache Snapshots + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + true + 1440 + true + true + true + READ_ONLY + true + true + + file + + + https://repository.apache.org/snapshots/ + + + ALLOW + 1440 + 1440 + false + false + WARN + SNAPSHOT + + + + releases + Releases + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + 1440 + true + true + true + ALLOW_WRITE_ONCE + true + true + + file + + + ALLOW + -1 + 1440 + false + false + WARN + RELEASE + + + + snapshots + Snapshots + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + 1440 + true + true + true + ALLOW_WRITE + true + true + + file + + + ALLOW + 1440 + 1440 + false + false + WARN + SNAPSHOT + + + + thirdparty + 3rd party + org.sonatype.nexus.proxy.repository.Repository + maven2 + IN_SERVICE + 1440 + true + true + true + ALLOW_WRITE_ONCE + true + true + + file + + + ALLOW + -1 + 1440 + false + false + WARN + RELEASE + + + + central-m1 + Central M1 shadow + org.sonatype.nexus.proxy.repository.ShadowRepository + m2-m1-shadow + IN_SERVICE + 15 + true + true + true + READ_ONLY + + file + + + central + false + + + + public + Public Repositories + org.sonatype.nexus.proxy.repository.GroupRepository + maven2 + IN_SERVICE + 15 + true + true + true + READ_ONLY + true + + file + + + true + + releases + snapshots + thirdparty + central + + + + + + + + inhouse-stuff + * + inclusive + + ^/(com|org)/somecompany/.* + + + snapshots + releases + + + + apache-stuff + * + exclusive + + ^/org/some-oss/.* + + + releases + snapshots + + + + + + + 1 + All (Maven2) + maven2 + + .* + + + + 2 + All (Maven1) + maven1 + + .* + + + + 3 + All but sources (Maven2) + maven2 + + (?!.*-sources.*).* + + + + 4 + All Metadata (Maven2) + maven2 + + .*maven-metadata\.xml.* + + + + any + All (Any Repository) + any + + .* + + + + site + All (site) + site + + .* + + + + npm + All (npm) + npm + + .* + + + + nuget + All (nuget) + nuget + + .* + + + + rubygems + All (rubygems) + rubygems + + .* + + + + + smtp-host + 25 + smtp-username + {jyU2gDFaNz8HQ4ybBAIdtJ6KL+YB08GXQs7vLPnia3o=} + system@nexus.org + + + diff --git a/taskcluster/scripts/misc/are-we-esmified-yet.py b/taskcluster/scripts/misc/are-we-esmified-yet.py new file mode 100644 index 0000000000..ff888b80c7 --- /dev/null +++ b/taskcluster/scripts/misc/are-we-esmified-yet.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import pathlib +import re +import subprocess +import sys + +TBPL_FAILURE = 2 + +excluded_files = [ + # Testcase for loader. + "js/xpconnect/tests/chrome/file_expandosharing.jsm", + "js/xpconnect/tests/unit/environment_script.js", + "js/xpconnect/tests/unit/bogus_element_type.jsm", + "js/xpconnect/tests/unit/bogus_exports_type.jsm", + "js/xpconnect/tests/unit/envChain.jsm", + "js/xpconnect/tests/unit/envChain_subscript.jsm", + "js/xpconnect/tests/unit/environment_checkscript.jsm", + "js/xpconnect/tests/unit/environment_loadscript.jsm", + "js/xpconnect/tests/unit/import_stack.jsm", + "js/xpconnect/tests/unit/importer.jsm", + "js/xpconnect/tests/unit/jsm_loaded-1.jsm", + "js/xpconnect/tests/unit/jsm_loaded-2.jsm", + "js/xpconnect/tests/unit/jsm_loaded-3.jsm", + "js/xpconnect/tests/unit/not-esmified-not-exported.jsm", + "js/xpconnect/tests/unit/recursive_importA.jsm", + "js/xpconnect/tests/unit/recursive_importB.jsm", + "js/xpconnect/tests/unit/ReturnCodeChild.jsm", + "js/xpconnect/tests/unit/syntax_error.jsm", + "js/xpconnect/tests/unit/TestBlob.jsm", + "js/xpconnect/tests/unit/TestFile.jsm", + "js/xpconnect/tests/unit/uninitialized_lexical.jsm", + "dom/url/tests/file_url.jsm", + "dom/url/tests/file_worker_url.jsm", + "dom/url/tests/test_bug883784.jsm", + "dom/workers/test/WorkerTest.jsm", + "dom/encoding/test/file_stringencoding.jsm", + "remote/shared/messagehandler/test/browser/resources/modules/root/invalid.jsm", + "toolkit/actors/TestProcessActorChild.jsm", + "toolkit/actors/TestProcessActorParent.jsm", + "toolkit/actors/TestWindowChild.jsm", + "toolkit/actors/TestWindowParent.jsm", + # Testcase for build system. + "python/mozbuild/mozbuild/test/backend/data/build/bar.jsm", + "python/mozbuild/mozbuild/test/backend/data/build/baz.jsm", + "python/mozbuild/mozbuild/test/backend/data/build/foo.jsm", + "python/mozbuild/mozbuild/test/backend/data/build/qux.jsm", + # EXPORTED_SYMBOLS inside testcase. + "tools/lint/eslint/eslint-plugin-mozilla/tests/mark-exported-symbols-as-used.js", +] + +if pathlib.Path(".hg").exists(): + mode = "hg" +elif pathlib.Path(".git").exists(): + mode = "git" +else: + print( + "Error: This script needs to be run inside mozilla-central checkout " + "of either mercurial or git.", + file=sys.stderr, + ) + sys.exit(TBPL_FAILURE) + + +def new_files_struct(): + return { + "jsm": [], + "esm": [], + "subdir": {}, + } + + +def put_file(files, kind, path): + """Put a path into files tree structure.""" + + if str(path) in excluded_files: + return + + name = path.name + + current_files = files + for part in path.parent.parts: + if part not in current_files["subdir"]: + current_files["subdir"][part] = new_files_struct() + current_files = current_files["subdir"][part] + + current_files[kind].append(name) + + +def run(cmd): + """Run command and return output as lines, excluding empty line.""" + lines = subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode() + return filter(lambda x: x != "", lines.split("\n")) + + +def collect_jsm(files): + """Collect JSM files.""" + kind = "jsm" + + # jsm files + if mode == "hg": + cmd = ["hg", "files", "set:glob:**/*.jsm"] + else: + cmd = ["git", "ls-files", "*.jsm"] + for line in run(cmd): + put_file(files, kind, pathlib.Path(line)) + + # js files with EXPORTED_SYMBOLS + if mode == "hg": + cmd = ["hg", "files", "set:grep('EXPORTED_SYMBOLS = \[') and glob:**/*.js"] + for line in run(cmd): + put_file(files, kind, pathlib.Path(line)) + else: + handled = {} + cmd = ["git", "grep", "EXPORTED_SYMBOLS = \[", "*.js"] + for line in run(cmd): + m = re.search("^([^:]+):", line) + if not m: + continue + path = m.group(1) + if path in handled: + continue + handled[path] = True + put_file(files, kind, pathlib.Path(path)) + + +def collect_esm(files): + """Collect system ESM files.""" + kind = "esm" + + # sys.mjs files + if mode == "hg": + cmd = ["hg", "files", "set:glob:**/*.sys.mjs"] + else: + cmd = ["git", "ls-files", "*.sys.mjs"] + for line in run(cmd): + put_file(files, kind, pathlib.Path(line)) + + +def to_stat(files): + """Convert files tree into status tree.""" + jsm = len(files["jsm"]) + esm = len(files["esm"]) + subdir = {} + + for key, sub_files in files["subdir"].items(): + sub_stat = to_stat(sub_files) + + subdir[key] = sub_stat + jsm += sub_stat["jsm"] + esm += sub_stat["esm"] + + stat = { + "jsm": jsm, + "esm": esm, + } + if len(subdir): + stat["subdir"] = subdir + + return stat + + +if mode == "hg": + cmd = ["hg", "parent", "--template", "{node}"] + commit_hash = list(run(cmd))[0] + + cmd = ["hg", "parent", "--template", "{date|shortdate}"] + date = list(run(cmd))[0] +else: + cmd = ["git", "log", "-1", "--pretty=%H"] + git_hash = list(run(cmd))[0] + cmd = ["git", "cinnabar", "git2hg", git_hash] + commit_hash = list(run(cmd))[0] + + cmd = ["git", "log", "-1", "--pretty=%cs"] + date = list(run(cmd))[0] + +files = new_files_struct() +collect_jsm(files) +collect_esm(files) + +stat = to_stat(files) +stat["hash"] = commit_hash +stat["date"] = date + +print(json.dumps(stat, indent=2)) diff --git a/taskcluster/scripts/misc/browsertime.sh b/taskcluster/scripts/misc/browsertime.sh new file mode 100755 index 0000000000..27ef83c095 --- /dev/null +++ b/taskcluster/scripts/misc/browsertime.sh @@ -0,0 +1,19 @@ +#!/bin/bash -vex + +set -x -e + +echo "running as" $(id) + +set -v + +cd $GECKO_PATH + +export PATH=$PATH:$MOZ_FETCHES_DIR/node/bin + +./mach browsertime --setup + +# We have tools/browsertime/{package.json,node_modules,...} and want +# browsertime/{package.json,node_modules}. +mkdir -p /builds/worker/artifacts +cd tools +tar caf /builds/worker/artifacts/browsertime.tar.zst browsertime diff --git a/taskcluster/scripts/misc/build-afl.sh b/taskcluster/scripts/misc/build-afl.sh new file mode 100755 index 0000000000..5039a7a7b1 --- /dev/null +++ b/taskcluster/scripts/misc/build-afl.sh @@ -0,0 +1,24 @@ +#!/bin/sh + +set -e -x + +artifact="$(basename "$TOOLCHAIN_ARTIFACT")" +dir="${artifact%.tar.*}" +scripts="$(realpath "${0%/*}")" + +cd "$MOZ_FETCHES_DIR/AFLplusplus" +patch -p1 -i "$scripts/afl-nyx.patch" +make -f GNUmakefile afl-showmap \ + CC="$MOZ_FETCHES_DIR/clang/bin/clang" +make -f GNUmakefile.llvm install \ + CODE_COVERAGE=1 \ + CPPFLAGS="--sysroot $MOZ_FETCHES_DIR/sysroot" \ + DESTDIR="$dir" \ + LLVM_CONFIG="$MOZ_FETCHES_DIR/clang/bin/llvm-config" \ + PREFIX=/ +rm -rf "$dir/share" + +tar caf "$artifact" "$dir" + +mkdir -p "$UPLOAD_DIR" +mv "$artifact" "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/build-binutils-linux.sh b/taskcluster/scripts/misc/build-binutils-linux.sh new file mode 100755 index 0000000000..b4e1d8bf6d --- /dev/null +++ b/taskcluster/scripts/misc/build-binutils-linux.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -x -e -v + +# This script is for building binutils for Linux. + +cd $GECKO_PATH + +PATH=$MOZ_FETCHES_DIR/gcc/bin:$PATH + +build/unix/build-binutils/build-binutils.sh $MOZ_FETCHES_DIR + +# Put a tarball in the artifacts dir +mkdir -p $UPLOAD_DIR +cp $MOZ_FETCHES_DIR/binutils.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-breakpad-injector.sh b/taskcluster/scripts/misc/build-breakpad-injector.sh new file mode 100755 index 0000000000..245dae3264 --- /dev/null +++ b/taskcluster/scripts/misc/build-breakpad-injector.sh @@ -0,0 +1,29 @@ +#!/bin/bash +set -x -e -v + +# This script is for building libbreakpadinjector.so, currently for linux only + +COMPRESS_EXT=zst + +cd $GECKO_PATH + +export MOZ_OBJDIR=obj-injector + +echo ac_add_options --enable-project=tools/crashreporter/injector > .mozconfig + +INJECTOR=libbreakpadinjector.so + +TOOLCHAINS="rustc clang" + +for t in $TOOLCHAINS; do + PATH="$MOZ_FETCHES_DIR/$t/bin:$PATH" +done + +./mach build -v + +mkdir injector +cp $MOZ_OBJDIR/dist/bin/$INJECTOR injector/ + +tar -acf injector.tar.$COMPRESS_EXT injector/ +mkdir -p $UPLOAD_DIR +cp injector.tar.$COMPRESS_EXT $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-cctools-port.sh b/taskcluster/scripts/misc/build-cctools-port.sh new file mode 100755 index 0000000000..203d4c85ff --- /dev/null +++ b/taskcluster/scripts/misc/build-cctools-port.sh @@ -0,0 +1,102 @@ +#!/bin/bash + +# cctools sometimes needs to be rebuilt when clang is modified. +# Until bug 1471905 is addressed, increase the following number +# when a forced rebuild of cctools is necessary: 1 + +set -x -e -v + +# This script is for building cctools (Apple's binutils) for Linux using +# cctools-port (https://github.com/tpoechtrager/cctools-port). +WORKSPACE=$HOME/workspace + +# Set some crosstools-port and libtapi directories +CROSSTOOLS_SOURCE_DIR=$MOZ_FETCHES_DIR/cctools-port +CROSSTOOLS_CCTOOLS_DIR=$CROSSTOOLS_SOURCE_DIR/cctools +CROSSTOOLS_BUILD_DIR=$WORKSPACE/cctools +LIBTAPI_SOURCE_DIR=$MOZ_FETCHES_DIR/apple-libtapi +LIBTAPI_BUILD_DIR=$WORKSPACE/libtapi-build +LDID_SOURCE_DIR=$MOZ_FETCHES_DIR/ldid +CLANG_DIR=$MOZ_FETCHES_DIR/clang + +# Create our directories +mkdir -p $CROSSTOOLS_BUILD_DIR $LIBTAPI_BUILD_DIR + +cd $GECKO_PATH + +# Common setup for libtapi and cctools +export CC=$CLANG_DIR/bin/clang +export CXX=$CLANG_DIR/bin/clang++ +# We also need this LD_LIBRARY_PATH at build time, since tapi builds bits of +# clang build tools, and then executes those tools. +export LD_LIBRARY_PATH=$CLANG_DIR/lib + +# Build libtapi; the included build.sh is not sufficient for our purposes. +cd $LIBTAPI_BUILD_DIR + +# Values taken from build.sh +TAPI_REPOSITORY=tapi-1000.10.8 +TAPI_VERSION=10.0.0 + +INCLUDE_FIX="-I $LIBTAPI_SOURCE_DIR/src/llvm/projects/clang/include -I $PWD/projects/clang/include" + +cmake $LIBTAPI_SOURCE_DIR/src/llvm \ + -GNinja \ + -DCMAKE_CXX_FLAGS="$INCLUDE_FIX" \ + -DLLVM_INCLUDE_TESTS=OFF \ + -DCMAKE_BUILD_TYPE=RELEASE \ + -DCMAKE_INSTALL_PREFIX=$CROSSTOOLS_BUILD_DIR \ + -DCMAKE_SYSROOT=$MOZ_FETCHES_DIR/sysroot \ + -DCMAKE_EXE_LINKER_FLAGS=-fuse-ld=lld \ + -DCMAKE_SHARED_LINKER_FLAGS=-fuse-ld=lld \ + -DLLVM_TARGETS_TO_BUILD="X86;ARM;AArch64" \ + -DTAPI_REPOSITORY_STRING=$TAPI_REPOSITORY \ + -DTAPI_FULL_VERSION=$TAPI_VERSION + +ninja clangBasic -v +ninja libtapi install-libtapi install-tapi-headers -v + +# Setup LDFLAGS late so run-at-build-time tools in the basic clang build don't +# pick up the possibly-incompatible libstdc++ from clang. +# Also set it up such that loading libtapi doesn't require a LD_LIBRARY_PATH. +# (this requires two dollars and extra backslashing because it's used verbatim +# via a Makefile) +export LDFLAGS="-fuse-ld=lld -lpthread -Wl,-rpath-link,$MOZ_FETCHES_DIR/sysroot/lib/x86_64-linux-gnu -Wl,-rpath-link,$MOZ_FETCHES_DIR/sysroot/usr/lib/x86_64-linux-gnu -Wl,-rpath,\\\$\$ORIGIN/../lib,-rpath,\\\$\$ORIGIN/../../clang/lib" + +export CC="$CC --sysroot=$MOZ_FETCHES_DIR/sysroot" +export CXX="$CXX --sysroot=$MOZ_FETCHES_DIR/sysroot" + +# Configure crosstools-port +cd $CROSSTOOLS_CCTOOLS_DIR +patch -p2 < $GECKO_PATH/taskcluster/scripts/misc/cctools.patch + +# Force re-libtoolization to overwrite files with the new libtool bits. +perl -pi -e 's/(LIBTOOLIZE -c)/\1 -f/' autogen.sh +./autogen.sh +./configure \ + --prefix=$CROSSTOOLS_BUILD_DIR \ + --target=x86_64-apple-darwin \ + --with-llvm-config=$CLANG_DIR/bin/llvm-config \ + --enable-lto-support \ + --enable-tapi-support \ + --with-libtapi=$CROSSTOOLS_BUILD_DIR + +# Build cctools +make -j `nproc --all` install + +# Build ldid +cd $LDID_SOURCE_DIR +# The crypto library in the sysroot cannot be linked in a PIE executable so we use -no-pie +make -j `nproc --all` install INSTALLPREFIX=$CROSSTOOLS_BUILD_DIR LDFLAGS="-no-pie -Wl,-Bstatic -lcrypto -Wl,-Bdynamic -ldl -pthread" + +strip $CROSSTOOLS_BUILD_DIR/bin/* +# various build scripts based on cmake want to find `lipo` without a prefix +cp $CROSSTOOLS_BUILD_DIR/bin/x86_64-apple-darwin-lipo $CROSSTOOLS_BUILD_DIR/bin/lipo + +(cd $CROSSTOOLS_BUILD_DIR/bin/; for i in x86_64-apple-darwin-*; do + ln $i aarch64${i#x86_64} +done) + +# Put a tarball in the artifacts dir +mkdir -p $UPLOAD_DIR +tar caf $UPLOAD_DIR/cctools.tar.zst -C $CROSSTOOLS_BUILD_DIR/.. `basename $CROSSTOOLS_BUILD_DIR` diff --git a/taskcluster/scripts/misc/build-clang-mingw.sh b/taskcluster/scripts/misc/build-clang-mingw.sh new file mode 100755 index 0000000000..fafc43cce7 --- /dev/null +++ b/taskcluster/scripts/misc/build-clang-mingw.sh @@ -0,0 +1,255 @@ +#!/bin/bash +set -x -e -v + +# This script is for building a mingw-clang toolchain for use on Linux. + +if [[ $# -eq 0 ]]; then + echo "Provide either x86 or x64 to specify a toolchain." + exit 1; +elif [ "$1" == "x86" ]; then + machine="i686" + compiler_rt_machine="i386" + crt_flags="--enable-lib32 --disable-lib64" + WRAPPER_FLAGS="" +elif [ "$1" == "x64" ]; then + machine="x86_64" + compiler_rt_machine="x86_64" + crt_flags="--disable-lib32 --enable-lib64" + WRAPPER_FLAGS="" +else + echo "Provide either x86 or x64 to specify a toolchain." + exit 1; +fi + +TOOLCHAIN_DIR=$MOZ_FETCHES_DIR/llvm-project +INSTALL_DIR=$MOZ_FETCHES_DIR/clang +CROSS_PREFIX_DIR=$INSTALL_DIR/$machine-w64-mingw32 + +make_flags="-j$(nproc)" + +if [ -d "$MOZ_FETCHES_DIR/binutils/bin" ]; then + export PATH="$MOZ_FETCHES_DIR/binutils/bin:$PATH" +fi + +# This is default value of _WIN32_WINNT. Gecko configure script explicitly sets this, +# so this is not used to build Gecko itself. We default to 0x601, which is Windows 7. +default_win32_winnt=0x601 + +cd $GECKO_PATH + +patch_file2="$(pwd)/taskcluster/scripts/misc/mingw-dwrite_3.patch" +patch_file3="$(pwd)/taskcluster/scripts/misc/mingw-unknown.patch" +patch_file4="$(pwd)/taskcluster/scripts/misc/mingw-enum.patch" +patch_file5="$(pwd)/taskcluster/scripts/misc/mingw-widl.patch" +patch_file6="$(pwd)/taskcluster/scripts/misc/mingw-dispatchqueue.patch" +patch_file10="$(pwd)/taskcluster/scripts/misc/mingw-ts_sd.patch" +patch_file11="$(pwd)/taskcluster/scripts/misc/mingw-composition.patch" + +prepare() { + pushd $MOZ_FETCHES_DIR/mingw-w64 + patch -p1 <$patch_file2 + patch -p1 <$patch_file3 + patch -p1 <$patch_file4 + patch -p1 <$patch_file5 + patch -p1 <$patch_file6 + patch -p1 <$patch_file10 + patch -p1 <$patch_file11 + popd +} + +install_wrappers() { + pushd $INSTALL_DIR/bin + + compiler_flags="--sysroot \$DIR/../$machine-w64-mingw32 -rtlib=compiler-rt -stdlib=libc++ -fuse-ld=lld $WRAPPER_FLAGS -fuse-cxa-atexit -Qunused-arguments" + + cat <$machine-w64-mingw32-clang +#!/bin/sh +DIR="\$(cd "\$(dirname "\$0")" && pwd)" +\$DIR/clang -target $machine-w64-mingw32 $compiler_flags "\$@" +EOF + chmod +x $machine-w64-mingw32-clang + + cat <$machine-w64-mingw32-clang++ +#!/bin/sh +DIR="\$(cd "\$(dirname "\$0")" && pwd)" +\$DIR/clang -target $machine-w64-mingw32 --driver-mode=g++ $compiler_flags "\$@" +EOF + chmod +x $machine-w64-mingw32-clang++ + + CC="$machine-w64-mingw32-clang" + CXX="$machine-w64-mingw32-clang++" + + popd +} + +build_mingw() { + mkdir mingw-w64-headers + pushd mingw-w64-headers + $MOZ_FETCHES_DIR/mingw-w64/mingw-w64-headers/configure \ + --host=$machine-w64-mingw32 \ + --enable-sdk=all \ + --enable-idl \ + --with-default-msvcrt=ucrt \ + --with-default-win32-winnt=$default_win32_winnt \ + --prefix=$CROSS_PREFIX_DIR + make $make_flags install + popd + + mkdir mingw-w64-crt + pushd mingw-w64-crt + $MOZ_FETCHES_DIR/mingw-w64/mingw-w64-crt/configure \ + --host=$machine-w64-mingw32 \ + $crt_flags \ + --with-default-msvcrt=ucrt \ + CC="$CC" \ + AR=llvm-ar \ + RANLIB=llvm-ranlib \ + DLLTOOL=llvm-dlltool \ + --prefix=$CROSS_PREFIX_DIR + make $make_flags + make $make_flags install + popd + + mkdir widl + pushd widl + $MOZ_FETCHES_DIR/mingw-w64/mingw-w64-tools/widl/configure --target=$machine-w64-mingw32 --prefix=$INSTALL_DIR + make $make_flags + make $make_flags install + popd +} + +build_compiler_rt() { + CLANG_VERSION=$(basename $(dirname $(dirname $(dirname $($CC --print-libgcc-file-name -rtlib=compiler-rt))))) + mkdir compiler-rt + pushd compiler-rt + cmake \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_C_COMPILER=$CC \ + -DCMAKE_SYSTEM_NAME=Windows \ + -DCMAKE_AR=$INSTALL_DIR/bin/llvm-ar \ + -DCMAKE_RANLIB=$INSTALL_DIR/bin/llvm-ranlib \ + -DCMAKE_C_COMPILER_WORKS=1 \ + -DCMAKE_C_COMPILER_TARGET=$compiler_rt_machine-windows-gnu \ + -DCOMPILER_RT_DEFAULT_TARGET_ONLY=TRUE \ + $TOOLCHAIN_DIR/compiler-rt/lib/builtins + make $make_flags + mkdir -p $INSTALL_DIR/lib/clang/$CLANG_VERSION/lib/windows + cp lib/windows/libclang_rt.builtins-$compiler_rt_machine.a $INSTALL_DIR/lib/clang/$CLANG_VERSION/lib/windows/ + popd +} + +build_runtimes() { + # Below, we specify -g -gcodeview to build static libraries with debug information. + # Because we're not distributing these builds, this is fine. If one were to distribute + # the builds, perhaps one would want to make those flags conditional or investigation + # other options. + DEBUG_FLAGS="-g -gcodeview" + + # First configure libcxx + mkdir runtimes + pushd runtimes + cmake \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=$CROSS_PREFIX_DIR \ + -DCMAKE_C_COMPILER=$CC \ + -DCMAKE_CXX_COMPILER=$CXX \ + -DCMAKE_CROSSCOMPILING=TRUE \ + -DCMAKE_SYSTEM_NAME=Windows \ + -DCMAKE_C_COMPILER_WORKS=TRUE \ + -DCMAKE_CXX_COMPILER_WORKS=TRUE \ + -DLLVM_COMPILER_CHECKED=TRUE \ + -DCMAKE_AR=$INSTALL_DIR/bin/llvm-ar \ + -DCMAKE_RANLIB=$INSTALL_DIR/bin/llvm-ranlib \ + -DCMAKE_CXX_FLAGS="${DEBUG_FLAGS} -D_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS" \ + -DLIBCXX_USE_COMPILER_RT=ON \ + -DLIBCXX_INSTALL_HEADERS=ON \ + -DLIBCXX_ENABLE_EXCEPTIONS=ON \ + -DLIBCXX_ENABLE_THREADS=ON \ + -DLIBCXX_HAS_WIN32_THREAD_API=ON \ + -DLIBCXX_ENABLE_MONOTONIC_CLOCK=ON \ + -DLIBCXX_ENABLE_SHARED=OFF \ + -DLIBCXX_SUPPORTS_STD_EQ_CXX11_FLAG=TRUE \ + -DLIBCXX_HAVE_CXX_ATOMICS_WITHOUT_LIB=TRUE \ + -DLIBCXX_ENABLE_EXPERIMENTAL_LIBRARY=OFF \ + -DLIBCXX_ENABLE_FILESYSTEM=ON \ + -DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=TRUE \ + -DLIBCXX_CXX_ABI=libcxxabi \ + -DLIBCXXABI_USE_LLVM_UNWINDER=TRUE \ + -DLIBCXXABI_ENABLE_STATIC_UNWINDER=TRUE \ + -DLLVM_NO_OLD_LIBSTDCXX=TRUE \ + -DLIBUNWIND_USE_COMPILER_RT=TRUE \ + -DLIBUNWIND_ENABLE_THREADS=TRUE \ + -DLIBUNWIND_ENABLE_SHARED=FALSE \ + -DLIBUNWIND_ENABLE_CROSS_UNWINDING=FALSE \ + -DLIBUNWIND_CXX_FLAGS="${DEBUG_FLAGS} -Wno-dll-attribute-on-redeclaration -nostdinc++ -DPSAPI_VERSION=2" \ + -DLIBUNWIND_C_FLAGS="-Wno-dll-attribute-on-redeclaration" \ + -DLIBUNWIND_ENABLE_FRAME_APIS=ON \ + -DLIBCXXABI_USE_COMPILER_RT=ON \ + -DLIBCXXABI_ENABLE_EXCEPTIONS=ON \ + -DLIBCXXABI_ENABLE_THREADS=ON \ + -DLIBCXXABI_TARGET_TRIPLE=$machine-w64-mingw32 \ + -DLIBCXXABI_ENABLE_SHARED=OFF \ + -DLIBCXXABI_CXX_FLAGS="${DEBUG_FLAGS} -D_LIBCPP_HAS_THREAD_API_WIN32" \ + -DLLVM_ENABLE_RUNTIMES="libcxxabi;libcxx;libunwind" \ + $TOOLCHAIN_DIR/runtimes + + make $make_flags VERBOSE=1 + make $make_flags install + + popd +} + +build_libssp() { + pushd $MOZ_FETCHES_DIR/gcc-source/ + + # Massage the environment for the build-libssp.sh script + mkdir -p ./$machine-w64-mingw32/lib + cp $MOZ_FETCHES_DIR/llvm-mingw/libssp-Makefile . + sed -i 's/set -e/set -x -e -v/' $MOZ_FETCHES_DIR/llvm-mingw/build-libssp.sh + sed -i 's/(CROSS)gcc/(CROSS)clang/' libssp-Makefile + sed -i 's/\$(CROSS)ar/llvm-ar/' libssp-Makefile + OLDPATH=$PATH + PATH=$INSTALL_DIR/bin:$PATH + + # Run the script + TOOLCHAIN_ARCHS=$machine $MOZ_FETCHES_DIR/llvm-mingw/build-libssp.sh . + + # Grab the artifacts, cleanup + cp $MOZ_FETCHES_DIR/gcc-source/$machine-w64-mingw32/lib/{libssp.a,libssp_nonshared.a} $INSTALL_DIR/$machine-w64-mingw32/lib/ + unset TOOLCHAIN_ARCHS + PATH=$OLDPATH + popd +} + +build_utils() { + pushd $INSTALL_DIR/bin/ + for prog in ar nm objcopy ranlib readobj strip; do + ln -s llvm-$prog $machine-w64-mingw32-$prog + done + ./clang $MOZ_FETCHES_DIR/llvm-mingw/wrappers/windres-wrapper.c -O2 -Wl,-s -o $machine-w64-mingw32-windres + popd +} + +export PATH=$INSTALL_DIR/bin:$PATH + +prepare + +mkdir $TOOLCHAIN_DIR/build +pushd $TOOLCHAIN_DIR/build + +install_wrappers +build_mingw +build_compiler_rt +build_runtimes +build_libssp +build_utils + +popd + +# Put a tarball in the artifacts dir +mkdir -p $UPLOAD_DIR + +pushd $(dirname $INSTALL_DIR) +tar caf clangmingw.tar.zst clang +mv clangmingw.tar.zst $UPLOAD_DIR +popd diff --git a/taskcluster/scripts/misc/build-clang-tidy-external.sh b/taskcluster/scripts/misc/build-clang-tidy-external.sh new file mode 100755 index 0000000000..8647432e7c --- /dev/null +++ b/taskcluster/scripts/misc/build-clang-tidy-external.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -x + +# # Delete the external directory +rm -rf $GECKO_PATH/build/clang-plugin/external/* + +# Move external repository into its place +cp -r $MOZ_FETCHES_DIR/civet.git/* $GECKO_PATH/build/clang-plugin/external + +# Call build-clang.sh with this script's first argument (our JSON config) +$GECKO_PATH/taskcluster/scripts/misc/build-clang.sh $1 diff --git a/taskcluster/scripts/misc/build-clang.sh b/taskcluster/scripts/misc/build-clang.sh new file mode 100755 index 0000000000..9307774062 --- /dev/null +++ b/taskcluster/scripts/misc/build-clang.sh @@ -0,0 +1,72 @@ +#!/bin/bash +set -x -e -v + +# This script is for building clang. + +ORIGPWD="$PWD" +CONFIGS=$(for c; do echo -n " -c $GECKO_PATH/$c"; done) + +cd $GECKO_PATH + +if [ -d "$MOZ_FETCHES_DIR/binutils/bin" ]; then + export PATH="$MOZ_FETCHES_DIR/binutils/bin:$PATH" +fi + +# Make the installed compiler-rt(s) available to clang. +UPLOAD_DIR= taskcluster/scripts/misc/repack-clang.sh + +case "$CONFIGS" in +*macosx64*) + # cmake makes decisions based on the output of the mac-only sw_vers, which is + # obviously missing when cross-compiling, so create a fake one. The exact + # version doesn't really matter: as of writing, cmake checks at most for 10.5. + mkdir -p $ORIGPWD/bin + echo "#!/bin/sh" > $ORIGPWD/bin/sw_vers + echo echo 10.12 >> $ORIGPWD/bin/sw_vers + chmod +x $ORIGPWD/bin/sw_vers + # these variables are used in build-clang.py + export CROSS_SYSROOT=$(ls -d $MOZ_FETCHES_DIR/MacOSX1*.sdk) + export PATH=$PATH:$ORIGPWD/bin + ;; +*win64*) + case "$(uname -s)" in + MINGW*|MSYS*) + export UPLOAD_DIR=$ORIGPWD/public/build + # Set up all the Visual Studio paths. + . taskcluster/scripts/misc/vs-setup.sh + + # LLVM_ENABLE_DIA_SDK is set if the directory "$ENV{VSINSTALLDIR}DIA SDK" + # exists. + export VSINSTALLDIR="${VSPATH}/" + + export PATH="$(cd $MOZ_FETCHES_DIR/cmake && pwd)/bin:${PATH}" + export PATH="$(cd $MOZ_FETCHES_DIR/ninja && pwd)/bin:${PATH}" + ;; + *) + export VSINSTALLDIR="$MOZ_FETCHES_DIR/vs" + ;; + esac + ;; +*linux64*|*android*) + ;; +*) + echo Cannot figure out build configuration for $CONFIGS + exit 1 + ;; +esac + +# gets a bit too verbose here +set +x + +cd $MOZ_FETCHES_DIR/llvm-project +python3 $GECKO_PATH/build/build-clang/build-clang.py $CONFIGS + +set -x + +if [ -f clang*.tar.zst ]; then + # Put a tarball in the artifacts dir + mkdir -p $UPLOAD_DIR + cp clang*.tar.zst $UPLOAD_DIR +fi + +. $GECKO_PATH/taskcluster/scripts/misc/vs-cleanup.sh diff --git a/taskcluster/scripts/misc/build-compiler-rt-wasi.sh b/taskcluster/scripts/misc/build-compiler-rt-wasi.sh new file mode 100755 index 0000000000..3f176d15d0 --- /dev/null +++ b/taskcluster/scripts/misc/build-compiler-rt-wasi.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -x -e -v + +artifact=$(basename $TOOLCHAIN_ARTIFACT) +dir=${artifact%.tar.*} + +if [ -n "$1" ]; then + patch -d $MOZ_FETCHES_DIR/wasi-sdk -p1 < $(dirname $0)/$1 +fi + +cd $MOZ_FETCHES_DIR/wasi-sdk +LLVM_PROJ_DIR=$MOZ_FETCHES_DIR/llvm-project + +mkdir -p build/install/wasi +# The wasi-sdk build system wants to build clang itself. We trick it into +# thinking it did, and put our own clang where it would have built its own. +ln -s $MOZ_FETCHES_DIR/clang build/llvm +touch build/llvm.BUILT + +# The wasi-sdk build system wants a clang and an ar binary in +# build/install/$PREFIX/bin +ln -s $MOZ_FETCHES_DIR/clang/bin build/install/wasi/bin +ln -s llvm-ar build/install/wasi/bin/ar + +# Build compiler-rt +make \ + LLVM_PROJ_DIR=$LLVM_PROJ_DIR \ + PREFIX=$(grep -q BUILD_PREFIX Makefile || echo $PWD/build/install)/wasi \ + build/compiler-rt.BUILT \ + -j$(nproc) + +mkdir -p $dir/lib +mv build/install/wasi/lib/clang/*/lib/wasi $dir/lib +tar --zstd -cf $artifact $dir +mkdir -p $UPLOAD_DIR +mv $artifact $UPLOAD_DIR/ diff --git a/taskcluster/scripts/misc/build-compiler-rt.sh b/taskcluster/scripts/misc/build-compiler-rt.sh new file mode 100755 index 0000000000..b5665381d1 --- /dev/null +++ b/taskcluster/scripts/misc/build-compiler-rt.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +set -e -x + +artifact=$(basename $TOOLCHAIN_ARTIFACT) +dir=${artifact%.tar.*} +target=${dir#compiler-rt-} + +case "$target" in +*-linux-android) + EXTRA_CMAKE_FLAGS=" + -DCOMPILER_RT_BUILD_LIBFUZZER=OFF + -DCOMPILER_RT_BUILD_ORC=OFF + -DCOMPILER_RT_BUILTINS_HIDE_SYMBOLS=OFF + " + ;; +*-apple-darwin) + EXTRA_CMAKE_FLAGS=" + -DCOMPILER_RT_ENABLE_IOS=OFF + -DCOMPILER_RT_ENABLE_WATCHOS=OFF + -DCOMPILER_RT_ENABLE_TVOS=OFF + " + ;; +*-windows-msvc) + EXTRA_CMAKE_FLAGS=" + -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded + " + ;; +esac + +EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DCOMPILER_RT_DEFAULT_TARGET_ONLY=ON + -DLLVM_ENABLE_PER_TARGET_RUNTIME_DIR=OFF +" + +export EXTRA_CMAKE_FLAGS + +$(dirname $0)/build-llvm-common.sh compiler-rt install $target "$@" diff --git a/taskcluster/scripts/misc/build-cpython.sh b/taskcluster/scripts/misc/build-cpython.sh new file mode 100755 index 0000000000..aac2034d0a --- /dev/null +++ b/taskcluster/scripts/misc/build-cpython.sh @@ -0,0 +1,116 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# This script builds the official interpreter for the python language, +# while also packing in a few default extra packages. + +set -e +set -x + +# Required fetch artifact +clang_bindir=${MOZ_FETCHES_DIR}/clang/bin +clang_libdir=${MOZ_FETCHES_DIR}/clang/lib +python_src=${MOZ_FETCHES_DIR}/cpython-source +xz_prefix=${MOZ_FETCHES_DIR}/xz + +# Make the compiler-rt available to clang. +env UPLOAD_DIR= $GECKO_PATH/taskcluster/scripts/misc/repack-clang.sh + +# Extra setup per platform +case `uname -s` in + Darwin) + # Use taskcluster clang instead of host compiler on OSX + export PATH=${clang_bindir}:${PATH} + export CC=clang + export CXX=clang++ + export LDFLAGS=-fuse-ld=lld + + case `uname -m` in + aarch64) + macosx_version_min=11.0 + ;; + *) + macosx_version_min=10.12 + ;; + esac + macosx_sdk=14.2 + # NOTE: both CFLAGS and CPPFLAGS need to be set here, otherwise + # configure step fails. + sysroot_flags="-isysroot ${MOZ_FETCHES_DIR}/MacOSX${macosx_sdk}.sdk -mmacosx-version-min=${macosx_version_min}" + export CPPFLAGS="${sysroot_flags} -I${xz_prefix}/include" + export CFLAGS=${sysroot_flags} + export LDFLAGS="${LDFLAGS} ${sysroot_flags} -L${xz_prefix}/lib" + configure_flags_extra=--with-openssl=/usr/local/opt/openssl + + # see https://bugs.python.org/issue22490 + unset __PYVENV_LAUNCHER__ + + # see https://bugs.python.org/issue44065 + sed -i -e 's,$CC --print-multiarch,:,' ${python_src}/configure + export LDFLAGS="${LDFLAGS} -Wl,-rpath -Wl,@loader_path/../.." + ;; + Linux) + # Use host gcc on Linux + export LDFLAGS="${LDFLAGS} -Wl,-rpath,\\\$ORIGIN/../.." + ;; +esac + +# Patch Python to honor MOZPYTHONHOME instead of PYTHONHOME. That way we have a +# relocatable python for free, while not interfering with the system Python that +# already honors PYTHONHOME. +find ${python_src} -type f -print0 | xargs -0 perl -i -pe "s,PYTHONHOME,MOZPYTHONHOME,g" + +# Actual build +work_dir=`pwd` +tardir=python + +cd `mktemp -d` +${python_src}/configure --prefix=/${tardir} --enable-optimizations ${configure_flags_extra} || { exit_status=$? && cat config.log && exit $exit_status ; } + +export MAKEFLAGS=-j`nproc` +make +make DESTDIR=${work_dir} install +cd ${work_dir} + +${work_dir}/python/bin/python3 -m pip install --upgrade pip==23.0 +${work_dir}/python/bin/python3 -m pip install -r ${GECKO_PATH}/build/psutil_requirements.txt -r ${GECKO_PATH}/build/zstandard_requirements.txt + +case `uname -s` in + Darwin) + + cp /usr/local/opt/openssl/lib/libssl*.dylib ${work_dir}/python/lib/ + cp /usr/local/opt/openssl/lib/libcrypto*.dylib ${work_dir}/python/lib/ + cp ${xz_prefix}/lib/liblzma.dylib ${work_dir}/python/lib/ + + # Instruct the loader to search for the lib in rpath instead of the one used during linking + install_name_tool -change /usr/local/opt/openssl@1.1/lib/libssl.1.1.dylib @rpath/libssl.1.1.dylib ${work_dir}/python/lib/python3.*/lib-dynload/_ssl.cpython-3*-darwin.so + install_name_tool -change /usr/local/opt/openssl@1.1/lib/libcrypto.1.1.dylib @rpath/libcrypto.1.1.dylib ${work_dir}/python/lib/python3.*/lib-dynload/_ssl.cpython-3*-darwin.so + otool -L ${work_dir}/python/lib/python3.*/lib-dynload/_ssl.cpython-3*-darwin.so | grep @rpath/libssl.1.1.dylib + + + install_name_tool -change /xz/lib/liblzma.5.dylib @rpath/liblzma.5.dylib ${work_dir}/python/lib/python3.*/lib-dynload/_lzma.cpython-3*-darwin.so + otool -L ${work_dir}/python/lib/python3.*/lib-dynload/_lzma.cpython-3*-darwin.so | grep @rpath/liblzma.5.dylib + + # Also modify the shipped libssl to use the shipped libcrypto + install_name_tool -change /usr/local/Cellar/openssl@1.1/1.1.1h/lib/libcrypto.1.1.dylib @rpath/libcrypto.1.1.dylib ${work_dir}/python/lib/libssl.1.1.dylib + otool -L ${work_dir}/python/lib/libssl.1.1.dylib | grep @rpath/libcrypto.1.1.dylib + + # sanity check + ${work_dir}/python/bin/python3 -c "import ssl" + ${work_dir}/python/bin/python3 -c "import lzma" + + # We may not have access to system certificate on OSX + ${work_dir}/python/bin/python3 -m pip install certifi==2024.2.2 + ;; + Linux) + cp /usr/lib/x86_64-linux-gnu/libffi.so.* ${work_dir}/python/lib/ + cp /usr/lib/x86_64-linux-gnu/libssl.so.* ${work_dir}/python/lib/ + cp /usr/lib/x86_64-linux-gnu/libcrypto.so.* ${work_dir}/python/lib/ + cp /lib/x86_64-linux-gnu/libncursesw.so.* ${work_dir}/python/lib/ + cp /lib/x86_64-linux-gnu/libtinfo.so.* ${work_dir}/python/lib/ + ;; +esac + +$(dirname $0)/pack.sh ${tardir} diff --git a/taskcluster/scripts/misc/build-custom-car.sh b/taskcluster/scripts/misc/build-custom-car.sh new file mode 100755 index 0000000000..d95538cb0e --- /dev/null +++ b/taskcluster/scripts/misc/build-custom-car.sh @@ -0,0 +1,205 @@ +#!/bin/bash +set -x -e -v + +# This script is for building a custom version of chromium-as-release + +# First argument must be the artifact name +ARTIFACT_NAME=$(basename $TOOLCHAIN_ARTIFACT) +shift + +# Use the rest of the arguments as the build config for gn +CONFIG=$(echo $* | tr -d "'") + +# Android build flag +if [[ "$ARTIFACT_NAME" == *"android"* ]]; then + IS_ANDROID=true +fi + +mkdir custom_car +cd custom_car +CUSTOM_CAR_DIR=$PWD + +# Setup depot_tools +git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git +export PATH="$PATH:$CUSTOM_CAR_DIR/depot_tools" + +# Log the current revision of depot tools for easier tracking in the future +DEPOT_TOOLS_REV=$(cd depot_tools && git rev-parse HEAD && cd ..) +echo "Current depot_tools revision: $DEPOT_TOOLS_REV" + +# Set up some env variables depending on the target OS +# Linux is the default case, with minor adjustments for +# android since it is built with a linux host + +# Final folder structure before compressing is +# the same for linux and windows +FINAL_BIN_PATH="src/out/Default" + +# Final binary name for autoninja build sequence +FINAL_BIN=chrome + +# Unique substring for PGO data for Linux +PGO_SUBSTR="chrome-linux-main" + +# Default (non android) fetch name for upstream src +FETCH_NAME=chromium + +# Android specific vars +if [ "$IS_ANDROID" = true ]; then + FETCH_NAME=android + PGO_SUBSTR="android64" + FINAL_BIN_PATH="src/out/Default/apks" + FINAL_BIN=chrome_public_apk +fi + +# Logic for macosx64 +if [[ $(uname -s) == "Darwin" ]]; then + # Modify the config with fetched sdk path + export MACOS_SYSROOT="$MOZ_FETCHES_DIR/MacOSX14.2.sdk" + + # Avoid mixing up the system python and toolchain python in the + # python path configuration + # https://bugs.python.org/issue22490 + unset __PYVENV_LAUNCHER__ + + # Set the SDK path for build, which is technically a higher version + # than what is associated with the current OS version (10.15). + # This should work as long as MACOSX_DEPLOYMENT_TARGET is set correctly + CONFIG=$(echo $CONFIG mac_sdk_path='"'$MACOS_SYSROOT'"') + + # Ensure we don't use ARM64 profdata with this unique sub string + PGO_SUBSTR="chrome-mac-main" + + # Temporary hacky way for now while we build this on intel workers. + # Afterwards we can replace it with a $(uname -m) == "arm64" check. + # Bug 1858740 + if [[ "$ARTIFACT_NAME" == *"macosx_arm"* ]]; then + PGO_SUBSTR="chrome-mac-arm-main" + fi + + # macOS final build folder is different than linux/win + FINAL_BIN_PATH="src/out/Default/Chromium.app" +fi + +# Logic for win64 using the mingw environment +if [[ $(uname -o) == "Msys" ]]; then + # Setup VS 2022 + . $GECKO_PATH/taskcluster/scripts/misc/vs-setup.sh + + # Setup some environment variables for chromium build scripts + export DEPOT_TOOLS_WIN_TOOLCHAIN=0 + export GYP_MSVS_OVERRIDE_PATH="$MOZ_FETCHES_DIR/VS" + export GYP_MSVS_VERSION=2022 + export vs2022_install="$MOZ_FETCHES_DIR/VS" + export WINDOWSSDKDIR="$MOZ_FETCHES_DIR/VS/Windows Kits/10" + export DEPOT_TOOLS_UPDATE=1 + export GCLIENT_PY3=1 + # Fool GYP + touch "$MOZ_FETCHES_DIR/VS/VC/vcvarsall.bat" + + # Construct some of our own dirs and move VS dlls + other files + # to a path that chromium build files & scripts are expecting + mkdir chrome_dll + cd chrome_dll + mkdir system32 + cd ../ + pushd "$WINDOWSSDKDIR" + mkdir -p Debuggers/x64/ + popd + mv $MOZ_FETCHES_DIR/VS/VC/Redist/MSVC/14.38.33130/x64/Microsoft.VC143.CRT/* chrome_dll/system32/ + mv "$WINDOWSSDKDIR/App Certification Kit/"* "$WINDOWSSDKDIR"/Debuggers/x64/ + export WINDIR="$PWD/chrome_dll" + + # Run glcient once first to get some windows deps + gclient + + # Ensure we don't use WIN32 profdata with this unique sub string + PGO_SUBSTR="chrome-win64-main" +fi + +# Get chromium source code and dependencies +mkdir chromium +cd chromium + +fetch --no-history --nohooks $FETCH_NAME + +# Setup the .gclient file to ensure pgo profiles are downloaded. +# For some reason we need to set --name flag even though it already exists. +# Currently the gclient.py file does NOT recognize --custom-var as it's own argument +gclient config --name src "https://chromium.googlesource.com/chromium/src.git" --custom-var="checkout_pgo_profiles=True" --unmanaged + +cd src + +# Log the current revision of the chromium src for easier tracking in the future +CHROMIUM_REV=$(git rev-parse HEAD) +echo "Current chromium revision: $CHROMIUM_REV" + +# Amend gclient file +if [ "$IS_ANDROID" = true ]; then + echo "target_os = [ 'android' ]" >> ../.gclient +fi + +if [[ $(uname -o) == "Msys" ]]; then + # For fast fetches it seems we will be missing some dummy files in windows. + # We can create a dummy this way to satisfy the rest of the build sequence. + # This is ok because we are not doing any development here and don't need + # the development history, but this file is still needed to proceed. + python3 build/util/lastchange.py -o build/util/LASTCHANGE +fi + +if [[ $(uname -s) == "Linux" ]] || [[ $(uname -s) == "Darwin" ]]; then + # Bug 1847210 + # Modifications to how the dirname and depot_tools and other env variables + # change how cipd is setup for Mac and Linux. + # Easily resolved by just running the setup script. + source ./third_party/depot_tools/cipd_bin_setup.sh + cipd_bin_setup +fi + +# Sync again for android, after cipd bin setup +if [ "$IS_ANDROID" = true ]; then + gclient sync +fi + +# Now we can run hooks and fetch PGO + everything else +gclient runhooks + +# PGO data should be in src/chrome/build/pgo_profiles/ +# with a name like "chrome-{OS}-" +export PGO_DATA_DIR="$CUSTOM_CAR_DIR/chromium/src/chrome/build/pgo_profiles" +for entry in "$PGO_DATA_DIR"/* +do + if [ -f "$entry" ]; then + if [[ "$entry" == *"$PGO_SUBSTR"* ]]; then + echo "Found the correct profdata" + export PGO_DATA_PATH="$entry" + break + fi + fi +done + +PGO_FILE=$PGO_DATA_PATH +if [[ $(uname -o) == "Msys" ]]; then + # Compute a relative path that the build scripts looks for. + # This odd pathing seems to only happen on windows + PGO_FILE=${PGO_DATA_PATH#*/*/*/*/*/*/*/*/*/} + mv $PGO_DATA_PATH build/config/compiler/pgo/ +fi + +CONFIG=$(echo $CONFIG pgo_data_path='"'$PGO_FILE'"') + +# Set up then build chrome +gn gen out/Default --args="$CONFIG" +autoninja -C out/Default $FINAL_BIN + +# Gather binary and related files into a zip, and upload it +cd .. +mkdir chromium + +mv "$FINAL_BIN_PATH" chromium +chmod -R +x chromium + +tar -c chromium | python3 $GECKO_PATH/taskcluster/scripts/misc/zstdpy > $ARTIFACT_NAME + +mkdir -p $UPLOAD_DIR +mv "$ARTIFACT_NAME" "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/build-custom-v8.sh b/taskcluster/scripts/misc/build-custom-v8.sh new file mode 100755 index 0000000000..5c8ea673ad --- /dev/null +++ b/taskcluster/scripts/misc/build-custom-v8.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -x -e -v + +# This script is for building a custom version of V8 +ARTIFACT_NAME='d8.tar.zst' +CONFIG='is_debug=false target_cpu="x64"' +if [[ $# -eq 0 ]]; then + echo "Using default configuration for v8 build." + CONFIG=$(echo $CONFIG | tr -d "'") +else + # First argument must be the artifact name + ARTIFACT_NAME="$1" + shift + + # Use the rest of the arguments as the build config + CONFIG=$(echo $* | tr -d "'") +fi + +echo "Config: $CONFIG" +echo "Artifact name: $ARTIFACT_NAME" + +cd $GECKO_PATH + +# Setup depot_tools +git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git +export PATH=$PATH:$GECKO_PATH/depot_tools + +# Get v8 source code and dependencies +fetch --force v8 +cd v8 + +# Build v8 +gn gen out/release --args="$CONFIG" +ninja -C out/release d8 + +# Gather binary and related files into a zip, and upload it +cd .. +mkdir d8 + +cp -R v8/out/release d8 +cp -R v8/include d8 +chmod -R +x d8 + +tar caf $ARTIFACT_NAME d8 + +mkdir -p $UPLOAD_DIR +cp $ARTIFACT_NAME $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-dist-toolchains.sh b/taskcluster/scripts/misc/build-dist-toolchains.sh new file mode 100755 index 0000000000..1ad1871775 --- /dev/null +++ b/taskcluster/scripts/misc/build-dist-toolchains.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -x -e -v + +# This script is for packaging toolchains suitable for use by distributed sccache. +TL_NAME="$1" + +mkdir -p $HOME/artifacts +mkdir -p $HOME/toolchains + +mv $MOZ_FETCHES_DIR/$TL_NAME $HOME/toolchains/$TL_NAME + +$MOZ_FETCHES_DIR/sccache/sccache --package-toolchain $HOME/toolchains/$TL_NAME/bin/$TL_NAME $HOME/artifacts/$TL_NAME-dist-toolchain.tar.xz diff --git a/taskcluster/scripts/misc/build-dmg-hfsplus.sh b/taskcluster/scripts/misc/build-dmg-hfsplus.sh new file mode 100755 index 0000000000..b0039432aa --- /dev/null +++ b/taskcluster/scripts/misc/build-dmg-hfsplus.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -x -e -v + +# This script is for building libdmg-hfsplus to get the `dmg` and `hfsplus` +# tools for producing DMG archives on Linux. + +WORKSPACE=$HOME/workspace +STAGE=$WORKSPACE/dmg + +mkdir -p $UPLOAD_DIR $STAGE + +cd $MOZ_FETCHES_DIR/libdmg-hfsplus + +# The openssl libraries in the sysroot cannot be linked in a PIE executable so we use -no-pie +cmake \ + -DCMAKE_C_COMPILER=$MOZ_FETCHES_DIR/clang/bin/clang \ + -DCMAKE_CXX_COMPILER=$MOZ_FETCHES_DIR/clang/bin/clang++ \ + -DCMAKE_SYSROOT=$MOZ_FETCHES_DIR/sysroot \ + -DOPENSSL_USE_STATIC_LIBS=1 \ + -DCMAKE_EXE_LINKER_FLAGS=-no-pie \ + . + +make VERBOSE=1 -j$(nproc) + +# We only need the dmg and hfsplus tools. +strip dmg/dmg hfs/hfsplus +cp dmg/dmg hfs/hfsplus $STAGE + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" + +cat >$STAGE/README<&2 + exit 1 + fi + + # The sixgill plugin uses some gmp symbols, including some not exported by + # cc1/cc1plus. So link the plugin statically to libgmp. Except that the + # default static build does not have -fPIC, and will result in a relocation + # error, so build our own. This requires the gcc and related source to be + # in $root_dir/gcc-source. + + mkdir $root_dir/gmp-objdir || true + ( + cd $root_dir/gmp-objdir + $root_dir/gcc-source/gmp/configure --disable-shared --with-pic --prefix=$gmp_prefix + make -j8 + make install DESTDIR=$root_dir + ) +} + +build_sixgill() {( + cd $root_dir/sixgill + export CC=$gcc_bindir/gcc + export CXX=$gcc_bindir/g++ + export PATH="$gcc_bindir:$PATH" + export LD_LIBRARY_PATH="${gcc_bindir%/bin}/lib64" + export TARGET_CC=$CC + export CPPFLAGS=-I$gmp_dir/include + export EXTRA_LDFLAGS=-L$gmp_dir/lib + export HOST_CFLAGS=$CPPFLAGS + + ./release.sh --build-and-package --with-gmp=$gmp_dir + tarball=$(ls -td *-sixgill | head -1)/sixgill.tar.xz + cp $tarball $root_dir/sixgill.tar.xz +)} + +prepare_sixgill +build_gmp +build_sixgill + +# Put a tarball in the artifacts dir +mkdir -p $UPLOAD_DIR +cp $MOZ_FETCHES_DIR/sixgill.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-geckodriver.sh b/taskcluster/scripts/misc/build-geckodriver.sh new file mode 100755 index 0000000000..7434ee2ef8 --- /dev/null +++ b/taskcluster/scripts/misc/build-geckodriver.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +set -x -e -v + +# Needed by osx-cross-linker. +export TARGET="$1" + +cd $GECKO_PATH + +EXE= +COMPRESS_EXT=gz + +case "$TARGET" in +*windows-msvc) + EXE=.exe + COMPRESS_EXT=zip + . $GECKO_PATH/taskcluster/scripts/misc/vs-setup.sh + # Bug 1584530: don't require the Microsoft MSVC runtime to be installed. + export RUSTFLAGS="-Ctarget-feature=+crt-static -C linker=$MOZ_FETCHES_DIR/clang/bin/lld-link" + export TARGET_CFLAGS="-Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" + export TARGET_CXXFLAGS="-Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" + ;; +# OSX cross builds are a bit harder +*-apple-darwin) + export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH" + export RUSTFLAGS="-C linker=$GECKO_PATH/taskcluster/scripts/misc/osx-cross-linker" + if test "$TARGET" = "aarch64-apple-darwin"; then + export MACOSX_DEPLOYMENT_TARGET=11.0 + else + export MACOSX_DEPLOYMENT_TARGET=10.12 + fi + ;; +aarch64-unknown-linux-musl) + export RUSTFLAGS="-C linker=$MOZ_FETCHES_DIR/clang/bin/clang -C link-arg=--target=$TARGET -C link-arg=-fuse-ld=lld" + ;; +esac + +export PATH="$MOZ_FETCHES_DIR/rustc/bin:$PATH" + +cd $GECKO_PATH/testing/geckodriver + +cp $GECKO_PATH/.cargo/config.in $GECKO_PATH/.cargo/config + +cargo build --frozen --verbose --release --target "$TARGET" + +cd $GECKO_PATH +mkdir -p $UPLOAD_DIR + +cp target/$TARGET/release/geckodriver$EXE . +if [ "$COMPRESS_EXT" = "zip" ]; then + zip geckodriver.zip geckodriver$EXE + cp geckodriver.zip $UPLOAD_DIR +else + tar -acf geckodriver.tar.$COMPRESS_EXT geckodriver$EXE + cp geckodriver.tar.$COMPRESS_EXT $UPLOAD_DIR +fi + +. $GECKO_PATH/taskcluster/scripts/misc/vs-cleanup.sh diff --git a/taskcluster/scripts/misc/build-gn-common.sh b/taskcluster/scripts/misc/build-gn-common.sh new file mode 100755 index 0000000000..b72d51df09 --- /dev/null +++ b/taskcluster/scripts/misc/build-gn-common.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -e -v + +# This is shared code for building GN. +cd $MOZ_FETCHES_DIR/gn + +if test -n "$MAC_CROSS"; then + python3 build/gen.py --platform darwin --no-last-commit-position +else + python3 build/gen.py --no-last-commit-position +fi + +cat > out/last_commit_position.h < gn.tar.zst +cp gn.tar.zst $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-gn-linux.sh b/taskcluster/scripts/misc/build-gn-linux.sh new file mode 100755 index 0000000000..f6fd9fd507 --- /dev/null +++ b/taskcluster/scripts/misc/build-gn-linux.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e -v + +# This script is for building GN on Linux. + +WORKSPACE=$HOME/workspace +export CC=gcc +export CXX=g++ +export LDFLAGS=-lrt + +cd $GECKO_PATH + +. taskcluster/scripts/misc/build-gn-common.sh diff --git a/taskcluster/scripts/misc/build-gn-macosx.sh b/taskcluster/scripts/misc/build-gn-macosx.sh new file mode 100755 index 0000000000..0d7f5d50a3 --- /dev/null +++ b/taskcluster/scripts/misc/build-gn-macosx.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -e -v + +# This script is for building GN. + +WORKSPACE=$HOME/workspace + +CROSS_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.2.sdk +export MACOSX_DEPLOYMENT_TARGET=10.12 + +export CC=$MOZ_FETCHES_DIR/clang/bin/clang +export CXX=$MOZ_FETCHES_DIR/clang/bin/clang++ +export AR=$MOZ_FETCHES_DIR/clang/bin/llvm-ar +export CFLAGS="-target x86_64-apple-darwin -isysroot ${CROSS_SYSROOT} -I${CROSS_SYSROOT}/usr/include -iframework ${CROSS_SYSROOT}/System/Library/Frameworks" +export CXXFLAGS="-stdlib=libc++ ${CFLAGS}" +export LDFLAGS="-fuse-ld=lld ${CXXFLAGS} -Wl,-syslibroot,${CROSS_SYSROOT} -Wl,-dead_strip" + +# We patch tools/gn/bootstrap/bootstrap.py to detect this. +export MAC_CROSS=1 + +cd $GECKO_PATH + +. taskcluster/scripts/misc/build-gn-common.sh diff --git a/taskcluster/scripts/misc/build-gn-win64.sh b/taskcluster/scripts/misc/build-gn-win64.sh new file mode 100755 index 0000000000..3ecd71fc74 --- /dev/null +++ b/taskcluster/scripts/misc/build-gn-win64.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -e -v -x + +# This script is for building GN on Windows. + +UPLOAD_DIR=$PWD/public/build + +cd $GECKO_PATH + +export PATH="$(cd $MOZ_FETCHES_DIR && pwd)/ninja/bin:$PATH" +export PATH="$(cd $MOZ_FETCHES_DIR && pwd)/mingw64/bin:$PATH" + +. taskcluster/scripts/misc/vs-setup.sh +. taskcluster/scripts/misc/build-gn-common.sh + +. $GECKO_PATH/taskcluster/scripts/misc/vs-cleanup.sh diff --git a/taskcluster/scripts/misc/build-hfsplus-linux.sh b/taskcluster/scripts/misc/build-hfsplus-linux.sh new file mode 100755 index 0000000000..91f9901ebc --- /dev/null +++ b/taskcluster/scripts/misc/build-hfsplus-linux.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -x -e -v + +# This script is for building hfsplus for Linux. + +cd $GECKO_PATH + +export PATH=$PATH:$MOZ_FETCHES_DIR/clang/bin + +build/unix/build-hfsplus/build-hfsplus.sh $MOZ_FETCHES_DIR + +# Put a tarball in the artifacts dir +mkdir -p $UPLOAD_DIR +cp $MOZ_FETCHES_DIR/hfsplus.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-libunwind.sh b/taskcluster/scripts/misc/build-libunwind.sh new file mode 100755 index 0000000000..1be5168d0a --- /dev/null +++ b/taskcluster/scripts/misc/build-libunwind.sh @@ -0,0 +1,23 @@ +#!/bin/sh + +set -e -x + +artifact=$(basename $TOOLCHAIN_ARTIFACT) +dir=${artifact%.tar.*} +target=${dir#libunwind-} + +# Make the android compiler-rt available to clang. +env UPLOAD_DIR= $GECKO_PATH/taskcluster/scripts/misc/repack-clang.sh + +EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DLLVM_ENABLE_PER_TARGET_RUNTIME_DIR=ON + -DCMAKE_C_COMPILER_WORKS=1 + -DCMAKE_CXX_COMPILER_WORKS=1 + -DLLVM_ENABLE_RUNTIMES=libunwind + -DLIBUNWIND_ENABLE_SHARED=OFF +" + +export EXTRA_CMAKE_FLAGS + +$(dirname $0)/build-llvm-common.sh runtimes install $target "$@" diff --git a/taskcluster/scripts/misc/build-llvm-common.sh b/taskcluster/scripts/misc/build-llvm-common.sh new file mode 100755 index 0000000000..2dbf68d3ec --- /dev/null +++ b/taskcluster/scripts/misc/build-llvm-common.sh @@ -0,0 +1,201 @@ +#!/bin/sh + +set -e -x + +artifact=$(basename $TOOLCHAIN_ARTIFACT) +dir=${artifact%.tar.*} +what=$1 +shift +install=$1 +shift +target=$1 +shift + +clang=$MOZ_FETCHES_DIR/clang/bin/clang + +case "$target" in +aarch64-apple-darwin) + arch=arm64 + export MACOSX_DEPLOYMENT_TARGET=11.0 + compiler_wrapper() { + echo exec \$MOZ_FETCHES_DIR/clang/bin/$1 -mcpu=apple-m1 \"\$@\" > $1 + chmod +x $1 + } + compiler_wrapper clang + compiler_wrapper clang++ + clang=$PWD/clang + ;; +x86_64-apple-darwin) + arch=x86_64 + export MACOSX_DEPLOYMENT_TARGET=10.12 + ;; +esac + +case "$target" in +*-apple-darwin) + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DCMAKE_LINKER=$MOZ_FETCHES_DIR/clang/bin/ld64.lld + -DCMAKE_LIPO=$MOZ_FETCHES_DIR/clang/bin/llvm-lipo + -DCMAKE_SYSTEM_NAME=Darwin + -DCMAKE_SYSTEM_VERSION=$MACOSX_DEPLOYMENT_TARGET + -DCMAKE_OSX_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.2.sdk + -DCMAKE_EXE_LINKER_FLAGS=-fuse-ld=lld + -DCMAKE_SHARED_LINKER_FLAGS=-fuse-ld=lld + -DDARWIN_osx_ARCHS=$arch + -DDARWIN_osx_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.2.sdk + -DDARWIN_macosx_OVERRIDE_SDK_VERSION=11.0 + -DDARWIN_osx_BUILTIN_ARCHS=$arch + -DLLVM_DEFAULT_TARGET_TRIPLE=$target + " + # compiler-rt build script expects to find `codesign` in $PATH. + # Give it a fake one. + echo "#!/bin/sh" > codesign + chmod +x codesign + # cmake makes decisions based on the output of the mac-only sw_vers, which is + # obviously missing when cross-compiling, so create a fake one. The exact + # version doesn't really matter: as of writing, cmake checks at most for 10.5. + echo "#!/bin/sh" > sw_vers + echo echo 10.12 >> sw_vers + chmod +x sw_vers + PATH="$PATH:$PWD" + ;; +*-linux-android) + case "$target" in + armv7-linux-android) + arch=arm + ;; + *-linux-android) + arch=${target%-linux-android} + ;; + esac + api_level=21 + target=$target$api_level + # These flags are only necessary to pass the cmake tests. They don't end up + # actually using libgcc, so use an empty library instead of trying to find + # where it is in the NDK. + if [ "$what" = "compiler-rt" ]; then + exe_linker_flags="--rtlib=libgcc -L$PWD" + touch libgcc.a + fi + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DCMAKE_SYSROOT=$MOZ_FETCHES_DIR/android-ndk/toolchains/llvm/prebuilt/linux-x86_64/sysroot + -DCMAKE_LINKER=$MOZ_FETCHES_DIR/clang/bin/ld.lld + -DCMAKE_EXE_LINKER_FLAGS='-fuse-ld=lld $exe_linker_flags' + -DCMAKE_SHARED_LINKER_FLAGS=-fuse-ld=lld + -DANDROID=1 + -DANDROID_NATIVE_API_LEVEL=$api_level + -DSANITIZER_ALLOW_CXXABI=OFF + -DLLVM_DEFAULT_TARGET_TRIPLE=$arch-unknown-linux-android + " + ;; +*-unknown-linux-gnu) + if [ -d "$MOZ_FETCHES_DIR/sysroot" ]; then + sysroot=$MOZ_FETCHES_DIR/sysroot + else + sysroot=$MOZ_FETCHES_DIR/sysroot-${target%-unknown-linux-gnu}-linux-gnu + fi + if [ "${target%-unknown-linux-gnu}" = i686 ]; then + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DLLVM_TABLEGEN=$MOZ_FETCHES_DIR/clang/bin/llvm-tblgen + " + fi + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DCMAKE_SYSROOT=$sysroot + -DCMAKE_LINKER=$MOZ_FETCHES_DIR/clang/bin/ld.lld + -DCMAKE_EXE_LINKER_FLAGS=-fuse-ld=lld + -DCMAKE_SHARED_LINKER_FLAGS=-fuse-ld=lld + -DLLVM_ENABLE_TERMINFO=OFF + " + ;; +*-pc-windows-msvc) + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DCMAKE_TOOLCHAIN_FILE=$MOZ_FETCHES_DIR/llvm-project/llvm/cmake/platforms/WinMsvc.cmake + -DLLVM_NATIVE_TOOLCHAIN=$MOZ_FETCHES_DIR/clang + -DHOST_ARCH=${target%-pc-windows-msvc} + -DLLVM_DISABLE_ASSEMBLY_FILES=ON + " + # LLVM 15+ uses different input variables. + if grep -q LLVM_WINSYSROOT $MOZ_FETCHES_DIR/llvm-project/llvm/cmake/platforms/WinMsvc.cmake; then + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DLLVM_WINSYSROOT=$MOZ_FETCHES_DIR/vs + " + else + # WinMsvc.cmake before LLVM 15 doesn't support spaces in WINDSK_BASE. + ln -s "windows kits/10" $MOZ_FETCHES_DIR/vs/sdk + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DMSVC_BASE=$MOZ_FETCHES_DIR/vs/vc/tools/msvc/14.29.30133 + -DWINSDK_BASE=$MOZ_FETCHES_DIR/vs/sdk + -DWINSDK_VER=10.0.19041.0 + " + fi + ;; +*) + echo $target is not supported yet + exit 1 + ;; +esac + +case "$target" in +*-pc-windows-msvc) + ;; +*) + EXTRA_CMAKE_FLAGS=" + $EXTRA_CMAKE_FLAGS + -DCMAKE_C_COMPILER=$clang + -DCMAKE_CXX_COMPILER=$clang++ + -DCMAKE_AR=$MOZ_FETCHES_DIR/clang/bin/llvm-ar + -DCMAKE_RANLIB=$MOZ_FETCHES_DIR/clang/bin/llvm-ranlib + " + ;; +esac + +mkdir build +cd build + +for patchfile in "$@"; do + case $patchfile in + *.json) + jq -r '.patches[]' $GECKO_PATH/$patchfile | while read p; do + patch -d $MOZ_FETCHES_DIR/llvm-project -p1 < $GECKO_PATH/$(dirname $patchfile)/$p + done + ;; + *) + patch -d $MOZ_FETCHES_DIR/llvm-project -p1 < $GECKO_PATH/$patchfile + ;; + esac +done + +eval cmake \ + $MOZ_FETCHES_DIR/llvm-project/$what \ + -GNinja \ + -DCMAKE_C_COMPILER_TARGET=$target \ + -DCMAKE_CXX_COMPILER_TARGET=$target \ + -DCMAKE_ASM_COMPILER_TARGET=$target \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=${PWD}/${dir} \ + -DLLVM_ENABLE_ASSERTIONS=OFF \ + -DLLVM_CONFIG_PATH=$MOZ_FETCHES_DIR/clang/bin/llvm-config \ + $EXTRA_CMAKE_FLAGS + +ninja -v $install + +if [ "$what" = "compiler-rt" ]; then + # ninja install doesn't copy the PDBs + case "$target" in + *-pc-windows-msvc) + cp lib/windows/*pdb $dir/lib/windows/ + ;; + esac +fi + +tar caf "$artifact" "$dir" + +mkdir -p "$UPLOAD_DIR" +mv "$artifact" "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/build-llvm-symbolizer.sh b/taskcluster/scripts/misc/build-llvm-symbolizer.sh new file mode 100755 index 0000000000..f0b3657376 --- /dev/null +++ b/taskcluster/scripts/misc/build-llvm-symbolizer.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +set -e -x + +$(dirname $0)/build-llvm-common.sh llvm install-llvm-symbolizer "$@" diff --git a/taskcluster/scripts/misc/build-mar-tools.sh b/taskcluster/scripts/misc/build-mar-tools.sh new file mode 100755 index 0000000000..e80a160e48 --- /dev/null +++ b/taskcluster/scripts/misc/build-mar-tools.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set -x -e -v + +# This script is for building mar and mbsdiff + +COMPRESS_EXT=zst + +cd $GECKO_PATH + +export MOZ_OBJDIR=obj-mar + +echo ac_add_options --enable-project=tools/update-packaging > .mozconfig + +TOOLCHAINS="clang" + +for t in $TOOLCHAINS; do + PATH="$MOZ_FETCHES_DIR/$t/bin:$PATH" +done + +./mach build -v + +mkdir mar-tools +cp $MOZ_OBJDIR/dist/host/bin/{mar,mbsdiff} mar-tools/ + +tar -acf mar-tools.tar.$COMPRESS_EXT mar-tools/ +mkdir -p $UPLOAD_DIR +cp mar-tools.tar.$COMPRESS_EXT $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-mingw-fxc2-x86.sh b/taskcluster/scripts/misc/build-mingw-fxc2-x86.sh new file mode 100755 index 0000000000..da93bbf1bc --- /dev/null +++ b/taskcluster/scripts/misc/build-mingw-fxc2-x86.sh @@ -0,0 +1,25 @@ +#!/bin/bash +set -x -e -v + +WORKSPACE=$HOME/workspace +INSTALL_DIR=$WORKSPACE/fxc2 + +mkdir -p $INSTALL_DIR/bin + +export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH" + +# -------------- + +cd $MOZ_FETCHES_DIR/fxc2 +make -j$(nproc) x86 + +cp fxc2.exe $INSTALL_DIR/bin/ +cp dll/d3dcompiler_47_32.dll $INSTALL_DIR/bin/d3dcompiler_47.dll + +# -------------- + +cd $WORKSPACE +tar caf fxc2.tar.zst fxc2 + +mkdir -p $UPLOAD_DIR +cp fxc2.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-mingw32-nsis.sh b/taskcluster/scripts/misc/build-mingw32-nsis.sh new file mode 100755 index 0000000000..85b3bd61ce --- /dev/null +++ b/taskcluster/scripts/misc/build-mingw32-nsis.sh @@ -0,0 +1,71 @@ +#!/bin/bash +set -x -e -v + +INSTALL_DIR=$MOZ_FETCHES_DIR/nsis + +mkdir -p $INSTALL_DIR + +cd $MOZ_FETCHES_DIR + +export PATH="$MOZ_FETCHES_DIR/binutils/bin:$MOZ_FETCHES_DIR/clang/bin:$PATH" + +# Call.S, included from CallCPP.S contains directives that clang's integrated +# assembler doesn't understand. +cat <<'EOF' >$MOZ_FETCHES_DIR/clang/bin/i686-w64-mingw32-gcc +#!/bin/sh +case "$@" in +*/CallCPP.S) + $(dirname $0)/i686-w64-mingw32-clang -fno-integrated-as "$@" + ;; +*) + $(dirname $0)/i686-w64-mingw32-clang "$@" + ;; +esac +EOF + +chmod +x $MOZ_FETCHES_DIR/clang/bin/i686-w64-mingw32-gcc +ln -s i686-w64-mingw32-clang++ $MOZ_FETCHES_DIR/clang/bin/i686-w64-mingw32-g++ + +# -------------- + +cd zlib-1.3.1 +make -f win32/Makefile.gcc PREFIX=i686-w64-mingw32- + +cd ../nsis-3.07-src +patch -p1 < $GECKO_PATH/build/win32/nsis-no-insert-timestamp.patch +patch -p1 < $GECKO_PATH/build/win32/nsis-no-underscore.patch +patch -p1 < $GECKO_PATH/build/win32/nsis-scons.patch +# --exclude-libs is not supported by lld, but is not required anyways. +# /fixed is passed by the build system when building with MSVC but not +# when building with GCC/binutils. The build system doesn't really support +# clang/lld, but apparently binutils and lld don't have the same defaults +# related to this. Unfortunately, /fixed is necessary for the stubs to be +# handled properly by the resource editor in NSIS, which doesn't handle +# relocations, so we pass the equivalent flag to lld-link through lld through +# clang. +sed -i 's/-Wl,--exclude-libs,msvcrt.a/-Wl,-Xlink=-fixed/' SCons/Config/gnu +# memcpy.c and memset.c are built with a C++ compiler so we need to +# avoid their symbols being mangled. +sed -i '2i extern "C"' SCons/Config/{memcpy,memset}.c +# Makensisw is skipped because its resource file fails to build with +# llvm-rc, but we don't need makensisw. +scons \ + PATH=$PATH \ + CC="clang --sysroot $MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" \ + CXX="clang++ --sysroot $MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" \ + XGCC_W32_PREFIX=i686-w64-mingw32- \ + ZLIB_W32=../zlib-1.3.1 \ + SKIPUTILS="NSIS Menu,Makensisw" \ + PREFIX_DEST=$INSTALL_DIR/ \ + PREFIX_BIN=bin \ + NSIS_CONFIG_CONST_DATA_PATH=no \ + VERSION=3.07 \ + install +# -------------- + +cd $MOZ_FETCHES_DIR + +tar caf nsis.tar.zst nsis + +mkdir -p $UPLOAD_DIR +cp nsis.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-mkbom-linux.sh b/taskcluster/scripts/misc/build-mkbom-linux.sh new file mode 100755 index 0000000000..8b4a69a1ef --- /dev/null +++ b/taskcluster/scripts/misc/build-mkbom-linux.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -x -e -v + +# This script is for building mkbom for Linux. +mkdir -p $UPLOAD_DIR + +export PATH=$PATH:$MOZ_FETCHES_DIR/clang/bin +cd $MOZ_FETCHES_DIR/bomutils + +make_flags="-j$(nproc)" +make "$make_flags" + +cd $(mktemp -d) +mkdir mkbom + +cp $MOZ_FETCHES_DIR/bomutils/build/bin/mkbom ./mkbom/mkbom +tar caf $UPLOAD_DIR/mkbom.tar.zst ./mkbom diff --git a/taskcluster/scripts/misc/build-mozmake.sh b/taskcluster/scripts/misc/build-mozmake.sh new file mode 100755 index 0000000000..455496787b --- /dev/null +++ b/taskcluster/scripts/misc/build-mozmake.sh @@ -0,0 +1,66 @@ +#!/bin/bash +set -x -e -v + +. $GECKO_PATH/taskcluster/scripts/misc/vs-setup.sh + +cd $MOZ_FETCHES_DIR/make + +# Patch for http://savannah.gnu.org/bugs/?58656 +patch -p1 <<'EOF' +diff --git a/src/remake.c b/src/remake.c +index fb237c5..b2ba069 100644 +--- a/src/remake.c ++++ b/src/remake.c +@@ -35,6 +35,13 @@ this program. If not, see . */ + #endif + #ifdef WINDOWS32 + #include ++#include ++#if defined(_MSC_VER) && _MSC_VER > 1200 ++/* VC7 or later support _stat64 to access 64-bit file size. */ ++#define stat64 _stat64 ++#else ++#define stat64 stat ++#endif + #endif + + +@@ -1466,7 +1473,11 @@ static FILE_TIMESTAMP + name_mtime (const char *name) + { + FILE_TIMESTAMP mtime; ++#if defined(WINDOWS32) ++ struct stat64 st; ++#else + struct stat st; ++#endif + int e; + + #if defined(WINDOWS32) +@@ -1498,7 +1509,7 @@ name_mtime (const char *name) + tend = &tem[0]; + } + +- e = stat (tem, &st); ++ e = stat64 (tem, &st); + if (e == 0 && !_S_ISDIR (st.st_mode) && tend < tem + (p - name - 1)) + { + errno = ENOTDIR; +EOF + +chmod +w src/config.h.W32 +sed "/#define BATCH_MODE_ONLY_SHELL/s/\/\*\(.*\)\*\//\1/" src/config.h.W32 > src/config.h +make -f Basic.mk \ + MAKE_HOST=Windows32 \ + MKDIR.cmd='mkdir -p $1' \ + RM.cmd='rm -f $1' \ + CP.cmd='cp $1 $2' \ + msvc_CC="$MOZ_FETCHES_DIR/clang/bin/clang-cl -Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" \ + msvc_LD=$MOZ_FETCHES_DIR/clang/bin/lld-link + +mkdir mozmake +cp WinRel/gnumake.exe mozmake/mozmake.exe + +tar -acvf mozmake.tar.zst mozmake +mkdir -p $UPLOAD_DIR +cp mozmake.tar.zst $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-msix-packaging.sh b/taskcluster/scripts/misc/build-msix-packaging.sh new file mode 100755 index 0000000000..345057a016 --- /dev/null +++ b/taskcluster/scripts/misc/build-msix-packaging.sh @@ -0,0 +1,33 @@ +#!/bin/bash +set -x -e -v + +cd $MOZ_FETCHES_DIR/msix-packaging + +export PATH=$MOZ_FETCHES_DIR/clang/bin:$PATH + +# makelinux.sh invokes `make` with no parallelism. These jobs run on hosts with +# 16+ vCPUs; let's try to take advantage. +export MAKEFLAGS=-j16 + +./makelinux.sh --pack -- \ + -DCMAKE_SYSROOT=$MOZ_FETCHES_DIR/sysroot \ + -DCMAKE_EXE_LINKER_FLAGS_INIT='-fuse-ld=lld -Wl,-rpath=\$ORIGIN' \ + -DCMAKE_SHARED_LINKER_FLAGS_INIT='-fuse-ld=lld -Wl,-rpath=\$ORIGIN' \ + -DCMAKE_SKIP_BUILD_RPATH=TRUE + +mkdir msix-packaging +cp .vs/bin/makemsix msix-packaging +cp .vs/lib/libmsix.so msix-packaging + +# The `msix-packaging` tool links against libicu dynamically. It would be +# better to link statically, but it's not easy to achieve. This copies the +# needed libicu libraries from the sysroot, and the rpath settings above allows +# them to be loaded, which means the consuming environment doesn't need to +# install libicu directly. +LD_LIBRARY_PATH=$MOZ_FETCHES_DIR/sysroot/usr/lib/x86_64-linux-gnu \ +ldd msix-packaging/libmsix.so | awk '$3 ~ /libicu/ {print $3}' | xargs -I '{}' cp '{}' msix-packaging + +tar caf msix-packaging.tar.zst msix-packaging + +mkdir -p $UPLOAD_DIR +cp msix-packaging.tar.zst $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-nasm.sh b/taskcluster/scripts/misc/build-nasm.sh new file mode 100755 index 0000000000..98370b312f --- /dev/null +++ b/taskcluster/scripts/misc/build-nasm.sh @@ -0,0 +1,63 @@ +#!/bin/bash +set -x -e -v + +COMPRESS_EXT=zst + +cd $MOZ_FETCHES_DIR/nasm-* + +case $(cat version) in +2.14.02) + # Fix for .debug_loc section containing garbage on elf32 + # https://bugzilla.nasm.us/show_bug.cgi?id=3392631 + patch -p1 <<'EOF' +diff --git a/output/outelf.c b/output/outelf.c +index de99d076..47031e12 100644 +--- a/output/outelf.c ++++ b/output/outelf.c +@@ -3275,7 +3275,7 @@ static void dwarf_generate(void) + WRITELONG(pbuf,framelen-4); /* initial length */ + + /* build loc section */ +- loclen = 16; ++ loclen = is_elf64() ? 16 : 8; + locbuf = pbuf = nasm_malloc(loclen); + if (is_elf32()) { + WRITELONG(pbuf,0); /* null beginning offset */ +EOF + ;; +esac + +export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH" + +case "$1" in + win64) + TARGET=x86_64-w64-mingw32 + CC=x86_64-w64-mingw32-clang + EXE=.exe + ;; + macosx64) + export MACOSX_DEPLOYMENT_TARGET=10.12 + TARGET=x86_64-apple-darwin + CC="clang -fuse-ld=lld --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk" + EXE= + ;; + macosx64-aarch64) + export MACOSX_DEPLOYMENT_TARGET=11.0 + TARGET=aarch64-apple-darwin + CC="clang -fuse-ld=lld --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk" + EXE= + ;; + *) + CC="clang --sysroot=$MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" + EXE= + ;; +esac +./configure CC="$CC" AR=llvm-ar RANLIB=llvm-ranlib LDFLAGS=-fuse-ld=lld ${TARGET:+--host=$TARGET} +make -j$(nproc) + +mv nasm$EXE nasm-tmp +mkdir nasm +mv nasm-tmp nasm/nasm$EXE +tar -acf nasm.tar.$COMPRESS_EXT nasm +mkdir -p "$UPLOAD_DIR" +cp nasm.tar.$COMPRESS_EXT "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/build-nodejs.sh b/taskcluster/scripts/misc/build-nodejs.sh new file mode 100755 index 0000000000..16e281d980 --- /dev/null +++ b/taskcluster/scripts/misc/build-nodejs.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -x -e -v + +artifact=$(basename "$TOOLCHAIN_ARTIFACT") +project=${artifact%.tar.*} +workspace=$HOME/workspace + +cd $MOZ_FETCHES_DIR/$project + +gcc_major=8 +export CFLAGS=--sysroot=$MOZ_FETCHES_DIR/sysroot +export CXXFLAGS"=--sysroot=$MOZ_FETCHES_DIR/sysroot -isystem $MOZ_FETCHES_DIR/sysroot/usr/include/c++/$gcc_major -isystem $MOZ_FETCHES_DIR/sysroot/usr/include/x86_64-linux-gnu/c++/$gcc_major" +export LDFLAGS="--sysroot=$MOZ_FETCHES_DIR/sysroot -L$MOZ_FETCHES_DIR/sysroot/lib/x86_64-linux-gnu -L$MOZ_FETCHES_DIR/sysroot/usr/lib/x86_64-linux-gnu -L$MOZ_FETCHES_DIR/sysroot/usr/lib/gcc/x86_64-linux-gnu/$gcc_major" +export CC=$MOZ_FETCHES_DIR/gcc/bin/gcc +export CXX=$MOZ_FETCHES_DIR/gcc/bin/g++ + +./configure --verbose --prefix=/ +make -j$(nproc) install DESTDIR=$workspace/$project + +tar -C $workspace -acvf $artifact $project +mkdir -p $UPLOAD_DIR +mv $artifact $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-nsis.sh b/taskcluster/scripts/misc/build-nsis.sh new file mode 100755 index 0000000000..b1b8e06248 --- /dev/null +++ b/taskcluster/scripts/misc/build-nsis.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -x -e -v + +export PATH=$MOZ_FETCHES_DIR/clang/bin:$PATH + +# nsis/ contains the pre-built windows native nsis. We build a linux +# makensis from source and install it there. +INSTALL_DIR=$MOZ_FETCHES_DIR/nsis + +cd $MOZ_FETCHES_DIR/nsis-3.07-src +patch -p1 < $GECKO_PATH/build/win32/nsis-no-underscore.patch +scons \ + -j $(nproc) \ + PATH=$PATH \ + CC="clang --sysroot $MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" \ + CXX="clang++ --sysroot $MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" \ + SKIPSTUBS=all \ + SKIPPLUGINS=all \ + SKIPUTILS=all \ + SKIPMISC=all \ + PREFIX_DEST=$INSTALL_DIR/ \ + PREFIX_BIN=bin \ + NSIS_CONFIG_CONST_DATA_PATH=no \ + VERSION=3.07 \ + install-compiler + +cd $MOZ_FETCHES_DIR + +tar caf nsis.tar.zst nsis + +mkdir -p $UPLOAD_DIR +cp nsis.tar.zst $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-pkgconf.sh b/taskcluster/scripts/misc/build-pkgconf.sh new file mode 100755 index 0000000000..ef211eeef1 --- /dev/null +++ b/taskcluster/scripts/misc/build-pkgconf.sh @@ -0,0 +1,44 @@ +#!/bin/bash +set -x -e -v + +# This script is for building pkgconfs. +PROJECT=pkgconf + +cd ${MOZ_FETCHES_DIR}/${PROJECT} + +export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH" + +case "$1" in +x86_64-unknown-linux-gnu) + CC="clang --sysroot=$MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" + EXE= + ;; +x86_64-apple-darwin) + export MACOSX_DEPLOYMENT_TARGET=10.12 + TARGET=$1 + CC="clang --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk" + EXE= + ;; +aarch64-apple-darwin) + export MACOSX_DEPLOYMENT_TARGET=11.0 + TARGET=$1 + CC="clang --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk" + EXE= + ;; +x86_64-pc-windows-gnu) + TARGET=x86_64-w64-mingw32 + CC="x86_64-w64-mingw32-clang -DPKGCONFIG_IS_STATIC=1" + EXE=.exe + ;; +esac + +./configure --disable-shared CC="$CC" AR=llvm-ar RANLIB=llvm-ranlib LDFLAGS=-fuse-ld=lld ${TARGET:+--host=$TARGET} +make -j$(nproc) V=1 + +mv ${PROJECT}${EXE} ${PROJECT}_tmp +mkdir ${PROJECT} +mv ${PROJECT}_tmp ${PROJECT}/pkg-config${EXE} +tar -acf ${PROJECT}.tar.zst ${PROJECT} + +mkdir -p $UPLOAD_DIR +mv ${PROJECT}.tar.zst $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-resourcemonitor.sh b/taskcluster/scripts/misc/build-resourcemonitor.sh new file mode 100755 index 0000000000..6643079c76 --- /dev/null +++ b/taskcluster/scripts/misc/build-resourcemonitor.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -x -e -v + +cd "$MOZ_FETCHES_DIR"/resource-monitor/ || exit 1 +COMPRESS_EXT=zst + +PATH="$MOZ_FETCHES_DIR/go/bin:$PATH" +export PATH + +EXE_SUFFIX="" + +case "$1" in + linux64) GOOS=linux; GOARCH=amd64 ;; + macos64) GOOS=darwin; GOARCH=amd64 ;; + windows64) GOOS=windows; GOARCH=amd64; EXE_SUFFIX=".exe" ;; + windows32) GOOS=windows; GOARCH=386; EXE_SUFFIX=".exe" ;; + *) + echo "Unknown architecture $1 not recognized in build-resourcemonitor.sh" >&2 + exit 1 + ;; +esac + +export GOOS +export GOARCH +export EXE_SUFFIX + +echo "GOOS=$GOOS" +echo "GOARCH=$GOARCH" + +go build . + +STAGING_DIR="resource-monitor" +mv "resource-monitor${EXE_SUFFIX}" resource-monitor.tmp +mkdir "${STAGING_DIR}" + +cp resource-monitor.tmp "${STAGING_DIR}/resource-monitor${EXE_SUFFIX}" + +tar -acf "resource-monitor.tar.$COMPRESS_EXT" "${STAGING_DIR}"/ +mkdir -p "$UPLOAD_DIR" +cp "resource-monitor.tar.$COMPRESS_EXT" "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/build-rust-based-toolchain.sh b/taskcluster/scripts/misc/build-rust-based-toolchain.sh new file mode 100755 index 0000000000..707ba9d478 --- /dev/null +++ b/taskcluster/scripts/misc/build-rust-based-toolchain.sh @@ -0,0 +1,79 @@ +#!/bin/bash +set -x -e -v + +artifact=$(basename "$TOOLCHAIN_ARTIFACT") +project=${artifact%.tar.*} +workspace=$HOME/workspace + +# Exported for osx-cross-linker. +export TARGET=$1 +shift + +FEATURES="$@" + +case "$TARGET" in +x86_64-unknown-linux-gnu) + # Native Linux Build + export RUSTFLAGS="-Clinker=$MOZ_FETCHES_DIR/clang/bin/clang++ -C link-arg=--sysroot=$MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu -C link-arg=-fuse-ld=lld" + export CC=$MOZ_FETCHES_DIR/clang/bin/clang + export CXX=$MOZ_FETCHES_DIR/clang/bin/clang++ + export TARGET_CFLAGS="--sysroot=$MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" + export TARGET_CXXFLAGS="-D_GLIBCXX_USE_CXX11_ABI=0 --sysroot=$MOZ_FETCHES_DIR/sysroot-x86_64-linux-gnu" + ;; +*-apple-darwin) + # Cross-compiling for Mac on Linux. + export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH" + export RUSTFLAGS="-C linker=$GECKO_PATH/taskcluster/scripts/misc/osx-cross-linker" + if test "$TARGET" = "aarch64-apple-darwin"; then + export MACOSX_DEPLOYMENT_TARGET=11.0 + else + export MACOSX_DEPLOYMENT_TARGET=10.12 + fi + export CC="$MOZ_FETCHES_DIR/clang/bin/clang" + export CXX="$MOZ_FETCHES_DIR/clang/bin/clang++" + export TARGET_CFLAGS="-isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk" + export TARGET_CXXFLAGS="-isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk -stdlib=libc++" + ;; +*-pc-windows-msvc) + # Cross-compiling for Windows on Linux. + export CC=$MOZ_FETCHES_DIR/clang/bin/clang-cl + export CXX=$MOZ_FETCHES_DIR/clang/bin/clang-cl + export TARGET_AR=$MOZ_FETCHES_DIR/clang/bin/llvm-lib + + . $GECKO_PATH/taskcluster/scripts/misc/vs-setup.sh + export CARGO_TARGET_I686_PC_WINDOWS_MSVC_LINKER=$MOZ_FETCHES_DIR/clang/bin/lld-link + export CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER=$MOZ_FETCHES_DIR/clang/bin/lld-link + export TARGET_CFLAGS="-Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" + export TARGET_CXXFLAGS="-Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" + ;; +esac + +PATH="$MOZ_FETCHES_DIR/rustc/bin:$PATH" + +CRATE_PATH=$MOZ_FETCHES_DIR/${FETCH-$project} +WORKSPACE_ROOT=$(cd $CRATE_PATH; cargo metadata --format-version 1 --no-deps --locked 2> /dev/null | jq -r .workspace_root) + +if test ! -f $WORKSPACE_ROOT/Cargo.lock; then + CARGO_LOCK=taskcluster/scripts/misc/$project-Cargo.lock + if test -f $GECKO_PATH/$CARGO_LOCK; then + cp $GECKO_PATH/$CARGO_LOCK $WORKSPACE_ROOT/Cargo.lock + else + echo "Missing Cargo.lock for the crate. Please provide one in $CARGO_LOCK" >&2 + exit 1 + fi +fi + +cargo install \ + --locked \ + --verbose \ + --path $CRATE_PATH \ + --target-dir $workspace/obj \ + --root $workspace/out \ + --target "$TARGET" \ + ${FEATURES:+--features "$FEATURES"} + +mkdir $workspace/$project +mv $workspace/out/bin/* $workspace/$project +tar -C $workspace -acvf $project.tar.zst $project +mkdir -p $UPLOAD_DIR +mv $project.tar.zst $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-sysroot-wasi.sh b/taskcluster/scripts/misc/build-sysroot-wasi.sh new file mode 100755 index 0000000000..dd5be58585 --- /dev/null +++ b/taskcluster/scripts/misc/build-sysroot-wasi.sh @@ -0,0 +1,53 @@ +#!/bin/bash +set -x -e -v + +artifact=$(basename $TOOLCHAIN_ARTIFACT) +sysroot=${artifact%.tar.*} + +# Make the wasi compiler-rt available to clang. +env UPLOAD_DIR= $GECKO_PATH/taskcluster/scripts/misc/repack-clang.sh + +if [ -n "$1" ]; then + patch -d $MOZ_FETCHES_DIR/wasi-sdk -p1 < $(dirname $0)/$1 +fi + +cd $MOZ_FETCHES_DIR/wasi-sdk +LLVM_PROJ_DIR=$MOZ_FETCHES_DIR/llvm-project + +mkdir -p build/install/wasi +# The wasi-sdk build system wants to build clang itself. We trick it into +# thinking it did, and put our own clang where it would have built its own. +ln -s $MOZ_FETCHES_DIR/clang build/llvm +touch build/llvm.BUILT + +# The wasi-sdk build system wants a clang and an ar binary in +# build/install/$PREFIX/bin +ln -s $MOZ_FETCHES_DIR/clang/bin build/install/wasi/bin +ln -s llvm-ar build/install/wasi/bin/ar + +# Build wasi-libc, libc++ and libc++abi. +do_make() { + make \ + LLVM_PROJ_DIR=$LLVM_PROJ_DIR \ + PREFIX=$(grep -q BUILD_PREFIX Makefile || echo $PWD/build/install)/wasi \ + -j$(nproc) \ + $1 +} + +do_make build/wasi-libc.BUILT + +# The wasi-sdk build system has a dependency on compiler-rt for libcxxabi, +# but that's not actually necessary. Pretend it's already built. +# Because compiler-rt has a dependency on wasi-libc, we can only do this +# after wasi-libc is built. +touch build/compiler-rt.BUILT + +do_make build/libcxx.BUILT +if grep -q build/libcxxabi.BUILT Makefile; then + do_make build/libcxxabi.BUILT +fi + +mv build/install/wasi/share/wasi-sysroot $sysroot +tar --zstd -cf $artifact $sysroot +mkdir -p $UPLOAD_DIR +mv $artifact $UPLOAD_DIR/ diff --git a/taskcluster/scripts/misc/build-sysroot.sh b/taskcluster/scripts/misc/build-sysroot.sh new file mode 100755 index 0000000000..f8a1054f15 --- /dev/null +++ b/taskcluster/scripts/misc/build-sysroot.sh @@ -0,0 +1,126 @@ +#!/bin/sh + +set -x +set -e + +arch=$1 +shift + +sysroot=$(basename $TOOLCHAIN_ARTIFACT) +sysroot=${sysroot%%.*} + +# To repackage Firefox as a .deb package +# we bootstrap jessie systems on a bullseye image. +# To keep the build and repackage environments +# consistent the build baseline used here (jessie) should be +# synchronized with the packaging baseline used in +# taskcluster/docker/debian-repackage/Dockerfile +# and python/mozbuild/mozbuild/repackaging/deb.py +case "$arch" in +i386|amd64) + dist=jessie + if [ -n "$PACKAGES_TASKS" ]; then + gcc_version=8 + else + gcc_version=4.9 + fi + # The Debian Jessie GPG key expired. + extra_apt_opt='Apt::Key::gpgvcommand "/usr/local/sbin/gpgvnoexpkeysig"' + ;; +arm64) + dist=buster + gcc_version=8 + ;; +*) + echo "$arch is not supported." >&2 + exit 1 + ;; +esac + +case "$dist" in +jessie) + repo_url=https://archive.debian.org/debian + ;; +*) + SNAPSHOT=20230611T210420Z + repo_url=http://snapshot.debian.org/archive/debian/$SNAPSHOT + ;; +esac + +packages=" + linux-libc-dev + libasound2-dev + libstdc++-${gcc_version}-dev + libfontconfig1-dev + libfreetype6-dev + libgconf2-dev + libgcc-${gcc_version}-dev + libgtk-3-dev + libpango1.0-dev + libpulse-dev + libx11-xcb-dev + libxt-dev + valgrind + $* +" + +# --keyring=... works around https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=981710 +# For a sysroot, we don't need everything. Essentially only libraries and headers, as +# well as pkgconfig files. We exclude debug info files and valgrind files that are not +# useful to build. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" +( + echo "deb $repo_url $dist main" + for task in $PACKAGES_TASKS; do + echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ apt/" + done +) | mmdebstrap \ + --architectures=$arch \ + --variant=extract \ + --include=$(echo $packages | tr ' ' ,) \ + $dist \ + $sysroot \ + - \ + --aptopt=/etc/apt/apt.conf.d/99taskcluster \ + ${extra_apt_opt:+--aptopt="$extra_apt_opt"} \ + --dpkgopt=path-exclude="*" \ + --dpkgopt=path-include="/lib/*" \ + --dpkgopt=path-include="/lib32/*" \ + --dpkgopt=path-include="/usr/include/*" \ + --dpkgopt=path-include="/usr/lib/*" \ + --dpkgopt=path-include="/usr/lib32/*" \ + --dpkgopt=path-exclude="/usr/lib/debug/*" \ + --dpkgopt=path-exclude="/usr/lib/python*" \ + --dpkgopt=path-exclude="/usr/lib/valgrind/*" \ + --dpkgopt=path-include="/usr/share/pkgconfig/*" \ + --keyring=/usr/share/keyrings/debian-archive-removed-keys.gpg \ + -v + +# Remove files that are created despite the path-exclude=*. +rm -rf $sysroot/etc $sysroot/dev $sysroot/tmp $sysroot/var + +# Remove empty directories +find $sysroot -depth -type d -empty -delete + +# Adjust symbolic links to link into the sysroot instead of absolute +# paths that end up pointing at the host system. +find $sysroot -type l | while read l; do + t=$(readlink $l) + case "$t" in + /*) + # We have a path in the form "$sysroot/a/b/c/d" and we want ../../.., + # which is how we get from d to the root of the sysroot. For that, + # we start from the directory containing d ("$sysroot/a/b/c"), remove + # all non-slash characters, leaving is with "///", replace each slash + # with "../", which gives us "../../../", and then we remove the last + # slash. + rel=$(dirname $l | sed 's,[^/],,g;s,/,../,g;s,/$,,') + ln -sf $rel$t $l + ;; + esac +done + +tar caf $sysroot.tar.zst $sysroot + +mkdir -p "$UPLOAD_DIR" +mv "$sysroot.tar.zst" "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/build-upx.sh b/taskcluster/scripts/misc/build-upx.sh new file mode 100755 index 0000000000..12462b1177 --- /dev/null +++ b/taskcluster/scripts/misc/build-upx.sh @@ -0,0 +1,26 @@ +#!/bin/bash +set -x -e -v + +WORKSPACE=$HOME/workspace +INSTALL_DIR=$WORKSPACE/upx + +mkdir -p $INSTALL_DIR/bin + +cd $WORKSPACE + +git clone -n https://github.com/upx/upx.git upx-clone +cd upx-clone +# https://github.com/upx/upx/releases/tag/v3.95 +git checkout 7a3637ff5a800b8bcbad20ae7f668d8c8449b014 # Asserts integrity of the clone (right?) +git submodule update --init --recursive +cd src +make -j$(nproc) CXXFLAGS_WERROR= +cp upx.out $INSTALL_DIR/bin/upx + +# -------------- + +cd $WORKSPACE +tar caf upx.tar.zst upx + +mkdir -p $UPLOAD_DIR +cp upx.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-winchecksec.sh b/taskcluster/scripts/misc/build-winchecksec.sh new file mode 100755 index 0000000000..066accd910 --- /dev/null +++ b/taskcluster/scripts/misc/build-winchecksec.sh @@ -0,0 +1,198 @@ +#!/bin/bash +set -e -v -x + +mkdir -p $UPLOAD_DIR + +cd $MOZ_FETCHES_DIR/winchecksec + +SUFFIX= + +case "$1" in +x86_64-pc-windows-msvc) + SUFFIX=.exe + export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH" + + . $GECKO_PATH/taskcluster/scripts/misc/vs-setup.sh + + # Patch pe-parse because clang-cl doesn't support /analyze. + patch -p1 <<'EOF' +--- a/pe-parse/cmake/compilation_flags.cmake ++++ b/pe-parse/cmake/compilation_flags.cmake +@@ -1,5 +1,5 @@ + if (MSVC) +- list(APPEND DEFAULT_CXX_FLAGS /W4 /analyze) ++ list(APPEND DEFAULT_CXX_FLAGS /W4) + + if (CMAKE_BUILD_TYPE STREQUAL "Debug" OR CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") + list(APPEND DEFAULT_CXX_FLAGS /Zi) +EOF + + CMAKE_FLAGS=' + -DCMAKE_CXX_COMPILER=clang-cl + -DCMAKE_C_COMPILER=clang-cl + -DCMAKE_LINKER=lld-link + -DCMAKE_C_FLAGS="-fuse-ld=lld -Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" + -DCMAKE_CXX_FLAGS="-fuse-ld=lld -EHsc -Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml" + -DCMAKE_RC_COMPILER=llvm-rc + -DCMAKE_MT=llvm-mt + -DCMAKE_SYSTEM_NAME=Windows + -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded + ' + ;; +esac + +# Apply https://github.com/trailofbits/pe-parse/commit/d9e72af81e832330c111e07b98d34877469445f5 +# And https://github.com/trailofbits/pe-parse/commit/eecdb3d36eb44e306398a2e66e85490f9bdcc74c +patch -p1 <<'EOF' +--- a/pe-parse/pe-parser-library/src/buffer.cpp ++++ b/pe-parse/pe-parser-library/src/buffer.cpp +@@ -112,11 +112,12 @@ bool readWord(bounded_buffer *b, std::uint32_t offset, std::uint16_t &out) { + return false; + } + +- std::uint16_t *tmp = reinterpret_cast(b->buf + offset); ++ std::uint16_t tmp; ++ memcpy(&tmp, (b->buf + offset), sizeof(std::uint16_t)); + if (b->swapBytes) { +- out = byteSwapUint16(*tmp); ++ out = byteSwapUint16(tmp); + } else { +- out = *tmp; ++ out = tmp; + } + + return true; +@@ -133,11 +134,12 @@ bool readDword(bounded_buffer *b, std::uint32_t offset, std::uint32_t &out) { + return false; + } + +- std::uint32_t *tmp = reinterpret_cast(b->buf + offset); ++ std::uint32_t tmp; ++ memcpy(&tmp, (b->buf + offset), sizeof(std::uint32_t)); + if (b->swapBytes) { +- out = byteSwapUint32(*tmp); ++ out = byteSwapUint32(tmp); + } else { +- out = *tmp; ++ out = tmp; + } + + return true; +@@ -154,11 +156,12 @@ bool readQword(bounded_buffer *b, std::uint32_t offset, std::uint64_t &out) { + return false; + } + +- std::uint64_t *tmp = reinterpret_cast(b->buf + offset); ++ std::uint64_t tmp; ++ memcpy(&tmp, (b->buf + offset), sizeof(std::uint64_t)); + if (b->swapBytes) { +- out = byteSwapUint64(*tmp); ++ out = byteSwapUint64(tmp); + } else { +- out = *tmp; ++ out = tmp; + } + + return true; +@@ -175,16 +178,16 @@ bool readChar16(bounded_buffer *b, std::uint32_t offset, char16_t &out) { + return false; + } + +- char16_t *tmp = nullptr; ++ char16_t tmp; + if (b->swapBytes) { + std::uint8_t tmpBuf[2]; + tmpBuf[0] = *(b->buf + offset + 1); + tmpBuf[1] = *(b->buf + offset); +- tmp = reinterpret_cast(tmpBuf); ++ memcpy(&tmp, tmpBuf, sizeof(std::uint16_t)); + } else { +- tmp = reinterpret_cast(b->buf + offset); ++ memcpy(&tmp, (b->buf + offset), sizeof(std::uint16_t)); + } +- out = *tmp; ++ out = tmp; + + return true; + } +--- a/pe-parse/pe-parser-library/include/parser-library/parse.h ++++ b/pe-parse/pe-parser-library/include/parser-library/parse.h +@@ -40,28 +40,38 @@ THE SOFTWARE. + err_loc.assign(__func__); \ + err_loc += ":" + to_string(__LINE__, std::dec); + +-#define READ_WORD(b, o, inst, member) \ +- if (!readWord(b, o + _offset(__typeof__(inst), member), inst.member)) { \ +- PE_ERR(PEERR_READ); \ +- return false; \ ++#define READ_WORD(b, o, inst, member) \ ++ if (!readWord(b, \ ++ o + static_cast(offsetof(__typeof__(inst), member)), \ ++ inst.member)) { \ ++ PE_ERR(PEERR_READ); \ ++ return false; \ + } + +-#define READ_DWORD(b, o, inst, member) \ +- if (!readDword(b, o + _offset(__typeof__(inst), member), inst.member)) { \ +- PE_ERR(PEERR_READ); \ +- return false; \ ++#define READ_DWORD(b, o, inst, member) \ ++ if (!readDword( \ ++ b, \ ++ o + static_cast(offsetof(__typeof__(inst), member)), \ ++ inst.member)) { \ ++ PE_ERR(PEERR_READ); \ ++ return false; \ + } + +-#define READ_QWORD(b, o, inst, member) \ +- if (!readQword(b, o + _offset(__typeof__(inst), member), inst.member)) { \ +- PE_ERR(PEERR_READ); \ +- return false; \ ++#define READ_QWORD(b, o, inst, member) \ ++ if (!readQword( \ ++ b, \ ++ o + static_cast(offsetof(__typeof__(inst), member)), \ ++ inst.member)) { \ ++ PE_ERR(PEERR_READ); \ ++ return false; \ + } + +-#define READ_BYTE(b, o, inst, member) \ +- if (!readByte(b, o + _offset(__typeof__(inst), member), inst.member)) { \ +- PE_ERR(PEERR_READ); \ +- return false; \ ++#define READ_BYTE(b, o, inst, member) \ ++ if (!readByte(b, \ ++ o + static_cast(offsetof(__typeof__(inst), member)), \ ++ inst.member)) { \ ++ PE_ERR(PEERR_READ); \ ++ return false; \ + } + + #define TEST_MACHINE_CHARACTERISTICS(h, m, ch) \ +--- a/pe-parse/pe-parser-library/src/parse.cpp ++++ b/pe-parse/pe-parser-library/src/parse.cpp +@@ -1777,7 +1777,7 @@ bool getRelocations(parsed_pe *p) { + // Mask out the type and assign + type = entry >> 12; + // Mask out the offset and assign +- offset = entry & ~0xf000; ++ offset = entry & static_cast(~0xf000); + + // Produce the VA of the relocation + VA relocVA; +EOF + +eval cmake \ + -GNinja \ + -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_SHARED_LIBS=Off \ + $CMAKE_FLAGS + +ninja -v + +cd .. +tar -caf winchecksec.tar.zst winchecksec/winchecksec${SUFFIX} +cp winchecksec.tar.zst $UPLOAD_DIR/ diff --git a/taskcluster/scripts/misc/build-wine.sh b/taskcluster/scripts/misc/build-wine.sh new file mode 100755 index 0000000000..e292fd2e1e --- /dev/null +++ b/taskcluster/scripts/misc/build-wine.sh @@ -0,0 +1,29 @@ +#!/bin/bash +set -x -e -v + +WORKSPACE=$HOME/workspace +INSTALL_DIR=$WORKSPACE/wine + +mkdir -p $INSTALL_DIR +mkdir -p $WORKSPACE/build/wine +mkdir -p $WORKSPACE/build/wine64 + +cd $WORKSPACE/build/wine64 +$MOZ_FETCHES_DIR/wine-source/configure --enable-win64 --without-x --without-freetype --prefix=$INSTALL_DIR/ +make -j$(nproc) + +cd $WORKSPACE/build/wine +$MOZ_FETCHES_DIR/wine-source/configure --with-wine64=../wine64 --without-x --without-freetype --prefix=$INSTALL_DIR/ +make -j$(nproc) +make install + +cd $WORKSPACE/build/wine64 +make install + +# -------------- + +cd $WORKSPACE/ +tar caf wine.tar.zst wine + +mkdir -p $UPLOAD_DIR +cp wine.tar.* $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/build-xar-linux.sh b/taskcluster/scripts/misc/build-xar-linux.sh new file mode 100755 index 0000000000..497e18217e --- /dev/null +++ b/taskcluster/scripts/misc/build-xar-linux.sh @@ -0,0 +1,25 @@ +#!/bin/bash +set -x -e -v + +# This script is for building xar for Linux. +mkdir -p $UPLOAD_DIR + +export PATH=$PATH:$MOZ_FETCHES_DIR/clang/bin +cd $MOZ_FETCHES_DIR/xar/xar + +./autogen.sh --prefix=/builds/worker --enable-static + +# Force statically-linking to libcrypto. pkg-config --static will tell +# us the extra flags that are needed (in practice, -ldl -pthread), +# and -lcrypto, which we need to change to actually link statically. +CRYPTO=$(pkg-config --static --libs libcrypto | sed 's/-lcrypto/-l:libcrypto.a/') +sed -i "s/-lcrypto/$CRYPTO/" src/Makefile.inc + +make_flags="-j$(nproc)" +make $make_flags + +cd $(mktemp -d) +mkdir xar + +cp $MOZ_FETCHES_DIR/xar/xar/src/xar ./xar/xar +tar caf $UPLOAD_DIR/xar.tar.zst ./xar diff --git a/taskcluster/scripts/misc/build-xz.sh b/taskcluster/scripts/misc/build-xz.sh new file mode 100755 index 0000000000..50bbc9ed0b --- /dev/null +++ b/taskcluster/scripts/misc/build-xz.sh @@ -0,0 +1,25 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# + +set -e +set -x + +# Required fetch artifact +xz_src=${MOZ_FETCHES_DIR}/xz-source + +# Actual build +work_dir=`pwd` +dest_dir=${work_dir}/tmp-install +tardir=xz + +cd `mktemp -d` +${xz_src}/configure --prefix=/${tardir} CFLAGS=-O2 ${configure_flags_extra} || { exit_status=$? && cat config.log && exit $exit_status ; } +export MAKEFLAGS=-j`nproc` +make +make DESTDIR=${dest_dir} install +cd ${dest_dir} + +$(dirname $0)/pack.sh ${tardir} diff --git a/taskcluster/scripts/misc/cargo-apk-Cargo.lock b/taskcluster/scripts/misc/cargo-apk-Cargo.lock new file mode 100644 index 0000000000..9e52f773a5 --- /dev/null +++ b/taskcluster/scripts/misc/cargo-apk-Cargo.lock @@ -0,0 +1,802 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_log-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85965b6739a430150bdd138e2374a98af0c3ee0d030b3bb7fc3bddff58d0102e" + +[[package]] +name = "android_logger" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9ed09b18365ed295d722d0b5ed59c01b79a826ff2d2a8f73d5ecca8e6fb2f66" +dependencies = [ + "android_log-sys", + "env_logger", + "lazy_static", + "log", +] + +[[package]] +name = "anyhow" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "cargo-apk" +version = "0.8.2" +dependencies = [ + "anyhow", + "cargo-subcommand", + "dunce", + "env_logger", + "log", + "ndk-build", + "serde", + "thiserror", + "toml", +] + +[[package]] +name = "cargo-subcommand" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c450fe6a70cd506bc95119cdbbf4343b61a808a419db4378357d278e42e4b79" +dependencies = [ + "dunce", + "glob", + "serde", + "toml", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "dirs" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dunce" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "env_logger" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "getrandom" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "indexmap" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "jni" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24967112a1e4301ca5342ea339763613a37592b8a6ce6cf2e4494537c7a42faf" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni-glue" +version = "0.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb534fa5773934d2580025e31b660656898f449366b7d3c8c51ba36a1609314" +dependencies = [ + "jni-sys", + "lazy_static", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" + +[[package]] +name = "libredox" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "ndk" +version = "0.6.0" +dependencies = [ + "bitflags 1.3.2", + "jni", + "jni-glue", + "jni-sys", + "ndk-sys", + "num_enum", + "thiserror", +] + +[[package]] +name = "ndk-build" +version = "0.4.3" +dependencies = [ + "dirs", + "dunce", + "quick-xml", + "serde", + "thiserror", + "which", +] + +[[package]] +name = "ndk-context" +version = "0.1.0" + +[[package]] +name = "ndk-examples" +version = "0.1.0" +dependencies = [ + "jni", + "libc", + "log", + "ndk", + "ndk-context", + "ndk-glue", +] + +[[package]] +name = "ndk-glue" +version = "0.6.1" +dependencies = [ + "android_logger", + "lazy_static", + "libc", + "log", + "ndk", + "ndk-context", + "ndk-macro", + "ndk-sys", +] + +[[package]] +name = "ndk-macro" +version = "0.3.0" +dependencies = [ + "darling", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ndk-sys" +version = "0.3.0" +dependencies = [ + "jni-sys", +] + +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quick-xml" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26aab6b48e2590e4a64d1ed808749ba06257882b461d01ca71baeb747074a6dd" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "rustix" +version = "0.38.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "serde" +version = "1.0.195" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.195" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "walkdir" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "winnow" +version = "0.5.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16" +dependencies = [ + "memchr", +] diff --git a/taskcluster/scripts/misc/cctools.patch b/taskcluster/scripts/misc/cctools.patch new file mode 100644 index 0000000000..8e11708465 --- /dev/null +++ b/taskcluster/scripts/misc/cctools.patch @@ -0,0 +1,17 @@ +diff --git a/cctools/ld64/src/ld/Resolver.cpp b/cctools/ld64/src/ld/Resolver.cpp +index bfb67a3..d5b1c67 100644 +--- a/cctools/ld64/src/ld/Resolver.cpp ++++ b/cctools/ld64/src/ld/Resolver.cpp +@@ -1178,9 +1178,10 @@ void Resolver::deadStripOptimize(bool force) + // unset liveness, so markLive() will recurse + (const_cast(atom))->setLive(0); + } +- // if doing LTO, mark all libclang_rt* mach-o atoms as live since the backend may suddenly codegen uses of them ++ // if doing LTO, mark all libclang_rt* mach-o atoms as live since the backend may suddenly codegen uses of them. ++ // Likewise with rust compiler_builtins atoms. They may come from a libcompiler_builtins-.a lib, or from a compiler_builtins-... member of a rust static library. + else if ( _haveLLVMObjs && !force && (atom->contentType() != ld::Atom::typeLTOtemporary) ) { +- if ( strstr(atom->safeFilePath(), "libclang_rt") != nullptr ) { ++ if ( strstr(atom->safeFilePath(), "libclang_rt") != nullptr || strstr(atom->safeFilePath(), "compiler_builtins") != nullptr ) { + _deadStripRoots.insert(atom); + } + } diff --git a/taskcluster/scripts/misc/dummy.sh b/taskcluster/scripts/misc/dummy.sh new file mode 100755 index 0000000000..1a2485251c --- /dev/null +++ b/taskcluster/scripts/misc/dummy.sh @@ -0,0 +1 @@ +#!/bin/sh diff --git a/taskcluster/scripts/misc/fetch-chromium.py b/taskcluster/scripts/misc/fetch-chromium.py new file mode 100644 index 0000000000..ab00dac863 --- /dev/null +++ b/taskcluster/scripts/misc/fetch-chromium.py @@ -0,0 +1,241 @@ +#!/usr/bin/python3 -u +# -*- coding: utf-8 -*- + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +This script downloads the latest chromium build (or a manually +defined version) for a given platform. It then uploads the build, +with the revision of the build stored in a REVISION file. +""" + +import argparse +import errno +import os +import shutil +import subprocess +import tempfile + +import requests +from redo import retriable + +LAST_CHANGE_URL = ( + # formatted with platform + "https://www.googleapis.com/download/storage/v1/b/" + "chromium-browser-snapshots/o/{}%2FLAST_CHANGE?alt=media" +) + +CHROMIUM_BASE_URL = ( + # formatted with (platform/revision/archive) + "https://www.googleapis.com/download/storage/v1/b/" + "chromium-browser-snapshots/o/{}%2F{}%2F{}?alt=media" +) + + +CHROMIUM_INFO = { + "linux": { + "platform": "Linux_x64", + "chromium": "chrome-linux.zip", + "result": "chromium-linux.tar.bz2", + "chromedriver": "chromedriver_linux64.zip", + }, + "win32": { + "platform": "Win", + "chromium": "chrome-win.zip", + "result": "chromium-win32.tar.bz2", + "chromedriver": "chromedriver_win32.zip", + }, + "win64": { + "platform": "Win", + "chromium": "chrome-win.zip", + "result": "chromium-win64.tar.bz2", + "chromedriver": "chromedriver_win32.zip", + }, + "mac": { + "platform": "Mac", + "chromium": "chrome-mac.zip", + "result": "chromium-mac.tar.bz2", + "chromedriver": "chromedriver_mac64.zip", + }, + "mac-arm": { + "platform": "Mac_Arm", + "chromium": "chrome-mac.zip", + "result": "chromium-mac-arm.tar.bz2", + "chromedriver": "chromedriver_mac64.zip", + }, +} + + +def log(msg): + print("build-chromium: %s" % msg) + + +@retriable(attempts=7, sleeptime=5, sleepscale=2) +def fetch_file(url, filepath): + """Download a file from the given url to a given file.""" + size = 4096 + r = requests.get(url, stream=True) + r.raise_for_status() + + with open(filepath, "wb") as fd: + for chunk in r.iter_content(size): + fd.write(chunk) + + +def unzip(zippath, target): + """Unzips an archive to the target location.""" + log("Unpacking archive at: %s to: %s" % (zippath, target)) + unzip_command = ["unzip", "-q", "-o", zippath, "-d", target] + subprocess.check_call(unzip_command) + + +@retriable(attempts=7, sleeptime=5, sleepscale=2) +def fetch_chromium_revision(platform): + """Get the revision of the latest chromium build.""" + chromium_platform = CHROMIUM_INFO[platform]["platform"] + revision_url = LAST_CHANGE_URL.format(chromium_platform) + + log("Getting revision number for latest %s chromium build..." % chromium_platform) + + # Expecting a file with a single number indicating the latest + # chromium build with a chromedriver that we can download + r = requests.get(revision_url, timeout=30) + r.raise_for_status() + + chromium_revision = r.content.decode("utf-8") + return chromium_revision.strip() + + +def fetch_chromium_build(platform, revision, zippath): + """Download a chromium build for a given revision, or the latest.""" + if not revision: + revision = fetch_chromium_revision(platform) + + download_platform = CHROMIUM_INFO[platform]["platform"] + download_url = CHROMIUM_BASE_URL.format( + download_platform, revision, CHROMIUM_INFO[platform]["chromium"] + ) + + log("Downloading %s chromium build revision %s..." % (download_platform, revision)) + log(download_url) + fetch_file(download_url, zippath) + return revision + + +def fetch_chromedriver(platform, revision, chromium_dir): + """Get the chromedriver for the given revision and repackage it.""" + download_url = CHROMIUM_BASE_URL.format( + CHROMIUM_INFO[platform]["platform"], + revision, + CHROMIUM_INFO[platform]["chromedriver"], + ) + + tmpzip = os.path.join(tempfile.mkdtemp(), "cd-tmp.zip") + log("Downloading chromedriver from %s" % download_url) + fetch_file(download_url, tmpzip) + + tmppath = tempfile.mkdtemp() + unzip(tmpzip, tmppath) + + # Find the chromedriver then copy it to the chromium directory + cd_path = None + for dirpath, _, filenames in os.walk(tmppath): + for filename in filenames: + if filename == "chromedriver" or filename == "chromedriver.exe": + cd_path = os.path.join(dirpath, filename) + break + if cd_path is not None: + break + if cd_path is None: + raise Exception("Could not find chromedriver binary in %s" % tmppath) + log("Copying chromedriver from: %s to: %s" % (cd_path, chromium_dir)) + shutil.copy(cd_path, chromium_dir) + + +def build_chromium_archive(platform, revision=None): + """ + Download and store a chromium build for a given platform. + + Retrieves either the latest version, or uses a pre-defined version if + the `--revision` option is given a revision. + """ + upload_dir = os.environ.get("UPLOAD_DIR") + if upload_dir: + # Create the upload directory if it doesn't exist. + try: + log("Creating upload directory in %s..." % os.path.abspath(upload_dir)) + os.makedirs(upload_dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + # Make a temporary location for the file + tmppath = tempfile.mkdtemp() + tmpzip = os.path.join(tmppath, "tmp-chromium.zip") + + revision = fetch_chromium_build(platform, revision, tmpzip) + + # Unpack archive in `tmpzip` to store the revision number and + # the chromedriver + unzip(tmpzip, tmppath) + + dirs = [ + d + for d in os.listdir(tmppath) + if os.path.isdir(os.path.join(tmppath, d)) and d.startswith("chrome-") + ] + + if len(dirs) > 1: + raise Exception( + "Too many directories starting with `chrome-` after extracting." + ) + elif len(dirs) == 0: + raise Exception( + "Could not find any directories after extraction of chromium zip." + ) + + chromium_dir = os.path.join(tmppath, dirs[0]) + revision_file = os.path.join(chromium_dir, ".REVISION") + with open(revision_file, "w+") as f: + f.write(str(revision)) + + # Get and store the chromedriver + fetch_chromedriver(platform, revision, chromium_dir) + + tar_file = CHROMIUM_INFO[platform]["result"] + tar_command = ["tar", "cjf", tar_file, "-C", tmppath, dirs[0]] + log("Added revision to %s file." % revision_file) + + log("Tarring with the command: %s" % str(tar_command)) + subprocess.check_call(tar_command) + + upload_dir = os.environ.get("UPLOAD_DIR") + if upload_dir: + # Move the tarball to the output directory for upload. + log("Moving %s to the upload directory..." % tar_file) + shutil.copy(tar_file, os.path.join(upload_dir, tar_file)) + + shutil.rmtree(tmppath) + + +def parse_args(): + """Read command line arguments and return options.""" + parser = argparse.ArgumentParser() + parser.add_argument( + "--platform", help="Platform version of chromium to build.", required=True + ) + parser.add_argument( + "--revision", + help="Revision of chromium to build to get. " + "(Defaults to the newest chromium build).", + default=None, + ) + + return parser.parse_args() + + +if __name__ == "__main__": + args = vars(parse_args()) + build_chromium_archive(**args) diff --git a/taskcluster/scripts/misc/fetch-content b/taskcluster/scripts/misc/fetch-content new file mode 100755 index 0000000000..9e6a1f4ef0 --- /dev/null +++ b/taskcluster/scripts/misc/fetch-content @@ -0,0 +1,900 @@ +#!/usr/bin/python3 -u +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import bz2 +import concurrent.futures +import contextlib +import datetime +import gzip +import hashlib +import io +import json +import lzma +import multiprocessing +import os +import pathlib +import random +import re +import stat +import subprocess +import sys +import tarfile +import tempfile +import time +import urllib.parse +import urllib.request +import zipfile + +try: + import zstandard +except ImportError: + zstandard = None + +try: + import certifi +except ImportError: + certifi = None + + +CONCURRENCY = multiprocessing.cpu_count() + + +def log(msg): + print(msg, file=sys.stderr) + sys.stderr.flush() + + +class IntegrityError(Exception): + """Represents an integrity error when downloading a URL.""" + + +def ZstdCompressor(*args, **kwargs): + if not zstandard: + raise ValueError("zstandard Python package not available") + return zstandard.ZstdCompressor(*args, **kwargs) + + +def ZstdDecompressor(*args, **kwargs): + if not zstandard: + raise ValueError("zstandard Python package not available") + return zstandard.ZstdDecompressor(*args, **kwargs) + + +@contextlib.contextmanager +def rename_after_close(fname, *args, **kwargs): + """ + Context manager that opens a temporary file to use as a writer, + and closes the file on context exit, renaming it to the expected + file name in case of success, or removing it in case of failure. + + Takes the same options as open(), but must be used as a context + manager. + """ + path = pathlib.Path(fname) + tmp = path.with_name("%s.tmp" % path.name) + try: + with tmp.open(*args, **kwargs) as fh: + yield fh + except Exception: + tmp.unlink() + raise + else: + tmp.rename(fname) + + +# The following is copied from +# https://github.com/mozilla-releng/redo/blob/6d07678a014e0c525e54a860381a165d34db10ff/redo/__init__.py#L15-L85 +def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1): + """ + A generator function that sleeps between retries, handles exponential + backoff and jitter. The action you are retrying is meant to run after + retrier yields. + + At each iteration, we sleep for sleeptime + random.randint(-jitter, jitter). + Afterwards sleeptime is multiplied by sleepscale for the next iteration. + + Args: + attempts (int): maximum number of times to try; defaults to 5 + sleeptime (float): how many seconds to sleep between tries; defaults to + 60s (one minute) + max_sleeptime (float): the longest we'll sleep, in seconds; defaults to + 300s (five minutes) + sleepscale (float): how much to multiply the sleep time by each + iteration; defaults to 1.5 + jitter (int): random jitter to introduce to sleep time each iteration. + the amount is chosen at random between [-jitter, +jitter] + defaults to 1 + + Yields: + None, a maximum of `attempts` number of times + + Example: + >>> n = 0 + >>> for _ in retrier(sleeptime=0, jitter=0): + ... if n == 3: + ... # We did the thing! + ... break + ... n += 1 + >>> n + 3 + + >>> n = 0 + >>> for _ in retrier(sleeptime=0, jitter=0): + ... if n == 6: + ... # We did the thing! + ... break + ... n += 1 + ... else: + ... print("max tries hit") + max tries hit + """ + jitter = jitter or 0 # py35 barfs on the next line if jitter is None + if jitter > sleeptime: + # To prevent negative sleep times + raise Exception( + "jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime) + ) + + sleeptime_real = sleeptime + for _ in range(attempts): + log("attempt %i/%i" % (_ + 1, attempts)) + + yield sleeptime_real + + if jitter: + sleeptime_real = sleeptime + random.randint(-jitter, jitter) + # our jitter should scale along with the sleeptime + jitter = int(jitter * sleepscale) + else: + sleeptime_real = sleeptime + + sleeptime *= sleepscale + + if sleeptime_real > max_sleeptime: + sleeptime_real = max_sleeptime + + # Don't need to sleep the last time + if _ < attempts - 1: + log( + "sleeping for %.2fs (attempt %i/%i)" % (sleeptime_real, _ + 1, attempts) + ) + time.sleep(sleeptime_real) + + +def stream_download(url, sha256=None, size=None): + """Download a URL to a generator, optionally with content verification. + + If ``sha256`` or ``size`` are defined, the downloaded URL will be + validated against those requirements and ``IntegrityError`` will be + raised if expectations do not match. + + Because verification cannot occur until the file is completely downloaded + it is recommended for consumers to not do anything meaningful with the + data if content verification is being used. To securely handle retrieved + content, it should be streamed to a file or memory and only operated + on after the generator is exhausted without raising. + """ + log("Downloading %s" % url) + + h = hashlib.sha256() + length = 0 + + t0 = time.time() + with urllib.request.urlopen( + url, timeout=60, cafile=certifi.where() + ) if certifi else urllib.request.urlopen(url, timeout=60) as fh: + if not url.endswith(".gz") and fh.info().get("Content-Encoding") == "gzip": + fh = gzip.GzipFile(fileobj=fh) + + while True: + chunk = fh.read(65536) + if not chunk: + break + + h.update(chunk) + length += len(chunk) + + yield chunk + + duration = time.time() - t0 + digest = h.hexdigest() + + log( + "%s resolved to %d bytes with sha256 %s in %.3fs" + % (url, length, digest, duration) + ) + + if size: + if size == length: + log("Verified size of %s" % url) + else: + raise IntegrityError( + "size mismatch on %s: wanted %d; got %d" % (url, size, length) + ) + + if sha256: + if digest == sha256: + log("Verified sha256 integrity of %s" % url) + else: + raise IntegrityError( + "sha256 mismatch on %s: wanted %s; got %s" % (url, sha256, digest) + ) + + +def download_to_path(url, path, sha256=None, size=None): + """Download a URL to a filesystem path, possibly with verification.""" + + # We download to a temporary file and rename at the end so there's + # no chance of the final file being partially written or containing + # bad data. + try: + path.unlink() + except FileNotFoundError: + pass + + for _ in retrier(attempts=5, sleeptime=60): + try: + log("Downloading %s to %s" % (url, path)) + + with rename_after_close(path, "wb") as fh: + for chunk in stream_download(url, sha256=sha256, size=size): + fh.write(chunk) + + return + except IntegrityError: + raise + except Exception as e: + log("Download failed: {}".format(e)) + continue + + raise Exception("Download failed, no more retries!") + + +def download_to_memory(url, sha256=None, size=None): + """Download a URL to memory, possibly with verification.""" + + data = b"" + for _ in retrier(attempts=5, sleeptime=60): + try: + log("Downloading %s" % (url)) + + for chunk in stream_download(url, sha256=sha256, size=size): + data += chunk + + return data + except IntegrityError: + raise + except Exception as e: + log("Download failed: {}".format(e)) + continue + + raise Exception("Download failed, no more retries!") + + +def gpg_verify_path(path: pathlib.Path, public_key_data: bytes, signature_data: bytes): + """Verify that a filesystem path verifies using GPG. + + Takes a Path defining a file to verify. ``public_key_data`` contains + bytes with GPG public key data. ``signature_data`` contains a signed + GPG document to use with ``gpg --verify``. + """ + log("Validating GPG signature of %s" % path) + log("GPG key data:\n%s" % public_key_data.decode("ascii")) + + with tempfile.TemporaryDirectory() as td: + try: + # --batch since we're running unattended. + gpg_args = ["gpg", "--homedir", td, "--batch"] + + log("Importing GPG key...") + subprocess.run(gpg_args + ["--import"], input=public_key_data, check=True) + + log("Verifying GPG signature...") + subprocess.run( + gpg_args + ["--verify", "-", "%s" % path], + input=signature_data, + check=True, + ) + + log("GPG signature verified!") + finally: + # There is a race between the agent self-terminating and + # shutil.rmtree() from the temporary directory cleanup that can + # lead to exceptions. Kill the agent before cleanup to prevent this. + env = dict(os.environ) + env["GNUPGHOME"] = td + subprocess.run(["gpgconf", "--kill", "gpg-agent"], env=env) + + +class ArchiveTypeNotSupported(Exception): + def __init__(self, path: pathlib.Path): + super(Exception, self).__init__("Archive type not supported for %s" % path) + + +def open_stream(path: pathlib.Path): + """Attempt to identify a path as an extractable archive by looking at its + content.""" + fh = path.open(mode="rb") + magic = fh.read(6) + fh.seek(0) + if magic[:2] == b"PK": + return "zip", fh + if magic[:2] == b"\x1f\x8b": + fh = gzip.GzipFile(fileobj=fh) + elif magic[:3] == b"BZh": + fh = bz2.BZ2File(fh) + elif magic == b"\xfd7zXZ\x00": + fh = lzma.LZMAFile(fh) + elif magic[:4] == b"\x28\xb5\x2f\xfd": + fh = ZstdDecompressor().stream_reader(fh) + fh = io.BufferedReader(fh) + try: + # A full tar info header is 512 bytes. + headers = fh.peek(512) + # 257 is the offset of the ustar magic. + magic = headers[257 : 257 + 8] + # For older unix tar, rely on TarInfo.frombuf's checksum check + if magic in (b"ustar\x0000", b"ustar \x00") or tarfile.TarInfo.frombuf( + headers[:512], tarfile.ENCODING, "surrogateescape" + ): + return "tar", fh + except Exception as e: + pass + raise ArchiveTypeNotSupported(path) + + +def archive_type(path: pathlib.Path): + """Attempt to identify a path as an extractable archive.""" + if path.suffixes[-2:-1] == [".tar"] or path.suffixes[-1:] == [".tgz"]: + return "tar" + elif path.suffix == ".zip": + return "zip" + else: + return None + + +def extract_archive(path, dest_dir): + """Extract an archive to a destination directory.""" + + # Resolve paths to absolute variants. + path = path.resolve() + dest_dir = dest_dir.resolve() + + log("Extracting %s to %s" % (path, dest_dir)) + t0 = time.time() + + # We pipe input to the decompressor program so that we can apply + # custom decompressors that the program may not know about. + typ, ifh = open_stream(path) + if typ == "tar": + # On Windows, the tar program doesn't support things like symbolic + # links, while Windows actually support them. The tarfile module in + # python does. So use that. But since it's significantly slower than + # the tar program on Linux, only use tarfile on Windows (tarfile is + # also not much slower on Windows, presumably because of the + # notoriously bad I/O). + if sys.platform == "win32": + tar = tarfile.open(fileobj=ifh, mode="r|") + tar.extractall(str(dest_dir)) + args = [] + else: + args = ["tar", "xf", "-"] + pipe_stdin = True + elif typ == "zip": + # unzip from stdin has wonky behavior. We don't use a pipe for it. + ifh = open(os.devnull, "rb") + args = ["unzip", "-o", str(path)] + pipe_stdin = False + else: + raise ValueError("unknown archive format: %s" % path) + + if args: + with ifh, subprocess.Popen( + args, cwd=str(dest_dir), bufsize=0, stdin=subprocess.PIPE + ) as p: + while True: + if not pipe_stdin: + break + + chunk = ifh.read(131072) + if not chunk: + break + + p.stdin.write(chunk) + + if p.returncode: + raise Exception("%r exited %d" % (args, p.returncode)) + + log("%s extracted in %.3fs" % (path, time.time() - t0)) + + +def repack_archive( + orig: pathlib.Path, dest: pathlib.Path, strip_components=0, prefix="" +): + assert orig != dest + log("Repacking as %s" % dest) + orig_typ, ifh = open_stream(orig) + typ = archive_type(dest) + if not typ: + raise Exception("Archive type not supported for %s" % dest.name) + + if dest.suffixes[-2:] != [".tar", ".zst"]: + raise Exception("Only producing .tar.zst archives is supported.") + + if strip_components or prefix: + + def filter(name): + if strip_components: + stripped = "/".join(name.split("/")[strip_components:]) + if not stripped: + raise Exception( + "Stripping %d components would remove files" % strip_components + ) + name = stripped + return prefix + name + + else: + filter = None + + with rename_after_close(dest, "wb") as fh: + ctx = ZstdCompressor() + if orig_typ == "zip": + assert typ == "tar" + zip = zipfile.ZipFile(ifh) + # Convert the zip stream to a tar on the fly. + with ctx.stream_writer(fh) as compressor, tarfile.open( + fileobj=compressor, mode="w:" + ) as tar: + for zipinfo in zip.infolist(): + if zipinfo.is_dir(): + continue + tarinfo = tarfile.TarInfo() + filename = zipinfo.filename + tarinfo.name = filter(filename) if filter else filename + tarinfo.size = zipinfo.file_size + # Zip files don't have any knowledge of the timezone + # they were created in. Which is not really convenient to + # reliably convert to a timestamp. But we don't really + # care about accuracy, but rather about reproducibility, + # so we pick UTC. + time = datetime.datetime( + *zipinfo.date_time, tzinfo=datetime.timezone.utc + ) + tarinfo.mtime = time.timestamp() + # 0 is MS-DOS, 3 is UNIX. Only in the latter case do we + # get anything useful for the tar file mode. + if zipinfo.create_system == 3: + mode = zipinfo.external_attr >> 16 + else: + mode = 0o0644 + tarinfo.mode = stat.S_IMODE(mode) + if stat.S_ISLNK(mode): + tarinfo.type = tarfile.SYMTYPE + tarinfo.linkname = zip.read(filename).decode() + tar.addfile(tarinfo, zip.open(filename)) + elif stat.S_ISREG(mode) or stat.S_IFMT(mode) == 0: + tar.addfile(tarinfo, zip.open(filename)) + else: + raise Exception("Unsupported file mode %o" % stat.S_IFMT(mode)) + + elif orig_typ == "tar": + if typ == "zip": + raise Exception("Repacking a tar to zip is not supported") + assert typ == "tar" + + if filter: + # To apply the filter, we need to open the tar stream and + # tweak it. + origtar = tarfile.open(fileobj=ifh, mode="r|") + with ctx.stream_writer(fh) as compressor, tarfile.open( + fileobj=compressor, + mode="w:", + format=origtar.format, + ) as tar: + for tarinfo in origtar: + if tarinfo.isdir(): + continue + tarinfo.name = filter(tarinfo.name) + if "path" in tarinfo.pax_headers: + tarinfo.pax_headers["path"] = filter( + tarinfo.pax_headers["path"] + ) + if tarinfo.isfile(): + tar.addfile(tarinfo, origtar.extractfile(tarinfo)) + else: + tar.addfile(tarinfo) + else: + # We only change compression here. The tar stream is unchanged. + ctx.copy_stream(ifh, fh) + + +def fetch_and_extract(url, dest_dir, extract=True, sha256=None, size=None): + """Fetch a URL and extract it to a destination path. + + If the downloaded URL is an archive, it is extracted automatically + and the archive is deleted. Otherwise the file remains in place in + the destination directory. + """ + + basename = urllib.parse.urlparse(url).path.split("/")[-1] + dest_path = dest_dir / basename + + download_to_path(url, dest_path, sha256=sha256, size=size) + + if not extract: + return + + try: + extract_archive(dest_path, dest_dir) + log("Removing %s" % dest_path) + dest_path.unlink() + except ArchiveTypeNotSupported: + pass + + +def fetch_urls(downloads): + """Fetch URLs pairs to a pathlib.Path.""" + with concurrent.futures.ThreadPoolExecutor(CONCURRENCY) as e: + fs = [] + + for download in downloads: + fs.append(e.submit(fetch_and_extract, *download)) + + for f in fs: + f.result() + + +def _git_checkout_github_archive(dest_path: pathlib.Path, repo: str, + commit: str, prefix: str): + 'Use github archive generator to speed up github git repo cloning' + repo = repo.rstrip('/') + github_url = '{repo}/archive/{commit}.tar.gz'.format(**locals()) + + with tempfile.TemporaryDirectory() as td: + temp_dir = pathlib.Path(td) + dl_dest = temp_dir / 'archive.tar.gz' + download_to_path(github_url, dl_dest) + repack_archive(dl_dest, dest_path, + strip_components=1, + prefix=prefix + '/') + + +def _github_submodule_required(repo: str, commit: str): + 'Use github API to check if submodules are used' + url = '{repo}/blob/{commit}/.gitmodules'.format(**locals()) + try: + status_code = urllib.request.urlopen(url).getcode() + return status_code == 200 + except: + return False + + +def git_checkout_archive( + dest_path: pathlib.Path, + repo: str, + commit: str, + prefix=None, + ssh_key=None, + include_dot_git=False, +): + """Produce an archive of the files comprising a Git checkout.""" + dest_path.parent.mkdir(parents=True, exist_ok=True) + + if not prefix: + prefix = repo.rstrip("/").rsplit("/", 1)[-1] + + if dest_path.suffixes[-2:] != [".tar", ".zst"]: + raise Exception("Only producing .tar.zst archives is supported.") + + if repo.startswith('https://github.com/'): + if not include_dot_git and not _github_submodule_required(repo, commit): + log("Using github archive service to speedup archive creation") + # Always log sha1 info, either from commit or resolved from repo. + if re.match(r"^[a-fA-F0-9]{40}$", commit): + revision = commit + else: + ref_output = subprocess.check_output(["git", "ls-remote", repo, + 'refs/heads/' + commit]) + revision, _ = ref_output.decode().split(maxsplit=1) + log("Fetching revision {}".format(revision)) + return _git_checkout_github_archive(dest_path, repo, commit, prefix) + + with tempfile.TemporaryDirectory() as td: + temp_dir = pathlib.Path(td) + + git_dir = temp_dir / prefix + + # This could be faster with a shallow clone. However, Git requires a ref + # to initiate a clone. Since the commit-ish may not refer to a ref, we + # simply perform a full clone followed by a checkout. + print("cloning %s to %s" % (repo, git_dir)) + + env = os.environ.copy() + keypath = "" + if ssh_key: + taskcluster_secret_url = api( + os.environ.get("TASKCLUSTER_PROXY_URL"), + "secrets", + "v1", + "secret/{keypath}".format(keypath=ssh_key), + ) + taskcluster_secret = b"".join(stream_download(taskcluster_secret_url)) + taskcluster_secret = json.loads(taskcluster_secret) + sshkey = taskcluster_secret["secret"]["ssh_privkey"] + + keypath = temp_dir.joinpath("ssh-key") + keypath.write_text(sshkey) + keypath.chmod(0o600) + + env = { + "GIT_SSH_COMMAND": "ssh -o 'StrictHostKeyChecking no' -i {keypath}".format( + keypath=keypath + ) + } + + subprocess.run(["git", "clone", "-n", repo, str(git_dir)], check=True, env=env) + + # Always use a detached head so that git prints out what it checked out. + subprocess.run( + ["git", "checkout", "--detach", commit], cwd=str(git_dir), check=True + ) + + # When including the .git, we want --depth 1, but a direct clone would not + # necessarily be able to give us the right commit. + if include_dot_git: + initial_clone = git_dir.with_name(git_dir.name + ".orig") + git_dir.rename(initial_clone) + subprocess.run( + [ + "git", + "clone", + "file://" + str(initial_clone), + str(git_dir), + "--depth", + "1", + ], + check=True, + ) + subprocess.run( + ["git", "remote", "set-url", "origin", repo], + cwd=str(git_dir), + check=True, + ) + + # --depth 1 can induce more work on the server side, so only use it for + # submodule initialization when we want to keep the .git directory. + depth = ["--depth", "1"] if include_dot_git else [] + subprocess.run( + ["git", "submodule", "update", "--init"] + depth, + cwd=str(git_dir), + check=True, + ) + + if keypath: + os.remove(keypath) + + print("creating archive %s of commit %s" % (dest_path, commit)) + exclude_dot_git = [] if include_dot_git else ["--exclude=.git"] + proc = subprocess.Popen( + [ + "tar", + "cf", + "-", + ] + + exclude_dot_git + + [ + "-C", + str(temp_dir), + prefix, + ], + stdout=subprocess.PIPE, + ) + + with rename_after_close(dest_path, "wb") as out: + ctx = ZstdCompressor() + ctx.copy_stream(proc.stdout, out) + + proc.wait() + + +def command_git_checkout_archive(args): + dest = pathlib.Path(args.dest) + + try: + git_checkout_archive( + dest, + args.repo, + args.commit, + prefix=args.path_prefix, + ssh_key=args.ssh_key_secret, + include_dot_git=args.include_dot_git, + ) + except Exception: + try: + dest.unlink() + except FileNotFoundError: + pass + + raise + + +def command_static_url(args): + gpg_sig_url = args.gpg_sig_url + gpg_env_key = args.gpg_key_env + + if bool(gpg_sig_url) != bool(gpg_env_key): + print("--gpg-sig-url and --gpg-key-env must both be defined") + return 1 + + if gpg_sig_url: + gpg_signature = b"".join(stream_download(gpg_sig_url)) + gpg_key = os.environb[gpg_env_key.encode("ascii")] + + dest = pathlib.Path(args.dest) + dest.parent.mkdir(parents=True, exist_ok=True) + + basename = urllib.parse.urlparse(args.url).path.split("/")[-1] + if basename.endswith("".join(dest.suffixes)): + dl_dest = dest + else: + dl_dest = dest.parent / basename + + try: + download_to_path(args.url, dl_dest, sha256=args.sha256, size=args.size) + + if gpg_sig_url: + gpg_verify_path(dl_dest, gpg_key, gpg_signature) + + if dl_dest != dest or args.strip_components or args.add_prefix: + repack_archive(dl_dest, dest, args.strip_components, args.add_prefix) + except Exception: + try: + dl_dest.unlink() + except FileNotFoundError: + pass + + raise + + if dl_dest != dest: + log("Removing %s" % dl_dest) + dl_dest.unlink() + + +def api(root_url, service, version, path): + # taskcluster-lib-urls is not available when this script runs, so + # simulate its behavior: + return "{root_url}/api/{service}/{version}/{path}".format( + root_url=root_url, service=service, version=version, path=path + ) + + +def get_hash(fetch, root_url): + path = "task/{task}/artifacts/{artifact}".format( + task=fetch["task"], artifact="public/chain-of-trust.json" + ) + url = api(root_url, "queue", "v1", path) + cot = json.loads(download_to_memory(url)) + return cot["artifacts"][fetch["artifact"]]["sha256"] + + +def command_task_artifacts(args): + start = time.monotonic() + fetches = json.loads(os.environ["MOZ_FETCHES"]) + downloads = [] + for fetch in fetches: + extdir = pathlib.Path(args.dest) + if "dest" in fetch: + # Note: normpath doesn't like pathlib.Path in python 3.5 + extdir = pathlib.Path(os.path.normpath(str(extdir.joinpath(fetch["dest"])))) + extdir.mkdir(parents=True, exist_ok=True) + root_url = os.environ["TASKCLUSTER_ROOT_URL"] + sha256 = None + if fetch.get("verify-hash"): + sha256 = get_hash(fetch, root_url) + if fetch["artifact"].startswith("public/"): + path = "task/{task}/artifacts/{artifact}".format( + task=fetch["task"], artifact=fetch["artifact"] + ) + url = api(root_url, "queue", "v1", path) + else: + url = ("{proxy_url}/api/queue/v1/task/{task}/artifacts/{artifact}").format( + proxy_url=os.environ["TASKCLUSTER_PROXY_URL"], + task=fetch["task"], + artifact=fetch["artifact"], + ) + downloads.append((url, extdir, fetch["extract"], sha256)) + + fetch_urls(downloads) + end = time.monotonic() + + perfherder_data = { + "framework": {"name": "build_metrics"}, + "suites": [ + { + "name": "fetch_content", + "value": end - start, + "lowerIsBetter": True, + "shouldAlert": False, + "subtests": [], + } + ], + } + print("PERFHERDER_DATA: {}".format(json.dumps(perfherder_data)), file=sys.stderr) + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(title="sub commands") + + git_checkout = subparsers.add_parser( + "git-checkout-archive", + help="Obtain an archive of files from a Git repository checkout", + ) + git_checkout.set_defaults(func=command_git_checkout_archive) + git_checkout.add_argument( + "--path-prefix", help="Prefix for paths in produced archive" + ) + git_checkout.add_argument("repo", help="URL to Git repository to be cloned") + git_checkout.add_argument("commit", help="Git commit to check out") + git_checkout.add_argument("dest", help="Destination path of archive") + git_checkout.add_argument( + "--ssh-key-secret", help="The scope path of the ssh key to used for checkout" + ) + git_checkout.add_argument( + "--include-dot-git", action="store_true", help="Include the .git directory" + ) + + url = subparsers.add_parser("static-url", help="Download a static URL") + url.set_defaults(func=command_static_url) + url.add_argument("--sha256", required=True, help="SHA-256 of downloaded content") + url.add_argument( + "--size", required=True, type=int, help="Size of downloaded content, in bytes" + ) + url.add_argument( + "--gpg-sig-url", + help="URL containing signed GPG document validating " "URL to fetch", + ) + url.add_argument( + "--gpg-key-env", help="Environment variable containing GPG key to validate" + ) + url.add_argument( + "--strip-components", + type=int, + default=0, + help="Number of leading components to strip from file " + "names in the downloaded archive", + ) + url.add_argument( + "--add-prefix", + default="", + help="Prefix to add to file names in the downloaded " "archive", + ) + url.add_argument("url", help="URL to fetch") + url.add_argument("dest", help="Destination path") + + artifacts = subparsers.add_parser("task-artifacts", help="Fetch task artifacts") + artifacts.set_defaults(func=command_task_artifacts) + artifacts.add_argument( + "-d", + "--dest", + default=os.environ.get("MOZ_FETCHES_DIR"), + help="Destination directory which will contain all " + "artifacts (defaults to $MOZ_FETCHES_DIR)", + ) + + args = parser.parse_args() + + if not args.dest: + parser.error( + "no destination directory specified, either pass in --dest " + "or set $MOZ_FETCHES_DIR" + ) + + return args.func(args) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/taskcluster/scripts/misc/get_vs.py b/taskcluster/scripts/misc/get_vs.py new file mode 100755 index 0000000000..d630abedb8 --- /dev/null +++ b/taskcluster/scripts/misc/get_vs.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import shutil +import ssl +from pathlib import Path +from tempfile import TemporaryDirectory +from urllib import request + +import certifi +import yaml +from buildconfig import topsrcdir +from vsdownload import downloadPackages, extractPackages + +# Hack to hook certifi +_urlopen = request.urlopen + + +def urlopen(url, data=None): + return _urlopen( + url, data, context=ssl.create_default_context(cafile=certifi.where()) + ) + + +request.urlopen = urlopen + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Download and build a Visual Studio artifact" + ) + parser.add_argument("manifest", help="YAML manifest of the contents to download") + parser.add_argument("outdir", help="Output directory") + args = parser.parse_args() + + out_dir = Path(args.outdir) + with open(Path(topsrcdir) / args.manifest) as f: + selected = yaml.safe_load(f.read()) + with TemporaryDirectory(prefix="get_vs", dir=".") as tmpdir: + tmpdir = Path(tmpdir) + dl_cache = tmpdir / "cache" + downloadPackages(selected, dl_cache) + unpacked = tmpdir / "unpack" + extractPackages(selected, dl_cache, unpacked) + vfs = {} + # Fill the output directory with all the paths in lowercase form for + # cross-compiles. + for subpath in ("VC", "Windows Kits/10", "DIA SDK"): + dest = subpath + # When running on Windows, SDK files are extracted under Windows Kits, + # but on other platforms, they end up in Program Files/Windows Kits. + program_files_subpath = unpacked / "Program Files" / subpath + if program_files_subpath.exists(): + subpath = program_files_subpath + else: + subpath = unpacked / subpath + dest = Path(dest) + for root, dirs, files in os.walk(subpath): + relpath = Path(root).relative_to(subpath) + for f in files: + path = Path(root) / f + mode = os.stat(path).st_mode + with open(path, "rb") as fh: + lower_f = f.lower() + # Ideally, we'd use the overlay for .libs too but as of + # writing it's still impractical to use, so lowercase + # them for now, that'll be enough. + if lower_f.endswith(".lib"): + f = lower_f + name = str(dest / relpath / f) + # Set executable flag on .exe files, the Firefox build + # system wants it. + if lower_f.endswith(".exe"): + mode |= (mode & 0o444) >> 2 + print("Adding", name) + out_file = out_dir / name + out_file.parent.mkdir(parents=True, exist_ok=True) + with out_file.open("wb") as out_fh: + shutil.copyfileobj(fh, out_fh) + os.chmod(out_file, mode) + if lower_f.endswith((".h", ".idl")): + vfs.setdefault(str(dest / relpath), []).append(f) + # Create an overlay file for use with clang's -ivfsoverlay flag. + overlay = { + "version": 0, + "case-sensitive": False, + "root-relative": "overlay-dir", + "overlay-relative": True, + "roots": [ + { + "name": p, + "type": "directory", + "contents": [ + { + "name": f, + "type": "file", + "external-contents": f"{p}/{f}", + } + for f in files + ], + } + for p, files in vfs.items() + ], + } + overlay_yaml = out_dir / "overlay.yaml" + with overlay_yaml.open("w") as fh: + fh.write(yaml.dump(overlay)) diff --git a/taskcluster/scripts/misc/mingw-composition.patch b/taskcluster/scripts/misc/mingw-composition.patch new file mode 100644 index 0000000000..40edf921d8 --- /dev/null +++ b/taskcluster/scripts/misc/mingw-composition.patch @@ -0,0 +1,50 @@ +diff --git a/mingw-w64-headers/include/windows.ui.composition.h b/mingw-w64-headers/include/windows.ui.composition.h +index 9dac0f1..58872d5 100644 +--- a/mingw-w64-headers/include/windows.ui.composition.h ++++ b/mingw-w64-headers/include/windows.ui.composition.h +@@ -4916,13 +4916,13 @@ namespace ABI { + ICompositionDrawingSurface : public IInspectable + { + virtual HRESULT STDMETHODCALLTYPE get_AlphaMode( +- enum DirectXAlphaMode *value) = 0; ++ ABI::Windows::Graphics::DirectX::DirectXAlphaMode *value) = 0; + + virtual HRESULT STDMETHODCALLTYPE get_PixelFormat( +- enum DirectXPixelFormat *value) = 0; ++ ABI::Windows::Graphics::DirectX::DirectXPixelFormat *value) = 0; + + virtual HRESULT STDMETHODCALLTYPE get_Size( +- struct Size *value) = 0; ++ ABI::Windows::Foundation::Size *value) = 0; + + }; + } +@@ -5704,8 +5704,8 @@ namespace ABI { + { + virtual HRESULT STDMETHODCALLTYPE CreateDrawingSurface( + struct Size pixels, +- enum DirectXPixelFormat format, +- enum DirectXAlphaMode mode, ++ ABI::Windows::Graphics::DirectX::DirectXPixelFormat format, ++ ABI::Windows::Graphics::DirectX::DirectXAlphaMode mode, + ABI::Windows::UI::Composition::ICompositionDrawingSurface **result) = 0; + + virtual HRESULT STDMETHODCALLTYPE add_RenderingDeviceReplaced( +@@ -9338,7 +9338,7 @@ namespace ABI { + boolean value) = 0; + + virtual HRESULT STDMETHODCALLTYPE get_Offset( +- struct Vector3 *value) = 0; ++ ABI::Windows::Foundation::Numerics::Vector3 *value) = 0; + + virtual HRESULT STDMETHODCALLTYPE put_Offset( + struct Vector3 value) = 0; +@@ -9383,7 +9383,7 @@ namespace ABI { + struct Vector3 value) = 0; + + virtual HRESULT STDMETHODCALLTYPE get_Size( +- struct Vector2 *value) = 0; ++ ABI::Windows::Foundation::Numerics::Vector2 *value) = 0; + + virtual HRESULT STDMETHODCALLTYPE put_Size( + struct Vector2 value) = 0; diff --git a/taskcluster/scripts/misc/mingw-dispatchqueue.patch b/taskcluster/scripts/misc/mingw-dispatchqueue.patch new file mode 100644 index 0000000000..70fd9be819 --- /dev/null +++ b/taskcluster/scripts/misc/mingw-dispatchqueue.patch @@ -0,0 +1,157 @@ +From 6e031273d1763ef1fd7acc11a6ed6c2a819c91ba Mon Sep 17 00:00:00 2001 +From: Tom Ritter +Date: Thu, 2 Feb 2023 15:51:46 -0500 +Subject: [PATCH 6/6] Add back IDispatcherQueueController + +--- + mingw-w64-headers/include/windows.system.h | 127 +++++++++++++++++++++ + 1 file changed, 127 insertions(+) + +diff --git a/mingw-w64-headers/include/windows.system.h b/mingw-w64-headers/include/windows.system.h +index 688361148..1bb159a31 100644 +--- a/mingw-w64-headers/include/windows.system.h ++++ b/mingw-w64-headers/include/windows.system.h +@@ -41,6 +41,22 @@ namespace ABI { + #endif /* __cplusplus */ + #endif + ++#ifndef ____x_ABI_CWindows_CSystem_CIDispatcherQueueController_FWD_DEFINED__ ++#define ____x_ABI_CWindows_CSystem_CIDispatcherQueueController_FWD_DEFINED__ ++typedef interface __x_ABI_CWindows_CSystem_CIDispatcherQueueController __x_ABI_CWindows_CSystem_CIDispatcherQueueController; ++#ifdef __cplusplus ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController ABI::Windows::System::IDispatcherQueueController ++namespace ABI { ++ namespace Windows { ++ namespace System { ++ interface IDispatcherQueueController; ++ } ++ } ++} ++#endif /* __cplusplus */ ++#endif ++ ++ + #ifndef ____x_ABI_CWindows_CSystem_CUser_FWD_DEFINED__ + #define ____x_ABI_CWindows_CSystem_CUser_FWD_DEFINED__ + #ifdef __cplusplus +@@ -269,6 +285,117 @@ static __WIDL_INLINE HRESULT __x_ABI_CWindows_CSystem_CIUserChangedEventArgs_get + #endif /* ____x_ABI_CWindows_CSystem_CIUserChangedEventArgs_INTERFACE_DEFINED__ */ + #endif /* WINDOWS_FOUNDATION_UNIVERSALAPICONTRACT_VERSION >= 0x10000 */ + ++/***************************************************************************** ++ * IDispatcherQueueController interface ++ */ ++#if WINDOWS_FOUNDATION_UNIVERSALAPICONTRACT_VERSION >= 0x50000 ++#ifndef ____x_ABI_CWindows_CSystem_CIDispatcherQueueController_INTERFACE_DEFINED__ ++#define ____x_ABI_CWindows_CSystem_CIDispatcherQueueController_INTERFACE_DEFINED__ ++ ++DEFINE_GUID(IID___x_ABI_CWindows_CSystem_CIDispatcherQueueController, 0x22f34e66, 0x50db, 0x4e36, 0xa9,0x8d, 0x61,0xc0,0x1b,0x38,0x4d,0x20); ++#if defined(__cplusplus) && !defined(CINTERFACE) ++} /* extern "C" */ ++namespace ABI { ++ namespace Windows { ++ namespace System { ++ MIDL_INTERFACE("22f34e66-50db-4e36-a98d-61c01b384d20") ++ IDispatcherQueueController : public IInspectable ++ { ++ }; ++ } ++ } ++} ++extern "C" { ++#ifdef __CRT_UUID_DECL ++__CRT_UUID_DECL(__x_ABI_CWindows_CSystem_CIDispatcherQueueController, 0x22f34e66, 0x50db, 0x4e36, 0xa9,0x8d, 0x61,0xc0,0x1b,0x38,0x4d,0x20) ++#endif ++#else ++typedef struct __x_ABI_CWindows_CSystem_CIDispatcherQueueControllerVtbl { ++ BEGIN_INTERFACE ++ ++ /*** IUnknown methods ***/ ++ HRESULT (STDMETHODCALLTYPE *QueryInterface)( ++ __x_ABI_CWindows_CSystem_CIDispatcherQueueController *This, ++ REFIID riid, ++ void **ppvObject); ++ ++ ULONG (STDMETHODCALLTYPE *AddRef)( ++ __x_ABI_CWindows_CSystem_CIDispatcherQueueController *This); ++ ++ ULONG (STDMETHODCALLTYPE *Release)( ++ __x_ABI_CWindows_CSystem_CIDispatcherQueueController *This); ++ ++ /*** IInspectable methods ***/ ++ HRESULT (STDMETHODCALLTYPE *GetIids)( ++ __x_ABI_CWindows_CSystem_CIDispatcherQueueController *This, ++ ULONG *iidCount, ++ IID **iids); ++ ++ HRESULT (STDMETHODCALLTYPE *GetRuntimeClassName)( ++ __x_ABI_CWindows_CSystem_CIDispatcherQueueController *This, ++ HSTRING *className); ++ ++ HRESULT (STDMETHODCALLTYPE *GetTrustLevel)( ++ __x_ABI_CWindows_CSystem_CIDispatcherQueueController *This, ++ TrustLevel *trustLevel); ++ ++ END_INTERFACE ++} __x_ABI_CWindows_CSystem_CIDispatcherQueueControllerVtbl; ++ ++interface __x_ABI_CWindows_CSystem_CIDispatcherQueueController { ++ CONST_VTBL __x_ABI_CWindows_CSystem_CIDispatcherQueueControllerVtbl* lpVtbl; ++}; ++ ++#ifdef COBJMACROS ++#ifndef WIDL_C_INLINE_WRAPPERS ++/*** IUnknown methods ***/ ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController_QueryInterface(This,riid,ppvObject) (This)->lpVtbl->QueryInterface(This,riid,ppvObject) ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController_AddRef(This) (This)->lpVtbl->AddRef(This) ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController_Release(This) (This)->lpVtbl->Release(This) ++/*** IInspectable methods ***/ ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetIids(This,iidCount,iids) (This)->lpVtbl->GetIids(This,iidCount,iids) ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetRuntimeClassName(This,className) (This)->lpVtbl->GetRuntimeClassName(This,className) ++#define __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetTrustLevel(This,trustLevel) (This)->lpVtbl->GetTrustLevel(This,trustLevel) ++#else ++/*** IUnknown methods ***/ ++static FORCEINLINE HRESULT __x_ABI_CWindows_CSystem_CIDispatcherQueueController_QueryInterface(__x_ABI_CWindows_CSystem_CIDispatcherQueueController* This,REFIID riid,void **ppvObject) { ++ return This->lpVtbl->QueryInterface(This,riid,ppvObject); ++} ++static FORCEINLINE ULONG __x_ABI_CWindows_CSystem_CIDispatcherQueueController_AddRef(__x_ABI_CWindows_CSystem_CIDispatcherQueueController* This) { ++ return This->lpVtbl->AddRef(This); ++} ++static FORCEINLINE ULONG __x_ABI_CWindows_CSystem_CIDispatcherQueueController_Release(__x_ABI_CWindows_CSystem_CIDispatcherQueueController* This) { ++ return This->lpVtbl->Release(This); ++} ++/*** IInspectable methods ***/ ++static FORCEINLINE HRESULT __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetIids(__x_ABI_CWindows_CSystem_CIDispatcherQueueController* This,ULONG *iidCount,IID **iids) { ++ return This->lpVtbl->GetIids(This,iidCount,iids); ++} ++static FORCEINLINE HRESULT __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetRuntimeClassName(__x_ABI_CWindows_CSystem_CIDispatcherQueueController* This,HSTRING *className) { ++ return This->lpVtbl->GetRuntimeClassName(This,className); ++} ++static FORCEINLINE HRESULT __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetTrustLevel(__x_ABI_CWindows_CSystem_CIDispatcherQueueController* This,TrustLevel *trustLevel) { ++ return This->lpVtbl->GetTrustLevel(This,trustLevel); ++} ++#endif ++#ifdef WIDL_using_Windows_System ++#define IID_IDispatcherQueueController IID___x_ABI_CWindows_CSystem_CIDispatcherQueueController ++#define IDispatcherQueueControllerVtbl __x_ABI_CWindows_CSystem_CIDispatcherQueueControllerVtbl ++#define IDispatcherQueueController __x_ABI_CWindows_CSystem_CIDispatcherQueueController ++#define IDispatcherQueueController_QueryInterface __x_ABI_CWindows_CSystem_CIDispatcherQueueController_QueryInterface ++#define IDispatcherQueueController_AddRef __x_ABI_CWindows_CSystem_CIDispatcherQueueController_AddRef ++#define IDispatcherQueueController_Release __x_ABI_CWindows_CSystem_CIDispatcherQueueController_Release ++#define IDispatcherQueueController_GetIids __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetIids ++#define IDispatcherQueueController_GetRuntimeClassName __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetRuntimeClassName ++#define IDispatcherQueueController_GetTrustLevel __x_ABI_CWindows_CSystem_CIDispatcherQueueController_GetTrustLevel ++#endif /* WIDL_using_Windows_System */ ++#endif ++ ++#endif ++ ++#endif /* ____x_ABI_CWindows_CSystem_CIDispatcherQueueController_INTERFACE_DEFINED__ */ ++#endif /* WINDOWS_FOUNDATION_UNIVERSALAPICONTRACT_VERSION >= 0x50000 */ ++ + /* + * Class Windows.System.User + */ +-- +2.25.1 + diff --git a/taskcluster/scripts/misc/mingw-dwrite_3.patch b/taskcluster/scripts/misc/mingw-dwrite_3.patch new file mode 100644 index 0000000000..25c7b89eea --- /dev/null +++ b/taskcluster/scripts/misc/mingw-dwrite_3.patch @@ -0,0 +1,87 @@ +From a9804765e442063be37338933b9c40e3e3d01aac Mon Sep 17 00:00:00 2001 +From: Sanketh Menda +Date: Thu, 2 Feb 2023 12:29:03 -0500 +Subject: [PATCH 4/7] dwrite_3.h: rename GetGlyphImageFormats_ to + GetGlyphImageFormats + +Wine's WIDL currently doesn't support overloading functions, so till +that is fixed patch dwrite_3.h to rename GlyphImageFormats_ to +GetGlyphImageFormats. +--- + mingw-w64-headers/include/dwrite_3.h | 18 +++++++++--------- + 1 file changed, 9 insertions(+), 9 deletions(-) + +diff --git a/mingw-w64-headers/include/dwrite_3.h b/mingw-w64-headers/include/dwrite_3.h +index 205c47f04..352731bf1 100644 +--- a/mingw-w64-headers/include/dwrite_3.h ++++ b/mingw-w64-headers/include/dwrite_3.h +@@ -8181,7 +8181,7 @@ DEFINE_GUID(IID_IDWriteFontFace4, 0x27f2a904, 0x4eb8, 0x441d, 0x96,0x78, 0x05,0x + MIDL_INTERFACE("27f2a904-4eb8-441d-9678-0563f53e3e2f") + IDWriteFontFace4 : public IDWriteFontFace3 + { +- virtual HRESULT STDMETHODCALLTYPE GetGlyphImageFormats_( ++ virtual HRESULT STDMETHODCALLTYPE GetGlyphImageFormats( + UINT16 glyph, + UINT32 ppem_first, + UINT32 ppem_last, +@@ -8481,7 +8481,7 @@ typedef struct IDWriteFontFace4Vtbl { + WINBOOL *are_local); + + /*** IDWriteFontFace4 methods ***/ +- HRESULT (STDMETHODCALLTYPE *GetGlyphImageFormats_)( ++ HRESULT (STDMETHODCALLTYPE *GetGlyphImageFormats)( + IDWriteFontFace4 *This, + UINT16 glyph, + UINT32 ppem_first, +@@ -8562,7 +8562,7 @@ interface IDWriteFontFace4 { + #define IDWriteFontFace4_AreCharactersLocal(This,characters,count,enqueue_if_not,are_local) (This)->lpVtbl->AreCharactersLocal(This,characters,count,enqueue_if_not,are_local) + #define IDWriteFontFace4_AreGlyphsLocal(This,glyphs,count,enqueue_if_not,are_local) (This)->lpVtbl->AreGlyphsLocal(This,glyphs,count,enqueue_if_not,are_local) + /*** IDWriteFontFace4 methods ***/ +-#define IDWriteFontFace4_GetGlyphImageFormats_(This,glyph,ppem_first,ppem_last,formats) (This)->lpVtbl->GetGlyphImageFormats_(This,glyph,ppem_first,ppem_last,formats) ++#define IDWriteFontFace4_GetGlyphImageFormats(This,glyph,ppem_first,ppem_last,formats) (This)->lpVtbl->GetGlyphImageFormats(This,glyph,ppem_first,ppem_last,formats) + #define IDWriteFontFace4_GetGlyphImageFormats(This) (This)->lpVtbl->GetGlyphImageFormats(This) + #define IDWriteFontFace4_GetGlyphImageData(This,glyph,ppem,format,data,context) (This)->lpVtbl->GetGlyphImageData(This,glyph,ppem,format,data,context) + #define IDWriteFontFace4_ReleaseGlyphImageData(This,context) (This)->lpVtbl->ReleaseGlyphImageData(This,context) +@@ -8705,8 +8705,8 @@ static __WIDL_INLINE HRESULT IDWriteFontFace4_AreGlyphsLocal(IDWriteFontFace4* T + return This->lpVtbl->AreGlyphsLocal(This,glyphs,count,enqueue_if_not,are_local); + } + /*** IDWriteFontFace4 methods ***/ +-static __WIDL_INLINE HRESULT IDWriteFontFace4_GetGlyphImageFormats_(IDWriteFontFace4* This,UINT16 glyph,UINT32 ppem_first,UINT32 ppem_last,DWRITE_GLYPH_IMAGE_FORMATS *formats) { +- return This->lpVtbl->GetGlyphImageFormats_(This,glyph,ppem_first,ppem_last,formats); ++static __WIDL_INLINE HRESULT IDWriteFontFace4_GetGlyphImageFormats(IDWriteFontFace4* This,UINT16 glyph,UINT32 ppem_first,UINT32 ppem_last,DWRITE_GLYPH_IMAGE_FORMATS *formats) { ++ return This->lpVtbl->GetGlyphImageFormats(This,glyph,ppem_first,ppem_last,formats); + } + static __WIDL_INLINE DWRITE_GLYPH_IMAGE_FORMATS IDWriteFontFace4_GetGlyphImageFormats(IDWriteFontFace4* This) { + return This->lpVtbl->GetGlyphImageFormats(This); +@@ -9033,7 +9033,7 @@ typedef struct IDWriteFontFace5Vtbl { + WINBOOL *are_local); + + /*** IDWriteFontFace4 methods ***/ +- HRESULT (STDMETHODCALLTYPE *GetGlyphImageFormats_)( ++ HRESULT (STDMETHODCALLTYPE *GetGlyphImageFormats)( + IDWriteFontFace5 *This, + UINT16 glyph, + UINT32 ppem_first, +@@ -9134,7 +9134,7 @@ interface IDWriteFontFace5 { + #define IDWriteFontFace5_AreCharactersLocal(This,characters,count,enqueue_if_not,are_local) (This)->lpVtbl->AreCharactersLocal(This,characters,count,enqueue_if_not,are_local) + #define IDWriteFontFace5_AreGlyphsLocal(This,glyphs,count,enqueue_if_not,are_local) (This)->lpVtbl->AreGlyphsLocal(This,glyphs,count,enqueue_if_not,are_local) + /*** IDWriteFontFace4 methods ***/ +-#define IDWriteFontFace5_GetGlyphImageFormats_(This,glyph,ppem_first,ppem_last,formats) (This)->lpVtbl->GetGlyphImageFormats_(This,glyph,ppem_first,ppem_last,formats) ++#define IDWriteFontFace5_GetGlyphImageFormats(This,glyph,ppem_first,ppem_last,formats) (This)->lpVtbl->GetGlyphImageFormats(This,glyph,ppem_first,ppem_last,formats) + #define IDWriteFontFace5_GetGlyphImageFormats(This) (This)->lpVtbl->GetGlyphImageFormats(This) + #define IDWriteFontFace5_GetGlyphImageData(This,glyph,ppem,format,data,context) (This)->lpVtbl->GetGlyphImageData(This,glyph,ppem,format,data,context) + #define IDWriteFontFace5_ReleaseGlyphImageData(This,context) (This)->lpVtbl->ReleaseGlyphImageData(This,context) +@@ -9283,8 +9283,8 @@ static __WIDL_INLINE HRESULT IDWriteFontFace5_AreGlyphsLocal(IDWriteFontFace5* T + return This->lpVtbl->AreGlyphsLocal(This,glyphs,count,enqueue_if_not,are_local); + } + /*** IDWriteFontFace4 methods ***/ +-static __WIDL_INLINE HRESULT IDWriteFontFace5_GetGlyphImageFormats_(IDWriteFontFace5* This,UINT16 glyph,UINT32 ppem_first,UINT32 ppem_last,DWRITE_GLYPH_IMAGE_FORMATS *formats) { +- return This->lpVtbl->GetGlyphImageFormats_(This,glyph,ppem_first,ppem_last,formats); ++static __WIDL_INLINE HRESULT IDWriteFontFace5_GetGlyphImageFormats(IDWriteFontFace5* This,UINT16 glyph,UINT32 ppem_first,UINT32 ppem_last,DWRITE_GLYPH_IMAGE_FORMATS *formats) { ++ return This->lpVtbl->GetGlyphImageFormats(This,glyph,ppem_first,ppem_last,formats); + } + static __WIDL_INLINE DWRITE_GLYPH_IMAGE_FORMATS IDWriteFontFace5_GetGlyphImageFormats(IDWriteFontFace5* This) { + return This->lpVtbl->GetGlyphImageFormats(This); +-- +2.25.1 + diff --git a/taskcluster/scripts/misc/mingw-enum.patch b/taskcluster/scripts/misc/mingw-enum.patch new file mode 100644 index 0000000000..de12434ee3 --- /dev/null +++ b/taskcluster/scripts/misc/mingw-enum.patch @@ -0,0 +1,25 @@ +From b415d3e199de9cb2dce6290721bcfc2871f33769 Mon Sep 17 00:00:00 2001 +From: Tom Ritter +Date: Thu, 2 Feb 2023 12:26:47 -0500 +Subject: [PATCH 3/7] Fix enum int issues + +--- + mingw-w64-headers/include/windows.foundation.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/mingw-w64-headers/include/windows.foundation.h b/mingw-w64-headers/include/windows.foundation.h +index fd66e27d3..7981f3380 100644 +--- a/mingw-w64-headers/include/windows.foundation.h ++++ b/mingw-w64-headers/include/windows.foundation.h +@@ -647,7 +647,7 @@ static __WIDL_INLINE HRESULT __x_ABI_CWindows_CFoundation_CIAsyncActionCompleted + namespace ABI { + namespace Windows { + namespace Foundation { +- enum PropertyType { ++ enum PropertyType : int { + PropertyType_Empty = 0, + PropertyType_UInt8 = 1, + PropertyType_Int16 = 2, +-- +2.25.1 + diff --git a/taskcluster/scripts/misc/mingw-ts_sd.patch b/taskcluster/scripts/misc/mingw-ts_sd.patch new file mode 100644 index 0000000000..c76f5f1dab --- /dev/null +++ b/taskcluster/scripts/misc/mingw-ts_sd.patch @@ -0,0 +1,33 @@ +From 8e23d493352ada53b3a766f14e2e93484353c15c Mon Sep 17 00:00:00 2001 +From: Tom Ritter +Date: Wed, 8 Feb 2023 10:52:16 -0500 +Subject: [PATCH 11/11] TS_SD_ defines + +--- + mingw-w64-headers/include/textstor.h | 10 ++++++++++ + 1 file changed, 10 insertions(+) + +diff --git a/mingw-w64-headers/include/textstor.h b/mingw-w64-headers/include/textstor.h +index 0681cab92..64b5ebec5 100644 +--- a/mingw-w64-headers/include/textstor.h ++++ b/mingw-w64-headers/include/textstor.h +@@ -68,6 +68,16 @@ extern "C" { + + #define TS_SD_LOADING (0x2) + ++#define TS_SD_RESERVED (0x4) ++ ++#define TS_SD_TKBAUTOCORRECTENABLE (0x8) ++ ++#define TS_SD_TKBPREDICTIONENABLE (0x10) ++ ++#define TS_SD_UIINTEGRATIONENABLE (0x20) ++ ++#define TS_SD_INPUTPANEMANUALDISPLAYENABLE (0x40) ++ + #define TS_SS_DISJOINTSEL (0x1) + + #define TS_SS_REGIONS (0x2) +-- +2.25.1 + diff --git a/taskcluster/scripts/misc/mingw-unknown.patch b/taskcluster/scripts/misc/mingw-unknown.patch new file mode 100644 index 0000000000..2e29e60106 --- /dev/null +++ b/taskcluster/scripts/misc/mingw-unknown.patch @@ -0,0 +1,46 @@ +From 753c3ad7018936ef9a9d2af8b75efbfa14c149b7 Mon Sep 17 00:00:00 2001 +From: Tom Ritter +Date: Thu, 2 Feb 2023 12:26:22 -0500 +Subject: [PATCH 2/7] Add back the IUnknown_QI functions + +--- + mingw-w64-headers/include/unknwn.h | 23 +++++++++++++++++++++++ + 1 file changed, 23 insertions(+) + +diff --git a/mingw-w64-headers/include/unknwn.h b/mingw-w64-headers/include/unknwn.h +index f3ada04a2..f33e8f270 100644 +--- a/mingw-w64-headers/include/unknwn.h ++++ b/mingw-w64-headers/include/unknwn.h +@@ -169,6 +169,29 @@ static __WIDL_INLINE ULONG IUnknown_Release(IUnknown* This) { + + #endif + ++HRESULT STDMETHODCALLTYPE IUnknown_QueryInterface_Proxy( ++ IUnknown* This, ++ REFIID riid, ++ void **ppvObject); ++void __RPC_STUB IUnknown_QueryInterface_Stub( ++ IRpcStubBuffer* This, ++ IRpcChannelBuffer* pRpcChannelBuffer, ++ PRPC_MESSAGE pRpcMessage, ++ DWORD* pdwStubPhase); ++ULONG STDMETHODCALLTYPE IUnknown_AddRef_Proxy( ++ IUnknown* This); ++void __RPC_STUB IUnknown_AddRef_Stub( ++ IRpcStubBuffer* This, ++ IRpcChannelBuffer* pRpcChannelBuffer, ++ PRPC_MESSAGE pRpcMessage, ++ DWORD* pdwStubPhase); ++ULONG STDMETHODCALLTYPE IUnknown_Release_Proxy( ++ IUnknown* This); ++void __RPC_STUB IUnknown_Release_Stub( ++ IRpcStubBuffer* This, ++ IRpcChannelBuffer* pRpcChannelBuffer, ++ PRPC_MESSAGE pRpcMessage, ++ DWORD* pdwStubPhase); + + #endif /* __IUnknown_INTERFACE_DEFINED__ */ + +-- +2.25.1 + diff --git a/taskcluster/scripts/misc/mingw-widl.patch b/taskcluster/scripts/misc/mingw-widl.patch new file mode 100644 index 0000000000..225a908ad7 --- /dev/null +++ b/taskcluster/scripts/misc/mingw-widl.patch @@ -0,0 +1,35 @@ +From 534ecbfb4da9a27c287a9a44ea18ef44ccf2aac2 Mon Sep 17 00:00:00 2001 +From: Tom Ritter +Date: Thu, 2 Feb 2023 13:28:39 -0500 +Subject: [PATCH 5/7] Fix widl + +In commit c94f44f9b455 (in wine's repo) open_typelib was changed +from returning a file descriptor (null on error) to aborting if +an error was encountered. + +This is incorrect, because read_importlib in typelib.c has a +fallback behavior where it calls open_typelib again if it +fails the first time. And _then_ it will error if it couldn't do +it either time. + +Restore the original behavior for open_typelib +--- + mingw-w64-tools/widl/src/widl.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/mingw-w64-tools/widl/src/widl.c b/mingw-w64-tools/widl/src/widl.c +index 986aa3624..39bc8ac21 100644 +--- a/mingw-w64-tools/widl/src/widl.c ++++ b/mingw-w64-tools/widl/src/widl.c +@@ -710,7 +710,7 @@ int open_typelib( const char *name ) + TRYOPEN( strmake( "%s%s/%s", default_dirs[i], pe_dir, name )); + } + } +- error( "cannot find %s\n", name ); ++ return -1; + #undef TRYOPEN + } + +-- +2.25.1 + diff --git a/taskcluster/scripts/misc/moz.build b/taskcluster/scripts/misc/moz.build new file mode 100644 index 0000000000..3b5b0e1e58 --- /dev/null +++ b/taskcluster/scripts/misc/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +with Files("verify-updatebot.py"): + BUG_COMPONENT = ("Developer Infrastructure", "Mach Vendor & Updatebot") diff --git a/taskcluster/scripts/misc/osx-cross-linker b/taskcluster/scripts/misc/osx-cross-linker new file mode 100755 index 0000000000..ec08589524 --- /dev/null +++ b/taskcluster/scripts/misc/osx-cross-linker @@ -0,0 +1,8 @@ +#!/bin/sh + +exec $MOZ_FETCHES_DIR/clang/bin/clang -v \ + -fuse-ld=lld \ + -mmacosx-version-min=${MACOSX_DEPLOYMENT_TARGET:-10.12} \ + -target $TARGET \ + -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk \ + "$@" diff --git a/taskcluster/scripts/misc/pack-cpython.sh b/taskcluster/scripts/misc/pack-cpython.sh new file mode 100755 index 0000000000..88c1ef6b7f --- /dev/null +++ b/taskcluster/scripts/misc/pack-cpython.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -x -e -v + +# This script is for extracting python bianry for windows from setup file. + +ARTIFACT_NAME=win64-cpython.tar.zst +PYTHON_INSTALLER=`echo $MOZ_FETCHES_DIR/python-3.*-amd64.exe` +WINE=$MOZ_FETCHES_DIR/wine/bin/wine + +cabextract $PYTHON_INSTALLER + +tardir=python +mkdir $tardir +pushd $tardir +msiextract ../* +rm -f api-ms-win-* + +# bundle pip +$WINE python.exe -m ensurepip +$WINE python.exe -m pip install --upgrade pip==23.0 +$WINE python.exe -m pip install --only-binary ':all:' -r ${GECKO_PATH}/build/psutil_requirements.txt -r ${GECKO_PATH}/build/zstandard_requirements.txt + +# extra symlinks to have a consistent install with Linux and OSX +ln -s python.exe python3.exe +chmod u+x python3.exe + +ln -s ./Scripts/pip3.exe pip3.exe +chmod u+x pip3.exe + + +popd + +tar caf `basename ${TOOLCHAIN_ARTIFACT}` ${tardir} + +mkdir -p $UPLOAD_DIR +mv `basename ${TOOLCHAIN_ARTIFACT}` $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/pack.sh b/taskcluster/scripts/misc/pack.sh new file mode 100755 index 0000000000..f19feb5053 --- /dev/null +++ b/taskcluster/scripts/misc/pack.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +set -x +set -e +set -o pipefail + +[ -z "$1" ] && echo Missing argument && exit 1 + +dir=$(dirname "$1") +name=$(basename "$1") + +case "$(uname -s)" in +Darwin) + TAR_FLAGS=--no-fflags + ;; +*) + TAR_FLAGS= + ;; +esac + +(cd "$dir"; find "$name"/* -not -type d -print0 | tar $TAR_FLAGS -cvf - --null -T -) | python3 $GECKO_PATH/taskcluster/scripts/misc/zstdpy > "$name.tar.zst" + +mkdir -p "$UPLOAD_DIR" +mv "$name.tar.zst" "$UPLOAD_DIR" diff --git a/taskcluster/scripts/misc/private_local_toolchain.sh b/taskcluster/scripts/misc/private_local_toolchain.sh new file mode 100755 index 0000000000..df255a8576 --- /dev/null +++ b/taskcluster/scripts/misc/private_local_toolchain.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -x +set -e +set -o pipefail + +script=$1 +shift +artifact=$(basename $TOOLCHAIN_ARTIFACT) +dir=${artifact%.tar.*} + +$GECKO_PATH/mach python --virtualenv build $(dirname $0)/$script "$@" $dir + +$(dirname $0)/pack.sh $dir diff --git a/taskcluster/scripts/misc/repack-android-avd-linux.sh b/taskcluster/scripts/misc/repack-android-avd-linux.sh new file mode 100755 index 0000000000..f36fda905d --- /dev/null +++ b/taskcluster/scripts/misc/repack-android-avd-linux.sh @@ -0,0 +1,30 @@ +#!/bin/bash +set -x -e -v + +# Initialize XVFB for the AVD +. /builds/worker/scripts/xvfb.sh + +cleanup() { + local rv=$? + cleanup_xvfb + exit $rv +} +trap cleanup EXIT INT + +start_xvfb '1024x768x24' 2 + +# This script is for fetching and repacking the Android SDK (for +# Linux), the tools required to produce Android packages. + +UPLOAD_DIR=/builds/worker/artifacts/ +AVD_JSON_CONFIG="$1" + +mkdir -p $HOME/artifacts $UPLOAD_DIR + +# Populate /builds/worker/.mozbuild/android-device +cd $GECKO_PATH +./mach python python/mozboot/mozboot/android.py --artifact-mode --prewarm-avd --avd-manifest="$AVD_JSON_CONFIG" --no-interactive --list-packages + +tar cavf $UPLOAD_DIR/android-avd-linux.tar.zst -C /builds/worker/.mozbuild android-device + +ls -al $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/repack-android-emulator-linux.sh b/taskcluster/scripts/misc/repack-android-emulator-linux.sh new file mode 100755 index 0000000000..a9745d7a1e --- /dev/null +++ b/taskcluster/scripts/misc/repack-android-emulator-linux.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -x -e -v + +# This script is for fetching and repacking the Android emulator (for +# Linux), the tools required to produce Android packages. + +mkdir -p $UPLOAD_DIR + +# Populate /builds/worker/.mozbuild/android-emulator-linux. +cd $GECKO_PATH +./mach python python/mozboot/mozboot/android.py --emulator-only --no-interactive --list-packages + +# Bug 1868944: override emulator to a known working version +curl -L http://dl.google.com/android/repository/emulator-linux_x64-10696886.zip > /tmp/emulator.zip +cd /builds/worker/.mozbuild/android-sdk-linux +rm -rf emulator +unzip /tmp/emulator.zip +cd $GECKO_PATH + +# Remove extra files we don't need +rm -rfv /builds/worker/.mozbuild/android-sdk-linux/tools +mkdir /builds/worker/.mozbuild/android-sdk-linux/system-images +mkdir /builds/worker/.mozbuild/android-sdk-linux/platforms +find /builds/worker/.mozbuild/android-sdk-linux/emulator/qemu -type f -not -name "*x86*" -print -delete + +tar cavf $UPLOAD_DIR/android-emulator-linux.tar.zst -C /builds/worker/.mozbuild android-sdk-linux bundletool.jar + +ls -al $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/repack-android-ndk-linux.sh b/taskcluster/scripts/misc/repack-android-ndk-linux.sh new file mode 100755 index 0000000000..f48b60c3e1 --- /dev/null +++ b/taskcluster/scripts/misc/repack-android-ndk-linux.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -x -e -v + +# This script is for fetching and repacking the Android NDK (for +# Linux), the tools required to produce native Android programs. + +mkdir -p $UPLOAD_DIR + +# Populate /builds/worker/.mozbuild/android-ndk-$VER. +cd $GECKO_PATH +./mach python python/mozboot/mozboot/android.py --ndk-only --no-interactive + +# Don't generate a tarball with a versioned NDK directory. +mv $HOME/.mozbuild/android-ndk-* $HOME/.mozbuild/android-ndk +tar cavf $UPLOAD_DIR/android-ndk.tar.zst -C /builds/worker/.mozbuild android-ndk + +ls -al $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/repack-android-sdk-linux.sh b/taskcluster/scripts/misc/repack-android-sdk-linux.sh new file mode 100755 index 0000000000..0042d17472 --- /dev/null +++ b/taskcluster/scripts/misc/repack-android-sdk-linux.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -x -e -v + +# This script is for fetching and repacking the Android SDK (for +# Linux), the tools required to produce Android packages. + +mkdir -p $UPLOAD_DIR + +# Populate /builds/worker/.mozbuild/android-sdk-linux. +cd $GECKO_PATH +./mach python python/mozboot/mozboot/android.py --artifact-mode --no-interactive --list-packages + +# Bug 1869407: override emulator to a known working version +curl -L http://dl.google.com/android/repository/emulator-linux_x64-10696886.zip > /tmp/emulator.zip +cd /builds/worker/.mozbuild/android-sdk-linux +rm -rf emulator +unzip /tmp/emulator.zip +cd $GECKO_PATH + +tar cavf $UPLOAD_DIR/android-sdk-linux.tar.zst -C /builds/worker/.mozbuild android-sdk-linux bundletool.jar + +ls -al $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/repack-android-system-images-linux.sh b/taskcluster/scripts/misc/repack-android-system-images-linux.sh new file mode 100755 index 0000000000..395061ba9e --- /dev/null +++ b/taskcluster/scripts/misc/repack-android-system-images-linux.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -x -e -v + +# This script is for fetching and repacking the Android SDK (for +# Linux), the tools required to produce Android packages. + +AVD_JSON_CONFIG="$1" + +mkdir -p $UPLOAD_DIR + +# Populate /builds/worker/.mozbuild/android-sdk-linux. +cd $GECKO_PATH +./mach python python/mozboot/mozboot/android.py --artifact-mode --system-images-only --avd-manifest="$AVD_JSON_CONFIG" --no-interactive --list-packages + +tar cavf $UPLOAD_DIR/android-system-images-linux.tar.zst -C /builds/worker/.mozbuild android-sdk-linux/system-images + +ls -al $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/repack-clang.sh b/taskcluster/scripts/misc/repack-clang.sh new file mode 100755 index 0000000000..4a1b3ed01e --- /dev/null +++ b/taskcluster/scripts/misc/repack-clang.sh @@ -0,0 +1,52 @@ +#!/bin/bash +set -x -e -v + +shopt -s nullglob + +# This script is for repacking clang for cross targets on a Linux host. + +cd $MOZ_FETCHES_DIR + +# We have a clang toolchain in $MOZ_FETCHES_DIR/clang +# We have some compiler-rts in $MOZ_FETCHES_DIR/compiler-rt* +# We have some libunwinds in $MOZ_FETCHES_DIR/libunwind* +# We copy everything from the compiler-rts into clang/lib/clang/$version/ +# and everything from the libunwinds into clang/ +clang_ver_dir=$(echo clang/lib/clang/*/include) +clang_ver_dir=${clang_ver_dir%/include} +[ -n "$clang_ver_dir" ] && for c in compiler-rt* libunwind*; do + case $c in + compiler-rt*) + clang_dir=$clang_ver_dir + ;; + libunwind*) + clang_dir=clang + ;; + esac + find $c -mindepth 1 -type d | while read d; do + mkdir -p "$clang_dir/${d#$c/}" + find $d -mindepth 1 -maxdepth 1 -not -type d | while read f; do + target_file="$clang_dir/${f#$c/}" + case $d in + compiler-rt-*/lib/darwin) + if [ -f "$target_file" ]; then + # Unify overlapping files for darwin/ + $MOZ_FETCHES_DIR/cctools/bin/lipo -create "$f" "$target_file" -output "$target_file.new" + mv "$target_file.new" "$target_file" + continue + fi + ;; + esac + if [ -f "$target_file" ] && ! diff -q "$f" "$target_file" 2>/dev/null; then + echo "Cannot copy $f because it is already in ${target_file%/*}" >&2 && exit 1 + fi + cp "$f" "$target_file" + done + done +done + +if [ -n "$UPLOAD_DIR" ]; then + tar caf clang.tar.zst clang + mkdir -p $UPLOAD_DIR + mv clang.tar.zst $UPLOAD_DIR +fi diff --git a/taskcluster/scripts/misc/repack-jdk-linux.sh b/taskcluster/scripts/misc/repack-jdk-linux.sh new file mode 100755 index 0000000000..2d13e360e5 --- /dev/null +++ b/taskcluster/scripts/misc/repack-jdk-linux.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -x -e -v + +# This script is for fetching and repacking the OpenJDK (for +# Linux) + +AVD_JSON_CONFIG="$1" + +mkdir -p $UPLOAD_DIR + +# Populate /builds/worker/.mozbuild/jdk +cd $GECKO_PATH +./mach python python/mozboot/mozboot/android.py --jdk-only + +tar cavf $UPLOAD_DIR/jdk-linux.tar.zst -C /builds/worker/.mozbuild jdk + +ls -al $UPLOAD_DIR diff --git a/taskcluster/scripts/misc/repack-node.sh b/taskcluster/scripts/misc/repack-node.sh new file mode 100755 index 0000000000..d3880e4799 --- /dev/null +++ b/taskcluster/scripts/misc/repack-node.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -x -e -v + +# This script is for repacking Node (and NPM) from nodejs.org. + +mkdir -p "$UPLOAD_DIR" + +cd "$MOZ_FETCHES_DIR" + +# npx doesn't have great security characteristics (it downloads and executes +# stuff directly out of npm at runtime), so let's not risk it getting into +# anyone's PATH who doesn't already have it there: +rm -f node/bin/npx node/bin/npx.exe +tar caf "$UPLOAD_DIR"/node.tar.zst node diff --git a/taskcluster/scripts/misc/repack_rust.py b/taskcluster/scripts/misc/repack_rust.py new file mode 100755 index 0000000000..43bbe44f16 --- /dev/null +++ b/taskcluster/scripts/misc/repack_rust.py @@ -0,0 +1,674 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +This script downloads and repacks official rust language builds +with the necessary tool and target support for the Firefox +build environment. +""" + +import argparse +import errno +import hashlib +import os +import shutil +import subprocess +import tarfile +import tempfile +import textwrap +from contextlib import contextmanager + +import requests +import toml +import zstandard + + +def log(msg): + print("repack: %s" % msg, flush=True) + + +def fetch_file(url): + """Download a file from the given url if it's not already present. + + Returns the SHA-2 256-bit hash of the received file.""" + filename = os.path.basename(url) + sha = hashlib.sha256() + size = 4096 + if os.path.exists(filename): + with open(filename, "rb") as fd: + while True: + block = fd.read(size) + if not block: + return sha.hexdigest() + sha.update(block) + log("Could not calculate checksum!") + return None + r = requests.get(url, stream=True) + r.raise_for_status() + with open(filename, "wb") as fd: + for chunk in r.iter_content(size): + fd.write(chunk) + sha.update(chunk) + return sha.hexdigest() + + +def check_call_with_input(cmd, input_data): + """Invoke a command, passing the input String over stdin. + + This is like subprocess.check_call, but allows piping + input to interactive commands.""" + p = subprocess.Popen(cmd, stdin=subprocess.PIPE) + p.communicate(input_data) + if p.wait(): + raise subprocess.CalledProcessError(p.returncode, cmd) + + +def setup_gpg(): + """Add the signing key to the current gpg config. + + Import a hard-coded copy of the release signing public key + and mark it trusted in the gpg database so subsequent + signature checks can succeed or fail cleanly.""" + keyid = "0x85AB96E6FA1BE5FE" + log("Importing signing key %s..." % keyid) + key = b""" +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBFJEwMkBEADlPACa2K7reD4x5zd8afKx75QYKmxqZwywRbgeICeD4bKiQoJZ +dUjmn1LgrGaXuBMKXJQhyA34e/1YZel/8et+HPE5XpljBfNYXWbVocE1UMUTnFU9 +CKXa4AhJ33f7we2/QmNRMUifw5adPwGMg4D8cDKXk02NdnqQlmFByv0vSaArR5kn +gZKnLY6o0zZ9Buyy761Im/ShXqv4ATUgYiFc48z33G4j+BDmn0ryGr1aFdP58tHp +gjWtLZs0iWeFNRDYDje6ODyu/MjOyuAWb2pYDH47Xu7XedMZzenH2TLM9yt/hyOV +xReDPhvoGkaO8xqHioJMoPQi1gBjuBeewmFyTSPS4deASukhCFOcTsw/enzJagiS +ZAq6Imehduke+peAL1z4PuRmzDPO2LPhVS7CDXtuKAYqUV2YakTq8MZUempVhw5n +LqVaJ5/XiyOcv405PnkT25eIVVVghxAgyz6bOU/UMjGQYlkUxI7YZ9tdreLlFyPR +OUL30E8q/aCd4PGJV24yJ1uit+yS8xjyUiMKm4J7oMP2XdBN98TUfLGw7SKeAxyU +92BHlxg7yyPfI4TglsCzoSgEIV6xoGOVRRCYlGzSjUfz0bCMCclhTQRBkegKcjB3 +sMTyG3SPZbjTlCqrFHy13e6hGl37Nhs8/MvXUysq2cluEISn5bivTKEeeQARAQAB +tERSdXN0IExhbmd1YWdlIChUYWcgYW5kIFJlbGVhc2UgU2lnbmluZyBLZXkpIDxy +dXN0LWtleUBydXN0LWxhbmcub3JnPokCOAQTAQIAIgUCUkTAyQIbAwYLCQgHAwIG +FQgCCQoLBBYCAwECHgECF4AACgkQhauW5vob5f5fYQ//b1DWK1NSGx5nZ3zYZeHJ +9mwGCftIaA2IRghAGrNf4Y8DaPqR+w1OdIegWn8kCoGfPfGAVW5XXJg+Oxk6QIaD +2hJojBUrq1DALeCZVewzTVw6BN4DGuUexsc53a8DcY2Yk5WE3ll6UKq/YPiWiPNX +9r8FE2MJwMABB6mWZLqJeg4RCrriBiCG26NZxGE7RTtPHyppoVxWKAFDiWyNdJ+3 +UnjldWrT9xFqjqfXWw9Bhz8/EoaGeSSbMIAQDkQQpp1SWpljpgqvctZlc5fHhsG6 +lmzW5RM4NG8OKvq3UrBihvgzwrIfoEDKpXbk3DXqaSs1o81NH5ftVWWbJp/ywM9Q +uMC6n0YWiMZMQ1cFBy7tukpMkd+VPbPkiSwBhPkfZIzUAWd74nanN5SKBtcnymgJ ++OJcxfZLiUkXRj0aUT1GLA9/7wnikhJI+RvwRfHBgrssXBKNPOfXGWajtIAmZc2t +kR1E8zjBVLId7r5M8g52HKk+J+y5fVgJY91nxG0zf782JjtYuz9+knQd55JLFJCO +hhbv3uRvhvkqgauHagR5X9vCMtcvqDseK7LXrRaOdOUDrK/Zg/abi5d+NIyZfEt/ +ObFsv3idAIe/zpU6xa1nYNe3+Ixlb6mlZm3WCWGxWe+GvNW/kq36jZ/v/8pYMyVO +p/kJqnf9y4dbufuYBg+RLqC5Ag0EUkTAyQEQANxy2tTSeRspfrpBk9+ju+KZ3zc4 +umaIsEa5DxJ2zIKHywVAR67Um0K1YRG07/F5+tD9TIRkdx2pcmpjmSQzqdk3zqa9 +2Zzeijjz2RNyBY8qYmyE08IncjTsFFB8OnvdXcsAgjCFmI1BKnePxrABL/2k8X18 +aysPb0beWqQVsi5FsSpAHu6k1kaLKc+130x6Hf/YJAjeo+S7HeU5NeOz3zD+h5bA +Q25qMiVHX3FwH7rFKZtFFog9Ogjzi0TkDKKxoeFKyADfIdteJWFjOlCI9KoIhfXq +Et9JMnxApGqsJElJtfQjIdhMN4Lnep2WkudHAfwJ/412fe7wiW0rcBMvr/BlBGRY +vM4sTgN058EwIuY9Qmc8RK4gbBf6GsfGNJjWozJ5XmXElmkQCAvbQFoAfi5TGfVb +77QQrhrQlSpfIYrvfpvjYoqj618SbU6uBhzh758gLllmMB8LOhxWtq9eyn1rMWyR +KL1fEkfvvMc78zP+Px6yDMa6UIez8jZXQ87Zou9EriLbzF4QfIYAqR9LUSMnLk6K +o61tSFmFEDobC3tc1jkSg4zZe/wxskn96KOlmnxgMGO0vJ7ASrynoxEnQE8k3WwA ++/YJDwboIR7zDwTy3Jw3mn1FgnH+c7Rb9h9geOzxKYINBFz5Hd0MKx7kZ1U6WobW +KiYYxcCmoEeguSPHABEBAAGJAh8EGAECAAkFAlJEwMkCGwwACgkQhauW5vob5f7f +FA//Ra+itJF4NsEyyhx4xYDOPq4uj0VWVjLdabDvFjQtbBLwIyh2bm8uO3AY4r/r +rM5WWQ8oIXQ2vvXpAQO9g8iNlFez6OLzbfdSG80AG74pQqVVVyCQxD7FanB/KGge +tAoOstFxaCAg4nxFlarMctFqOOXCFkylWl504JVIOvgbbbyj6I7qCUmbmqazBSMU +K8c/Nz+FNu2Uf/lYWOeGogRSBgS0CVBcbmPUpnDHLxZWNXDWQOCxbhA1Uf58hcyu +036kkiWHh2OGgJqlo2WIraPXx1cGw1Ey+U6exbtrZfE5kM9pZzRG7ZY83CXpYWMp +kyVXNWmf9JcIWWBrXvJmMi0FDvtgg3Pt1tnoxqdilk6yhieFc8LqBn6CZgFUBk0t +NSaWk3PsN0N6Ut8VXY6sai7MJ0Gih1gE1xadWj2zfZ9sLGyt2jZ6wK++U881YeXA +ryaGKJ8sIs182hwQb4qN7eiUHzLtIh8oVBHo8Q4BJSat88E5/gOD6IQIpxc42iRL +T+oNZw1hdwNyPOT1GMkkn86l3o7klwmQUWCPm6vl1aHp3omo+GHC63PpNFO5RncJ +Ilo3aBKKmoE5lDSMGE8KFso5awTo9z9QnVPkRsk6qeBYit9xE3x3S+iwjcSg0nie +aAkc0N00nc9V9jfPvt4z/5A5vjHh+NhFwH5h2vBJVPdsz6m5Ag0EVI9keAEQAL3R +oVsHncJTmjHfBOV4JJsvCum4DuJDZ/rDdxauGcjMUWZaG338ZehnDqG1Yn/ys7zE +aKYUmqyT+XP+M2IAQRTyxwlU1RsDlemQfWrESfZQCCmbnFScL0E7cBzy4xvtInQe +UaFgJZ1BmxbzQrx+eBBdOTDv7RLnNVygRmMzmkDhxO1IGEu1+3ETIg/DxFE7VQY0 +It/Ywz+nHu1o4Hemc/GdKxu9hcYvcRVc/Xhueq/zcIM96l0m+CFbs0HMKCj8dgMe +Ng6pbbDjNM+cV+5BgpRdIpE2l9W7ImpbLihqcZt47J6oWt/RDRVoKOzRxjhULVyV +2VP9ESr48HnbvxcpvUAEDCQUhsGpur4EKHFJ9AmQ4zf91gWLrDc6QmlACn9o9ARU +fOV5aFsZI9ni1MJEInJTP37stz/uDECRie4LTL4O6P4Dkto8ROM2wzZq5CiRNfnT +PP7ARfxlCkpg+gpLYRlxGUvRn6EeYwDtiMQJUQPfpGHSvThUlgDEsDrpp4SQSmdA +CB+rvaRqCawWKoXs0In/9wylGorRUupeqGC0I0/rh+f5mayFvORzwy/4KK4QIEV9 +aYTXTvSRl35MevfXU1Cumlaqle6SDkLr3ZnFQgJBqap0Y+Nmmz2HfO/pohsbtHPX +92SN3dKqaoSBvzNGY5WT3CsqxDtik37kR3f9/DHpABEBAAGJBD4EGAECAAkFAlSP +ZHgCGwICKQkQhauW5vob5f7BXSAEGQECAAYFAlSPZHgACgkQXLSpNHs7CdwemA/+ +KFoGuFqU0uKT9qblN4ugRyil5itmTRVffl4tm5OoWkW8uDnu7Ue3vzdzy+9NV8X2 +wRG835qjXijWP++AGuxgW6LB9nV5OWiKMCHOWnUjJQ6pNQMAgSN69QzkFXVF/q5f +bkma9TgSbwjrVMyPzLSRwq7HsT3V02Qfr4cyq39QeILGy/NHW5z6LZnBy3BaVSd0 +lGjCEc3yfH5OaB79na4W86WCV5n4IT7cojFM+LdL6P46RgmEtWSG3/CDjnJl6BLR +WqatRNBWLIMKMpn+YvOOL9TwuP1xbqWr1vZ66wksm53NIDcWhptpp0KEuzbU0/Dt +OltBhcX8tOmO36LrSadX9rwckSETCVYklmpAHNxPml011YNDThtBidvsicw1vZwR +HsXn+txlL6RAIRN+J/Rw3uOiJAqN9Qgedpx2q+E15t8MiTg/FXtB9SysnskFT/BH +z0USNKJUY0btZBw3eXWzUnZf59D8VW1M/9JwznCHAx0c9wy/gRDiwt9w4RoXryJD +VAwZg8rwByjldoiThUJhkCYvJ0R3xH3kPnPlGXDW49E9R8C2umRC3cYOL4U9dOQ1 +5hSlYydF5urFGCLIvodtE9q80uhpyt8L/5jj9tbwZWv6JLnfBquZSnCGqFZRfXlb +Jphk9+CBQWwiZSRLZRzqQ4ffl4xyLuolx01PMaatkQbRaw/+JpgRNlurKQ0PsTrO +8tztO/tpBBj/huc2DGkSwEWvkfWElS5RLDKdoMVs/j5CLYUJzZVikUJRm7m7b+OA +P3W1nbDhuID+XV1CSBmGifQwpoPTys21stTIGLgznJrIfE5moFviOLqD/LrcYlsq +CQg0yleu7SjOs//8dM3mC2FyLaE/dCZ8l2DCLhHw0+ynyRAvSK6aGCmZz6jMjmYF +MXgiy7zESksMnVFMulIJJhR3eB0wx2GitibjY/ZhQ7tD3i0yy9ILR07dFz4pgkVM +afxpVR7fmrMZ0t+yENd+9qzyAZs0ksxORoc2ze90SCx2jwEX/3K+m4I0hP2H/w5W +gqdvuRLiqf+4BGW4zqWkLLlNIe/okt0r82SwHtDN0Ui1asmZTGj6sm8SXtwx+5cE +38MttWqjDiibQOSthRVcETByRYM8KcjYSUCi4PoBc3NpDONkFbZm6XofR/f5mTcl +2jDw6fIeVc4Hd1jBGajNzEqtneqqbdAkPQaLsuD2TMkQfTDJfE/IljwjrhDa9Mi+ +odtnMWq8vlwOZZ24/8/BNK5qXuCYL67O7AJB4ZQ6BT+g4z96iRLbupzu/XJyXkQF +rOY/Ghegvn7fDrnt2KC9MpgeFBXzUp+k5rzUdF8jbCx5apVjA1sWXB9Kh3L+DUwF +Mve696B5tlHyc1KxjHR6w9GRsh4= +=5FXw +-----END PGP PUBLIC KEY BLOCK----- +""" + check_call_with_input(["gpg", "--import"], key) + check_call_with_input( + ["gpg", "--command-fd", "0", "--edit-key", keyid], b"trust\n5\ny\n" + ) + + +def verify_sha(filename, sha): + """Verify that the checksum file matches the given sha digest.""" + sha_filename = filename + ".sha256" + with open(sha_filename) as f: + # Older sha256 files would contain `sha filename`, but more recent + # ones only contain `sha`. + checksum = f.readline().split()[0] + if checksum != sha: + raise ValueError("Checksum mismatch in %s" % filename) + return True + log("No checksum file for %s!" % filename) + return False + + +def fetch(url, validate=True): + """Download and verify a package url.""" + base = os.path.basename(url) + log("Fetching %s..." % base) + if validate: + fetch_file(url + ".asc") + fetch_file(url + ".sha256") + sha = fetch_file(url) + if validate: + log("Verifying %s..." % base) + verify_sha(base, sha) + subprocess.check_call( + ["gpg", "--keyid-format", "0xlong", "--verify", base + ".asc", base] + ) + return sha + + +def install(filename, target): + """Run a package's installer script against the given target directory.""" + log("Unpacking %s..." % filename) + subprocess.check_call(["tar", "xf", filename]) + basename = filename.split(".tar")[0] + log("Installing %s..." % basename) + install_cmd = [os.path.join(basename, "install.sh")] + install_cmd += ["--prefix=" + os.path.abspath(target)] + install_cmd += ["--disable-ldconfig"] + subprocess.check_call(install_cmd) + log("Cleaning %s..." % basename) + shutil.rmtree(basename) + + +def package(manifest, pkg, target): + """Pull out the package dict for a particular package and target + from the given manifest.""" + version = manifest["pkg"][pkg]["version"] + if target in manifest["pkg"][pkg]["target"]: + info = manifest["pkg"][pkg]["target"][target] + else: + # rust-src is the same for all targets, and has a literal '*' in the + # section key/name instead of a target + info = manifest["pkg"][pkg]["target"]["*"] + if "xz_url" in info: + info["url"] = info.pop("xz_url") + info["hash"] = info.pop("xz_hash") + return (version, info) + + +def fetch_package(manifest, pkg, host): + version, info = package(manifest, pkg, host) + if not info["available"]: + log("%s marked unavailable for %s" % (pkg, host)) + raise KeyError + + log("%s %s\n %s\n %s" % (pkg, version, info["url"], info["hash"])) + sha = fetch(info["url"], info["hash"] is not None) + if info["hash"] and sha != info["hash"]: + log( + "Checksum mismatch: package resource is different from manifest" + "\n %s" % sha + ) + raise AssertionError + return info + + +def fetch_std(manifest, targets): + stds = [] + for target in targets: + stds.append(fetch_package(manifest, "rust-std", target)) + analysis = fetch_optional(manifest, "rust-analysis", target) + if analysis: + stds.append(analysis) + else: + log(f"Missing rust-analysis for {target}") + # If it's missing for one of the searchfox targets, explicitly + # error out. + if target in ( + "x86_64-unknown-linux-gnu", + "x86_64-apple-darwin", + "x86_64-pc-windows-msvc", + "thumbv7neon-linux-androideabi", + ): + raise AssertionError + + return stds + + +def fetch_optional(manifest, pkg, host): + try: + return fetch_package(manifest, pkg, host) + except KeyError: + # The package is not available, oh well! + return None + + +@contextmanager +def chdir(path): + d = os.getcwd() + log('cd "%s"' % path) + os.chdir(path) + try: + yield + finally: + log('cd "%s"' % d) + os.chdir(d) + + +def build_tar_package(name, base, directory): + name = os.path.realpath(name) + log("tarring {} from {}/{}".format(name, base, directory)) + assert name.endswith(".tar.zst") + + cctx = zstandard.ZstdCompressor() + with open(name, "wb") as f, cctx.stream_writer(f) as z: + with tarfile.open(mode="w|", fileobj=z) as tf: + with chdir(base): + tf.add(directory) + + +def fetch_manifest(channel="stable", host=None, targets=()): + if channel.startswith("bors-"): + assert host + rev = channel[len("bors-") :] + base_url = "https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rustc-builds" + manifest = { + "date": "some date", + "pkg": {}, + } + + def target(url): + return { + "url": url, + "hash": None, + "available": requests.head(url).status_code == 200, + } + + for pkg in ( + "cargo", + "rustc", + "rustfmt-preview", + "clippy-preview", + "rust-analyzer-preview", + ): + manifest["pkg"][pkg] = { + "version": "bors", + "target": { + host: target( + "{}/{}/{}-nightly-{}.tar.xz".format(base_url, rev, pkg, host) + ), + }, + } + manifest["pkg"]["rust-src"] = { + "version": "bors", + "target": { + "*": target("{}/{}/rust-src-nightly.tar.xz".format(base_url, rev)), + }, + } + for pkg in ("rust-std", "rust-analysis"): + manifest["pkg"][pkg] = { + "version": "bors", + "target": { + t: target( + "{}/{}/{}-nightly-{}.tar.xz".format(base_url, rev, pkg, t) + ) + for t in sorted(set(targets) | set([host])) + }, + } + return manifest + if "-" in channel: + channel, date = channel.split("-", 1) + prefix = "/" + date + else: + prefix = "" + url = "https://static.rust-lang.org/dist%s/channel-rust-%s.toml" % (prefix, channel) + req = requests.get(url) + req.raise_for_status() + manifest = toml.loads(req.text) + if manifest["manifest-version"] != "2": + raise NotImplementedError( + "Unrecognized manifest version %s." % manifest["manifest-version"] + ) + return manifest + + +def patch_src(patch, module): + log("Patching Rust src... {} with {}".format(module, patch)) + patch = os.path.realpath(patch) + subprocess.check_call(["patch", "-d", module, "-p1", "-i", patch, "--fuzz=0", "-s"]) + + +def build_src(install_dir, host, targets, patches): + install_dir = os.path.abspath(install_dir) + fetches = os.environ["MOZ_FETCHES_DIR"] + rust_dir = os.path.join(fetches, "rust") + patch_dir = os.path.join(os.environ["GECKO_PATH"], "build", "build-rust") + + # Clear and remake any previous install directory. + try: + shutil.rmtree(install_dir) + except OSError as e: + if e.errno != errno.ENOENT: + raise + os.makedirs(install_dir) + + # Patch the src (see the --patch flag's description for details) + for p in patches: + module, colon, file = p.partition(":") + if not colon: + module, file = "", p + patch_file = os.path.join(patch_dir, file) + patch_module = os.path.join(rust_dir, module) + patch_src(patch_file, patch_module) + + log("Building Rust...") + + example_config = "" + for example_toml in ("config.example.toml", "config.toml.example"): + path = os.path.join(rust_dir, example_toml) + if os.path.exists(path): + with open(path) as file: + example_config = file.read() + break + + if "ignore-git" in example_config: + omit_git_hash = "ignore-git" + else: + omit_git_hash = "omit-git-hash" + + # Rust builds are configured primarily through a config.toml file. + # + # `sysconfdir` is overloaded to be relative instead of absolute. + # This is the default of `install.sh`, but for whatever reason + # `x.py install` has its own default of `/etc` which we don't want. + # + # `missing-tools` is set so `rustfmt` is allowed to fail. This means + # we can "succeed" at building Rust while failing to build, say, Cargo. + # Ideally the build system would have better granularity: + # https://github.com/rust-lang/rust/issues/79249 + base_config = textwrap.dedent( + """ + [build] + docs = false + sanitizers = true + extended = true + tools = ["analysis", "cargo", "rustfmt", "clippy", "src", "rust-analyzer"] + cargo-native-static = true + + [rust] + {omit_git_hash} = false + use-lld = true + + [install] + prefix = "{prefix}" + sysconfdir = "etc" + + [dist] + missing-tools = true + + [llvm] + download-ci-llvm = false + """.format( + prefix=install_dir, + omit_git_hash=omit_git_hash, + ) + ) + + # Rust requires these to be specified per-target + target_config = textwrap.dedent( + """ + [target.{target}] + cc = "clang" + cxx = "clang++" + linker = "clang" + + """ + ) + + final_config = base_config + for target in sorted(set(targets) | set([host])): + final_config = final_config + target_config.format(target=target) + + with open(os.path.join(rust_dir, "config.toml"), "w") as file: + file.write(final_config) + + # Setup the env so compilers and toolchains are visible + clang = os.path.join(fetches, "clang") + clang_bin = os.path.join(clang, "bin") + clang_lib = os.path.join(clang, "lib") + sysroot = os.path.join(fetches, "sysroot") + + # The rust build doesn't offer much in terms of overriding compiler flags + # when it builds LLVM's compiler-rt, but we want to build with a sysroot. + # So, we create wrappers for clang and clang++ that add the sysroot to the + # command line. + with tempfile.TemporaryDirectory() as tmpdir: + for exe in ("clang", "clang++"): + tmp_exe = os.path.join(tmpdir, exe) + with open(tmp_exe, "w") as fh: + fh.write("#!/bin/sh\n") + fh.write(f'exec {clang_bin}/{exe} --sysroot={sysroot} "$@"\n') + os.chmod(tmp_exe, 0o755) + + env = os.environ.copy() + env.update( + { + "PATH": os.pathsep.join((tmpdir, clang_bin, os.environ["PATH"])), + "LD_LIBRARY_PATH": clang_lib, + } + ) + + # x.py install does everything we need for us. + # If you're running into issues, consider using `-vv` to debug it. + command = ["python3", "x.py", "install", "-v", "--host", host] + for target in targets: + command.extend(["--target", target]) + + subprocess.check_call(command, stderr=subprocess.STDOUT, env=env, cwd=rust_dir) + + +def repack( + host, + targets, + channel="stable", + cargo_channel=None, + patches=[], +): + install_dir = "rustc" + if channel == "dev": + build_src(install_dir, host, targets, patches) + else: + if patches: + raise ValueError( + 'Patch specified, but channel "%s" is not "dev"!' + "\nPatches are only for building from source." % channel + ) + log("Repacking rust for %s supporting %s..." % (host, targets)) + manifest = fetch_manifest(channel, host, targets) + log("Using manifest for rust %s as of %s." % (channel, manifest["date"])) + if cargo_channel == channel: + cargo_manifest = manifest + else: + cargo_manifest = fetch_manifest(cargo_channel, host, targets) + log( + "Using manifest for cargo %s as of %s." + % (cargo_channel, cargo_manifest["date"]) + ) + + log("Fetching packages...") + rustc = fetch_package(manifest, "rustc", host) + cargo = fetch_package(cargo_manifest, "cargo", host) + stds = fetch_std(manifest, targets) + rustsrc = fetch_package(manifest, "rust-src", host) + rustfmt = fetch_optional(manifest, "rustfmt-preview", host) + clippy = fetch_optional(manifest, "clippy-preview", host) + rust_analyzer = fetch_optional(manifest, "rust-analyzer-preview", host) + + log("Installing packages...") + + # Clear any previous install directory. + try: + shutil.rmtree(install_dir) + except OSError as e: + if e.errno != errno.ENOENT: + raise + install(os.path.basename(rustc["url"]), install_dir) + install(os.path.basename(cargo["url"]), install_dir) + install(os.path.basename(rustsrc["url"]), install_dir) + if rustfmt: + install(os.path.basename(rustfmt["url"]), install_dir) + if clippy: + install(os.path.basename(clippy["url"]), install_dir) + if rust_analyzer: + install(os.path.basename(rust_analyzer["url"]), install_dir) + for std in stds: + install(os.path.basename(std["url"]), install_dir) + pass + + log("Creating archive...") + tar_file = install_dir + ".tar.zst" + build_tar_package(tar_file, ".", install_dir) + shutil.rmtree(install_dir) + log("%s is ready." % tar_file) + + upload_dir = os.environ.get("UPLOAD_DIR") + if upload_dir: + # Create the upload directory if it doesn't exist. + try: + log("Creating upload directory in %s..." % os.path.abspath(upload_dir)) + os.makedirs(upload_dir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + # Move the tarball to the output directory for upload. + log("Moving %s to the upload directory..." % tar_file) + shutil.move(tar_file, upload_dir) + + +def expand_platform(name): + """Expand a shortcut name to a full Rust platform string.""" + platforms = { + "android": "armv7-linux-androideabi", + "android_x86": "i686-linux-android", + "android_x86-64": "x86_64-linux-android", + "android_aarch64": "aarch64-linux-android", + "linux64": "x86_64-unknown-linux-gnu", + "linux32": "i686-unknown-linux-gnu", + "mac": "x86_64-apple-darwin", + "macos": "x86_64-apple-darwin", + "mac64": "x86_64-apple-darwin", + "mac32": "i686-apple-darwin", + "win64": "x86_64-pc-windows-msvc", + "win32": "i686-pc-windows-msvc", + "mingw32": "i686-pc-windows-gnu", + } + return platforms.get(name, name) + + +def validate_channel(channel): + """Require a specific release version. + + Packaging from meta-channels, like `stable`, `beta`, or `nightly` + doesn't give repeatable output. Reject such channels.""" + channel_prefixes = ("stable", "beta", "nightly") + if any([channel.startswith(c) for c in channel_prefixes]): + if "-" not in channel: + raise ValueError( + 'Generic channel "%s" specified!' + "\nPlease give a specific release version" + ' like "1.24.0" or "beta-2018-02-20".' % channel + ) + + +def args(): + """Read command line arguments and return options.""" + parser = argparse.ArgumentParser() + parser.add_argument( + "--channel", + help="Release channel to use:" + " 1.xx.y, beta-yyyy-mm-dd," + " nightly-yyyy-mm-dd," + " bors-$rev (grab a build from rust's CI)," + " or dev (build from source).", + required=True, + ) + parser.add_argument( + "--patch", + dest="patches", + action="append", + default=[], + help="apply the given patch file to a dev build." + " Patch files should be placed in /build/build-rust." + " Patches can be prefixed with `module-path:` to specify they" + " apply to that git submodule in the Rust source." + " e.g. `src/llvm-project:mypatch.diff` patches rust's llvm." + " Can be given more than once.", + ) + parser.add_argument( + "--cargo-channel", + help="Release channel version to use for cargo." + " Defaults to the same as --channel.", + ) + parser.add_argument( + "--host", + help="Host platform for the toolchain executable:" + " e.g. linux64 or aarch64-linux-android." + " Defaults to linux64.", + ) + parser.add_argument( + "--target", + dest="targets", + action="append", + default=[], + help="Additional target platform to support:" + " e.g. linux32 or i686-pc-windows-gnu." + " can be given more than once.", + ) + args = parser.parse_args() + if not args.cargo_channel: + args.cargo_channel = args.channel + validate_channel(args.channel) + validate_channel(args.cargo_channel) + if not args.host: + args.host = "linux64" + args.host = expand_platform(args.host) + args.targets = [expand_platform(t) for t in args.targets] + + return args + + +if __name__ == "__main__": + args = vars(args()) + setup_gpg() + repack(**args) diff --git a/taskcluster/scripts/misc/run-profileserver-macos.sh b/taskcluster/scripts/misc/run-profileserver-macos.sh new file mode 100755 index 0000000000..61873a273a --- /dev/null +++ b/taskcluster/scripts/misc/run-profileserver-macos.sh @@ -0,0 +1,20 @@ +#! /bin/bash -vex +set -x -e + +#### +# Taskcluster friendly wrapper for running the profileserver on macOS +#### + +export UPLOAD_PATH=../../artifacts +mkdir -p $UPLOAD_PATH + +export JARLOG_FILE="en-US.log" + +export LLVM_PROFDATA=$MOZ_FETCHES_DIR/clang/bin/llvm-profdata + +set -v + +./mach python python/mozbuild/mozbuild/action/install.py $MOZ_FETCHES_DIR/target.dmg $MOZ_FETCHES_DIR +./mach python build/pgo/profileserver.py --binary $MOZ_FETCHES_DIR/*.app/Contents/MacOS/firefox + +tar -Jcvf $UPLOAD_PATH/profdata.tar.xz merged.profdata en-US.log diff --git a/taskcluster/scripts/misc/run-profileserver.sh b/taskcluster/scripts/misc/run-profileserver.sh new file mode 100755 index 0000000000..dd0ad31f05 --- /dev/null +++ b/taskcluster/scripts/misc/run-profileserver.sh @@ -0,0 +1,42 @@ +#! /bin/bash -vex + +set -x -e + +echo "running as" $(id) + +: NEED_XVFB ${NEED_XVFB:=false} +: UPLOAD_PATH ${UPLOAD_PATH:=$HOME/artifacts} +export UPLOAD_PATH + +#### +# Taskcluster friendly wrapper for running the profileserver +#### + +PGO_RUNDIR=obj-firefox/dist +export JARLOG_FILE="en-US.log" +export LLVM_PROFDATA=$MOZ_FETCHES_DIR/clang/bin/llvm-profdata + +set -v + +if $NEED_XVFB; then + # run XVfb in the background + . /builds/worker/scripts/xvfb.sh + + cleanup() { + local rv=$? + cleanup_xvfb + exit $rv + } + trap cleanup EXIT INT + + start_xvfb '1024x768x24' 2 +fi + +# Move our fetched firefox into objdir/dist so the jarlog entries will match +# the paths when the final PGO stage packages the build. +mkdir -p $PGO_RUNDIR +mkdir -p $UPLOAD_PATH +mv $MOZ_FETCHES_DIR/firefox $PGO_RUNDIR +./mach python build/pgo/profileserver.py --binary $PGO_RUNDIR/firefox/firefox + +tar -acvf $UPLOAD_PATH/profdata.tar.xz merged.profdata en-US.log diff --git a/taskcluster/scripts/misc/source-test-clang-setup.sh b/taskcluster/scripts/misc/source-test-clang-setup.sh new file mode 100755 index 0000000000..5388b6376e --- /dev/null +++ b/taskcluster/scripts/misc/source-test-clang-setup.sh @@ -0,0 +1,27 @@ +#!/bin/bash +source $HOME/checkouts/gecko/taskcluster/scripts/misc/source-test-common.sh + +# Add clang-tidy to PATH +export PATH=$MOZ_FETCHES_DIR/clang-tidy/bin:$PATH + +# Use toolchain clang +export LD_LIBRARY_PATH=$MOZ_FETCHES_DIR/clang/lib + +# Write custom mozconfig +export MOZCONFIG=$GECKO_PATH/mozconfig + +# Add to mozconfig all the appropriate options +cat <> $MOZCONFIG +# Enable debug mode +ac_add_options --enable-debug +# Enable clang-plugin in order to have all defines activated for static-analysis +ac_add_options --enable-clang-plugin +# Enable GC zeal, a testing and debugging feature that helps find GC-related bugs in JSAPI applications. +ac_add_options --enable-gczeal +# Do not treat warnings as errors +ac_add_options --disable-warnings-as-errors +EOT + +# Mach lookup clang-tidy in clang-tools +mkdir -p $MOZBUILD_STATE_PATH/clang-tools +ln -s $MOZ_FETCHES_DIR/clang-tidy $MOZBUILD_STATE_PATH/clang-tools/clang-tidy diff --git a/taskcluster/scripts/misc/source-test-common.sh b/taskcluster/scripts/misc/source-test-common.sh new file mode 100755 index 0000000000..eb2409cf4e --- /dev/null +++ b/taskcluster/scripts/misc/source-test-common.sh @@ -0,0 +1,16 @@ +#! /bin/bash -vex + +set -x -e + +export MOZBUILD_STATE_PATH=$HOME/workspace + +# Add toolchain binaries to PATH to run ./mach configure +export PATH=$MOZ_FETCHES_DIR/clang/bin:$PATH +export PATH=$MOZ_FETCHES_DIR/rustc/bin:$PATH +export PATH=$MOZ_FETCHES_DIR/cbindgen:$PATH +export PATH=$MOZ_FETCHES_DIR/nasm:$PATH +export PATH=$MOZ_FETCHES_DIR/node/bin:$PATH + +# Use clang as host compiler +export CC=$MOZ_FETCHES_DIR/clang/bin/clang +export CXX=$MOZ_FETCHES_DIR/clang/bin/clang++ diff --git a/taskcluster/scripts/misc/source-test-infer-setup.sh b/taskcluster/scripts/misc/source-test-infer-setup.sh new file mode 100755 index 0000000000..57786013c5 --- /dev/null +++ b/taskcluster/scripts/misc/source-test-infer-setup.sh @@ -0,0 +1,18 @@ +#!/bin/bash +source $GECKO_PATH/taskcluster/scripts/misc/source-test-common.sh + +# Write custom mozconfig +MOZCONFIG=$GECKO_PATH/mozconfig +echo "ac_add_options --enable-project=mobile/android" > $MOZCONFIG +echo "ac_add_options --target=arm-linux-androideabi" >> $MOZCONFIG +echo "ac_add_options --with-android-sdk=${MOZ_FETCHES_DIR}/android-sdk-linux" >> $MOZCONFIG +echo "ac_add_options --with-android-ndk=${MOZ_FETCHES_DIR}/android-ndk" >> $MOZCONFIG + +# Write custom grade properties +export GRADLE_USER_HOME=$HOME/workspace/gradle +mkdir -p $GRADLE_USER_HOME +echo "org.gradle.daemon=false" >> ${GRADLE_USER_HOME}/gradle.properties + +# Mach lookup infer in infer... +mkdir -p $MOZBUILD_STATE_PATH/infer/infer +mv $MOZ_FETCHES_DIR/infer/{bin,lib} $MOZBUILD_STATE_PATH/infer/infer diff --git a/taskcluster/scripts/misc/summarize-tgdiff.py b/taskcluster/scripts/misc/summarize-tgdiff.py new file mode 100644 index 0000000000..68db9bbdc7 --- /dev/null +++ b/taskcluster/scripts/misc/summarize-tgdiff.py @@ -0,0 +1,52 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import json +import pathlib + + +def filter_changes(line): + # Skip diff headers + if line.startswith("---") or line.startswith("+++"): + return False + + # Only count lines that changed + return line.startswith("-") or line.startswith("+") + + +def run(): + parser = argparse.ArgumentParser( + description="Classify output of taskgraph for CI analsyis" + ) + parser.add_argument( + "path", + type=pathlib.Path, + help="Folder containing all the TXT files from taskgraph target.", + ) + parser.add_argument( + "threshold", + type=int, + help="Minimum number of lines to trigger a warning on taskgraph output.", + ) + args = parser.parse_args() + + out = {"files": {}, "status": "OK", "threshold": args.threshold} + for path in args.path.glob("*.txt"): + with path.open() as f: + nb = len(list(filter(filter_changes, f.readlines()))) + + out["files"][path.stem] = { + "nb": nb, + "status": "WARNING" if nb >= args.threshold else "OK", + } + + if nb >= args.threshold: + out["status"] = "WARNING" + + (args.path / "summary.json").write_text(json.dumps(out, sort_keys=True, indent=4)) + + +if __name__ == "__main__": + run() diff --git a/taskcluster/scripts/misc/tooltool-download.sh b/taskcluster/scripts/misc/tooltool-download.sh new file mode 100644 index 0000000000..5f971d725a --- /dev/null +++ b/taskcluster/scripts/misc/tooltool-download.sh @@ -0,0 +1,21 @@ +# Fetch a tooltool manifest. + +cd $MOZ_FETCHES_DIR + +TOOLTOOL_DL_FLAGS= + +if [ -n "$UPLOAD_DIR" ]; then + TOOLTOOL_DL_FLAGS="${TOOLTOOL_DL_FLAGS=} --artifact-manifest $UPLOAD_DIR/toolchains.json" +fi + +: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache} +export TOOLTOOL_CACHE + +if [ -z "$TOOLTOOL_MANIFEST" ]; then + echo This script should not be used when there is no tooltool manifest set + exit 1 +fi + +${GECKO_PATH}/mach artifact toolchain -v${TOOLTOOL_DL_FLAGS} --tooltool-manifest "${GECKO_PATH}/${TOOLTOOL_MANIFEST}"${TOOLTOOL_CACHE:+ --cache-dir ${TOOLTOOL_CACHE}} --retry 5 + +cd $OLDPWD diff --git a/taskcluster/scripts/misc/unify.sh b/taskcluster/scripts/misc/unify.sh new file mode 100755 index 0000000000..85c57667d8 --- /dev/null +++ b/taskcluster/scripts/misc/unify.sh @@ -0,0 +1,42 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -x -e + +export LIPO=$MOZ_FETCHES_DIR/cctools/bin/x86_64-apple-darwin-lipo + +for i in x64 aarch64; do + $GECKO_PATH/mach python -m mozbuild.action.unpack_dmg $MOZ_FETCHES_DIR/$i/target.dmg $i +done +$GECKO_PATH/mach python $GECKO_PATH/toolkit/mozapps/installer/unify.py x64/*.app aarch64/*.app +$GECKO_PATH/mach python -m mozbuild.action.make_dmg x64 target.dmg + +mkdir -p $UPLOAD_DIR +mv target.dmg $UPLOAD_DIR/ + +python3 -c ' +import json +import os + +for artifact in json.loads(os.environ["MOZ_FETCHES"]): + if artifact.get("extract") and artifact.get("dest", "").startswith("x64"): + print(artifact["dest"], os.path.basename(artifact["artifact"])) +' | while read dir artifact; do + if [ "$artifact" = target.crashreporter-symbols.zip ]; then + $GECKO_PATH/mach python $GECKO_PATH/python/mozbuild/mozbuild/action/unify_symbols.py $MOZ_FETCHES_DIR/$dir $MOZ_FETCHES_DIR/aarch64${dir#x64} + else + $GECKO_PATH/mach python $GECKO_PATH/python/mozbuild/mozbuild/action/unify_tests.py $MOZ_FETCHES_DIR/$dir $MOZ_FETCHES_DIR/aarch64${dir#x64} + fi + + case $artifact in + *.tar.gz) + find $MOZ_FETCHES_DIR/$dir -not -type d -printf '%P\0' | tar -C $MOZ_FETCHES_DIR/$dir --owner=0:0 --group=0:0 -zcf $artifact --no-recursion --null -T - + ;; + *.zip) + $GECKO_PATH/mach python $GECKO_PATH/python/mozbuild/mozbuild/action/zip.py -C $MOZ_FETCHES_DIR/$dir $PWD/$artifact '*' + ;; + esac + mv $artifact $UPLOAD_DIR/ +done diff --git a/taskcluster/scripts/misc/unpack-sdk.py b/taskcluster/scripts/misc/unpack-sdk.py new file mode 100644 index 0000000000..e17f98d657 --- /dev/null +++ b/taskcluster/scripts/misc/unpack-sdk.py @@ -0,0 +1,105 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import hashlib +import os +import shutil +import stat +import sys +import tempfile +from io import BytesIO +from urllib.request import urlopen + +from mozpack.macpkg import Pbzx, uncpio, unxar + + +def unpack_sdk(url, sha512, extract_prefix, out_dir="."): + if "MOZ_AUTOMATION" in os.environ: + url = f"http://taskcluster/tooltool.mozilla-releng.net/sha512/{sha512}" + with tempfile.TemporaryFile() as pkg: + hash = hashlib.sha512() + for attempt in range(3): + if attempt != 0: + print(f"Failed to download from {url}. Retrying", file=sys.stderr) + + with urlopen(url) as fh: + # Equivalent to shutil.copyfileobj, but computes sha512 at the same time. + while True: + buf = fh.read(1024 * 1024) + if not buf: + break + hash.update(buf) + pkg.write(buf) + digest = hash.hexdigest() + if digest == sha512: + break + else: + raise Exception(f"(actual) {digest} != (expected) {sha512}") + + pkg.seek(0, os.SEEK_SET) + + for name, content in unxar(pkg): + if name in ("Payload", "Content"): + extract_payload(content, extract_prefix, out_dir) + + +def extract_payload(fileobj, extract_prefix, out_dir="."): + hardlinks = {} + for path, st, content in uncpio(Pbzx(fileobj)): + if not path: + continue + path = path.decode() + matches = path.startswith(extract_prefix) + if matches: + path = os.path.join(out_dir, path[len(extract_prefix) :].lstrip("/")) + + # When there are hardlinks, normally a cpio stream is supposed to + # contain the data for all of them, but, even with compression, that + # can be a waste of space, so in some cpio streams (*cough* *cough*, + # Apple's, e.g. in Xcode), the files after the first one have dummy + # data. + # As we may be filtering the first file out (if it doesn't match + # extract_prefix), we need to keep its data around (we're not going + # to be able to rewind). + if stat.S_ISREG(st.mode) and st.nlink > 1: + key = (st.dev, st.ino) + hardlink = hardlinks.get(key) + if hardlink: + hardlink[0] -= 1 + if hardlink[0] == 0: + del hardlinks[key] + content = hardlink[1] + if isinstance(content, BytesIO): + content.seek(0) + if matches: + hardlink[1] = path + elif matches: + hardlink = hardlinks[key] = [st.nlink - 1, path] + else: + hardlink = hardlinks[key] = [st.nlink - 1, BytesIO(content.read())] + content = hardlink[1] + + if not matches: + continue + if stat.S_ISDIR(st.mode): + os.makedirs(path, exist_ok=True) + else: + parent = os.path.dirname(path) + if parent: + os.makedirs(parent, exist_ok=True) + + if stat.S_ISLNK(st.mode): + os.symlink(content.read(), path) + elif stat.S_ISREG(st.mode): + if isinstance(content, str): + os.link(content, path) + else: + with open(path, "wb") as out: + shutil.copyfileobj(content, out) + else: + raise Exception(f"File mode {st.mode:o} is not supported") + + +if __name__ == "__main__": + unpack_sdk(*sys.argv[1:]) diff --git a/taskcluster/scripts/misc/verify-devtools-bundle.py b/taskcluster/scripts/misc/verify-devtools-bundle.py new file mode 100644 index 0000000000..901db0eb08 --- /dev/null +++ b/taskcluster/scripts/misc/verify-devtools-bundle.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Check that the current sourcemap and worker bundles built for DevTools are up to date. +This job should fail if any file impacting the bundle creation was modified without +regenerating the bundles. + +This check should be run after building the bundles via: +cd devtools/client/debugger +yarn && node bin/bundle.js + +Those steps are done in the devtools-verify-bundle job, prior to calling this script. +The script will only run `hg status devtools/` and check that no change is detected by +mercurial. +""" + +import argparse +import json +import subprocess +import sys + +# Ignore module-manifest.json updates which can randomly happen when +# building bundles. +hg_exclude = "devtools/client/debugger/bin/module-manifest.json" + +print("Run `hg status devtools/`") +status = ( + subprocess.check_output(["hg", "status", "-n", "devtools/", "-X", hg_exclude]) + .decode("utf-8") + .split("\n") +) +print(" status:") +print("-" * 80) + +doc = "https://firefox-source-docs.mozilla.org/devtools/tests/node-tests.html#devtools-bundle" + +failures = {} +for l in status: + if not l: + # Ignore empty lines + continue + + failures[l] = [ + { + "path": l, + "line": None, + "column": None, + "level": "error", + "message": l + + " is outdated and needs to be regenerated, " + + f"instructions at: {doc}", + } + ] + + +diff = subprocess.check_output(["hg", "diff", "devtools/", "-X", hg_exclude]).decode( + "utf-8" +) + +# Revert all the changes created by `node bin/bundle.js` +subprocess.check_output(["hg", "revert", "-C", "devtools/"]) + +parser = argparse.ArgumentParser() +parser.add_argument("--output", required=True) +args = parser.parse_args() + +with open(args.output, "w") as fp: + json.dump(failures, fp, indent=2) + +if len(failures) > 0: + print( + "TEST-UNEXPECTED-FAIL | devtools-bundle | DevTools bundles need to be regenerated, " + + f"instructions at: {doc}" + ) + + print("The following devtools bundles were detected as outdated:") + for failure in failures: + print(failure) + + print(f"diff:{diff}") + + sys.exit(1) diff --git a/taskcluster/scripts/misc/vs-cleanup.sh b/taskcluster/scripts/misc/vs-cleanup.sh new file mode 100644 index 0000000000..8bb93b266f --- /dev/null +++ b/taskcluster/scripts/misc/vs-cleanup.sh @@ -0,0 +1,13 @@ +case "$(uname -s)" in +MINGW*|MSYS*) + # For some reason, by the time the task finishes, and when run-task + # starts its cleanup, there is still a vctip.exe (MSVC telemetry-related + # process) running and using a dll that run-task can't then delete. + # "For some reason", because the same doesn't happen with other tasks. + # In fact, this used to happen with older versions of MSVC for other + # tasks, and stopped when upgrading to 15.8.4... + taskkill -f -im vctip.exe || true + # Same with the mspdbsrv process. + taskkill -f -im mspdbsrv.exe || true + ;; +esac diff --git a/taskcluster/scripts/misc/vs-setup.sh b/taskcluster/scripts/misc/vs-setup.sh new file mode 100644 index 0000000000..05eb219794 --- /dev/null +++ b/taskcluster/scripts/misc/vs-setup.sh @@ -0,0 +1,42 @@ +VSDIR=vs +VSPATH="${MOZ_FETCHES_DIR}/${VSDIR}" +UNIX_VSPATH="$(cd ${MOZ_FETCHES_DIR} && pwd)/${VSDIR}" +VCDIR=VC/Tools/MSVC/14.16.27023 +if [ ! -d "${VSPATH}/${VCDIR}" ]; then + VCDIR=VC/Tools/MSVC/14.29.30133 +fi +if [ ! -d "${VSPATH}/${VCDIR}" ]; then + VCDIR=VC/Tools/MSVC/14.38.33130 +fi +SDKDIR="Windows Kits/10" +SDK_VERSION=10.0.17134.0 +if [ ! -d "${VSPATH}/${SDKDIR}/Lib/${SDK_VERSION}" ]; then + SDK_VERSION=10.0.19041.0 +fi +if [ ! -d "${VSPATH}/${SDKDIR}/Lib/${SDK_VERSION}" ]; then + SDK_VERSION=10.0.22621.0 +fi + +case "$TARGET" in +aarch64-pc-windows-msvc) + SDK_CPU=arm64 + ;; +i686-pc-windows-msvc) + SDK_CPU=x86 + ;; +*) + SDK_CPU=x64 + ;; +esac + +CRT_DIR="microsoft.vc141.crt" +if [ ! -d "${UNIX_VSPATH}/redist/${SDK_CPU}/$CRT_DIR" ]; then + CRT_DIR="microsoft.vc142.crt" +fi +if [ ! -d "${UNIX_VSPATH}/redist/${SDK_CPU}/$CRT_DIR" ]; then + CRT_DIR="microsoft.vc143.crt" +fi + +export INCLUDE="${VSPATH}/${VCDIR}/include;${VSPATH}/${VCDIR}/atlmfc/include;${VSPATH}/${SDKDIR}/Include/${SDK_VERSION}/ucrt;${VSPATH}/${SDKDIR}/Include/${SDK_VERSION}/shared;${VSPATH}/${SDKDIR}/Include/${SDK_VERSION}/um;${VSPATH}/${SDKDIR}/Include/${SDK_VERSION}/winrt;${VSPATH}/dia sdk/include" +export LIB="${VSPATH}/${VCDIR}/lib/${SDK_CPU};${VSPATH}/${VCDIR}/atlmfc/lib/${SDK_CPU};${VSPATH}/${SDKDIR}/Lib/${SDK_VERSION}/um/${SDK_CPU};${VSPATH}/${SDKDIR}/Lib/${SDK_VERSION}/ucrt/${SDK_CPU};${VSPATH}/dia sdk/lib/amd64" +export PATH="${UNIX_VSPATH}/${VCDIR}/bin/hostx64/${SDK_CPU}:${UNIX_VSPATH}/${VCDIR}/bin/hostx86/x86:${UNIX_VSPATH}/${SDKDIR}/bin/${SDK_VERSION}/${SDK_CPU}:${UNIX_VSPATH}/redist/${SDK_CPU}/$CRT_DIR:${UNIX_VSPATH}/${SDKDIR}/redist/ucrt/dlls/${SDK_CPU}:${UNIX_VSPATH}/dia sdk/bin/amd64:$PATH" diff --git a/taskcluster/scripts/misc/wasi-sdk-11.patch b/taskcluster/scripts/misc/wasi-sdk-11.patch new file mode 100644 index 0000000000..ec5b514d70 --- /dev/null +++ b/taskcluster/scripts/misc/wasi-sdk-11.patch @@ -0,0 +1,14 @@ +# https://github.com/WebAssembly/wasi-sdk/pull/189 +diff --git a/Makefile b/Makefile +index bde9936..b1f24fe 100644 +--- a/Makefile ++++ b/Makefile +@@ -91,7 +91,7 @@ build/wasi-libc.BUILT: build/llvm.BUILT + SYSROOT=$(BUILD_PREFIX)/share/wasi-sysroot + touch build/wasi-libc.BUILT + +-build/compiler-rt.BUILT: build/llvm.BUILT ++build/compiler-rt.BUILT: build/llvm.BUILT build/wasi-libc.BUILT + # Do the build, and install it. + mkdir -p build/compiler-rt + cd build/compiler-rt && cmake -G Ninja \ diff --git a/taskcluster/scripts/misc/wasi-sdk.patch b/taskcluster/scripts/misc/wasi-sdk.patch new file mode 100644 index 0000000000..9f7b6ad908 --- /dev/null +++ b/taskcluster/scripts/misc/wasi-sdk.patch @@ -0,0 +1,45 @@ +diff --git a/Makefile b/Makefile +index d2ccff6..95f395b 100644 +--- a/Makefile ++++ b/Makefile +@@ -178,6 +178,7 @@ LIBCXX_CMAKE_FLAGS = \ + -DLIBCXXABI_BUILD_EXTERNAL_THREAD_LIBRARY:BOOL=OFF \ + -DLIBCXXABI_HAS_WIN32_THREAD_API:BOOL=OFF \ + -DLIBCXXABI_ENABLE_PIC:BOOL=OFF \ ++ -DLIBCXXABI_USE_LLVM_UNWINDER:BOOL=OFF \ + -DWASI_SDK_PREFIX=$(BUILD_PREFIX) \ + -DUNIX:BOOL=ON \ + --debug-trycompile +diff --git a/src/wasi-libc/Makefile b/src/wasi-libc/Makefile +index f350ecb..9a5c940 100644 +--- a/src/wasi-libc/Makefile ++++ b/src/wasi-libc/Makefile +@@ -308,7 +308,7 @@ ASMFLAGS += --target=$(TARGET_TRIPLE) + # TODO: Add -fno-signaling-nans when the compiler supports it. + CFLAGS += -fno-trapping-math + # Add all warnings, but disable a few which occur in third-party code. +-CFLAGS += -Wall -Wextra -Werror \ ++CFLAGS += -Wall -Wextra \ + -Wno-null-pointer-arithmetic \ + -Wno-unused-parameter \ + -Wno-sign-compare \ +@@ -671,6 +671,9 @@ check-symbols: startup_files libc + @# + @# TODO: Filter out __NO_MATH_ERRNO_ and a few __*WIDTH__ that are new to clang 14. + @# TODO: Filter out __GCC_HAVE_SYNC_COMPARE_AND_SWAP_* that are new to clang 16. ++ @# TODO: Filter out __FPCLASS_* that are new to clang 17. ++ @# TODO: Filter out __FLT128_* that are new to clang 18. ++ @# TODO: Filter out __MEMORY_SCOPE_* that are new to clang 18. + @# TODO: clang defined __FLT_EVAL_METHOD__ until clang 15, so we force-undefine it + @# for older versions. + @# TODO: Undefine __wasm_mutable_globals__ and __wasm_sign_ext__, that are new to +@@ -702,6 +703,9 @@ check-symbols: startup_files libc + | sed -e 's/__GNUC_VA_LIST $$/__GNUC_VA_LIST 1/' \ + | grep -v '^#define __\(BOOL\|INT_\(LEAST\|FAST\)\(8\|16\|32\|64\)\|INT\|LONG\|LLONG\|SHRT\)_WIDTH__' \ + | grep -v '^#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_\(1\|2\|4\|8\)' \ ++ | grep -v '^#define __FPCLASS_' \ ++ | grep -v '^#define __FLT128_' \ ++ | grep -v '^#define __MEMORY_SCOPE_' \ + > "$(SYSROOT_SHARE)/predefined-macros.txt" + + # Check that the computed metadata matches the expected metadata. diff --git a/taskcluster/scripts/misc/wr-cargotest-macos-build.sh b/taskcluster/scripts/misc/wr-cargotest-macos-build.sh new file mode 100755 index 0000000000..23b4ccedec --- /dev/null +++ b/taskcluster/scripts/misc/wr-cargotest-macos-build.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -x -e -v + +source ${GECKO_PATH}/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh + +export UPLOAD_DIR="${HOME}/artifacts" +mkdir -p "${UPLOAD_DIR}" + +# Do a cross-build for cargo test run +pushd "${GECKO_PATH}/gfx/wr" +CARGOFLAGS="-vv --frozen --target=${TARGET_TRIPLE}" \ + CARGOTESTFLAGS="--no-run" \ + ci-scripts/macos-debug-tests.sh +# Package up the test binaries +cd "target/${TARGET_TRIPLE}" +mkdir cargo-test-binaries +mv debug cargo-test-binaries/ +find cargo-test-binaries/debug/deps -type f -maxdepth 1 -executable -print0 > binaries.lst +tar cjf cargo-test-binaries.tar.bz2 --null -T binaries.lst +mv cargo-test-binaries.tar.bz2 "${UPLOAD_DIR}" +# Clean the build +cd "${GECKO_PATH}/gfx/wr" +rm -rf target +popd diff --git a/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh b/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh new file mode 100755 index 0000000000..bfed36012c --- /dev/null +++ b/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh @@ -0,0 +1,60 @@ +#!/bin/bash +set -x -e -v + +export TARGET_TRIPLE="x86_64-apple-darwin" + +MACOS_SYSROOT="${MOZ_FETCHES_DIR}/MacOSX14.2.sdk" +CLANGDIR="${MOZ_FETCHES_DIR}/clang" + +# Deploy the wrench dependencies +mv ${MOZ_FETCHES_DIR}/wrench-deps/{vendor,.cargo} "${GECKO_PATH}/gfx/wr/" + +# Building wrench with the `headless` feature also builds the osmesa-src crate, +# which includes building C++ code. We have to do a bunch of shenanigans +# to make this cross-compile properly. + +pushd "${MOZ_FETCHES_DIR}/clang/bin" + +# Add a pkg-config cross-compile wrapper. Without this, the configure script +# will use pkg-config from the host, which will find host libraries that are +# not what we want. This script stolen from +# https://autotools.io/pkgconfig/cross-compiling.html +cat > ${TARGET_TRIPLE}-pkg-config < ${MOZ_FETCHES_DIR}/clang-mac/clang/bin/llvm-config < .cargo/config +mkdir wrench-deps +mv vendor .cargo wrench-deps/ + +ci-scripts/install-meson.sh +mv meson wrench-deps/meson + +mkdir -p $UPLOAD_DIR +tar caf $UPLOAD_DIR/wrench-deps.tar.zst wrench-deps diff --git a/taskcluster/scripts/misc/wrench-macos-build.sh b/taskcluster/scripts/misc/wrench-macos-build.sh new file mode 100755 index 0000000000..73835b7417 --- /dev/null +++ b/taskcluster/scripts/misc/wrench-macos-build.sh @@ -0,0 +1,59 @@ +#!/bin/bash +set -x -e -v + +source ${GECKO_PATH}/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh + +# The osmesa-src build which we do as part of the headless build below +# doesn't seem to always use CFLAGS/CXXFLAGS where expected. Instead we +# just squash those flags into CC/CXX and everything works out. +# Export HOST_CC and HOST_CXX without the squashed flags, so that host +# builds use them and don't see the target flags. +export HOST_CC="${CC}" +export HOST_CXX="${CXX}" +CFLAGS_VAR="CFLAGS_${TARGET_TRIPLE//-/_}" +CXXFLAGS_VAR="CXXFLAGS_${TARGET_TRIPLE//-/_}" +export CC="${CC} ${!CFLAGS_VAR}" +export ${CFLAGS_VAR}= +export CXX="${CXX} ${!CXXFLAGS_VAR}" +export ${CXXFLAGS_VAR}= + +export MESON_CROSSFILE=${GECKO_PATH}/gfx/wr/ci-scripts/etc/wr-darwin.meson +export UPLOAD_DIR="${HOME}/artifacts" +mkdir -p "${UPLOAD_DIR}" + +# Do a cross-build without the `headless` feature +pushd "${GECKO_PATH}/gfx/wr/wrench" +python3 -m pip install -r ../ci-scripts/requirements.txt +cargo build --release -vv --frozen --target=${TARGET_TRIPLE} +# Package up the resulting wrench binary +cd "../target/${TARGET_TRIPLE}" +mkdir -p wrench-macos/bin +mv release/wrench wrench-macos/bin/ +tar cjf wrench-macos.tar.bz2 wrench-macos +mv wrench-macos.tar.bz2 "${UPLOAD_DIR}" +# Clean the build +cd "${GECKO_PATH}/gfx/wr" +rm -rf target +popd + +# Do a cross-build with the `headless` feature +pushd "${GECKO_PATH}/gfx/wr/wrench" +cargo build --release -vv --frozen --target=${TARGET_TRIPLE} --features headless +# Package up the wrench binary and some libraries that we will need +cd "../target/${TARGET_TRIPLE}" + +# Copy the native macOS libLLVM as dynamic dependency +cp "${MOZ_FETCHES_DIR}/clang-mac/clang/lib/libLLVM.dylib" release/build/osmesa-src*/out/mesa/src/gallium/targets/osmesa/ + +mkdir wrench-macos-headless +mv release wrench-macos-headless/ +tar cjf wrench-macos-headless.tar.bz2 \ + wrench-macos-headless/release/wrench \ + wrench-macos-headless/release/build/osmesa-src*/out/mesa/src/gallium/targets/osmesa \ + wrench-macos-headless/release/build/osmesa-src*/out/mesa/src/mapi/shared-glapi +mv wrench-macos-headless.tar.bz2 "${UPLOAD_DIR}" + +# Clean the build +cd "${GECKO_PATH}/gfx/wr" +rm -rf target +popd diff --git a/taskcluster/scripts/misc/wrench-windows-tests.sh b/taskcluster/scripts/misc/wrench-windows-tests.sh new file mode 100644 index 0000000000..52b3a32173 --- /dev/null +++ b/taskcluster/scripts/misc/wrench-windows-tests.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -x -e -v + +# This script runs the windows CI scripts for standalone WebRender. The CI +# scripts build WebRender in various "standalone" (without Gecko) +# configurations and also run WebRender's reftest suite using the `wrench` +# tool in the WebRender repository. +# The builds involved require a number of dependencies to be available, +# which is all handled below. + +cd $GECKO_PATH + +export PATH=$PATH:$(cd $MOZ_FETCHES_DIR && pwd)/rustc/bin + +. taskcluster/scripts/misc/vs-setup.sh + +# Move the wrench-deps vendored crates into place +mv ${MOZ_FETCHES_DIR}/wrench-deps/{vendor,.cargo} gfx/wr +cd gfx/wr + +# This is needed for the WebRender standalone reftests +powershell.exe 'iex (Get-Content -Raw ci-scripts\set-screenresolution.ps1); Set-ScreenResolution 1920 1080' + +# Run the CI scripts +export CARGOFLAGS='--verbose --frozen' +cmd.exe /c 'ci-scripts\windows-tests.cmd' + +. $GECKO_PATH/taskcluster/scripts/misc/vs-cleanup.sh diff --git a/taskcluster/scripts/misc/zstdpy b/taskcluster/scripts/misc/zstdpy new file mode 100755 index 0000000000..7fc27fded6 --- /dev/null +++ b/taskcluster/scripts/misc/zstdpy @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""This script compresses and decompresses data using the zstandard compression +format, as provided by the python-zstandard module. + +Data is provided on stdin and output on stdout.""" + +import sys +import zstandard +from argparse import ArgumentParser + + +def main(argv=None): + parser = ArgumentParser(description=__doc__) + parser.set_defaults(mode="compress") + parser.add_argument( + "-z", + "--compress", + dest="mode", + action="store_const", + const="compress", + help="compress the data (this is the default)", + ) + parser.add_argument( + "-d", + "--decompress", + dest="mode", + action="store_const", + const="decompress", + help="decompress the data", + ) + parser.add_argument( + "-T", + "--threads", + dest="threads", + default=1, + type=int, + help="Compress using # working threads. If 0, use number of CPUs on the system. (default 1)", + ) + parser.add_argument( + "-l", + "--level", + dest="level", + default=3, + type=int, + help="Compression level from 1-22 (default 3)", + ) + parser.add_argument( + "file", + nargs="?", + help="File to compress/decompress. Default is stdin.", + ) + + args = parser.parse_args(argv) + + # The zstd commandline tool uses 0 to specify number of threads equal to + # the number of CPUs whereas the python module uses negative numbers to + # flag this behavior. Emulate the zstd commandline utility's behavior here + if args.threads == 0: + args.threads = -1 + + if args.file: + in_file = open(args.file, "rb") + else: + in_file = sys.stdin.buffer + + if args.mode == "compress": + ctx = zstandard.ZstdCompressor(level=args.level, threads=args.threads) + elif args.mode == "decompress": + ctx = zstandard.ZstdDecompressor() + + ctx.copy_stream(in_file, sys.stdout.buffer) + + +if __name__ == "__main__": + main() diff --git a/taskcluster/scripts/run-task b/taskcluster/scripts/run-task new file mode 100755 index 0000000000..2f3f6460db --- /dev/null +++ b/taskcluster/scripts/run-task @@ -0,0 +1,1021 @@ +#!/usr/bin/python3 -u +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""Run a task after performing common actions. + +This script is meant to be the "driver" for TaskCluster based tasks. +It receives some common arguments to control the run-time environment. + +It performs actions as requested from the arguments. Then it executes +the requested process and prints its output, prefixing it with the +current time to improve log usefulness. +""" + +import sys + + +if sys.version_info[0:2] < (3, 5): + print('run-task requires Python 3.5+') + sys.exit(1) + + +import argparse +import datetime +import errno +import io +import json +import os +import random +import re +import shutil +import signal +import socket +import stat +import subprocess + +import urllib.error +import urllib.request + +from threading import Thread + +FINGERPRINT_URL = 'http://taskcluster/secrets/v1/secret/project/taskcluster/gecko/hgfingerprint' +FALLBACK_FINGERPRINT = { + 'fingerprints': + "sha256:4D:EB:21:6E:35:2F:99:C6:8F:C3:47:9B:57:B8:6C:17:15:8F:86:09:D4:6C:17:1D:87:B0:DE:F9:0E:51:70:FC," + "sha256:90:85:39:A8:4F:47:20:58:98:0D:48:4D:8A:AC:71:DB:5C:AF:76:44:F1:B1:3E:56:92:FF:21:8C:C9:A9:F7:11" +} + +HGMOINTERNAL_CONFIG_URL = 'http://taskcluster/secrets/v1/secret/project/taskcluster/gecko/hgmointernal' + +CACHE_UID_GID_MISMATCH = ''' +There is a UID/GID mismatch on the cache. This likely means: + +a) different tasks are running as a different user/group +b) different Docker images have different UID/GID for the same user/group + +Our cache policy is that the UID/GID for ALL tasks must be consistent +for the lifetime of the cache. This eliminates permissions problems due +to file/directory user/group ownership. + +To make this error go away, ensure that all Docker images are use +a consistent UID/GID and that all tasks using this cache are running as +the same user/group. +''' + + +NON_EMPTY_VOLUME = ''' +error: volume %s is not empty + +Our Docker image policy requires volumes to be empty. + +The volume was likely populated as part of building the Docker image. +Change the Dockerfile and anything run from it to not create files in +any VOLUME. + +A lesser possibility is that you stumbled upon a TaskCluster platform bug +where it fails to use new volumes for tasks. +''' + + +FETCH_CONTENT_NOT_FOUND = ''' +error: fetch-content script not found + +The script at `taskcluster/scripts/misc/fetch-content` could not be +detected in the current environment. + +If this task clones gecko, make sure the GECKO_PATH environment variable +is set to proper location. Otherwise, the script may need to be mounted +or added to the task's docker image then added to the PATH. +''' + +# The exit code to use when caches should be purged and the task retried. +# This is EX_OSFILE (from sysexits.h): +# Some system file does not exist, cannot be opened, or has some +# sort of error (e.g., syntax error). +EXIT_PURGE_CACHE = 72 + + +IS_MACOSX = sys.platform == 'darwin' +IS_POSIX = os.name == 'posix' +IS_WINDOWS = os.name == 'nt' + + +def print_line(prefix, m): + now = datetime.datetime.utcnow().isoformat().encode('utf-8') + # slice microseconds to 3 decimals. + now = now[:-3] if now[-7:-6] == b'.' else now + bytes = b'[%s %sZ] %s' % (prefix, now, m) + written = 0 + while written < len(bytes): + written += (sys.stdout.buffer.write(bytes[written:]) or 0) + sys.stdout.buffer.flush() + + +def run_and_prefix_output(prefix, args, *, extra_env=None, cwd=None): + """Runs a process and prefixes its output with the time. + + Returns the process exit code. + """ + print_line( + prefix, + b"executing %r%s\n" % (args, b"in %s" % (cwd.encode("utf-8"),) if cwd else b""), + ) + + env = dict(os.environ) + env.update(extra_env or {}) + + # Note: TaskCluster's stdin is a TTY. This attribute is lost + # when we pass sys.stdin to the invoked process. If we cared + # to preserve stdin as a TTY, we could make this work. But until + # someone needs it, don't bother. + + # We want stdout to be bytes on Python 3. That means we can't use + # universal_newlines=True (because it implies text mode). But + # p.stdout.readline() won't work for bytes text streams. So, on Python 3, + # we manually install a latin1 stream wrapper. This allows us to readline() + # and preserves bytes, without losing any data. + + p = subprocess.Popen(args, + # Disable buffering because we want to receive output + # as it is generated so timestamps in logs are + # accurate. + bufsize=0, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdin=sys.stdin.fileno(), + env=env, + cwd=cwd) + + stdout = io.TextIOWrapper(p.stdout, encoding='latin1') + + while True: + data = stdout.readline().encode('latin1') + + if data == b'': + break + + print_line(prefix, data) + + return p.wait() + + +def get_posix_user_group(user, group): + import grp + import pwd + + try: + user_record = pwd.getpwnam(user) + except KeyError: + print('could not find user %s; specify a valid user with --user' % user) + sys.exit(1) + + try: + group_record = grp.getgrnam(group) + except KeyError: + print('could not find group %s; specify a valid group with --group' % + group) + sys.exit(1) + + # Most tasks use worker:worker. We require they have a specific numeric ID + # because otherwise it is too easy for files written to caches to have + # mismatched numeric IDs, which results in permissions errors. + if user_record.pw_name == 'worker' and user_record.pw_uid != 1000: + print('user `worker` must have uid=1000; got %d' % user_record.pw_uid) + sys.exit(1) + + if group_record.gr_name == 'worker' and group_record.gr_gid != 1000: + print('group `worker` must have gid=1000; got %d' % group_record.gr_gid) + sys.exit(1) + + # Find all groups to which this user is a member. + gids = [g.gr_gid for g in grp.getgrall() if group in g.gr_mem] + + return user_record, group_record, gids + + +def write_audit_entry(path, msg): + now = datetime.datetime.utcnow().isoformat().encode('utf-8') + with open(path, 'ab') as fh: + fh.write(b'[%sZ %s] %s\n' % ( + now, os.environb.get(b'TASK_ID', b'UNKNOWN'), msg)) + + +WANTED_DIR_MODE = stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR + + +def set_dir_permissions(path, uid, gid): + st = os.lstat(path) + + if st.st_uid != uid or st.st_gid != gid: + os.chown(path, uid, gid) + + # Also make sure dirs are writable in case we need to delete + # them. + if st.st_mode & WANTED_DIR_MODE != WANTED_DIR_MODE: + os.chmod(path, st.st_mode | WANTED_DIR_MODE) + + +def chown_recursive(path, user, group, uid, gid): + print_line(b'chown', + b'recursively changing ownership of %s to %s:%s\n' % + (path.encode('utf-8'), user.encode('utf-8'), group.encode( + 'utf-8'))) + + set_dir_permissions(path, uid, gid) + + for root, dirs, files in os.walk(path): + for d in dirs: + set_dir_permissions(os.path.join(root, d), uid, gid) + + for f in files: + # File may be a symlink that points to nowhere. In which case + # os.chown() would fail because it attempts to follow the + # symlink. We only care about directory entries, not what + # they point to. So setting the owner of the symlink should + # be sufficient. + os.lchown(os.path.join(root, f), uid, gid) + + +def configure_cache_posix(cache, user, group, + untrusted_caches, running_as_root): + """Configure a cache path on POSIX platforms. + + For each cache, we write out a special file denoting attributes and + capabilities of run-task and the task being executed. These attributes + are used by subsequent run-task invocations to validate that use of + the cache is acceptable. + + We /could/ blow away the cache data on requirements mismatch. + While this would be convenient, this could result in "competing" tasks + effectively undoing the other's work. This would slow down task + execution in aggregate. Without monitoring for this, people may not notice + the problem and tasks would be slower than they could be. We follow the + principle of "fail fast" to ensure optimal task execution. + + We also write an audit log of who used the caches. This log is printed + during failures to help aid debugging. + """ + + our_requirements = { + # Include a version string that we can bump whenever to trigger + # fresh caches. The actual value is not relevant and doesn't need + # to follow any explicit order. Since taskgraph bakes this file's + # hash into cache names, any change to this file/version is sufficient + # to force the use of a new cache. + b'version=1', + # Include the UID and GID the task will run as to ensure that tasks + # with different UID and GID don't share the same cache. + b'uid=%d' % user.pw_uid, + b'gid=%d' % group.gr_gid, + } + + requires_path = os.path.join(cache, '.cacherequires') + audit_path = os.path.join(cache, '.cachelog') + + # The cache is empty. Configure it. + if not os.listdir(cache): + print_line(b'cache', b'cache %s is empty; writing requirements: ' + b'%s\n' % ( + cache.encode('utf-8'), b' '.join(sorted(our_requirements)))) + + # We write a requirements file so future invocations know what the + # requirements are. + with open(requires_path, 'wb') as fh: + fh.write(b'\n'.join(sorted(our_requirements))) + + # And make it read-only as a precaution against deletion. + os.chmod(requires_path, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) + + write_audit_entry(audit_path, + b'created; requirements: %s' % + b', '.join(sorted(our_requirements))) + + set_dir_permissions(cache, user.pw_uid, group.gr_gid) + return + + # The cache has content and we have a requirements file. Validate + # requirements alignment. + if os.path.exists(requires_path): + with open(requires_path, 'rb') as fh: + wanted_requirements = set(fh.read().splitlines()) + + print_line(b'cache', b'cache %s exists; requirements: %s\n' % ( + cache.encode('utf-8'), b' '.join(sorted(wanted_requirements)))) + + missing = wanted_requirements - our_requirements + + # Allow requirements mismatch for uid/gid if and only if caches + # are untrusted. This allows cache behavior on Try to be + # reasonable. Otherwise, random tasks could "poison" cache + # usability by introducing uid/gid mismatches. For untrusted + # environments like Try, this is a perfectly reasonable thing to + # allow. + if missing and untrusted_caches and running_as_root and \ + all(s.startswith((b'uid=', b'gid=')) for s in missing): + print_line(b'cache', + b'cache %s uid/gid mismatch; this is acceptable ' + b'because caches for this task are untrusted; ' + b'changing ownership to facilitate cache use\n' % + cache.encode('utf-8')) + chown_recursive(cache, user.pw_name, group.gr_name, user.pw_uid, + group.gr_gid) + + # And write out the updated reality. + with open(requires_path, 'wb') as fh: + fh.write(b'\n'.join(sorted(our_requirements))) + + write_audit_entry(audit_path, + b'chown; requirements: %s' % + b', '.join(sorted(our_requirements))) + + elif missing: + print('error: requirements for populated cache %s differ from ' + 'this task' % cache) + print('cache requirements: %s' % ' '.join(sorted( + s.decode('utf-8') for s in wanted_requirements))) + print('our requirements: %s' % ' '.join(sorted( + s.decode('utf-8') for s in our_requirements))) + if any(s.startswith((b'uid=', b'gid=')) for s in missing): + print(CACHE_UID_GID_MISMATCH) + + write_audit_entry(audit_path, + b'requirements mismatch; wanted: %s' % + b', '.join(sorted(our_requirements))) + + print('') + print('audit log:') + with open(audit_path, 'r') as fh: + print(fh.read()) + + return True + else: + write_audit_entry(audit_path, b'used') + + # We don't need to adjust permissions here because the cache is + # associated with a uid/gid and the first task should have set + # a proper owner/group. + + return + + # The cache has content and no requirements file. This shouldn't + # happen because run-task should be the first thing that touches a + # cache. + print('error: cache %s is not empty and is missing a ' + '.cacherequires file; the cache names for this task are ' + 'likely mis-configured or TASKCLUSTER_CACHES is not set ' + 'properly' % cache) + + write_audit_entry(audit_path, b'missing .cacherequires') + return True + + +def configure_volume_posix(volume, user, group, running_as_root): + # The only time we should see files in the volume is if the Docker + # image build put files there. + # + # For the sake of simplicity, our policy is that volumes should be + # empty. This also has the advantage that an empty volume looks + # a lot like an empty cache. Tasks can rely on caches being + # swapped in and out on any volume without any noticeable change + # of behavior. + volume_files = os.listdir(volume) + if volume_files: + print(NON_EMPTY_VOLUME % volume) + print('entries in root directory: %s' % + ' '.join(sorted(volume_files))) + sys.exit(1) + + # The volume is almost certainly owned by root:root. Chown it so it + # is writable. + + if running_as_root: + print_line(b'volume', b'changing ownership of volume %s ' + b'to %d:%d\n' % (volume.encode('utf-8'), + user.pw_uid, + group.gr_gid)) + set_dir_permissions(volume, user.pw_uid, group.gr_gid) + + +def vcs_checkout(source_repo, dest, store_path, + base_repo=None, revision=None, branch=None, + fetch_hgfingerprint=False, sparse_profile=None): + # Specify method to checkout a revision. This defaults to revisions as + # SHA-1 strings, but also supports symbolic revisions like `tip` via the + # branch flag. + if revision: + revision_flag = '--revision' + revision_value = revision + elif branch: + revision_flag = '--branch' + revision_value = branch + else: + print('revision is not specified for checkout') + sys.exit(1) + + if IS_MACOSX or IS_POSIX: + hg_bin = 'hg' + elif IS_WINDOWS: + # This is where OCC installs it in the AMIs. + hg_bin = r'C:\Program Files\Mercurial\hg.exe' + if not os.path.exists(hg_bin): + print('could not find Mercurial executable: %s' % hg_bin) + sys.exit(1) + + store_path = os.path.abspath(store_path) + args = [ + hg_bin, + 'robustcheckout', + '--sharebase', store_path, + '--purge', + ] + + # Obtain certificate fingerprints. Without this, the checkout will use the fingerprint + # on the system, which is managed some other way (such as puppet) + if fetch_hgfingerprint: + try: + print_line(b'vcs', b'fetching hg.mozilla.org fingerprint from %s\n' % + FINGERPRINT_URL.encode('utf-8')) + res = urllib.request.urlopen(FINGERPRINT_URL, timeout=10) + secret = res.read() + try: + secret = json.loads(secret.decode('utf-8')) + except ValueError: + print_line(b'vcs', b'invalid JSON in hg fingerprint secret') + sys.exit(1) + except (urllib.error.URLError, socket.timeout): + print_line(b'vcs', b'Unable to retrieve current hg.mozilla.org fingerprint' + b'using the secret service, using fallback instead.') + # XXX This fingerprint will not be accurate if running on an old + # revision after the server fingerprint has changed. + secret = {'secret': FALLBACK_FINGERPRINT} + + hgmo_fingerprint = secret['secret']['fingerprints'] + args.extend([ + '--config', 'hostsecurity.hg.mozilla.org:fingerprints=%s' % hgmo_fingerprint, + ]) + + if base_repo: + args.extend(['--upstream', base_repo]) + if sparse_profile: + args.extend(['--sparseprofile', sparse_profile]) + + dest = os.path.abspath(dest) + args.extend([ + revision_flag, revision_value, + source_repo, dest, + ]) + + res = run_and_prefix_output(b'vcs', args, + extra_env={'PYTHONUNBUFFERED': '1'}) + if res: + # Mitigation for bug 1539681: if for some reason the clone failed, + # we just remove it, so that its possible incomplete state doesn't + # interfere with cloning in subsequent tasks. + shutil.rmtree(dest, ignore_errors=True) + sys.exit(res) + + # Update the current revision hash and ensure that it is well formed. + revision = subprocess.check_output( + [hg_bin, 'log', + '--rev', '.', + '--template', '{node}'], + cwd=dest, + # Triggers text mode on Python 3. + universal_newlines=True) + + assert re.match('^[a-f0-9]{40}$', revision) + + msg = ("TinderboxPrint:" + "{revision}\n".format(revision=revision, + source_repo=source_repo, + repo_name=source_repo.split('/')[-1])) + + print_line(b'vcs', msg.encode('utf-8')) + + return revision + + +def fetch_artifacts(): + print_line(b'fetches', b'fetching artifacts\n') + + fetch_content = shutil.which('fetch-content') + if not fetch_content and os.environ.get('GECKO_PATH'): + fetch_content = os.path.join(os.environ['GECKO_PATH'], 'taskcluster', + 'scripts', 'misc', 'fetch-content') + + if not fetch_content or not os.path.isfile(fetch_content): + fetch_content = os.path.join(os.path.dirname(__file__), + 'fetch-content') + + if not os.path.isfile(fetch_content): + print(FETCH_CONTENT_NOT_FOUND) + sys.exit(1) + + cmd = [sys.executable, '-u', fetch_content, 'task-artifacts'] + res = run_and_prefix_output(b'fetches', cmd) + if res: + sys.exit(res) + + print_line(b'fetches', b'finished fetching artifacts\n') + + +def add_vcs_arguments(parser, project, name): + """Adds arguments to ArgumentParser to control VCS options for a project.""" + + parser.add_argument('--%s-checkout' % project, + help='Directory where %s checkout should be created' % + name) + parser.add_argument('--%s-sparse-profile' % project, + help='Path to sparse profile for %s checkout' % name) + + +def resolve_checkout_url(base_repo, head_repo): + """Resolve the Mercurial URL to perform a checkout against, either the + public hg.mozilla.org service or a CI-only regional mirror. + + The config will be of the form: + { + "aws/us-west-2": { # key built from `TASKCLUSTER_WORKER_LOCATION` variable + "rate": 0.5, + "domain": "us-west-2.hgmointernal.net" + }, + "google/us-central1": {...} + } + """ + worker_location = os.getenv('TASKCLUSTER_WORKER_LOCATION') + if not worker_location: + print_line(b'vcs', b'TASKCLUSTER_WORKER_LOCATION environment variable not set; ' + b'using public hg.mozilla.org service\n') + return base_repo, head_repo + + try: + worker_location = json.loads(worker_location) + except json.JSONDecodeError: + print_line(b'vcs', b'Could not decode TASKCLUSTER_WORKER_LOCATION environment variable ' + b'as JSON. Content: %s\n' % worker_location.encode('utf-8')) + print_line(b'vcs', b'using public hg.mozilla.org service\n') + return base_repo, head_repo + + if 'cloud' not in worker_location or 'region' not in worker_location: + print_line(b'vcs', b'TASKCLUSTER_WORKER_LOCATION missing required keys; ' + b'using public hg.mozilla.org service\n') + return base_repo, head_repo + + config_key = '%(cloud)s/%(region)s' % worker_location + + try: + print_line(b'vcs', b'fetching hgmointernal config from %s\n' % + HGMOINTERNAL_CONFIG_URL.encode('utf-8')) + + # Get the hgmointernal config Taskcluster secret + res = urllib.request.urlopen(HGMOINTERNAL_CONFIG_URL, timeout=10) + hgmointernal_config = json.loads(res.read().decode('utf-8'))['secret'] + + # Use public hg service if region not yet supported + if config_key not in hgmointernal_config: + print_line(b'vcs', b'region %s not yet supported; using public ' + b'hg.mozilla.org service\n' % config_key.encode('utf-8')) + + return base_repo, head_repo + + # Only send a percentage of traffic to the internal mirror + rate = float(hgmointernal_config[config_key]['rate']) + + if random.random() > rate: + print_line(b'vcs', b'hgmointernal rate miss; using ' + b'public hg.mozilla.org service\n') + return base_repo, head_repo + + print_line(b'vcs', b'hgmointernal rate hit; cloning from ' + b'private hgweb mirror\n') + + mirror_domain = hgmointernal_config[config_key]['domain'] + + if base_repo and base_repo.startswith('https://hg.mozilla.org'): + base_repo = base_repo.replace('hg.mozilla.org', mirror_domain, 1) + + if head_repo and head_repo.startswith('https://hg.mozilla.org'): + head_repo = head_repo.replace('hg.mozilla.org', mirror_domain, 1) + + return base_repo, head_repo + + except (KeyError, ValueError): + print_line(b'vcs', b'invalid JSON in hgmointernal config; ' + b'falling back to public hg.mozilla.org service\n') + + except (urllib.error.URLError, socket.timeout): + print_line(b'vcs', b'Unable to retrieve hgmointernal config using ' + b'the secret service; falling back to public hg.mozilla.org ' + b'service\n') + + return base_repo, head_repo + + +def collect_vcs_options(args, project): + checkout = getattr(args, '%s_checkout' % project) + sparse_profile = getattr(args, '%s_sparse_profile' % project) + + env_prefix = project.upper() + + base_repo = os.environ.get('%s_BASE_REPOSITORY' % env_prefix) + head_repo = os.environ.get('%s_HEAD_REPOSITORY' % env_prefix) + revision = os.environ.get('%s_HEAD_REV' % env_prefix) + branch = os.environ.get('%s_HEAD_REF' % env_prefix) + + store_path = os.environ.get('HG_STORE_PATH') + + # Expand ~ in some paths. + if checkout: + checkout = os.path.expanduser(checkout) + if store_path: + store_path = os.path.expanduser(store_path) + + # Some callers set the base repository to mozilla-central for historical + # reasons. Switch to mozilla-unified because robustcheckout works best + # with it. + if base_repo == 'https://hg.mozilla.org/mozilla-central': + base_repo = 'https://hg.mozilla.org/mozilla-unified' + + # No need to check the hgmointernal config if we aren't performing + # a checkout. + if checkout: + base_repo, head_repo = resolve_checkout_url(base_repo, head_repo) + + return { + 'store-path': store_path, + 'project': project, + 'env-prefix': env_prefix, + 'checkout': checkout, + 'sparse-profile': sparse_profile, + 'base-repo': base_repo, + 'head-repo': head_repo, + 'revision': revision, + 'branch': branch, + } + + +def vcs_checkout_from_args(args, project): + options = collect_vcs_options(args, project) + + if not options['checkout']: + if options['branch'] and not options['revision']: + print('task should be defined in terms of non-symbolic revision') + sys.exit(1) + return + + os.environ['%s_HEAD_REV' % options['env-prefix']] = vcs_checkout( + options['head-repo'], + options['checkout'], + options['store-path'], + base_repo=options['base-repo'], + revision=options['revision'], + fetch_hgfingerprint=args.fetch_hgfingerprint, + branch=options['branch'], + sparse_profile=options['sparse-profile']) + + +def maybe_run_resource_monitoring(): + """Run the resource monitor if available. + + Discussion in https://github.com/taskcluster/taskcluster-rfcs/pull/160 + and https://bugzil.la/1648051 + """ + if 'MOZ_FETCHES' not in os.environ: + return + if 'RESOURCE_MONITOR_OUTPUT' not in os.environ: + return + + prefix = b'resource_monitor' + + executable = '{}/resource-monitor/resource-monitor{}'.format( + os.environ.get('MOZ_FETCHES_DIR'), '.exe' if IS_WINDOWS else '') + + if not os.path.exists(executable) or not os.access(executable, os.X_OK): + print_line(prefix, b"%s not executable\n" % executable.encode('utf-8')) + return + args = [ + executable, + '-process', + str(os.getpid()), + '-output', + os.environ["RESOURCE_MONITOR_OUTPUT"], + ] + print_line(prefix, b"Resource monitor starting: %s\n" % str(args).encode('utf-8')) + # Avoid environment variables the payload doesn't need. + del os.environ['RESOURCE_MONITOR_OUTPUT'] + + # Without CREATE_NEW_PROCESS_GROUP Windows signals will attempt to kill run-task, too. + process = subprocess.Popen(args, + bufsize=0, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + creationflags=subprocess.CREATE_NEW_PROCESS_GROUP if IS_WINDOWS else 0, + cwd=os.getcwd()) + + def capture_output(): + fh = io.TextIOWrapper(process.stdout, encoding='latin1') + while True: + data = fh.readline().encode('latin1') + if data == b'': + break + print_line(prefix, data) + + monitor_process = Thread(target=capture_output) + monitor_process.start() + return process + + +def main(args): + print_line(b'setup', b'run-task started in %s\n' % os.getcwd().encode('utf-8')) + running_as_root = IS_POSIX and os.getuid() == 0 + + # Set a reasonable limit to the number of open files. + # Running under docker inherits the system defaults, which are not subject + # to the "standard" limits set by pam_limits.so, and while they work well + # for servers that may receive a lot of connections, they cause performance + # problems for things that close file descriptors before forking (for good + # reasons), like python's `subprocess.Popen(..., close_fds=True)` (and while + # the default was close_fds=False in python2, that changed in python3). + # In some cases, Firefox does the same thing when spawning subprocesses. + # Processes spawned by this one will inherit the limit set here. + try: + import resource + # Keep the hard limit the same, though, allowing processes to change their + # soft limit if they need to (Firefox does, for instance). + (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE) + limit = os.environ.get('MOZ_LIMIT_NOFILE') + if limit: + limit = int(limit) + else: + # If no explicit limit is given, use 1024 if it's less than the current + # soft limit. For instance, the default on macOS is 256, so we'd pick + # that rather than 1024. + limit = min(soft, 1024) + # Now apply the limit, if it's different from the original one. + if limit != soft: + resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) + except ImportError: + # The resource module is UNIX only. + pass + + # Arguments up to '--' are ours. After are for the main task + # to be executed. + try: + i = args.index('--') + our_args = args[0:i] + task_args = args[i + 1:] + except ValueError: + our_args = args + task_args = [] + + parser = argparse.ArgumentParser() + parser.add_argument('--user', default='worker', help='user to run as') + parser.add_argument('--group', default='worker', help='group to run as') + parser.add_argument('--task-cwd', help='directory to run the provided command in') + + add_vcs_arguments(parser, 'gecko', 'Firefox') + add_vcs_arguments(parser, 'comm', 'Comm') + + parser.add_argument('--fetch-hgfingerprint', action='store_true', + help='Fetch the latest hgfingerprint from the secrets store, ' + 'using the taskclsuerProxy') + + args = parser.parse_args(our_args) + + uid = gid = gids = None + if IS_POSIX and running_as_root: + user, group, gids = get_posix_user_group(args.user, args.group) + uid = user.pw_uid + gid = group.gr_gid + + if running_as_root and os.path.exists("/dev/kvm"): + # Ensure kvm permissions for worker, required for Android x86 + st = os.stat("/dev/kvm") + os.chmod("/dev/kvm", st.st_mode | 0o666) + + # Validate caches. + # + # Taskgraph should pass in a list of paths that are caches via an + # environment variable (which we don't want to pass down to child + # processes). + + if 'TASKCLUSTER_CACHES' in os.environ: + caches = os.environ['TASKCLUSTER_CACHES'].split(';') + del os.environ['TASKCLUSTER_CACHES'] + else: + caches = [] + + if 'TASKCLUSTER_UNTRUSTED_CACHES' in os.environ: + untrusted_caches = True + del os.environ['TASKCLUSTER_UNTRUSTED_CACHES'] + else: + untrusted_caches = False + + for cache in caches: + if not os.path.isdir(cache): + print('error: cache %s is not a directory; this should never ' + 'happen' % cache) + return 1 + + if running_as_root: + purge = configure_cache_posix(cache, user, group, untrusted_caches, + running_as_root) + + if purge: + return EXIT_PURGE_CACHE + + if 'TASKCLUSTER_VOLUMES' in os.environ: + volumes = os.environ['TASKCLUSTER_VOLUMES'].split(';') + del os.environ['TASKCLUSTER_VOLUMES'] + else: + volumes = [] + + if volumes and not IS_POSIX: + print('assertion failed: volumes not expected on Windows') + return 1 + + # Sanitize volumes. + for volume in volumes: + # If a volume is a cache, it was dealt with above. + if volume in caches: + print_line(b'volume', b'volume %s is a cache\n' % + volume.encode('utf-8')) + continue + + if running_as_root: + configure_volume_posix(volume, user, group, running_as_root) + + all_caches_and_volumes = set(map(os.path.normpath, caches)) + all_caches_and_volumes |= set(map(os.path.normpath, volumes)) + + def path_in_cache_or_volume(path): + path = os.path.normpath(path) + + while path: + if path in all_caches_and_volumes: + return True + + path, child = os.path.split(path) + if not child: + break + + return False + + def prepare_checkout_dir(checkout): + if not checkout: + return + + # The checkout path becomes the working directory. Since there are + # special cache files in the cache's root directory and working + # directory purging could blow them away, disallow this scenario. + if os.path.exists(os.path.join(checkout, '.cacherequires')): + print('error: cannot perform vcs checkout into cache root: %s' % + checkout) + sys.exit(1) + + # TODO given the performance implications, consider making this a fatal + # error. + if not path_in_cache_or_volume(checkout): + print_line(b'vcs', b'WARNING: vcs checkout path (%s) not in cache ' + b'or volume; performance will likely suffer\n' % + checkout.encode('utf-8')) + + # Ensure the directory for the source checkout exists. + try: + os.makedirs(os.path.dirname(checkout)) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + # And that it is owned by the appropriate user/group. + if running_as_root: + os.chown(os.path.dirname(checkout), uid, gid) + + def prepare_hg_store_path(): + # And ensure the shared store path exists and has proper permissions. + if 'HG_STORE_PATH' not in os.environ: + print('error: HG_STORE_PATH environment variable not set') + sys.exit(1) + + store_path = os.environ['HG_STORE_PATH'] + + if not path_in_cache_or_volume(store_path): + print_line(b'vcs', b'WARNING: HG_STORE_PATH (%s) not in cache or ' + b'volume; performance will likely suffer\n' % + store_path.encode('utf-8')) + + try: + os.makedirs(store_path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + if running_as_root: + os.chown(store_path, uid, gid) + + prepare_checkout_dir(args.gecko_checkout) + if args.gecko_checkout or args.comm_checkout: + prepare_hg_store_path() + + if IS_POSIX and running_as_root: + # Drop permissions to requested user. + # This code is modeled after what `sudo` was observed to do in a Docker + # container. We do not bother calling setrlimit() because containers have + # their own limits. + print_line(b'setup', b'running as %s:%s\n' % ( + args.user.encode('utf-8'), args.group.encode('utf-8'))) + + os.setgroups(gids) + os.umask(0o22) + os.setresgid(gid, gid, gid) + os.setresuid(uid, uid, uid) + + vcs_checkout_from_args(args, 'gecko') + vcs_checkout_from_args(args, 'comm') + + resource_process = None + + try: + for k in ('GECKO_PATH', 'MOZ_FETCHES_DIR', 'UPLOAD_DIR', 'MOZ_PYTHON_HOME'): + if k in os.environ: + # Normalize paths to use forward slashes. Some shell scripts + # tolerate that better on Windows. + os.environ[k] = os.path.abspath(os.environ[k]).replace(os.sep, '/') + print_line(b'setup', b'%s is %s\n' % ( + k.encode('utf-8'), + os.environ[k].encode('utf-8'))) + + if 'MOZ_FETCHES' in os.environ: + fetch_artifacts() + + # If Python is a fetch dependency, add it to the PATH and setting + # the mozilla-specific MOZ_PYTHON_HOME to relocate binaries. + if 'MOZ_PYTHON_HOME' in os.environ: + + print_line(b'setup', + b'Setting up local python environment\n') + prev = [os.environ['PATH']] if 'PATH' in os.environ else [] + + moz_python_home = os.environ['MOZ_PYTHON_HOME'] + if IS_WINDOWS: + ext = '.exe' + moz_python_bindir = moz_python_home + else: + ext = '' + moz_python_bindir = moz_python_home + '/bin' + + + new = os.environ['PATH'] = os.pathsep.join([moz_python_bindir] + + prev) + + # Relocate the python binary. Standard way uses PYTHONHOME, but + # this conflicts with system python (e.g. used by hg) so we + # maintain a small patch to use MOZPYTHONHOME instead. + os.environ['MOZPYTHONHOME'] = moz_python_home + + pyinterp = os.path.join(moz_python_bindir, f'python3{ext}') + # just a sanity check + if not os.path.exists(pyinterp): + raise RuntimeError("Inconsistent Python installation: " + "archive found, but no python3 binary " + "detected") + + if IS_MACOSX: + # On OSX, we may not have access to the system certificate, + # so use the certifi ones. + certifi_cert_file = subprocess.check_output( + [pyinterp, '-c', + 'import certifi; print(certifi.where())'], + text=True + ) + os.environ['SSL_CERT_FILE'] = certifi_cert_file.strip() + print_line(b'setup', + b'patching ssl certificate\n') + + print_line(b'setup', + b'updated PATH with python artifact: ' + + new.encode() + b'\n') + + + resource_process = maybe_run_resource_monitoring() + + return run_and_prefix_output(b'task', task_args, cwd=args.task_cwd) + finally: + if resource_process: + print_line(b'resource_monitor', b'terminating\n') + if IS_WINDOWS: + # .terminate() on Windows is not a graceful shutdown, due to + # differences in signals. CTRL_BREAK_EVENT will work provided + # the subprocess is in a different process group, so this script + # isn't also killed. + os.kill(resource_process.pid, signal.CTRL_BREAK_EVENT) + else: + resource_process.terminate() + resource_process.wait() + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/taskcluster/scripts/tester/run-wizard b/taskcluster/scripts/tester/run-wizard new file mode 100755 index 0000000000..43eb1e0b53 --- /dev/null +++ b/taskcluster/scripts/tester/run-wizard @@ -0,0 +1,176 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this, +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import datetime +import os +import subprocess +import sys +import time +import shutil +from textwrap import wrap + +here = os.path.dirname(os.path.abspath(__file__)) +MOZHARNESS_WORKDIR = os.path.expanduser(os.path.join('~', 'workspace', 'build')) + +MACH_SETUP_FINISHED = """ +Mozharness has finished downloading the build and tests to: +{} + +A limited mach environment has also been set up and added to the $PATH, but +it may be missing the command you need. To see a list of commands, run: + $ mach help +""".lstrip().format(MOZHARNESS_WORKDIR) + +MACH_SETUP_FAILED = """ +Could not set up mach environment, no mach binary detected. +""".lstrip() + + +def call(cmd, **kwargs): + print(" ".join(cmd)) + return subprocess.call(cmd, **kwargs) + + +def wait_for_run_mozharness(timeout=60): + starttime = datetime.datetime.now() + while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout): + if os.path.isfile(os.path.join(here, 'run-mozharness')): + break + time.sleep(0.2) + else: + print("Timed out after %d seconds waiting for the 'run-mozharness' binary" % timeout) + return 1 + + +def setup_mach_environment(): + mach_src = os.path.join(MOZHARNESS_WORKDIR, 'tests', 'mach') + if not os.path.isfile(mach_src): + return 1 + + mach_dest = os.path.expanduser(os.path.join('~', 'bin', 'mach')) + if os.path.exists(mach_dest): + os.remove(mach_dest) + os.symlink(mach_src, mach_dest) + return 0 + + +def run_mozharness(*args): + wait_for_run_mozharness() + try: + return call(['run-mozharness'] + list(args)) + finally: + setup_mach_environment() + + +def setup(): + """Run the mozharness script without the 'run-tests' action. + + This will do all the necessary setup steps like creating a virtualenv and + downloading the tests and firefox binary. But it stops before running the + tests. + """ + status = run_mozharness('--no-run-tests') + + if shutil.which('mach'): + print(MACH_SETUP_FINISHED) + else: + print(MACH_SETUP_FAILED) + + return status + + +def clone(): + """Clone the correct gecko repository and update to the proper revision.""" + base_repo = os.environ['GECKO_HEAD_REPOSITORY'] + dest = os.path.expanduser(os.path.join('~', 'gecko')) + + # Specify method to checkout a revision. This defaults to revisions as + # SHA-1 strings, but also supports symbolic revisions like `tip` via the + # branch flag. + if os.environ.get('GECKO_HEAD_REV'): + revision_flag = b'--revision' + revision = os.environ['GECKO_HEAD_REV'] + elif os.environ.get('GECKO_HEAD_REF'): + revision_flag = b'--branch' + revision = os.environ['GECKO_HEAD_REF'] + else: + print('revision is not specified for checkout') + return 1 + + # TODO Bug 1301382 - pin hg.mozilla.org fingerprint. + call([ + b'/usr/bin/hg', b'robustcheckout', + b'--sharebase', os.environ['HG_STORE_PATH'], + b'--purge', + b'--upstream', b'https://hg.mozilla.org/mozilla-unified', + revision_flag, revision, + base_repo, dest + ]) + print("Finished cloning to {} at revision {}.".format(dest, revision)) + + +def exit(): + pass + + +OPTIONS = [ + ('Resume task', run_mozharness, + "Resume the original task without modification. This can be useful for " + "passively monitoring it from another shell."), + ('Setup task', setup, + "Setup the task (download the application and tests) but don't run the " + "tests just yet. The tests can be run with a custom configuration later. " + "This will provide a mach environment (experimental)."), + ('Clone gecko', clone, + "Perform a clone of gecko using the task's repo and update it to the " + "task's revision."), + ('Exit', exit, "Exit this wizard and return to the shell.") +] + + +def _fmt_options(): + max_line_len = 60 + max_name_len = max(len(o[0]) for o in OPTIONS) + + # TODO Pad will be off if there are more than 9 options. + pad = ' ' * (max_name_len+6) + + msg = [] + for i, (name, _, desc) in enumerate(OPTIONS): + desc = wrap(desc, width=max_line_len) + desc = [desc[0]] + [pad + l for l in desc[1:]] + + optstr = '{}) {} - {}\n'.format( + i+1, name.ljust(max_name_len), '\n'.join(desc)) + msg.append(optstr) + msg.append("Select one of the above options: ") + return '\n'.join(msg) + + +def wizard(): + print("This wizard can help you get started with some common debugging " + "workflows.\nWhat would you like to do?\n") + print(_fmt_options(), end="") + choice = None + while True: + choice = input() + try: + choice = int(choice)-1 + if 0 <= choice < len(OPTIONS): + break + except ValueError: + pass + + print("Must provide an integer from 1-{}:".format(len(OPTIONS))) + + func = OPTIONS[choice][1] + ret = func() + + print("Use the 'run-wizard' command to start this wizard again.") + return ret + + +if __name__ == '__main__': + sys.exit(wizard()) diff --git a/taskcluster/scripts/tester/test-linux.sh b/taskcluster/scripts/tester/test-linux.sh new file mode 100755 index 0000000000..250ae40e10 --- /dev/null +++ b/taskcluster/scripts/tester/test-linux.sh @@ -0,0 +1,283 @@ +#! /bin/bash -xe + +set -x -e + +echo "running as" $(id) + +# Detect distribution +. /etc/os-release +if [ "${ID}" == "ubuntu" ]; then + DISTRIBUTION="Ubuntu" +elif [ "${ID}" == "debian" ]; then + DISTRIBUTION="Debian" +else + DISTRIBUTION="Unknown" +fi + +# Detect release version if supported +FILE="/etc/lsb-release" +if [ -e $FILE ] ; then + . /etc/lsb-release + RELEASE="${DISTRIB_RELEASE}" +else + RELEASE="unknown" +fi + +#### +# Taskcluster friendly wrapper for performing fx desktop tests via mozharness. +#### + +# Inputs, with defaults + +: GECKO_PATH ${GECKO_PATH} +: MOZHARNESS_PATH ${MOZHARNESS_PATH} +: MOZHARNESS_URL ${MOZHARNESS_URL} +: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT} +: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG} +: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS} +: MOZ_ENABLE_WAYLAND ${MOZ_ENABLE_WAYLAND} +: NEED_XVFB ${NEED_XVFB:=true} +: NEED_WINDOW_MANAGER ${NEED_WINDOW_MANAGER:=false} +: NEED_PULSEAUDIO ${NEED_PULSEAUDIO:=false} +: NEED_COMPIZ ${NEED_COPMPIZ:=false} +: START_VNC ${START_VNC:=false} +: TASKCLUSTER_INTERACTIVE ${TASKCLUSTER_INTERACTIVE:=false} +: mozharness args "${@}" +: WORKING_DIR ${WORKING_DIR:=$(pwd)} +: WORKSPACE ${WORKSPACE:=${WORKING_DIR%/}/workspace} + +set -v +mkdir -p "$WORKSPACE" +cd "$WORKSPACE" + +fail() { + echo # make sure error message is on a new line + echo "[test-linux.sh:error]" "${@}" + exit 1 +} + +# start pulseaudio +maybe_start_pulse() { + if $NEED_PULSEAUDIO; then + # call pulseaudio for Ubuntu only + if [ $DISTRIBUTION == "Ubuntu" ]; then + pulseaudio --daemonize --log-level=4 --log-time=1 --log-target=stderr --start --fail -vvvvv --exit-idle-time=-1 --cleanup-shm --dump-conf + fi + fi +} + +# test required parameters are supplied +if [ -z "${MOZHARNESS_PATH}" -a -z "${MOZHARNESS_URL}" ]; then + fail "MOZHARNESS_PATH or MOZHARNESS_URL must be defined"; +fi + +if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi +if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi + +if [ $MOZ_ENABLE_WAYLAND ]; then + NEED_XVFB=true + NEED_WINDOW_MANAGER=true +fi + +# make sure artifact directories exist +mkdir -p "$WORKSPACE/logs" +mkdir -p "$WORKING_DIR/artifacts/public" +mkdir -p "$WORKSPACE/build/blobber_upload_dir" + +cleanup_mutter() { + local mutter_pids=`ps aux | grep 'mutter --wayland' | grep -v grep | awk '{print $2}'` + if [ "$mutter_pids" != "" ]; then + echo "Killing the following Mutter processes: $mutter_pids" + sudo kill $mutter_pids + else + echo "No Mutter processes to kill" + fi +} + +cleanup() { + local rv=$? + if [[ -s $HOME/.xsession-errors ]]; then + # To share X issues + cp "$HOME/.xsession-errors" "$WORKING_DIR/artifacts/public/xsession-errors.log" + fi + if [ $MOZ_ENABLE_WAYLAND ]; then + cleanup_mutter + fi + if $NEED_XVFB; then + cleanup_xvfb + fi + exit $rv +} +trap cleanup EXIT INT + +# Download mozharness with exponential backoff +# curl already applies exponential backoff, but not for all +# failed cases, apparently, as we keep getting failed downloads +# with 404 code. +download_mozharness() { + local max_attempts=10 + local timeout=1 + local attempt=0 + + echo "Downloading mozharness" + + while [[ $attempt < $max_attempts ]]; do + if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then + rm -rf mozharness + if unzip -q mozharness.zip -d mozharness; then + return 0 + fi + echo "error unzipping mozharness.zip" >&2 + else + echo "failed to download mozharness zip" >&2 + fi + echo "Download failed, retrying in $timeout seconds..." >&2 + sleep $timeout + timeout=$((timeout*2)) + attempt=$((attempt+1)) + done + + fail "Failed to download and unzip mozharness" +} + +# Download mozharness if we're told to. +if [ ${MOZHARNESS_URL} ]; then + download_mozharness + rm mozharness.zip + + if ! [ -d mozharness ]; then + fail "mozharness zip did not contain mozharness/" + fi + + MOZHARNESS_PATH=`pwd`/mozharness +fi + +# run XVfb in the background, if necessary +if $NEED_XVFB; then + # note that this file is not available when run under native-worker + . $HOME/scripts/xvfb.sh + start_xvfb '1600x1200x24' 0 +fi + +if $START_VNC; then + x11vnc > "$WORKING_DIR/artifacts/public/x11vnc.log" 2>&1 & +fi + +if $NEED_WINDOW_MANAGER; then + # This is read by xsession to select the window manager + . /etc/lsb-release + if [ $DISTRIBUTION == "Ubuntu" ] && [ $RELEASE == "16.04" ]; then + echo DESKTOP_SESSION=ubuntu > $HOME/.xsessionrc + elif [ $DISTRIBUTION == "Ubuntu" ] && [ $RELEASE == "18.04" ]; then + echo export DESKTOP_SESSION=gnome > $HOME/.xsessionrc + echo export XDG_CURRENT_DESKTOP=GNOME > $HOME/.xsessionrc + if [ $MOZ_ENABLE_WAYLAND ]; then + echo export XDG_SESSION_TYPE=wayland >> $HOME/.xsessionrc + else + echo export XDG_SESSION_TYPE=x11 >> $HOME/.xsessionrc + fi + else + : + fi + + # DISPLAY has already been set above + # XXX: it would be ideal to add a semaphore logic to make sure that the + # window manager is ready + /etc/X11/Xsession 2>&1 & + + # Turn off the screen saver and screen locking + gsettings set org.gnome.desktop.screensaver idle-activation-enabled false + gsettings set org.gnome.desktop.screensaver lock-enabled false + gsettings set org.gnome.desktop.screensaver lock-delay 3600 + + # Disable the screen saver + xset s off s reset + + # This starts the gnome-keyring-daemon with an unlocked login keyring. libsecret uses this to + # store secrets. Firefox uses libsecret to store a key that protects sensitive information like + # credit card numbers. + if test -z "$DBUS_SESSION_BUS_ADDRESS" ; then + # if not found, launch a new one + eval `dbus-launch --sh-syntax` + fi + eval `echo '' | /usr/bin/gnome-keyring-daemon -r -d --unlock --components=secrets` + + # Run mutter as nested wayland compositor to provide Wayland environment + # on top of XVfb. + if [ $MOZ_ENABLE_WAYLAND ]; then + env | grep "DISPLAY" + export XDG_RUNTIME_DIR=$WORKING_DIR + mutter --display=:0 --wayland --nested & + export WAYLAND_DISPLAY=wayland-0 + retry_count=0 + max_retries=5 + until [ $retry_count -gt $max_retries ]; do + if [ -S "$XDG_RUNTIME_DIR/$WAYLAND_DISPLAY" ]; then + retry_count=$(($max_retries + 1)) + else + retry_count=$(($retry_count + 1)) + echo "Waiting for Mutter, retry: $retry_count" + sleep 2 + fi + done + fi +fi + +if [[ $NEED_COMPIZ == true ]] && [[ $RELEASE == 16.04 ]]; then + compiz 2>&1 & +elif [[ $NEED_COMPIZ == true ]] && [[ $RELEASE == 18.04 ]]; then + compiz --replace 2>&1 & +fi + +# Bug 1607713 - set cursor position to 0,0 to avoid odd libx11 interaction +if [ $NEED_WINDOW_MANAGER ] && [ $DISPLAY == ':0' ]; then + xwit -root -warp 0 0 +fi + +maybe_start_pulse + +# For telemetry purposes, the build process wants information about the +# source it is running +export MOZ_SOURCE_REPO="${GECKO_HEAD_REPOSITORY}" +export MOZ_SOURCE_CHANGESET="${GECKO_HEAD_REV}" + +# support multiple, space delimited, config files +config_cmds="" +for cfg in $MOZHARNESS_CONFIG; do + config_cmds="${config_cmds} --config-file ${MOZHARNESS_PATH}/configs/${cfg}" +done + +if [ -n "$MOZHARNESS_OPTIONS" ]; then + options="" + for option in $MOZHARNESS_OPTIONS; do + options="$options --$option" + done +fi + +# Save the computed mozharness command to a binary which is useful for +# interactive mode. +mozharness_bin="$HOME/bin/run-mozharness" +mkdir -p $(dirname $mozharness_bin) + +echo -e "#!/usr/bin/env bash +# Some mozharness scripts assume base_work_dir is in +# the current working directory, see bug 1279237 +cd "$WORKSPACE" +cmd=\"${PYTHON:-python3} ${MOZHARNESS_PATH}/scripts/${MOZHARNESS_SCRIPT} ${config_cmds} ${options} ${@} \${@}\" +echo \"Running: \${cmd}\" +exec \${cmd}" > ${mozharness_bin} +chmod +x ${mozharness_bin} + +# In interactive mode, the user will be prompted with options for what to do. +if ! $TASKCLUSTER_INTERACTIVE; then + # run the given mozharness script and configs, but pass the rest of the + # arguments in from our own invocation + ${mozharness_bin}; +fi + +# Run a custom mach command (this is typically used by action tasks to run +# harnesses in a particular way) +if [ "$CUSTOM_MACH_COMMAND" ]; then + eval "'$WORKSPACE/build/venv/bin/python' '$WORKSPACE/build/tests/mach' ${CUSTOM_MACH_COMMAND} ${@}" + exit $? +fi -- cgit v1.2.3