summaryrefslogtreecommitdiffstats
path: root/src/ci
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:57:31 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:57:31 +0000
commitdc0db358abe19481e475e10c32149b53370f1a1c (patch)
treeab8ce99c4b255ce46f99ef402c27916055b899ee /src/ci
parentReleasing progress-linux version 1.71.1+dfsg1-2~progress7.99u1. (diff)
downloadrustc-dc0db358abe19481e475e10c32149b53370f1a1c.tar.xz
rustc-dc0db358abe19481e475e10c32149b53370f1a1c.zip
Merging upstream version 1.72.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/ci')
-rwxr-xr-x[-rw-r--r--]src/ci/cpu-usage-over-time.py0
-rw-r--r--src/ci/docker/README.md2
-rw-r--r--src/ci/docker/host-x86_64/arm-android/Dockerfile2
-rw-r--r--src/ci/docker/host-x86_64/dist-android/Dockerfile2
-rwxr-xr-xsrc/ci/docker/host-x86_64/dist-various-1/install-x86_64-redox.sh2
-rwxr-xr-xsrc/ci/docker/host-x86_64/dist-various-2/build-wasi-toolchain.sh2
-rw-r--r--src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile2
-rw-r--r--src/ci/docker/host-x86_64/mingw-check/Dockerfile3
-rw-r--r--src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml1
-rwxr-xr-x[-rw-r--r--]src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py0
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile54
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile20
-rwxr-xr-xsrc/ci/docker/host-x86_64/x86_64-gnu-llvm-14/script.sh34
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile2
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version2
-rwxr-xr-xsrc/ci/docker/run.sh4
-rwxr-xr-xsrc/ci/docker/scripts/android-sdk-manager.py17
-rw-r--r--src/ci/docker/scripts/crosstool-ng-git.sh2
-rwxr-xr-xsrc/ci/docker/scripts/fuchsia-test-runner.py254
-rw-r--r--src/ci/github-actions/ci.yml117
-rw-r--r--src/ci/github-actions/problem_matchers.json40
-rwxr-xr-xsrc/ci/run.sh2
-rwxr-xr-xsrc/ci/scripts/create-doc-artifacts.sh42
-rwxr-xr-x[-rw-r--r--]src/ci/stage-build.py289
24 files changed, 509 insertions, 386 deletions
diff --git a/src/ci/cpu-usage-over-time.py b/src/ci/cpu-usage-over-time.py
index adfd895ea..adfd895ea 100644..100755
--- a/src/ci/cpu-usage-over-time.py
+++ b/src/ci/cpu-usage-over-time.py
diff --git a/src/ci/docker/README.md b/src/ci/docker/README.md
index e799d7c96..852f2e209 100644
--- a/src/ci/docker/README.md
+++ b/src/ci/docker/README.md
@@ -270,7 +270,7 @@ For targets: `loongarch64-unknown-linux-gnu`
- Operating System > Linux kernel version = 5.19.16
- Binary utilities > Version of binutils = 2.40
- C-library > glibc version = 2.36
-- C compiler > gcc version = 12.2.0
+- C compiler > gcc version = 13.1.0
- C compiler > C++ = ENABLE -- to cross compile LLVM
### `mips-linux-gnu.defconfig`
diff --git a/src/ci/docker/host-x86_64/arm-android/Dockerfile b/src/ci/docker/host-x86_64/arm-android/Dockerfile
index b6b4fdc67..db11700af 100644
--- a/src/ci/docker/host-x86_64/arm-android/Dockerfile
+++ b/src/ci/docker/host-x86_64/arm-android/Dockerfile
@@ -1,4 +1,4 @@
-FROM ubuntu:22.10
+FROM ubuntu:23.04
ARG DEBIAN_FRONTEND=noninteractive
COPY scripts/android-base-apt-get.sh /scripts/
diff --git a/src/ci/docker/host-x86_64/dist-android/Dockerfile b/src/ci/docker/host-x86_64/dist-android/Dockerfile
index 9c6f64889..b09b6edb0 100644
--- a/src/ci/docker/host-x86_64/dist-android/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-android/Dockerfile
@@ -1,4 +1,4 @@
-FROM ubuntu:22.10
+FROM ubuntu:23.04
COPY scripts/android-base-apt-get.sh /scripts/
RUN sh /scripts/android-base-apt-get.sh
diff --git a/src/ci/docker/host-x86_64/dist-various-1/install-x86_64-redox.sh b/src/ci/docker/host-x86_64/dist-various-1/install-x86_64-redox.sh
index dad979223..f86402b01 100755
--- a/src/ci/docker/host-x86_64/dist-various-1/install-x86_64-redox.sh
+++ b/src/ci/docker/host-x86_64/dist-various-1/install-x86_64-redox.sh
@@ -2,5 +2,5 @@
set -ex
-curl https://static.redox-os.org/toolchain/x86_64-unknown-redox/relibc-install.tar.gz | \
+curl https://ci-mirrors.rust-lang.org/rustc/2022-11-27-relibc-install.tar.gz | \
tar --extract --gzip --directory /usr/local
diff --git a/src/ci/docker/host-x86_64/dist-various-2/build-wasi-toolchain.sh b/src/ci/docker/host-x86_64/dist-various-2/build-wasi-toolchain.sh
index 5fbce36c3..b867db6a1 100755
--- a/src/ci/docker/host-x86_64/dist-various-2/build-wasi-toolchain.sh
+++ b/src/ci/docker/host-x86_64/dist-various-2/build-wasi-toolchain.sh
@@ -10,7 +10,7 @@ bin="$PWD/clang+llvm-15.0.6-x86_64-linux-gnu-ubuntu-18.04/bin"
git clone https://github.com/WebAssembly/wasi-libc
cd wasi-libc
-git reset --hard 4362b1885fd369e042a7c0ecd8df3b6cd47fb4e8
+git reset --hard 7018e24d8fe248596819d2e884761676f3542a04
make -j$(nproc) \
CC="$bin/clang" \
NM="$bin/llvm-nm" \
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
index 04fdb15f5..c2fd2e3a9 100644
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
@@ -55,7 +55,7 @@ RUN ./build-clang.sh
ENV CC=clang CXX=clang++
# rustc-perf version from 2023-03-15
-ENV PERF_COMMIT 9dfaa35193154b690922347ee1141a06ec87a199
+ENV PERF_COMMIT 8b2ac3042e1ff2c0074455a0a3618adef97156b1
RUN curl -LS -o perf.zip https://github.com/rust-lang/rustc-perf/archive/$PERF_COMMIT.zip && \
unzip perf.zip && \
mv rustc-perf-$PERF_COMMIT rustc-perf && \
diff --git a/src/ci/docker/host-x86_64/mingw-check/Dockerfile b/src/ci/docker/host-x86_64/mingw-check/Dockerfile
index 515890aef..85a9a5d33 100644
--- a/src/ci/docker/host-x86_64/mingw-check/Dockerfile
+++ b/src/ci/docker/host-x86_64/mingw-check/Dockerfile
@@ -45,6 +45,9 @@ ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
python3 ../x.py test --stage 0 src/tools/compiletest && \
python3 ../x.py test --stage 0 core alloc std test proc_macro && \
# Build both public and internal documentation.
+ RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 library && \
+ mkdir -p /checkout/obj/staging/doc && \
+ cp -r build/x86_64-unknown-linux-gnu/doc /checkout/obj/staging && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 compiler && \
RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 library/test && \
/scripts/validate-toolstate.sh && \
diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml
index fa8e5b3d0..2d17cf7d4 100644
--- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml
+++ b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml
@@ -4,6 +4,7 @@ version = "0.0.0"
edition = "2021"
[workspace]
+resolver = "1"
[dependencies]
r-efi = "4.1.0"
diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py
index ffae7b0d4..ffae7b0d4 100644..100755
--- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py
+++ b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile
deleted file mode 100644
index d45ef0a7d..000000000
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile
+++ /dev/null
@@ -1,54 +0,0 @@
-FROM ubuntu:22.04
-
-ARG DEBIAN_FRONTEND=noninteractive
-RUN apt-get update && apt-get install -y --no-install-recommends \
- g++ \
- gcc-multilib \
- make \
- ninja-build \
- file \
- curl \
- ca-certificates \
- python3 \
- git \
- cmake \
- sudo \
- gdb \
- llvm-14-tools \
- llvm-14-dev \
- libedit-dev \
- libssl-dev \
- pkg-config \
- zlib1g-dev \
- xz-utils \
- nodejs \
- mingw-w64 \
- && rm -rf /var/lib/apt/lists/*
-
-COPY scripts/sccache.sh /scripts/
-RUN sh /scripts/sccache.sh
-
-# We are disabling CI LLVM since this builder is intentionally using a host
-# LLVM, rather than the typical src/llvm-project LLVM.
-ENV NO_DOWNLOAD_CI_LLVM 1
-
-# This is not the latest LLVM version, so some components required by tests may
-# be missing.
-ENV IS_NOT_LATEST_LLVM 1
-
-# Using llvm-link-shared due to libffi issues -- see #34486
-ENV RUST_CONFIGURE_ARGS \
- --build=x86_64-unknown-linux-gnu \
- --llvm-root=/usr/lib/llvm-14 \
- --enable-llvm-link-shared \
- --set rust.thin-lto-import-instr-limit=10
-
-ENV SCRIPT ../x.py --stage 1 test --exclude src/tools/tidy && \
- # Run the `mir-opt` tests again but this time for a 32-bit target.
- # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
- # both 32-bit and 64-bit outputs updated by the PR author, before
- # the PR is approved and tested for merging.
- # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`,
- # despite having different output on 32-bit vs 64-bit targets.
- ../x.py --stage 1 test tests/mir-opt \
- --host='' --target=i686-unknown-linux-gnu
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
index 1f28b9397..93d18bcf1 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
@@ -49,20 +49,6 @@ ENV RUST_CONFIGURE_ARGS \
--enable-llvm-link-shared \
--set rust.thin-lto-import-instr-limit=10
-# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
-ENV SCRIPT ../x.py --stage 2 test --exclude src/tools/tidy && \
- # Run the `mir-opt` tests again but this time for a 32-bit target.
- # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
- # both 32-bit and 64-bit outputs updated by the PR author, before
- # the PR is approved and tested for merging.
- # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`,
- # despite having different output on 32-bit vs 64-bit targets.
- ../x --stage 2 test tests/mir-opt \
- --host='' --target=i686-unknown-linux-gnu && \
- # Run the UI test suite again, but in `--pass=check` mode
- #
- # This is intended to make sure that both `--pass=check` continues to
- # work.
- #
- ../x.ps1 --stage 2 test tests/ui --pass=check \
- --host='' --target=i686-unknown-linux-gnu
+COPY host-x86_64/x86_64-gnu-llvm-14/script.sh /tmp/
+
+ENV SCRIPT /tmp/script.sh
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/script.sh b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/script.sh
new file mode 100755
index 000000000..0120fd982
--- /dev/null
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/script.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -ex
+
+# Only run the stage 1 tests on merges, not on PR CI jobs.
+if [[ -z "${PR_CI_JOB}" ]]; then
+../x.py --stage 1 test --exclude src/tools/tidy && \
+ # Run the `mir-opt` tests again but this time for a 32-bit target.
+ # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
+ # both 32-bit and 64-bit outputs updated by the PR author, before
+ # the PR is approved and tested for merging.
+ # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`,
+ # despite having different output on 32-bit vs 64-bit targets.
+ ../x.py --stage 1 test tests/mir-opt \
+ --host='' --target=i686-unknown-linux-gnu
+fi
+
+# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
+../x.py --stage 2 test --exclude src/tools/tidy && \
+ # Run the `mir-opt` tests again but this time for a 32-bit target.
+ # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
+ # both 32-bit and 64-bit outputs updated by the PR author, before
+ # the PR is approved and tested for merging.
+ # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`,
+ # despite having different output on 32-bit vs 64-bit targets.
+ ../x --stage 2 test tests/mir-opt \
+ --host='' --target=i686-unknown-linux-gnu && \
+ # Run the UI test suite again, but in `--pass=check` mode
+ #
+ # This is intended to make sure that both `--pass=check` continues to
+ # work.
+ #
+ ../x.ps1 --stage 2 test tests/ui --pass=check \
+ --host='' --target=i686-unknown-linux-gnu
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
index 960683b92..06a8f7eeb 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
@@ -1,4 +1,4 @@
-FROM ubuntu:22.10
+FROM ubuntu:23.04
ARG DEBIAN_FRONTEND=noninteractive
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version
index 806935b82..b31629ad6 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version
@@ -1 +1 @@
-0.16.4 \ No newline at end of file
+0.16.8 \ No newline at end of file
diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh
index 8bea8cd4c..4b218d577 100755
--- a/src/ci/docker/run.sh
+++ b/src/ci/docker/run.sh
@@ -254,8 +254,6 @@ docker \
--env DEPLOY \
--env DEPLOY_ALT \
--env CI \
- --env TF_BUILD \
- --env BUILD_SOURCEBRANCHNAME \
--env GITHUB_ACTIONS \
--env GITHUB_REF \
--env TOOLSTATE_REPO_ACCESS_TOKEN \
@@ -264,6 +262,8 @@ docker \
--env RUST_CI_OVERRIDE_RELEASE_CHANNEL \
--env CI_JOB_NAME="${CI_JOB_NAME-$IMAGE}" \
--env BASE_COMMIT="$BASE_COMMIT" \
+ --env DIST_TRY_BUILD \
+ --env PR_CI_JOB \
--init \
--rm \
rust-ci \
diff --git a/src/ci/docker/scripts/android-sdk-manager.py b/src/ci/docker/scripts/android-sdk-manager.py
index c9e2961f6..66cba5842 100755
--- a/src/ci/docker/scripts/android-sdk-manager.py
+++ b/src/ci/docker/scripts/android-sdk-manager.py
@@ -2,6 +2,14 @@
# Simpler reimplementation of Android's sdkmanager
# Extra features of this implementation are pinning and mirroring
+import argparse
+import hashlib
+import os
+import subprocess
+import tempfile
+import urllib.request
+import xml.etree.ElementTree as ET
+
# These URLs are the Google repositories containing the list of available
# packages and their versions. The list has been generated by listing the URLs
# fetched while executing `tools/bin/sdkmanager --list`
@@ -27,15 +35,6 @@ MIRROR_BUCKET = "rust-lang-ci-mirrors"
MIRROR_BUCKET_REGION = "us-west-1"
MIRROR_BASE_DIR = "rustc/android/"
-import argparse
-import hashlib
-import os
-import subprocess
-import sys
-import tempfile
-import urllib.request
-import xml.etree.ElementTree as ET
-
class Package:
def __init__(self, path, url, sha1, deps=None):
if deps is None:
diff --git a/src/ci/docker/scripts/crosstool-ng-git.sh b/src/ci/docker/scripts/crosstool-ng-git.sh
index 449cc476f..b8d399153 100644
--- a/src/ci/docker/scripts/crosstool-ng-git.sh
+++ b/src/ci/docker/scripts/crosstool-ng-git.sh
@@ -2,7 +2,7 @@
set -ex
URL=https://github.com/crosstool-ng/crosstool-ng
-REV=943364711a650d9b9e84c1b42c91cc0265b6ab5c
+REV=227d99d7f3115f3a078595a580d2b307dcd23e93
mkdir crosstool-ng
cd crosstool-ng
diff --git a/src/ci/docker/scripts/fuchsia-test-runner.py b/src/ci/docker/scripts/fuchsia-test-runner.py
index ecef56f56..af01f9ccb 100755
--- a/src/ci/docker/scripts/fuchsia-test-runner.py
+++ b/src/ci/docker/scripts/fuchsia-test-runner.py
@@ -25,13 +25,9 @@ from typing import ClassVar, List, Optional
@dataclass
class TestEnvironment:
- rust_dir: str
+ rust_build_dir: str
sdk_dir: str
target: str
- package_server_pid: Optional[int] = None
- emu_addr: Optional[str] = None
- libstd_name: Optional[str] = None
- libtest_name: Optional[str] = None
verbose: bool = False
@staticmethod
@@ -57,7 +53,7 @@ class TestEnvironment:
@classmethod
def from_args(cls, args):
return cls(
- os.path.abspath(args.rust),
+ os.path.abspath(args.rust_build),
os.path.abspath(args.sdk),
args.target,
verbose=args.verbose,
@@ -68,13 +64,9 @@ class TestEnvironment:
with open(cls.env_file_path(), encoding="utf-8") as f:
test_env = json.loads(f.read())
return cls(
- test_env["rust_dir"],
+ test_env["rust_build_dir"],
test_env["sdk_dir"],
test_env["target"],
- libstd_name=test_env["libstd_name"],
- libtest_name=test_env["libtest_name"],
- emu_addr=test_env["emu_addr"],
- package_server_pid=test_env["package_server_pid"],
verbose=test_env["verbose"],
)
@@ -82,18 +74,6 @@ class TestEnvironment:
with open(self.env_file_path(), "w", encoding="utf-8") as f:
f.write(json.dumps(self.__dict__))
- def ssh_dir(self):
- return os.path.join(self.tmp_dir(), "ssh")
-
- def ssh_keyfile_path(self):
- return os.path.join(self.ssh_dir(), "fuchsia_ed25519")
-
- def ssh_authfile_path(self):
- return os.path.join(self.ssh_dir(), "fuchsia_authorized_keys")
-
- def vdl_output_path(self):
- return os.path.join(self.tmp_dir(), "vdl_output")
-
def package_server_log_path(self):
return os.path.join(self.tmp_dir(), "package_server_log")
@@ -113,7 +93,9 @@ class TestEnvironment:
def libs_dir(self):
return os.path.join(
- self.rust_dir,
+ self.rust_build_dir,
+ "host",
+ "stage2",
"lib",
)
@@ -171,7 +153,6 @@ class TestEnvironment:
def home_dir(self):
return os.path.join(self.tmp_dir(), "user-home")
-
def start_ffx_isolation(self):
# Most of this is translated directly from ffx's isolate library
os.mkdir(self.ffx_isolate_dir())
@@ -213,21 +194,19 @@ class TestEnvironment:
# Set configs
configs = {
"log.enabled": "true",
- "ssh.pub": self.ssh_authfile_path(),
- "ssh.priv": self.ssh_keyfile_path(),
"test.is_isolated": "true",
"test.experimental_structured_output": "true",
}
for key, value in configs.items():
subprocess.check_call(
[
- self.tool_path("ffx"),
+ ffx_path,
"config",
"set",
key,
value,
],
- env=self.ffx_cmd_env(),
+ env=ffx_env,
stdout=self.subprocess_output(),
stderr=self.subprocess_output(),
)
@@ -249,6 +228,7 @@ class TestEnvironment:
self.tool_path("ffx"),
"daemon",
"stop",
+ "-w",
],
env=self.ffx_cmd_env(),
stdout=self.subprocess_output(),
@@ -276,87 +256,62 @@ class TestEnvironment:
elif len(os.listdir(self.tmp_dir())) != 0:
raise Exception(f"Temp directory is not clean (in {self.tmp_dir()})")
- os.mkdir(self.ssh_dir())
os.mkdir(self.output_dir())
- # Find libstd and libtest
- libstd_paths = glob.glob(os.path.join(self.rustlibs_dir(), "libstd-*.so"))
- libtest_paths = glob.glob(os.path.join(self.rustlibs_dir(), "libtest-*.so"))
-
- if not libstd_paths:
- raise Exception(f"Failed to locate libstd (in {self.rustlibs_dir()})")
-
- if not libtest_paths:
- raise Exception(f"Failed to locate libtest (in {self.rustlibs_dir()})")
+ ffx_path = self.tool_path("ffx")
+ ffx_env = self.ffx_cmd_env()
- self.libstd_name = os.path.basename(libstd_paths[0])
- self.libtest_name = os.path.basename(libtest_paths[0])
+ # Start ffx isolation
+ self.log_info("Starting ffx isolation...")
+ self.start_ffx_isolation()
- # Generate SSH keys for the emulator to use
- self.log_info("Generating SSH keys...")
+ # Stop any running emulators (there shouldn't be any)
subprocess.check_call(
[
- "ssh-keygen",
- "-N",
- "",
- "-t",
- "ed25519",
- "-f",
- self.ssh_keyfile_path(),
- "-C",
- "Generated by fuchsia-test-runner.py",
+ ffx_path,
+ "emu",
+ "stop",
+ "--all",
],
+ env=ffx_env,
stdout=self.subprocess_output(),
stderr=self.subprocess_output(),
)
- authfile_contents = subprocess.check_output(
+
+ # Start emulator
+ self.log_info("Starting emulator...")
+ product_bundle = "terminal.qemu-" + self.triple_to_arch(self.target)
+ subprocess.check_call(
[
- "ssh-keygen",
- "-y",
- "-f",
- self.ssh_keyfile_path(),
+ ffx_path,
+ "product-bundle",
+ "get",
+ product_bundle,
],
+ env=ffx_env,
+ stdout=self.subprocess_output(),
stderr=self.subprocess_output(),
)
- with open(self.ssh_authfile_path(), "wb") as authfile:
- authfile.write(authfile_contents)
-
- # Start ffx isolation
- self.log_info("Starting ffx isolation...")
- self.start_ffx_isolation()
-
- # Start emulator (this will generate the vdl output)
- self.log_info("Starting emulator...")
+ # FIXME: condition --accel hyper on target arch matching host arch
subprocess.check_call(
[
- self.tool_path("fvdl"),
- "--sdk",
+ ffx_path,
+ "emu",
"start",
- "--tuntap",
+ product_bundle,
"--headless",
- "--nointeractive",
- "--ssh",
- self.ssh_dir(),
- "--vdl-output",
- self.vdl_output_path(),
- "--emulator-log",
+ "--log",
self.emulator_log_path(),
- "--image-name",
- "qemu-" + self.triple_to_arch(self.target),
+ "--net",
+ "tap",
+ "--accel",
+ "hyper",
],
+ env=ffx_env,
stdout=self.subprocess_output(),
stderr=self.subprocess_output(),
)
- # Parse vdl output for relevant information
- with open(self.vdl_output_path(), encoding="utf-8") as f:
- vdl_content = f.read()
- matches = re.search(
- r'network_address:\s+"\[([0-9a-f]{1,4}:(:[0-9a-f]{1,4}){4}%qemu)\]"',
- vdl_content,
- )
- self.emu_addr = matches.group(1)
-
# Create new package repo
self.log_info("Creating package repo...")
subprocess.check_call(
@@ -370,61 +325,46 @@ class TestEnvironment:
stderr=self.subprocess_output(),
)
- # Start package server
- self.log_info("Starting package server...")
- with open(
- self.package_server_log_path(), "w", encoding="utf-8"
- ) as package_server_log:
- # We want this to be a long-running process that persists after the script finishes
- # pylint: disable=consider-using-with
- self.package_server_pid = subprocess.Popen(
- [
- self.tool_path("pm"),
- "serve",
- "-vt",
- "-repo",
- self.repo_dir(),
- "-l",
- ":8084",
- ],
- stdout=package_server_log,
- stderr=package_server_log,
- ).pid
-
- # Register package server with emulator
- self.log_info("Registering package server...")
- ssh_client = subprocess.check_output(
+ # Add repo
+ subprocess.check_call(
[
- "ssh",
- "-i",
- self.ssh_keyfile_path(),
- "-o",
- "StrictHostKeyChecking=accept-new",
- self.emu_addr,
- "-f",
- "echo $SSH_CLIENT",
+ ffx_path,
+ "repository",
+ "add-from-pm",
+ self.repo_dir(),
+ "--repository",
+ self.TEST_REPO_NAME,
],
- text=True,
+ env=ffx_env,
+ stdout=self.subprocess_output(),
+ stderr=self.subprocess_output(),
)
- repo_addr = ssh_client.split()[0].replace("%", "%25")
- repo_url = f"http://[{repo_addr}]:8084/config.json"
+
+ # Start repository server
+ subprocess.check_call(
+ [ffx_path, "repository", "server", "start", "--address", "[::]:0"],
+ env=ffx_env,
+ stdout=self.subprocess_output(),
+ stderr=self.subprocess_output(),
+ )
+
+ # Register with newly-started emulator
subprocess.check_call(
[
- "ssh",
- "-i",
- self.ssh_keyfile_path(),
- "-o",
- "StrictHostKeyChecking=accept-new",
- self.emu_addr,
- "-f",
- f"pkgctl repo add url -f 1 -n {self.TEST_REPO_NAME} {repo_url}",
+ ffx_path,
+ "target",
+ "repository",
+ "register",
+ "--repository",
+ self.TEST_REPO_NAME,
],
+ env=ffx_env,
stdout=self.subprocess_output(),
stderr=self.subprocess_output(),
)
# Create lockfiles
- open(self.pm_lockfile_path(), 'a').close()
+ open(self.pm_lockfile_path(), "a").close()
# Write to file
self.write_to_file()
@@ -458,6 +398,7 @@ class TestEnvironment:
],
use: [
{{ storage: "data", path: "/data" }},
+ {{ storage: "tmp", path: "/tmp" }},
{{ protocol: [ "fuchsia.process.Launcher" ] }},
{{ protocol: [ "fuchsia.posix.socket.Provider" ] }}
],
@@ -471,8 +412,8 @@ class TestEnvironment:
meta/package={package_dir}/meta/package
meta/{package_name}.cm={package_dir}/meta/{package_name}.cm
bin/{exe_name}={bin_path}
- lib/{libstd_name}={rust_dir}/lib/rustlib/{rustlib_dir}/lib/{libstd_name}
- lib/{libtest_name}={rust_dir}/lib/rustlib/{rustlib_dir}/lib/{libtest_name}
+ lib/{libstd_name}={libstd_path}
+ lib/{libtest_name}={libtest_path}
lib/ld.so.1={sdk_dir}/arch/{target_arch}/sysroot/dist/lib/ld.so.1
lib/libfdio.so={sdk_dir}/arch/{target_arch}/dist/libfdio.so
"""
@@ -502,6 +443,16 @@ class TestEnvironment:
bin_path = os.path.abspath(args.bin_path)
+ # Find libstd and libtest
+ libstd_paths = glob.glob(os.path.join(self.rustlibs_dir(), "libstd-*.so"))
+ libtest_paths = glob.glob(os.path.join(self.rustlibs_dir(), "libtest-*.so"))
+
+ if not libstd_paths:
+ raise Exception(f"Failed to locate libstd (in {self.rustlibs_dir()})")
+
+ if not libtest_paths:
+ raise Exception(f"Failed to locate libtest (in {self.rustlibs_dir()})")
+
# Build a unique, deterministic name for the test using the name of the
# binary and the last 6 hex digits of the hash of the full path
def path_checksum(path):
@@ -568,8 +519,11 @@ class TestEnvironment:
env_vars += f'\n "{var_name}={var_value}",'
# Default to no backtrace for test suite
- if os.getenv("RUST_BACKTRACE") == None:
- env_vars += f'\n "RUST_BACKTRACE=0",'
+ if os.getenv("RUST_BACKTRACE") is None:
+ env_vars += '\n "RUST_BACKTRACE=0",'
+
+ # Use /tmp as the test temporary directory
+ env_vars += f'\n "RUST_TEST_TMPDIR=/tmp",'
cml.write(
self.CML_TEMPLATE.format(env_vars=env_vars, exe_name=exe_name)
@@ -601,11 +555,12 @@ class TestEnvironment:
exe_name=exe_name,
package_dir=package_dir,
package_name=package_name,
- rust_dir=self.rust_dir,
- rustlib_dir=self.target,
+ target=self.target,
sdk_dir=self.sdk_dir,
- libstd_name=self.libstd_name,
- libtest_name=self.libtest_name,
+ libstd_name=os.path.basename(libstd_paths[0]),
+ libtest_name=os.path.basename(libtest_paths[0]),
+ libstd_path=libstd_paths[0],
+ libtest_path=libtest_paths[0],
target_arch=self.triple_to_arch(self.target),
)
)
@@ -642,7 +597,7 @@ class TestEnvironment:
log("Publishing package to repo...")
# Publish package to repo
- with open(self.pm_lockfile_path(), 'w') as pm_lockfile:
+ with open(self.pm_lockfile_path(), "w") as pm_lockfile:
fcntl.lockf(pm_lockfile.fileno(), fcntl.LOCK_EX)
subprocess.check_call(
[
@@ -776,20 +731,15 @@ class TestEnvironment:
else:
self.log_debug("No ffx daemon log found")
- # Stop package server
- self.log_info("Stopping package server...")
- os.kill(self.package_server_pid, signal.SIGTERM)
-
# Shut down the emulator
self.log_info("Stopping emulator...")
subprocess.check_call(
[
- self.tool_path("fvdl"),
- "--sdk",
- "kill",
- "--launched-proto",
- self.vdl_output_path(),
+ self.tool_path("ffx"),
+ "emu",
+ "stop",
],
+ env=self.ffx_cmd_env(),
stdout=self.subprocess_output(),
stderr=self.subprocess_output(),
)
@@ -966,8 +916,8 @@ def main():
"start", help="initializes the testing environment"
)
start_parser.add_argument(
- "--rust",
- help="the directory of the installed Rust compiler for Fuchsia",
+ "--rust-build",
+ help="the current compiler build directory (`$RUST_SRC/build` by default)",
required=True,
)
start_parser.add_argument(
@@ -1045,9 +995,7 @@ def main():
)
debug_parser.set_defaults(func=debug)
- syslog_parser = subparsers.add_parser(
- "syslog", help="prints the device syslog"
- )
+ syslog_parser = subparsers.add_parser("syslog", help="prints the device syslog")
syslog_parser.set_defaults(func=syslog)
args = parser.parse_args()
diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml
index fd619467f..8027e6996 100644
--- a/src/ci/github-actions/ci.yml
+++ b/src/ci/github-actions/ci.yml
@@ -34,6 +34,8 @@ x--expand-yaml-anchors--remove:
- &shared-ci-variables
CI_JOB_NAME: ${{ matrix.name }}
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
+ # commit of PR sha or commit sha. `GITHUB_SHA` is not accurate for PRs.
+ HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
- &public-variables
SCCACHE_BUCKET: rust-lang-ci-sccache2
@@ -145,13 +147,6 @@ x--expand-yaml-anchors--remove:
run: src/ci/scripts/verify-channel.sh
<<: *step
- - name: configure GitHub Actions to kill the build when outdated
- uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
- with:
- github_token: "${{ secrets.github_token }}"
- if: success() && !env.SKIP_JOB && github.ref != 'refs/heads/try' && github.ref != 'refs/heads/try-perf'
- <<: *step
-
- name: collect CPU statistics
run: src/ci/scripts/collect-cpu-stats.sh
<<: *step
@@ -229,6 +224,20 @@ x--expand-yaml-anchors--remove:
TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}
<<: *step
+ - name: create github artifacts
+ run: src/ci/scripts/create-doc-artifacts.sh
+ <<: *step
+
+ - name: upload artifacts to github
+ uses: actions/upload-artifact@v3
+ with:
+ # name is set in previous step
+ name: ${{ env.DOC_ARTIFACT_NAME }}
+ path: obj/artifacts/doc
+ if-no-files-found: ignore
+ retention-days: 5
+ <<: *step
+
- name: upload artifacts to S3
run: src/ci/scripts/upload-artifacts.sh
env:
@@ -289,14 +298,20 @@ defaults:
# shell is PowerShell.)
shell: bash
+concurrency:
+ # For a given workflow, if we push to the same branch, cancel all previous builds on that branch.
+ # We add an exception for try builds (try branch) and unrolled rollup builds (try-perf), which
+ # are all triggered on the same branch, but which should be able to run concurrently.
+ group: ${{ github.workflow }}-${{ ((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.sha) || github.ref }}
+ cancel-in-progress: true
+
jobs:
pr:
- permissions:
- actions: write # for rust-lang/simpleinfra/github-actions/cancel-outdated-builds
<<: *base-ci-job
name: PR - ${{ matrix.name }}
env:
<<: [*shared-ci-variables, *public-variables]
+ PR_CI_JOB: 1
if: github.event_name == 'pull_request'
continue-on-error: ${{ matrix.name == 'mingw-check-tidy' }}
strategy:
@@ -315,8 +330,6 @@ jobs:
<<: *job-linux-16c
auto:
- permissions:
- actions: write # for rust-lang/simpleinfra/github-actions/cancel-outdated-builds
<<: *base-ci-job
name: auto - ${{ matrix.name }}
env:
@@ -362,18 +375,6 @@ jobs:
- name: dist-loongarch64-linux
<<: *job-linux-8c
- - name: dist-mips-linux
- <<: *job-linux-8c
-
- - name: dist-mips64-linux
- <<: *job-linux-8c
-
- - name: dist-mips64el-linux
- <<: *job-linux-8c
-
- - name: dist-mipsel-linux
- <<: *job-linux-8c
-
- name: dist-powerpc-linux
<<: *job-linux-8c
@@ -473,11 +474,6 @@ jobs:
RUST_BACKTRACE: 1
<<: *job-linux-8c
- - name: x86_64-gnu-llvm-14-stage1
- env:
- RUST_BACKTRACE: 1
- <<: *job-linux-8c
-
- name: x86_64-gnu-nopt
<<: *job-linux-4c
@@ -582,40 +578,22 @@ jobs:
# Windows Builders #
######################
- - name: x86_64-msvc-1
- env:
- RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-profiler
- SCRIPT: make ci-subset-1
- <<: *job-windows-8c
-
- - name: x86_64-msvc-2
+ - name: x86_64-msvc
env:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-profiler
- SCRIPT: make ci-subset-2
+ SCRIPT: make ci-msvc
<<: *job-windows-8c
- - name: i686-msvc-1
+ - name: i686-msvc
env:
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
- SCRIPT: make ci-subset-1
+ SCRIPT: make ci-msvc
<<: *job-windows-8c
- - name: i686-msvc-2
+ - name: x86_64-msvc-ext
env:
- RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
- SCRIPT: make ci-subset-2
- <<: *job-windows-8c
-
- - name: x86_64-msvc-cargo
- env:
- SCRIPT: python x.py --stage 2 test src/tools/cargotest src/tools/cargo
- RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld
- <<: *job-windows-8c
-
- - name: x86_64-msvc-tools
- env:
- SCRIPT: src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows
- RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstate/toolstates.json
+ SCRIPT: python x.py --stage 2 test src/tools/cargotest src/tools/cargo && src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows
+ RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld --save-toolstates=/tmp/toolstate/toolstates.json
DEPLOY_TOOLSTATES_JSON: toolstates-windows.json
<<: *job-windows-8c
@@ -635,41 +613,19 @@ jobs:
# came from the mingw-w64 SourceForge download site. Unfortunately
# SourceForge is notoriously flaky, so we mirror it on our own infrastructure.
- - name: i686-mingw-1
+ - name: i686-mingw
env:
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
- SCRIPT: make ci-mingw-subset-1
- # We are intentionally allowing an old toolchain on this builder (and that's
- # incompatible with LLVM downloads today).
- NO_DOWNLOAD_CI_LLVM: 1
- CUSTOM_MINGW: 1
- <<: *job-windows-8c
-
- - name: i686-mingw-2
- env:
- RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
- SCRIPT: make ci-mingw-subset-2
- # We are intentionally allowing an old toolchain on this builder (and that's
- # incompatible with LLVM downloads today).
- NO_DOWNLOAD_CI_LLVM: 1
- CUSTOM_MINGW: 1
- <<: *job-windows-8c
-
- - name: x86_64-mingw-1
- env:
- SCRIPT: make ci-mingw-subset-1
- RUST_CONFIGURE_ARGS: >-
- --build=x86_64-pc-windows-gnu
- --enable-profiler
+ SCRIPT: make ci-mingw
# We are intentionally allowing an old toolchain on this builder (and that's
# incompatible with LLVM downloads today).
NO_DOWNLOAD_CI_LLVM: 1
CUSTOM_MINGW: 1
<<: *job-windows-8c
- - name: x86_64-mingw-2
+ - name: x86_64-mingw
env:
- SCRIPT: make ci-mingw-subset-2
+ SCRIPT: make ci-mingw
RUST_CONFIGURE_ARGS: >-
--build=x86_64-pc-windows-gnu
--enable-profiler
@@ -752,11 +708,10 @@ jobs:
<<: *job-windows-8c
try:
- permissions:
- actions: write # for rust-lang/simpleinfra/github-actions/cancel-outdated-builds
<<: *base-ci-job
name: try - ${{ matrix.name }}
env:
+ DIST_TRY_BUILD: 1
<<: [*shared-ci-variables, *prod-variables]
if: github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust'
strategy:
diff --git a/src/ci/github-actions/problem_matchers.json b/src/ci/github-actions/problem_matchers.json
index 37561924b..b6c7ace84 100644
--- a/src/ci/github-actions/problem_matchers.json
+++ b/src/ci/github-actions/problem_matchers.json
@@ -10,6 +10,46 @@
"message": 3
}
]
+ },
+ {
+ "owner": "cargo-common",
+ "pattern": [
+ {
+ "regexp": "^(warning|warn|error)(\\[(\\S*)\\])?: (.*)$",
+ "severity": 1,
+ "message": 4,
+ "code": 3
+ },
+ {
+ "regexp": "^\\s+-->\\s(\\S+):(\\d+):(\\d+)$",
+ "file": 1,
+ "line": 2,
+ "column": 3
+ }
+ ]
+ },
+ {
+ "owner": "compiler-panic",
+ "pattern": [
+ {
+ "regexp": "error: internal compiler error: (.*):(\\d+):(\\d+): (.*)$",
+ "message": 4,
+ "file": 1,
+ "line": 2,
+ "column": 3
+ }
+ ]
+ },
+ {
+ "owner": "cargo-fmt",
+ "pattern": [
+ {
+ "regexp": "^(Diff in (\\S+)) at line (\\d+):",
+ "message": 1,
+ "file": 2,
+ "line": 3
+ }
+ ]
}
]
}
diff --git a/src/ci/run.sh b/src/ci/run.sh
index 966af3abc..48fb40d6a 100755
--- a/src/ci/run.sh
+++ b/src/ci/run.sh
@@ -53,6 +53,7 @@ if ! isCI || isCiBranch auto || isCiBranch beta || isCiBranch try || isCiBranch
HAS_METRICS=1
fi
+RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-verbose-configure"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-sccache"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-locked-deps"
@@ -187,6 +188,7 @@ else
fi
if [ ! -z "$SCRIPT" ]; then
+ echo "Executing ${SCRIPT}"
sh -x -c "$SCRIPT"
else
do_make() {
diff --git a/src/ci/scripts/create-doc-artifacts.sh b/src/ci/scripts/create-doc-artifacts.sh
new file mode 100755
index 000000000..2516b0d85
--- /dev/null
+++ b/src/ci/scripts/create-doc-artifacts.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+# Compress doc artifacts and name them based on the commit, or the date if
+# commit is not available.
+
+set -euox pipefail
+
+# Try to get short commit hash, fallback to date
+if [ -n "$HEAD_SHA" ]; then
+ short_rev=$(echo "${HEAD_SHA}" | cut -c1-8)
+else
+ short_rev=$(git rev-parse --short HEAD || date -u +'%Y-%m-%dT%H%M%SZ')
+fi
+
+# Try to get branch, fallback to none
+branch=$(git branch --show-current || echo)
+
+if [ -n "$branch" ]; then
+ branch="${branch}-"
+fi
+
+if [ "${GITHUB_EVENT_NAME:=none}" = "pull_request" ]; then
+ pr_num=$(echo "$GITHUB_REF_NAME" | cut -d'/' -f1)
+ name="doc-${pr_num}-${short_rev}"
+else
+ name="doc-${branch}${short_rev}"
+fi
+
+
+if [ -d "obj/staging/doc" ]; then
+ mkdir -p obj/artifacts/doc
+
+ # Level 12 seems to give a good tradeoff of time vs. space savings
+ ZSTD_CLEVEL=12 ZSTD_NBTHREADS=4 \
+ tar --zstd -cf "obj/artifacts/doc/${name}.tar.zst" -C obj/staging/doc .
+
+ ls -lh obj/artifacts/doc
+fi
+
+# Set this environment variable for future use if running in CI
+if [ -n "$GITHUB_ENV" ]; then
+ echo "DOC_ARTIFACT_NAME=${name}" >> "$GITHUB_ENV"
+fi
diff --git a/src/ci/stage-build.py b/src/ci/stage-build.py
index 8d03d3759..3bb3b1418 100644..100755
--- a/src/ci/stage-build.py
+++ b/src/ci/stage-build.py
@@ -22,6 +22,7 @@ from typing import Callable, ContextManager, Dict, Iterable, Iterator, List, Opt
Tuple, Union
PGO_HOST = os.environ["PGO_HOST"]
+CHANNEL = os.environ.get("RUST_RELEASE_CHANNEL", "")
LOGGER = logging.getLogger("stage-build")
@@ -48,6 +49,11 @@ RUSTC_PGO_CRATES = [
LLVM_BOLT_CRATES = LLVM_PGO_CRATES
+
+def is_try_build() -> bool:
+ return os.environ.get("DIST_TRY_BUILD", "0") != "0"
+
+
class Pipeline:
# Paths
def checkout_path(self) -> Path:
@@ -119,6 +125,12 @@ class Pipeline:
def metrics_path(self) -> Path:
return self.build_root() / "build" / "metrics.json"
+ def executable_extension(self) -> str:
+ raise NotImplementedError
+
+ def skipped_tests(self) -> Iterable[str]:
+ return ()
+
class LinuxPipeline(Pipeline):
def checkout_path(self) -> Path:
@@ -147,6 +159,13 @@ class LinuxPipeline(Pipeline):
def supports_bolt(self) -> bool:
return True
+ def executable_extension(self) -> str:
+ return ""
+
+ def skipped_tests(self) -> Iterable[str]:
+ # This test fails because of linker errors, as of June 2023.
+ yield "tests/ui/process/nofile-limit.rs"
+
class WindowsPipeline(Pipeline):
def __init__(self):
@@ -175,7 +194,7 @@ class WindowsPipeline(Pipeline):
def build_rustc_perf(self):
# rustc-perf version from 2023-03-15
- perf_commit = "9dfaa35193154b690922347ee1141a06ec87a199"
+ perf_commit = "8b2ac3042e1ff2c0074455a0a3618adef97156b1"
rustc_perf_zip_path = self.opt_artifacts() / "perf.zip"
def download_rustc_perf():
@@ -206,6 +225,13 @@ class WindowsPipeline(Pipeline):
def supports_bolt(self) -> bool:
return False
+ def executable_extension(self) -> str:
+ return ".exe"
+
+ def skipped_tests(self) -> Iterable[str]:
+ # This test fails as of June 2023
+ yield "tests\\codegen\\vec-shrink-panik.rs"
+
def get_timestamp() -> float:
return time.time()
@@ -398,9 +424,9 @@ def delete_directory(path: Path):
shutil.rmtree(path)
-def unpack_archive(archive: Path):
+def unpack_archive(archive: Path, target_dir: Optional[Path] = None):
LOGGER.info(f"Unpacking archive `{archive}`")
- shutil.unpack_archive(archive)
+ shutil.unpack_archive(str(archive), extract_dir=str(target_dir) if target_dir is not None else None)
def download_file(src: str, target: Path):
@@ -415,7 +441,7 @@ def retry_action(action, name: str, max_fails: int = 5):
try:
action()
return
- except:
+ except BaseException: # also catch ctrl+c/sysexit
LOGGER.error(f"Action `{name}` has failed\n{traceback.format_exc()}")
raise Exception(f"Action `{name}` has failed after {max_fails} attempts")
@@ -450,6 +476,7 @@ def cmd(
)
return subprocess.run(args, env=environment, check=True)
+
class BenchmarkRunner:
def run_rustc(self, pipeline: Pipeline):
raise NotImplementedError
@@ -460,6 +487,7 @@ class BenchmarkRunner:
def run_bolt(self, pipeline: Pipeline):
raise NotImplementedError
+
class DefaultBenchmarkRunner(BenchmarkRunner):
def run_rustc(self, pipeline: Pipeline):
# Here we're profiling the `rustc` frontend, so we also include `Check`.
@@ -473,6 +501,7 @@ class DefaultBenchmarkRunner(BenchmarkRunner):
LLVM_PROFILE_FILE=str(pipeline.rustc_profile_template_path())
)
)
+
def run_llvm(self, pipeline: Pipeline):
run_compiler_benchmarks(
pipeline,
@@ -489,6 +518,7 @@ class DefaultBenchmarkRunner(BenchmarkRunner):
crates=LLVM_BOLT_CRATES
)
+
def run_compiler_benchmarks(
pipeline: Pipeline,
profiles: List[str],
@@ -591,11 +621,17 @@ def get_files(directory: Path, filter: Optional[Callable[[Path], bool]] = None)
yield path
-def build_rustc(
+def bootstrap_build(
pipeline: Pipeline,
args: List[str],
- env: Optional[Dict[str, str]] = None
+ env: Optional[Dict[str, str]] = None,
+ targets: Iterable[str] = ("library/std", )
):
+ if env is None:
+ env = {}
+ else:
+ env = dict(env)
+ env["RUST_BACKTRACE"] = "1"
arguments = [
sys.executable,
pipeline.checkout_path() / "x.py",
@@ -603,8 +639,7 @@ def build_rustc(
"--target", PGO_HOST,
"--host", PGO_HOST,
"--stage", "2",
- "library/std"
- ] + args
+ ] + list(targets) + args
cmd(arguments, env=env)
@@ -645,10 +680,8 @@ def gather_llvm_profiles(pipeline: Pipeline, runner: BenchmarkRunner):
def gather_rustc_profiles(pipeline: Pipeline, runner: BenchmarkRunner):
LOGGER.info("Running benchmarks with PGO instrumented rustc")
-
runner.run_rustc(pipeline)
-
profile_path = pipeline.rustc_profile_merged_file()
LOGGER.info(f"Merging Rustc PGO profiles to {profile_path}")
cmd([
@@ -749,95 +782,221 @@ def record_metrics(pipeline: Pipeline, timer: Timer):
if metrics is None:
return
llvm_steps = tuple(metrics.find_all_by_type("bootstrap::llvm::Llvm"))
- assert len(llvm_steps) > 0
llvm_duration = sum(step.duration for step in llvm_steps)
rustc_steps = tuple(metrics.find_all_by_type("bootstrap::compile::Rustc"))
- assert len(rustc_steps) > 0
rustc_duration = sum(step.duration for step in rustc_steps)
# The LLVM step is part of the Rustc step
- rustc_duration -= llvm_duration
+ rustc_duration = max(0, rustc_duration - llvm_duration)
- timer.add_duration("LLVM", llvm_duration)
- timer.add_duration("Rustc", rustc_duration)
+ if llvm_duration > 0:
+ timer.add_duration("LLVM", llvm_duration)
+ if rustc_duration > 0:
+ timer.add_duration("Rustc", rustc_duration)
log_metrics(metrics)
-def execute_build_pipeline(timer: Timer, pipeline: Pipeline, runner: BenchmarkRunner, final_build_args: List[str]):
+def run_tests(pipeline: Pipeline):
+ """
+ After `dist` is executed, we extract its archived components into a sysroot directory,
+ and then use that extracted rustc as a stage0 compiler.
+ Then we run a subset of tests using that compiler, to have a basic smoke test which checks
+ whether the optimization pipeline hasn't broken something.
+ """
+ build_dir = pipeline.build_root() / "build"
+ dist_dir = build_dir / "dist"
+
+ def extract_dist_dir(name: str) -> Path:
+ target_dir = build_dir / "optimized-dist"
+ target_dir.mkdir(parents=True, exist_ok=True)
+ unpack_archive(dist_dir / f"{name}.tar.xz", target_dir=target_dir)
+ extracted_path = target_dir / name
+ assert extracted_path.is_dir()
+ return extracted_path
+
+ # Extract rustc, libstd, cargo and src archives to create the optimized sysroot
+ rustc_dir = extract_dist_dir(f"rustc-{CHANNEL}-{PGO_HOST}") / "rustc"
+ libstd_dir = extract_dist_dir(f"rust-std-{CHANNEL}-{PGO_HOST}") / f"rust-std-{PGO_HOST}"
+ cargo_dir = extract_dist_dir(f"cargo-{CHANNEL}-{PGO_HOST}") / "cargo"
+ extracted_src_dir = extract_dist_dir(f"rust-src-{CHANNEL}") / "rust-src"
+
+ # We need to manually copy libstd to the extracted rustc sysroot
+ shutil.copytree(
+ libstd_dir / "lib" / "rustlib" / PGO_HOST / "lib",
+ rustc_dir / "lib" / "rustlib" / PGO_HOST / "lib"
+ )
+
+ # Extract sources - they aren't in the `rustc-{CHANNEL}-{host}` tarball, so we need to manually copy libstd
+ # sources to the extracted sysroot. We need sources available so that `-Zsimulate-remapped-rust-src-base`
+ # works correctly.
+ shutil.copytree(
+ extracted_src_dir / "lib" / "rustlib" / "src",
+ rustc_dir / "lib" / "rustlib" / "src"
+ )
+
+ rustc_path = rustc_dir / "bin" / f"rustc{pipeline.executable_extension()}"
+ assert rustc_path.is_file()
+ cargo_path = cargo_dir / "bin" / f"cargo{pipeline.executable_extension()}"
+ assert cargo_path.is_file()
+
+ # Specify path to a LLVM config so that LLVM is not rebuilt.
+ # It doesn't really matter which LLVM config we choose, because no sysroot will be compiled.
+ llvm_config = pipeline.build_artifacts() / "llvm" / "bin" / f"llvm-config{pipeline.executable_extension()}"
+ assert llvm_config.is_file()
+
+ config_content = f"""profile = "user"
+changelog-seen = 2
+
+[build]
+rustc = "{rustc_path.as_posix()}"
+cargo = "{cargo_path.as_posix()}"
+
+[target.{PGO_HOST}]
+llvm-config = "{llvm_config.as_posix()}"
+"""
+ logging.info(f"Using following `config.toml` for running tests:\n{config_content}")
+
+ # Simulate a stage 0 compiler with the extracted optimized dist artifacts.
+ with open("config.toml", "w") as f:
+ f.write(config_content)
+
+ args = [
+ sys.executable,
+ pipeline.checkout_path() / "x.py",
+ "test",
+ "--stage", "0",
+ "tests/assembly",
+ "tests/codegen",
+ "tests/codegen-units",
+ "tests/incremental",
+ "tests/mir-opt",
+ "tests/pretty",
+ "tests/run-pass-valgrind",
+ "tests/ui",
+ ]
+ for test_path in pipeline.skipped_tests():
+ args.extend(["--exclude", test_path])
+ cmd(args=args, env=dict(
+ COMPILETEST_FORCE_STAGE0="1"
+ ))
+
+
+def execute_build_pipeline(timer: Timer, pipeline: Pipeline, runner: BenchmarkRunner, dist_build_args: List[str]):
# Clear and prepare tmp directory
shutil.rmtree(pipeline.opt_artifacts(), ignore_errors=True)
os.makedirs(pipeline.opt_artifacts(), exist_ok=True)
pipeline.build_rustc_perf()
- # Stage 1: Build rustc + PGO instrumented LLVM
- with timer.section("Stage 1 (LLVM PGO)") as stage1:
- with stage1.section("Build rustc and LLVM") as rustc_build:
- build_rustc(pipeline, args=[
- "--llvm-profile-generate"
- ], env=dict(
- LLVM_PROFILE_DIR=str(pipeline.llvm_profile_dir_root() / "prof-%p")
- ))
- record_metrics(pipeline, rustc_build)
+ """
+ Stage 1: Build PGO instrumented rustc
+
+ We use a normal build of LLVM, because gathering PGO profiles for LLVM and `rustc` at the same time
+ can cause issues.
+ """
+ with timer.section("Stage 1 (rustc PGO)") as stage1:
+ with stage1.section("Build PGO instrumented rustc and LLVM") as rustc_pgo_instrument:
+ bootstrap_build(pipeline, args=[
+ "--rust-profile-generate",
+ pipeline.rustc_profile_dir_root()
+ ])
+ record_metrics(pipeline, rustc_pgo_instrument)
with stage1.section("Gather profiles"):
- gather_llvm_profiles(pipeline, runner)
+ gather_rustc_profiles(pipeline, runner)
print_free_disk_space(pipeline)
- clear_llvm_files(pipeline)
- final_build_args += [
- "--llvm-profile-use",
- pipeline.llvm_profile_merged_file()
- ]
-
- # Stage 2: Build PGO instrumented rustc + LLVM
- with timer.section("Stage 2 (rustc PGO)") as stage2:
- with stage2.section("Build rustc and LLVM") as rustc_build:
- build_rustc(pipeline, args=[
- "--rust-profile-generate",
- pipeline.rustc_profile_dir_root()
+ with stage1.section("Build PGO optimized rustc") as rustc_pgo_use:
+ bootstrap_build(pipeline, args=[
+ "--rust-profile-use",
+ pipeline.rustc_profile_merged_file()
])
- record_metrics(pipeline, rustc_build)
+ record_metrics(pipeline, rustc_pgo_use)
+ dist_build_args += [
+ "--rust-profile-use",
+ pipeline.rustc_profile_merged_file()
+ ]
+
+ """
+ Stage 2: Gather LLVM PGO profiles
+ """
+ with timer.section("Stage 2 (LLVM PGO)") as stage2:
+ # Clear normal LLVM artifacts
+ clear_llvm_files(pipeline)
+
+ with stage2.section("Build PGO instrumented LLVM") as llvm_pgo_instrument:
+ bootstrap_build(pipeline, args=[
+ "--llvm-profile-generate",
+ # We want to keep the already built PGO-optimized `rustc`.
+ "--keep-stage", "0",
+ "--keep-stage", "1"
+ ], env=dict(
+ LLVM_PROFILE_DIR=str(pipeline.llvm_profile_dir_root() / "prof-%p")
+ ))
+ record_metrics(pipeline, llvm_pgo_instrument)
with stage2.section("Gather profiles"):
- gather_rustc_profiles(pipeline, runner)
+ gather_llvm_profiles(pipeline, runner)
+
+ dist_build_args += [
+ "--llvm-profile-use",
+ pipeline.llvm_profile_merged_file(),
+ ]
print_free_disk_space(pipeline)
- clear_llvm_files(pipeline)
- final_build_args += [
- "--rust-profile-use",
- pipeline.rustc_profile_merged_file()
- ]
+ # Clear PGO-instrumented LLVM artifacts
+ clear_llvm_files(pipeline)
- # Stage 3: Build rustc + BOLT instrumented LLVM
+ """
+ Stage 3: Build BOLT instrumented LLVM
+
+ We build a PGO optimized LLVM in this step, then instrument it with BOLT and gather BOLT profiles.
+ Note that we don't remove LLVM artifacts after this step, so that they are reused in the final dist build.
+ BOLT instrumentation is performed "on-the-fly" when the LLVM library is copied to the sysroot of rustc,
+ therefore the LLVM artifacts on disk are not "tainted" with BOLT instrumentation and they can be reused.
+ """
if pipeline.supports_bolt():
with timer.section("Stage 3 (LLVM BOLT)") as stage3:
- with stage3.section("Build rustc and LLVM") as rustc_build:
- build_rustc(pipeline, args=[
+ with stage3.section("Build BOLT instrumented LLVM") as llvm_bolt_instrument:
+ bootstrap_build(pipeline, args=[
"--llvm-profile-use",
pipeline.llvm_profile_merged_file(),
"--llvm-bolt-profile-generate",
- "--rust-profile-use",
- pipeline.rustc_profile_merged_file()
+ # We want to keep the already built PGO-optimized `rustc`.
+ "--keep-stage", "0",
+ "--keep-stage", "1"
])
- record_metrics(pipeline, rustc_build)
+ record_metrics(pipeline, llvm_bolt_instrument)
with stage3.section("Gather profiles"):
gather_llvm_bolt_profiles(pipeline, runner)
- # LLVM is not being cleared here, we want to reuse the previous build
- print_free_disk_space(pipeline)
- final_build_args += [
- "--llvm-bolt-profile-use",
- pipeline.llvm_bolt_profile_merged_file()
- ]
+ dist_build_args += [
+ "--llvm-bolt-profile-use",
+ pipeline.llvm_bolt_profile_merged_file()
+ ]
+ print_free_disk_space(pipeline)
+
+ # We want to keep the already built PGO-optimized `rustc`.
+ dist_build_args += [
+ "--keep-stage", "0",
+ "--keep-stage", "1"
+ ]
+
+ """
+ Final stage: Build PGO optimized rustc + PGO/BOLT optimized LLVM
+ """
+ with timer.section("Final stage (dist build)") as final_stage:
+ cmd(dist_build_args)
+ record_metrics(pipeline, final_stage)
- # Stage 4: Build PGO optimized rustc + PGO/BOLT optimized LLVM
- with timer.section("Stage 4 (final build)") as stage4:
- cmd(final_build_args)
- record_metrics(pipeline, stage4)
+ # Try builds can be in various broken states, so we don't want to gatekeep them with tests
+ # Do not run tests, as they are broken for beta/stable versions in this script
+ # if not is_try_build():
+ # with timer.section("Run tests"):
+ # run_tests(pipeline)
def run(runner: BenchmarkRunner):
@@ -851,6 +1010,13 @@ def run(runner: BenchmarkRunner):
build_args = sys.argv[1:]
+ # Skip components that are not needed for try builds to speed them up
+ if is_try_build():
+ LOGGER.info("Skipping building of unimportant components for a try build")
+ for target in ("rust-docs", "rustc-docs", "rust-docs-json", "rust-analyzer",
+ "rustc-src", "clippy", "miri", "rustfmt"):
+ build_args.extend(["--exclude", target])
+
timer = Timer()
pipeline = create_pipeline()
@@ -865,6 +1031,7 @@ def run(runner: BenchmarkRunner):
print_binary_sizes(pipeline)
+
if __name__ == "__main__":
runner = DefaultBenchmarkRunner()
run(runner)