summaryrefslogtreecommitdiffstats
path: root/taskcluster/scripts
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-12 05:35:37 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-12 05:35:37 +0000
commita90a5cba08fdf6c0ceb95101c275108a152a3aed (patch)
tree532507288f3defd7f4dcf1af49698bcb76034855 /taskcluster/scripts
parentAdding debian version 126.0.1-1. (diff)
downloadfirefox-a90a5cba08fdf6c0ceb95101c275108a152a3aed.tar.xz
firefox-a90a5cba08fdf6c0ceb95101c275108a152a3aed.zip
Merging upstream version 127.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'taskcluster/scripts')
-rwxr-xr-xtaskcluster/scripts/builder/build-android.sh2
-rw-r--r--taskcluster/scripts/lib/testrail_api.py126
-rw-r--r--taskcluster/scripts/misc/afl-nyx.patch29
-rwxr-xr-xtaskcluster/scripts/misc/android-gradle-dependencies/after.sh2
-rwxr-xr-xtaskcluster/scripts/misc/android-gradle-dependencies/before.sh22
-rw-r--r--taskcluster/scripts/misc/cctools.patch14
-rw-r--r--taskcluster/scripts/misc/fetch-chromium.py56
-rwxr-xr-xtaskcluster/scripts/misc/gradle-python-envs.sh11
-rw-r--r--taskcluster/scripts/misc/wasi-sdk.patch34
-rw-r--r--taskcluster/scripts/testrail_main.py13
-rw-r--r--taskcluster/scripts/tests/test-lab.py13
11 files changed, 198 insertions, 124 deletions
diff --git a/taskcluster/scripts/builder/build-android.sh b/taskcluster/scripts/builder/build-android.sh
index fd2ecd0e2b..fc1ccca7e4 100755
--- a/taskcluster/scripts/builder/build-android.sh
+++ b/taskcluster/scripts/builder/build-android.sh
@@ -19,7 +19,7 @@ ac_add_options --without-google-safebrowsing-api-keyfile
ac_add_options --disable-nodejs
unset NODEJS
-export GRADLE_MAVEN_REPOSITORIES="file://$MOZ_FETCHES_DIR/geckoview","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/mozilla","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/google","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/central","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/gradle-plugins","file:///$MOZ_FETCHES_DIR/plugins.gradle.org/m2"
+export GRADLE_MAVEN_REPOSITORIES="file://$MOZ_FETCHES_DIR/geckoview","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/mozilla","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/google","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/central","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/gradle-plugins","file:///$MOZ_FETCHES_DIR/android-gradle-dependencies/plugins.gradle.org/m2"
EOF
export MOZCONFIG=$mozconfig
GRADLE=$MOZ_FETCHES_DIR/android-gradle-dependencies/gradle-dist/bin/gradle
diff --git a/taskcluster/scripts/lib/testrail_api.py b/taskcluster/scripts/lib/testrail_api.py
index 44474ebe9d..786ae8552c 100644
--- a/taskcluster/scripts/lib/testrail_api.py
+++ b/taskcluster/scripts/lib/testrail_api.py
@@ -37,82 +37,98 @@ from testrail_conn import APIClient
class TestRail:
def __init__(self, host, username, password):
+ if not all([host, username, password]):
+ raise ValueError("TestRail host, username, and password must be provided.")
self.client = APIClient(host)
self.client.user = username
self.client.password = password
# Public Methods
- def create_milestone(self, testrail_project_id, title, description):
+ def create_milestone(self, project_id, title, description):
+ if not all([project_id, title, description]):
+ raise ValueError("Project ID, title, and description must be provided.")
data = {"name": title, "description": description}
- return self.client.send_post(f"add_milestone/{testrail_project_id}", data)
-
- def create_milestone_and_test_runs(
- self, project_id, milestone_name, milestone_description, devices, test_suite_id
- ):
- # Create milestone
- milestone_id = self._retry_api_call(
- self.create_milestone, project_id, milestone_name, milestone_description
- )["id"]
-
- # Create test runs for each device
- for device in devices:
- test_run_id = self._retry_api_call(
- self.create_test_run, project_id, milestone_id, device, test_suite_id
- )["id"]
- self._retry_api_call(
- self.update_test_cases_to_passed, project_id, test_run_id, test_suite_id
- )
-
- return milestone_id
+ return self.client.send_post(f"add_milestone/{project_id}", data)
def create_test_run(
- self, testrail_project_id, testrail_milestone_id, name_run, testrail_suite_id
+ self,
+ project_id,
+ milestone_id,
+ test_run_name,
+ suite_id,
):
+ if not all([project_id, milestone_id, test_run_name, suite_id]):
+ raise ValueError(
+ "Project ID, milestone ID, test run name, and suite ID must be provided."
+ )
data = {
- "name": name_run,
- "milestone_id": testrail_milestone_id,
- "suite_id": testrail_suite_id,
+ "name": test_run_name,
+ "milestone_id": milestone_id,
+ "suite_id": suite_id,
}
- return self.client.send_post(f"add_run/{testrail_project_id}", data)
-
- def does_milestone_exist(self, testrail_project_id, milestone_name):
- num_of_milestones_to_check = 10 # check last 10 milestones
- milestones = self._get_milestones(
- testrail_project_id
- ) # returns reverse chronological order
- for milestone in milestones[
- -num_of_milestones_to_check:
- ]: # check last 10 api responses
- if milestone_name == milestone["name"]:
- return True
- return False
-
- def update_test_cases_to_passed(
- self, testrail_project_id, testrail_run_id, testrail_suite_id
- ):
- test_cases = self._get_test_cases(testrail_project_id, testrail_suite_id)
+ return self.client.send_post(f"add_run/{project_id}", data)
+
+ def does_milestone_exist(self, project_id, milestone_name, num_of_milestones=10):
+ if not all([project_id, milestone_name]):
+ raise ValueError("Project ID and milestone name must be provided.")
+ # returns reverse chronological order of milestones, check last 10 milestones
+ milestones = self._get_milestones(project_id)[-num_of_milestones:]
+ return any(milestone_name == milestone["name"] for milestone in milestones)
+
+ def update_test_run_tests(self, test_run_id, test_status):
+ if not all([test_run_id, test_status]):
+ raise ValueError("Test run ID and test status must be provided.")
+ tests = self._get_tests(test_run_id)
data = {
"results": [
- {"case_id": test_case["id"], "status_id": 1} for test_case in test_cases
+ {"test_id": test["id"], "status_id": test_status} for test in tests
]
}
- return self._update_test_run_results(testrail_run_id, data)
+ return self.client.send_post(f"add_results/{test_run_id}", data)
# Private Methods
- def _get_test_cases(self, testrail_project_id, testrail_test_suite_id):
- return self.client.send_get(
- f"get_cases/{testrail_project_id}&suite_id={testrail_test_suite_id}"
- )
-
- def _update_test_run_results(self, testrail_run_id, data):
- return self.client.send_post(f"add_results_for_cases/{testrail_run_id}", data)
-
- def _get_milestones(self, testrail_project_id):
- return self.client.send_get(f"get_milestones/{testrail_project_id}")
+ def _get_test_cases(self, project_id, suite_id):
+ if not all([project_id, suite_id]):
+ raise ValueError("Project ID and suite ID must be provided.")
+ return self.client.send_get(f"get_cases/{project_id}&suite_id={suite_id}")[
+ "cases"
+ ]
+
+ def _get_milestone(self, milestone_id):
+ if not milestone_id:
+ raise ValueError("Milestone ID must be provided.")
+ return self.client.send_get(f"get_milestone/{milestone_id}")
+
+ def _get_milestones(self, project_id):
+ if not project_id:
+ raise ValueError("Project ID must be provided.")
+ return self.client.send_get(f"get_milestones/{project_id}")["milestones"]
+
+ def _get_tests(self, test_run_id):
+ if not test_run_id:
+ raise ValueError("Test run ID must be provided.")
+ return self.client.send_get(f"get_tests/{test_run_id}")["tests"]
+
+ def _get_test_run(self, test_run_id):
+ if not test_run_id:
+ raise ValueError("Test run ID must be provided.")
+ return self.client.send_get(f"get_run/{test_run_id}")
+
+ def _get_test_runs(self, project_id):
+ if not project_id:
+ raise ValueError("Project ID must be provided.")
+ return self.client.send_get(f"get_runs/{project_id}")["runs"]
+
+ def _get_test_run_results(self, test_run_id):
+ if not test_run_id:
+ raise ValueError("Test run ID must be provided.")
+ return self.client.send_get(f"get_results_for_run/{test_run_id}")["results"]
def _retry_api_call(self, api_call, *args, max_retries=3, delay=5):
+ if not all([api_call, args]):
+ raise ValueError("API call and arguments must be provided.")
"""
Retries the given API call up to max_retries times with a delay between attempts.
diff --git a/taskcluster/scripts/misc/afl-nyx.patch b/taskcluster/scripts/misc/afl-nyx.patch
index 94a1bbb31d..7a8a55b975 100644
--- a/taskcluster/scripts/misc/afl-nyx.patch
+++ b/taskcluster/scripts/misc/afl-nyx.patch
@@ -17,6 +17,12 @@ index 988e536e..5e9870c0 100644
/* Do not change this unless you really know what you are doing. */
+commit e46fac6063f7b0b6eca8e140b10c3a107deb0a0f
+Author: Christian Holler (:decoder) <choller@mozilla.com>
+Date: Wed Mar 6 10:19:52 2024 +0100
+
+ Fix delayed pcmap writing for code coverage with pc-table
+
diff --git a/instrumentation/afl-compiler-rt.o.c b/instrumentation/afl-compiler-rt.o.c
index caa3c3a8..76ceb451 100644
--- a/instrumentation/afl-compiler-rt.o.c
@@ -51,3 +57,26 @@ index caa3c3a8..76ceb451 100644
+ }
if (__afl_debug) {
+
+commit 58206a3180479416e14ea324607be71ee69caa6f
+Author: Jesse Schwartzentruber <truber@mozilla.com>
+Date: Wed Apr 17 14:40:41 2024 -0400
+
+ Set explicit visibility on shared memory variables.
+
+diff --git a/src/afl-cc.c b/src/afl-cc.c
+index dd4fb4ea..57089ae0 100644
+--- a/src/afl-cc.c
++++ b/src/afl-cc.c
+@@ -1591,8 +1591,10 @@ void add_defs_persistent_mode(aflcc_state_t *aflcc) {
+ insert_param(aflcc,
+ "-D__AFL_FUZZ_INIT()="
+ "int __afl_sharedmem_fuzzing = 1;"
+- "extern unsigned int *__afl_fuzz_len;"
+- "extern unsigned char *__afl_fuzz_ptr;"
++ "extern __attribute__((visibility(\"default\"))) "
++ "unsigned int *__afl_fuzz_len;"
++ "extern __attribute__((visibility(\"default\"))) "
++ "unsigned char *__afl_fuzz_ptr;"
+ "unsigned char __afl_fuzz_alt[1048576];"
+ "unsigned char *__afl_fuzz_alt_ptr = __afl_fuzz_alt;");
diff --git a/taskcluster/scripts/misc/android-gradle-dependencies/after.sh b/taskcluster/scripts/misc/android-gradle-dependencies/after.sh
index 9943cf2c41..22be14256e 100755
--- a/taskcluster/scripts/misc/android-gradle-dependencies/after.sh
+++ b/taskcluster/scripts/misc/android-gradle-dependencies/after.sh
@@ -10,7 +10,7 @@ set -v
# Package everything up.
pushd $WORKSPACE
-mkdir -p android-gradle-dependencies /builds/worker/artifacts
+mkdir -p /builds/worker/artifacts
# NEXUS_WORK is exported by `before.sh`.
cp -R ${NEXUS_WORK}/storage/mozilla android-gradle-dependencies
diff --git a/taskcluster/scripts/misc/android-gradle-dependencies/before.sh b/taskcluster/scripts/misc/android-gradle-dependencies/before.sh
index 7150731d73..3d1b5809b2 100755
--- a/taskcluster/scripts/misc/android-gradle-dependencies/before.sh
+++ b/taskcluster/scripts/misc/android-gradle-dependencies/before.sh
@@ -4,14 +4,30 @@ set -x -e
echo "running as" $(id)
-: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
+if [[ -z "${WORKSPACE}" ]]; then
+ export WORKSPACE=/builds/worker/workspace
+fi
set -v
+# Download the gradle-python-envs plugin
+# See https://github.com/gradle/plugin-portal-requests/issues/164
+pushd ${WORKSPACE}
+mkdir -p android-gradle-dependencies
+pushd android-gradle-dependencies
+
+PYTHON_ENVS_VERSION="0.0.31"
+
+PYTHON_ENVS_BASE_URL=https://plugins.gradle.org/m2/gradle/plugin/com/jetbrains/python/gradle-python-envs
+
+wget --no-parent --recursive --execute robots=off "${PYTHON_ENVS_BASE_URL}/${PYTHON_ENVS_VERSION}/"
+popd
+popd
+
# Export NEXUS_WORK so that `after.sh` can use it.
-export NEXUS_WORK=/builds/worker/workspace/sonatype-nexus-work
+export NEXUS_WORK=${WORKSPACE}/sonatype-nexus-work
mkdir -p ${NEXUS_WORK}/conf
-cp /builds/worker/workspace/build/src/taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml ${NEXUS_WORK}/conf/nexus.xml
+cp ${WORKSPACE}/build/src/taskcluster/scripts/misc/android-gradle-dependencies/nexus.xml ${NEXUS_WORK}/conf/nexus.xml
RUN_AS_USER=worker $MOZ_FETCHES_DIR/sonatype-nexus/bin/nexus restart
diff --git a/taskcluster/scripts/misc/cctools.patch b/taskcluster/scripts/misc/cctools.patch
index 8e11708465..65e10437dd 100644
--- a/taskcluster/scripts/misc/cctools.patch
+++ b/taskcluster/scripts/misc/cctools.patch
@@ -15,3 +15,17 @@ index bfb67a3..d5b1c67 100644
_deadStripRoots.insert(atom);
}
}
+diff --git a/cctools/ld64/src/ld/code-sign-blobs/blob.h b/cctools/ld64/src/ld/code-sign-blobs/blob.h
+index 54afb2d..c17d19b 100644
+--- a/cctools/ld64/src/ld/code-sign-blobs/blob.h
++++ b/cctools/ld64/src/ld/code-sign-blobs/blob.h
+@@ -181,9 +181,6 @@ public:
+ return NULL;
+ }
+
+- BlobType *clone() const
+- { assert(validateBlob()); return specific(this->BlobCore::clone()); }
+-
+ static BlobType *readBlob(int fd)
+ { return specific(BlobCore::readBlob(fd, _magic, sizeof(BlobType), 0), true); }
+
diff --git a/taskcluster/scripts/misc/fetch-chromium.py b/taskcluster/scripts/misc/fetch-chromium.py
index ab00dac863..89b476a5a3 100644
--- a/taskcluster/scripts/misc/fetch-chromium.py
+++ b/taskcluster/scripts/misc/fetch-chromium.py
@@ -38,30 +38,35 @@ CHROMIUM_INFO = {
"linux": {
"platform": "Linux_x64",
"chromium": "chrome-linux.zip",
+ "dir": "chrome-linux",
"result": "chromium-linux.tar.bz2",
"chromedriver": "chromedriver_linux64.zip",
},
"win32": {
"platform": "Win",
"chromium": "chrome-win.zip",
+ "dir": "chrome-win",
"result": "chromium-win32.tar.bz2",
"chromedriver": "chromedriver_win32.zip",
},
"win64": {
"platform": "Win",
"chromium": "chrome-win.zip",
+ "dir": "chrome-win",
"result": "chromium-win64.tar.bz2",
"chromedriver": "chromedriver_win32.zip",
},
"mac": {
"platform": "Mac",
"chromium": "chrome-mac.zip",
+ "dir": "chrome-mac",
"result": "chromium-mac.tar.bz2",
"chromedriver": "chromedriver_mac64.zip",
},
"mac-arm": {
"platform": "Mac_Arm",
"chromium": "chrome-mac.zip",
+ "dir": "chrome-mac",
"result": "chromium-mac-arm.tar.bz2",
"chromedriver": "chromedriver_mac64.zip",
},
@@ -108,24 +113,11 @@ def fetch_chromium_revision(platform):
return chromium_revision.strip()
-def fetch_chromium_build(platform, revision, zippath):
- """Download a chromium build for a given revision, or the latest."""
+def fetch_chromedriver(platform, revision, chromium_dir):
+ """Get the chromedriver for the given revision and repackage it."""
if not revision:
revision = fetch_chromium_revision(platform)
- download_platform = CHROMIUM_INFO[platform]["platform"]
- download_url = CHROMIUM_BASE_URL.format(
- download_platform, revision, CHROMIUM_INFO[platform]["chromium"]
- )
-
- log("Downloading %s chromium build revision %s..." % (download_platform, revision))
- log(download_url)
- fetch_file(download_url, zippath)
- return revision
-
-
-def fetch_chromedriver(platform, revision, chromium_dir):
- """Get the chromedriver for the given revision and repackage it."""
download_url = CHROMIUM_BASE_URL.format(
CHROMIUM_INFO[platform]["platform"],
revision,
@@ -152,6 +144,7 @@ def fetch_chromedriver(platform, revision, chromium_dir):
raise Exception("Could not find chromedriver binary in %s" % tmppath)
log("Copying chromedriver from: %s to: %s" % (cd_path, chromium_dir))
shutil.copy(cd_path, chromium_dir)
+ return revision
def build_chromium_archive(platform, revision=None):
@@ -173,39 +166,20 @@ def build_chromium_archive(platform, revision=None):
# Make a temporary location for the file
tmppath = tempfile.mkdtemp()
- tmpzip = os.path.join(tmppath, "tmp-chromium.zip")
- revision = fetch_chromium_build(platform, revision, tmpzip)
+ # Create the directory format expected for browsertime setup in taskgraph transform
+ artifact_dir = CHROMIUM_INFO[platform]["dir"]
+ chromium_dir = os.path.join(tmppath, artifact_dir)
+ os.mkdir(chromium_dir)
- # Unpack archive in `tmpzip` to store the revision number and
- # the chromedriver
- unzip(tmpzip, tmppath)
-
- dirs = [
- d
- for d in os.listdir(tmppath)
- if os.path.isdir(os.path.join(tmppath, d)) and d.startswith("chrome-")
- ]
-
- if len(dirs) > 1:
- raise Exception(
- "Too many directories starting with `chrome-` after extracting."
- )
- elif len(dirs) == 0:
- raise Exception(
- "Could not find any directories after extraction of chromium zip."
- )
-
- chromium_dir = os.path.join(tmppath, dirs[0])
+ # Store the revision number and chromedriver
+ revision = fetch_chromedriver(platform, revision, chromium_dir)
revision_file = os.path.join(chromium_dir, ".REVISION")
with open(revision_file, "w+") as f:
f.write(str(revision))
- # Get and store the chromedriver
- fetch_chromedriver(platform, revision, chromium_dir)
-
tar_file = CHROMIUM_INFO[platform]["result"]
- tar_command = ["tar", "cjf", tar_file, "-C", tmppath, dirs[0]]
+ tar_command = ["tar", "cjf", tar_file, "-C", tmppath, artifact_dir]
log("Added revision to %s file." % revision_file)
log("Tarring with the command: %s" % str(tar_command))
diff --git a/taskcluster/scripts/misc/gradle-python-envs.sh b/taskcluster/scripts/misc/gradle-python-envs.sh
deleted file mode 100755
index 5873e3fa91..0000000000
--- a/taskcluster/scripts/misc/gradle-python-envs.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-set -x -e -v
-
-VERSION="$1"
-
-BASE_URL=https://plugins.gradle.org/m2/gradle/plugin/com/jetbrains/python/gradle-python-envs
-
-mkdir -p "${UPLOAD_DIR}"
-wget --no-parent --recursive --execute robots=off "${BASE_URL}/${VERSION}/"
-tar caf "${UPLOAD_DIR}/gradle-python-envs-${VERSION}.tar.zst" plugins.gradle.org
diff --git a/taskcluster/scripts/misc/wasi-sdk.patch b/taskcluster/scripts/misc/wasi-sdk.patch
index b9e26074d1..d151e5ae37 100644
--- a/taskcluster/scripts/misc/wasi-sdk.patch
+++ b/taskcluster/scripts/misc/wasi-sdk.patch
@@ -21,10 +21,10 @@ index d2ccff6..95f395b 100644
-DUNIX:BOOL=ON \
--debug-trycompile
diff --git a/src/wasi-libc/Makefile b/src/wasi-libc/Makefile
-index f350ecb..9a5c940 100644
+index f350ecb..3b73115 100644
--- a/src/wasi-libc/Makefile
+++ b/src/wasi-libc/Makefile
-@@ -308,7 +308,7 @@ ASMFLAGS += --target=$(TARGET_TRIPLE)
+@@ -305,7 +305,7 @@ ASMFLAGS += --target=$(TARGET_TRIPLE)
# TODO: Add -fno-signaling-nans when the compiler supports it.
CFLAGS += -fno-trapping-math
# Add all warnings, but disable a few which occur in third-party code.
@@ -33,23 +33,49 @@ index f350ecb..9a5c940 100644
-Wno-null-pointer-arithmetic \
-Wno-unused-parameter \
-Wno-sign-compare \
-@@ -671,6 +671,9 @@ check-symbols: startup_files libc
+@@ -639,7 +639,7 @@ check-symbols: startup_files libc
+ for undef_sym in $$("$(NM)" --undefined-only "$(SYSROOT_LIB)"/libc.a "$(SYSROOT_LIB)"/libc-*.a "$(SYSROOT_LIB)"/*.o \
+ |grep ' U ' |sed 's/.* U //' |LC_ALL=C sort |uniq); do \
+ grep -q '\<'$$undef_sym'\>' "$(DEFINED_SYMBOLS)" || echo $$undef_sym; \
+- done | grep -v "^__mul" > "$(UNDEFINED_SYMBOLS)"
++ done | grep -E -v "^__mul|__indirect_function_table" > "$(UNDEFINED_SYMBOLS)"
+ grep '^_*imported_wasi_' "$(UNDEFINED_SYMBOLS)" \
+ > "$(SYSROOT_LIB)/libc.imports"
+
+@@ -671,10 +671,16 @@ check-symbols: startup_files libc
@#
@# TODO: Filter out __NO_MATH_ERRNO_ and a few __*WIDTH__ that are new to clang 14.
@# TODO: Filter out __GCC_HAVE_SYNC_COMPARE_AND_SWAP_* that are new to clang 16.
+ @# TODO: Filter out __FPCLASS_* that are new to clang 17.
+ @# TODO: Filter out __FLT128_* that are new to clang 18.
+ @# TODO: Filter out __MEMORY_SCOPE_* that are new to clang 18.
++ @# TODO: Filter out __GCC_(CON|DE)STRUCTIVE_SIZE that are new to clang 19.
@# TODO: clang defined __FLT_EVAL_METHOD__ until clang 15, so we force-undefine it
@# for older versions.
@# TODO: Undefine __wasm_mutable_globals__ and __wasm_sign_ext__, that are new to
-@@ -702,6 +703,9 @@ check-symbols: startup_files libc
+ @# clang 16 for -mcpu=generic.
++ @# TODO: Undefine __wasm_multivalue__ and __wasm_reference_types__, that are new to
++ @# clang 19 for -mcpu=generic.
+ @# TODO: As of clang 16, __GNUC_VA_LIST is #defined without a value.
+ $(CC) $(CFLAGS) "$(SYSROOT_SHARE)/include-all.c" \
+ -isystem $(SYSROOT_INC) \
+@@ -691,6 +697,8 @@ check-symbols: startup_files libc
+ -U__clang_wide_literal_encoding__ \
+ -U__wasm_mutable_globals__ \
+ -U__wasm_sign_ext__ \
++ -U__wasm_multivalue__ \
++ -U__wasm_reference_types__ \
+ -U__GNUC__ \
+ -U__GNUC_MINOR__ \
+ -U__GNUC_PATCHLEVEL__ \
+@@ -702,6 +710,10 @@ check-symbols: startup_files libc
| sed -e 's/__GNUC_VA_LIST $$/__GNUC_VA_LIST 1/' \
| grep -v '^#define __\(BOOL\|INT_\(LEAST\|FAST\)\(8\|16\|32\|64\)\|INT\|LONG\|LLONG\|SHRT\)_WIDTH__' \
| grep -v '^#define __GCC_HAVE_SYNC_COMPARE_AND_SWAP_\(1\|2\|4\|8\)' \
+ | grep -v '^#define __FPCLASS_' \
+ | grep -v '^#define __FLT128_' \
+ | grep -v '^#define __MEMORY_SCOPE_' \
++ | grep -v '^#define __GCC_\(CON\|DE\)STRUCTIVE_SIZE' \
> "$(SYSROOT_SHARE)/predefined-macros.txt"
# Check that the computed metadata matches the expected metadata.
diff --git a/taskcluster/scripts/testrail_main.py b/taskcluster/scripts/testrail_main.py
index 19bb2f41ea..c8221a73b9 100644
--- a/taskcluster/scripts/testrail_main.py
+++ b/taskcluster/scripts/testrail_main.py
@@ -73,14 +73,15 @@ def main():
# Create milestone and test runs
devices = ["Google Pixel 3(Android11)", "Google Pixel 2(Android11)"]
- testrail.create_milestone_and_test_runs(
- testrail_project_id,
- milestone_name,
- milestone_description,
- devices,
- testrail_test_suite_id,
+ milestone = testrail.create_milestone(
+ testrail_project_id, milestone_name, milestone_description
)
+ for device in devices:
+ test_run = testrail.create_test_run(
+ testrail_project_id, milestone["id"], device, testrail_test_suite_id
+ )
+ testrail.update_test_run_tests(test_run["id"], 1) # 1 = Passed
# Send success notification
success_values = {
"RELEASE_TYPE": release_type,
diff --git a/taskcluster/scripts/tests/test-lab.py b/taskcluster/scripts/tests/test-lab.py
index b8b812df89..7c626f8359 100644
--- a/taskcluster/scripts/tests/test-lab.py
+++ b/taskcluster/scripts/tests/test-lab.py
@@ -21,6 +21,7 @@ import sys
from enum import Enum
from pathlib import Path
from typing import List, Optional, Union
+from urllib.parse import urlparse
# Worker paths and binaries
@@ -116,10 +117,18 @@ def execute_tests(
Worker.RESULTS_DIR.value,
"--project",
os.environ.get("GOOGLE_PROJECT"),
- "--client-details",
- f'matrixLabel={os.environ.get("PULL_REQUEST_NUMBER", "None")}',
]
+ # Add a client details parameter using the repository name
+ matrixLabel = os.environ.get("GECKO_HEAD_REPOSITORY")
+ if matrixLabel is not None:
+ flank_command.extend(
+ [
+ "--client-details",
+ f"matrixLabel={urlparse(matrixLabel).path.rpartition('/')[-1]}",
+ ]
+ )
+
# Add androidTest APK if provided (optional) as robo test or instrumentation test
if apk_test:
flank_command.extend(["--test", str(apk_test)])