summaryrefslogtreecommitdiffstats
path: root/taskcluster/scripts
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-15 03:35:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-15 03:35:49 +0000
commitd8bbc7858622b6d9c278469aab701ca0b609cddf (patch)
treeeff41dc61d9f714852212739e6b3738b82a2af87 /taskcluster/scripts
parentReleasing progress-linux version 125.0.3-1~progress7.99u1. (diff)
downloadfirefox-d8bbc7858622b6d9c278469aab701ca0b609cddf.tar.xz
firefox-d8bbc7858622b6d9c278469aab701ca0b609cddf.zip
Merging upstream version 126.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'taskcluster/scripts')
-rwxr-xr-xtaskcluster/scripts/are_dependencies_completed.py59
-rwxr-xr-xtaskcluster/scripts/builder/build-android.sh35
-rwxr-xr-xtaskcluster/scripts/get-secret.py115
-rw-r--r--taskcluster/scripts/lib/testrail_api.py130
-rw-r--r--taskcluster/scripts/lib/testrail_conn.py109
-rw-r--r--taskcluster/scripts/lib/testrail_utils.py84
-rwxr-xr-xtaskcluster/scripts/lint/is_buildconfig_yml_up_to_date.py71
-rw-r--r--taskcluster/scripts/lint/requirements.in2
-rw-r--r--taskcluster/scripts/lint/requirements.txt52
-rwxr-xr-xtaskcluster/scripts/lint/update_buildconfig_from_gradle.py165
-rwxr-xr-xtaskcluster/scripts/misc/android-gradle-dependencies.sh13
-rw-r--r--taskcluster/scripts/misc/are-we-esmified-yet.py190
-rwxr-xr-xtaskcluster/scripts/misc/build-cpython.sh2
-rwxr-xr-xtaskcluster/scripts/misc/build-custom-car.sh2
-rwxr-xr-xtaskcluster/scripts/misc/build-geckodriver.sh9
-rwxr-xr-xtaskcluster/scripts/misc/build-gn-macosx.sh2
-rwxr-xr-xtaskcluster/scripts/misc/build-llvm-common.sh4
-rwxr-xr-xtaskcluster/scripts/misc/build-nasm.sh4
-rwxr-xr-xtaskcluster/scripts/misc/build-pkgconf.sh4
-rwxr-xr-xtaskcluster/scripts/misc/build-rust-based-toolchain.sh4
-rwxr-xr-xtaskcluster/scripts/misc/fetch-content10
-rwxr-xr-xtaskcluster/scripts/misc/fetch-talos-pdfs.py118
-rwxr-xr-xtaskcluster/scripts/misc/get-hostutils.sh14
-rwxr-xr-xtaskcluster/scripts/misc/gradle-python-envs.sh11
-rwxr-xr-xtaskcluster/scripts/misc/osx-cross-linker2
-rwxr-xr-xtaskcluster/scripts/misc/repack_rust.py1
-rwxr-xr-xtaskcluster/scripts/misc/run-fetch-talos-pdfs.sh10
-rwxr-xr-xtaskcluster/scripts/misc/wr-macos-cross-build-setup.sh2
-rwxr-xr-xtaskcluster/scripts/misc/wrench-deps-vendoring.sh2
-rw-r--r--taskcluster/scripts/slack_notifier.py194
-rw-r--r--taskcluster/scripts/testrail_main.py99
-rw-r--r--taskcluster/scripts/tests/test-lab.py231
-rwxr-xr-xtaskcluster/scripts/write-dummy-secret.py43
33 files changed, 1578 insertions, 215 deletions
diff --git a/taskcluster/scripts/are_dependencies_completed.py b/taskcluster/scripts/are_dependencies_completed.py
new file mode 100755
index 0000000000..79a0bb30c3
--- /dev/null
+++ b/taskcluster/scripts/are_dependencies_completed.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+
+import taskcluster
+
+queue = taskcluster.Queue(
+ {
+ "rootUrl": os.environ.get("TASKCLUSTER_PROXY_URL", "https://taskcluster.net"),
+ }
+)
+
+
+def check_all_dependencies_are_completed(current_task_id):
+ print(f"Fetching task definition of {current_task_id}...")
+ task = queue.task(current_task_id)
+ dependencies_task_ids = task["dependencies"]
+
+ print(f"Fetching status of {len(dependencies_task_ids)} dependencies...")
+ # TODO Make this dict-comprehension async once we go Python 3
+ state_per_task_ids = {
+ task_id: queue.status(task_id)["status"]["state"]
+ for task_id in dependencies_task_ids
+ }
+ print("Statuses fetched.")
+ non_completed_tasks = {
+ task_id: state
+ for task_id, state in state_per_task_ids.items()
+ if state != "completed"
+ }
+
+ if non_completed_tasks:
+ raise ValueError(f"Some tasks are not completed: {non_completed_tasks}")
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Errors out if one of the DEPENDENCY_TASK_ID does not have the Taskcluster status "completed"'
+ )
+
+ parser.add_argument(
+ "current_task_id",
+ metavar="CURRENT_TASK_ID",
+ help="The task ID of the current running task",
+ )
+
+ result = parser.parse_args()
+ check_all_dependencies_are_completed(result.current_task_id)
+ print("All dependencies are completed. Reporting a green task!")
+ exit(0)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/scripts/builder/build-android.sh b/taskcluster/scripts/builder/build-android.sh
new file mode 100755
index 0000000000..fd2ecd0e2b
--- /dev/null
+++ b/taskcluster/scripts/builder/build-android.sh
@@ -0,0 +1,35 @@
+#!/bin/sh
+
+set -e
+set -x
+
+mozconfig=$(mktemp)
+cat > $mozconfig <<EOF
+# the corresponding geckoview's mozconfig, to pick up its config options
+. $MOZCONFIG
+# no-compile because we don't need to build native code here
+. $GECKO_PATH/build/mozconfig.no-compile
+
+# Disable Keyfile Loading (and checks)
+# This overrides the settings in the common android mozconfig
+ac_add_options --without-mozilla-api-keyfile
+ac_add_options --without-google-location-service-api-keyfile
+ac_add_options --without-google-safebrowsing-api-keyfile
+
+ac_add_options --disable-nodejs
+unset NODEJS
+
+export GRADLE_MAVEN_REPOSITORIES="file://$MOZ_FETCHES_DIR/geckoview","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/mozilla","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/google","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/central","file://$MOZ_FETCHES_DIR/android-gradle-dependencies/gradle-plugins","file:///$MOZ_FETCHES_DIR/plugins.gradle.org/m2"
+EOF
+export MOZCONFIG=$mozconfig
+GRADLE=$MOZ_FETCHES_DIR/android-gradle-dependencies/gradle-dist/bin/gradle
+
+./mach configure
+
+eval $PRE_GRADLEW
+
+eval $GET_SECRETS
+
+$GRADLE listRepositories $GRADLEW_ARGS
+
+eval $POST_GRADLEW
diff --git a/taskcluster/scripts/get-secret.py b/taskcluster/scripts/get-secret.py
new file mode 100755
index 0000000000..ae9bafcadf
--- /dev/null
+++ b/taskcluster/scripts/get-secret.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import argparse
+import base64
+import errno
+import json
+import os
+
+import taskcluster
+
+
+def write_secret_to_file(
+ path, data, key, base64decode=False, json_secret=False, append=False, prefix=""
+):
+ path = os.path.abspath(os.path.join(os.getcwd(), path))
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+ print(f"Outputting secret to: {path}")
+
+ with open(path, "a" if append else "w") as f:
+ value = data["secret"][key]
+ if base64decode:
+ value = base64.b64decode(value)
+ if json_secret:
+ value = json.dumps(value)
+
+ if isinstance(value, bytes):
+ value = value.decode("utf-8")
+ f.write(prefix + value)
+
+
+def fetch_secret_from_taskcluster(name):
+ try:
+ secrets = taskcluster.Secrets(
+ {
+ # BaseUrl is still needed for tasks that haven't migrated to taskgraph yet.
+ "baseUrl": "http://taskcluster/secrets/v1",
+ }
+ )
+ except taskcluster.exceptions.TaskclusterFailure:
+ # taskcluster library >=5 errors out when `baseUrl` is used
+ secrets = taskcluster.Secrets(
+ {
+ "rootUrl": os.environ.get(
+ "TASKCLUSTER_PROXY_URL", "https://taskcluster.net"
+ ),
+ }
+ )
+
+ return secrets.get(name)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Fetch a taskcluster secret value and save it to a file."
+ )
+
+ parser.add_argument("-s", dest="secret", action="store", help="name of the secret")
+ parser.add_argument("-k", dest="key", action="store", help="key of the secret")
+ parser.add_argument(
+ "-f", dest="path", action="store", help="file to save secret to"
+ )
+ parser.add_argument(
+ "--decode",
+ dest="decode",
+ action="store_true",
+ default=False,
+ help="base64 decode secret before saving to file",
+ )
+ parser.add_argument(
+ "--json",
+ dest="json",
+ action="store_true",
+ default=False,
+ help="serializes the secret to JSON format",
+ )
+ parser.add_argument(
+ "--append",
+ dest="append",
+ action="store_true",
+ default=False,
+ help="append secret to existing file",
+ )
+ parser.add_argument(
+ "--prefix",
+ dest="prefix",
+ action="store",
+ default="",
+ help="add prefix when writing secret to file",
+ )
+
+ result = parser.parse_args()
+
+ secret = fetch_secret_from_taskcluster(result.secret)
+ write_secret_to_file(
+ result.path,
+ secret,
+ result.key,
+ result.decode,
+ result.json,
+ result.append,
+ result.prefix,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/scripts/lib/testrail_api.py b/taskcluster/scripts/lib/testrail_api.py
new file mode 100644
index 0000000000..44474ebe9d
--- /dev/null
+++ b/taskcluster/scripts/lib/testrail_api.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This module provides a TestRail class for interfacing with the TestRail API, enabling the creation and management of test milestones, test runs, and updating test cases. It facilitates automation and integration of TestRail functionalities into testing workflows, particularly for projects requiring automated test management and reporting.
+
+The TestRail class encapsulates methods to interact with TestRail's API, including creating milestones and test runs, updating test cases, and checking the existence of milestones. It also features a method to retry API calls, enhancing the robustness of network interactions.
+
+Key Components:
+- TestRail Class: A class providing methods for interacting with TestRail's API.
+ - create_milestone: Create a new milestone in a TestRail project.
+ - create_milestone_and_test_runs: Create a milestone and associated test runs for multiple devices in a project.
+ - create_test_run: Create a test run within a TestRail project.
+ - does_milestone_exist: Check if a milestone already exists in a TestRail project.
+ - update_test_cases_to_passed: Update the status of test cases to 'passed' in a test run.
+- Private Methods: Utility methods for internal use to fetch test cases, update test run results, and retrieve milestones.
+- Retry Mechanism: A method to retry API calls with a specified number of attempts and delay, improving reliability in case of intermittent network issues.
+
+Usage:
+This module is intended to be used as part of a larger automated testing system, where integration with TestRail is required for test management and reporting.
+
+"""
+
+import os
+import sys
+import time
+
+# Ensure the directory containing this script is in Python's search path
+script_directory = os.path.dirname(os.path.abspath(__file__))
+if script_directory not in sys.path:
+ sys.path.append(script_directory)
+
+from testrail_conn import APIClient
+
+
+class TestRail:
+ def __init__(self, host, username, password):
+ self.client = APIClient(host)
+ self.client.user = username
+ self.client.password = password
+
+ # Public Methods
+
+ def create_milestone(self, testrail_project_id, title, description):
+ data = {"name": title, "description": description}
+ return self.client.send_post(f"add_milestone/{testrail_project_id}", data)
+
+ def create_milestone_and_test_runs(
+ self, project_id, milestone_name, milestone_description, devices, test_suite_id
+ ):
+ # Create milestone
+ milestone_id = self._retry_api_call(
+ self.create_milestone, project_id, milestone_name, milestone_description
+ )["id"]
+
+ # Create test runs for each device
+ for device in devices:
+ test_run_id = self._retry_api_call(
+ self.create_test_run, project_id, milestone_id, device, test_suite_id
+ )["id"]
+ self._retry_api_call(
+ self.update_test_cases_to_passed, project_id, test_run_id, test_suite_id
+ )
+
+ return milestone_id
+
+ def create_test_run(
+ self, testrail_project_id, testrail_milestone_id, name_run, testrail_suite_id
+ ):
+ data = {
+ "name": name_run,
+ "milestone_id": testrail_milestone_id,
+ "suite_id": testrail_suite_id,
+ }
+ return self.client.send_post(f"add_run/{testrail_project_id}", data)
+
+ def does_milestone_exist(self, testrail_project_id, milestone_name):
+ num_of_milestones_to_check = 10 # check last 10 milestones
+ milestones = self._get_milestones(
+ testrail_project_id
+ ) # returns reverse chronological order
+ for milestone in milestones[
+ -num_of_milestones_to_check:
+ ]: # check last 10 api responses
+ if milestone_name == milestone["name"]:
+ return True
+ return False
+
+ def update_test_cases_to_passed(
+ self, testrail_project_id, testrail_run_id, testrail_suite_id
+ ):
+ test_cases = self._get_test_cases(testrail_project_id, testrail_suite_id)
+ data = {
+ "results": [
+ {"case_id": test_case["id"], "status_id": 1} for test_case in test_cases
+ ]
+ }
+ return self._update_test_run_results(testrail_run_id, data)
+
+ # Private Methods
+
+ def _get_test_cases(self, testrail_project_id, testrail_test_suite_id):
+ return self.client.send_get(
+ f"get_cases/{testrail_project_id}&suite_id={testrail_test_suite_id}"
+ )
+
+ def _update_test_run_results(self, testrail_run_id, data):
+ return self.client.send_post(f"add_results_for_cases/{testrail_run_id}", data)
+
+ def _get_milestones(self, testrail_project_id):
+ return self.client.send_get(f"get_milestones/{testrail_project_id}")
+
+ def _retry_api_call(self, api_call, *args, max_retries=3, delay=5):
+ """
+ Retries the given API call up to max_retries times with a delay between attempts.
+
+ :param api_call: The API call method to retry.
+ :param args: Arguments to pass to the API call.
+ :param max_retries: Maximum number of retries.
+ :param delay: Delay between retries in seconds.
+ """
+ for attempt in range(max_retries):
+ try:
+ return api_call(*args)
+ except Exception:
+ if attempt == max_retries - 1:
+ raise # Reraise the last exception
+ time.sleep(delay)
diff --git a/taskcluster/scripts/lib/testrail_conn.py b/taskcluster/scripts/lib/testrail_conn.py
new file mode 100644
index 0000000000..92e3aae275
--- /dev/null
+++ b/taskcluster/scripts/lib/testrail_conn.py
@@ -0,0 +1,109 @@
+# flake8: noqa
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""TestRail API binding for Python 3.x.
+
+(API v2, available since TestRail 3.0)
+
+Compatible with TestRail 3.0 and later.
+
+Learn more:
+
+http://docs.gurock.com/testrail-api2/start
+http://docs.gurock.com/testrail-api2/accessing
+
+Copyright Gurock Software GmbH. See license.md for details.
+"""
+
+import base64
+import json
+
+import requests
+
+
+class APIClient:
+ def __init__(self, base_url):
+ self.user = ""
+ self.password = ""
+ if not base_url.endswith("/"):
+ base_url += "/"
+ self.__url = base_url + "index.php?/api/v2/"
+
+ def send_get(self, uri, filepath=None):
+ """Issue a GET request (read) against the API.
+
+ Args:
+ uri: The API method to call including parameters, e.g. get_case/1.
+ filepath: The path and file name for attachment download; used only
+ for 'get_attachment/:attachment_id'.
+
+ Returns:
+ A dict containing the result of the request.
+ """
+ return self.__send_request("GET", uri, filepath)
+
+ def send_post(self, uri, data):
+ """Issue a POST request (write) against the API.
+
+ Args:
+ uri: The API method to call, including parameters, e.g. add_case/1.
+ data: The data to submit as part of the request as a dict; strings
+ must be UTF-8 encoded. If adding an attachment, must be the
+ path to the file.
+
+ Returns:
+ A dict containing the result of the request.
+ """
+ return self.__send_request("POST", uri, data)
+
+ def __send_request(self, method, uri, data):
+ url = self.__url + uri
+
+ auth = str(
+ base64.b64encode(bytes("%s:%s" % (self.user, self.password), "utf-8")),
+ "ascii",
+ ).strip()
+ headers = {"Authorization": "Basic " + auth}
+
+ if method == "POST":
+ if uri[:14] == "add_attachment": # add_attachment API method
+ files = {"attachment": (open(data, "rb"))}
+ response = requests.post(url, headers=headers, files=files)
+ files["attachment"].close()
+ else:
+ headers["Content-Type"] = "application/json"
+ payload = bytes(json.dumps(data), "utf-8")
+ response = requests.post(url, headers=headers, data=payload)
+ else:
+ headers["Content-Type"] = "application/json"
+ response = requests.get(url, headers=headers)
+
+ if response.status_code > 201:
+ try:
+ error = response.json()
+ except (
+ requests.exceptions.HTTPError
+ ): # response.content not formatted as JSON
+ error = str(response.content)
+ raise APIError(
+ "TestRail API returned HTTP %s (%s)" % (response.status_code, error)
+ )
+ else:
+ if uri[:15] == "get_attachment/": # Expecting file, not JSON
+ try:
+ open(data, "wb").write(response.content)
+ return data
+ except FileNotFoundError:
+ return "Error saving attachment."
+ else:
+ try:
+ return response.json()
+ except requests.exceptions.HTTPError:
+ return {}
+
+
+class APIError(Exception):
+ pass
diff --git a/taskcluster/scripts/lib/testrail_utils.py b/taskcluster/scripts/lib/testrail_utils.py
new file mode 100644
index 0000000000..3f502397b8
--- /dev/null
+++ b/taskcluster/scripts/lib/testrail_utils.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This script contains utility functions designed to support the integration of automated
+testing processes with TestRail, a test case management tool. The primary focus is on
+creating and managing milestones in TestRail based on automated smoke tests for product
+releases. It includes functions for building milestone names and descriptions, determining
+release types, and loading TestRail credentials.
+
+Functions:
+- build_milestone_name(product_type, release_type, version_number): Constructs a formatted
+ milestone name based on the product type, release type, and version number.
+- build_milestone_description(milestone_name): Generates a detailed description for the
+ milestone, including the release date and placeholders for testing status and QA recommendations.
+- get_release_version(): Reads and returns the release version number from a 'version.txt' file.
+- get_release_type(version): Determines the release type (e.g., Alpha, Beta, RC) based on
+ the version string.
+- load_testrail_credentials(json_file_path): Loads TestRail credentials from a JSON file
+ and handles potential errors during the loading process.
+"""
+
+import json
+import os
+import textwrap
+from datetime import datetime
+
+
+def build_milestone_name(product_type, release_type, version_number):
+ return f"Build Validation sign-off - {product_type} {release_type} {version_number}"
+
+
+def build_milestone_description(milestone_name):
+ current_date = datetime.now()
+ formatted_date = current_date = current_date.strftime("%B %d, %Y")
+ return textwrap.dedent(
+ f"""
+ RELEASE: {milestone_name}\n\n\
+ RELEASE_TAG_URL: https://archive.mozilla.org/pub/fenix/releases/\n\n\
+ RELEASE_DATE: {formatted_date}\n\n\
+ TESTING_STATUS: [ TBD ]\n\n\
+ QA_RECOMMENDATION:[ TBD ]\n\n\
+ QA_RECOMENTATION_VERBOSE: \n\n\
+ TESTING_SUMMARY\n\n\
+ Known issues: n/a\n\
+ New issue: n/a\n\
+ Verified issue:
+ """
+ )
+
+
+def get_release_version():
+ # Check if version.txt was found
+ version_file_path = os.path.join(
+ os.environ.get("GECKO_PATH", "."), "mobile", "android", "version.txt"
+ )
+ if not os.path.isfile(version_file_path):
+ raise FileNotFoundError(f"{version_file_path} not found.")
+
+ # Read the version from the file
+ with open(version_file_path, "r") as file:
+ version = file.readline().strip()
+
+ return version
+
+
+def get_release_type(version):
+ release_map = {"a": "Alpha", "b": "Beta"}
+ # use generator expression to check each char for key else default to 'RC'
+ product_type = next(
+ (release_map[char] for char in version if char in release_map), "RC"
+ )
+ return product_type
+
+
+def load_testrail_credentials(json_file_path):
+ try:
+ with open(json_file_path, "r") as file:
+ credentials = json.load(file)
+ return credentials
+ except json.JSONDecodeError as e:
+ raise ValueError(f"Failed to load TestRail credentials: {e}")
diff --git a/taskcluster/scripts/lint/is_buildconfig_yml_up_to_date.py b/taskcluster/scripts/lint/is_buildconfig_yml_up_to_date.py
new file mode 100755
index 0000000000..2df9cc044f
--- /dev/null
+++ b/taskcluster/scripts/lint/is_buildconfig_yml_up_to_date.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import os
+import subprocess
+import sys
+
+from update_buildconfig_from_gradle import main as update_build_config
+
+CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
+PROJECT_DIR = os.path.realpath(os.path.join(CURRENT_DIR, "..", "..", ".."))
+OUTPUT_DIR = os.path.join(PROJECT_DIR, "artifacts")
+BUILDCONFIG_DIFF_FILE_NAME = "buildconfig.diff"
+BUILDCONFIG_DIFF_FILE = os.path.join(OUTPUT_DIR, BUILDCONFIG_DIFF_FILE_NAME)
+BUILDCONFIG_FILE_NAME = ".buildconfig.yml"
+
+logger = logging.getLogger(__name__)
+
+
+def _buildconfig_files_diff():
+ cmd = [
+ "hg",
+ "diff",
+ "--rev",
+ "draft() and ancestors(.)",
+ "-I",
+ "**/.buildconfig.yml",
+ ]
+ p = subprocess.run(cmd, capture_output=True, universal_newlines=True)
+ return p.stdout
+
+
+def _execute_taskcluster_steps(diff, task_id):
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
+ with open(BUILDCONFIG_DIFF_FILE, mode="w") as f:
+ f.write(diff)
+ tc_root_url = os.environ["TASKCLUSTER_ROOT_URL"]
+ artifact_url = f"{tc_root_url}/api/queue/v1/task/{task_id}/artifacts/public%2F{BUILDCONFIG_DIFF_FILE_NAME}" # noqa E501
+ message = f"""{BUILDCONFIG_FILE_NAME} file changed! Please update it by running:
+
+curl --location --compressed {artifact_url} | git apply
+
+Then commit and push!
+"""
+ logger.error(message)
+
+
+def _execute_local_steps():
+ logger.error(f"{BUILDCONFIG_FILE_NAME} file updated! Please commit these changes.")
+
+
+def main():
+ update_build_config()
+ diff = _buildconfig_files_diff()
+ if diff:
+ task_id = os.environ.get("TASK_ID")
+ if task_id:
+ _execute_taskcluster_steps(diff, task_id)
+ else:
+ _execute_local_steps()
+ sys.exit(1)
+
+ logger.info(f"All good! {BUILDCONFIG_FILE_NAME} is up-to-date with gradle.")
+
+
+__name__ == "__main__" and main()
diff --git a/taskcluster/scripts/lint/requirements.in b/taskcluster/scripts/lint/requirements.in
new file mode 100644
index 0000000000..d5f4369f9e
--- /dev/null
+++ b/taskcluster/scripts/lint/requirements.in
@@ -0,0 +1,2 @@
+mergedeep
+pyyaml
diff --git a/taskcluster/scripts/lint/requirements.txt b/taskcluster/scripts/lint/requirements.txt
new file mode 100644
index 0000000000..1a3ce6f714
--- /dev/null
+++ b/taskcluster/scripts/lint/requirements.txt
@@ -0,0 +1,52 @@
+#
+# This file is autogenerated by pip-compile with Python 3.8
+# by the following command:
+#
+# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
+#
+mergedeep==1.3.4 \
+ --hash=sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8 \
+ --hash=sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307
+ # via -r requirements.in
+pyyaml==6.0 \
+ --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
+ --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
+ --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
+ --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
+ --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
+ --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
+ --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
+ --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
+ --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
+ --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
+ --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
+ --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
+ --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
+ --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
+ --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
+ --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
+ --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
+ --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
+ --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
+ --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
+ --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
+ --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
+ --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
+ --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
+ --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
+ --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
+ --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
+ --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
+ --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
+ --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
+ --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
+ --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
+ --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
+ --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
+ --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
+ --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
+ --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
+ --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
+ --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
+ --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
+ # via -r requirements.in
diff --git a/taskcluster/scripts/lint/update_buildconfig_from_gradle.py b/taskcluster/scripts/lint/update_buildconfig_from_gradle.py
new file mode 100755
index 0000000000..148fa19aa4
--- /dev/null
+++ b/taskcluster/scripts/lint/update_buildconfig_from_gradle.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import argparse
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+from collections import defaultdict
+
+import yaml
+from mergedeep import merge
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_GRADLE_COMMAND = ("./gradlew", "--console=plain", "--no-parallel")
+_LOCAL_DEPENDENCY_PATTERN = re.compile(
+ r"(\+|\\)--- project :(?P<local_dependency_name>\S+)\s?.*"
+)
+
+
+def _get_upstream_deps_per_gradle_project(gradle_root, existing_build_config):
+ project_dependencies = defaultdict(set)
+ gradle_projects = _get_gradle_projects(gradle_root, existing_build_config)
+
+ logger.info(f"Looking for dependencies in {gradle_root}")
+
+ # This is eventually going to fail if there's ever enough projects to make the
+ # command line too long. If that happens, we'll need to split this list up and
+ # run gradle more than once.
+ cmd = list(_DEFAULT_GRADLE_COMMAND)
+ cmd.extend(
+ [f"{gradle_project}:dependencies" for gradle_project in sorted(gradle_projects)]
+ )
+
+ # Parsing output like this is not ideal but bhearsum couldn't find a way
+ # to get the dependencies printed in a better format. If we could convince
+ # gradle to spit out JSON that would be much better.
+ # This is filed as https://bugzilla.mozilla.org/show_bug.cgi?id=1795152
+ current_project_name = None
+ print(f"Running command: {' '.join(cmd)}")
+ try:
+ output = subprocess.check_output(cmd, universal_newlines=True, cwd=gradle_root)
+ except subprocess.CalledProcessError as cpe:
+ print(cpe.output)
+ raise
+ for line in output.splitlines():
+ # If we find the start of a new component section, update our tracking
+ # variable
+ if line.startswith("Project"):
+ current_project_name = line.split(":")[1].strip("'")
+
+ # If we find a new local dependency, add it.
+ local_dep_match = _LOCAL_DEPENDENCY_PATTERN.search(line)
+ if local_dep_match:
+ local_dependency_name = local_dep_match.group("local_dependency_name")
+ if (
+ local_dependency_name != current_project_name
+ # These lint rules are not part of android-components
+ and local_dependency_name != "mozilla-lint-rules"
+ ):
+ project_dependencies[current_project_name].add(local_dependency_name)
+
+ return {
+ project_name: sorted(project_dependencies[project_name])
+ for project_name in gradle_projects
+ }
+
+
+def _get_gradle_projects(gradle_root, existing_build_config):
+ if gradle_root.endswith("android-components"):
+ return list(existing_build_config["projects"].keys())
+ elif gradle_root.endswith("focus-android"):
+ return ["app"]
+ elif gradle_root.endswith("fenix"):
+ return ["app"]
+
+ raise NotImplementedError(f"Cannot find gradle projects for {gradle_root}")
+
+
+def is_dir(string):
+ if os.path.isdir(string):
+ return string
+ else:
+ raise argparse.ArgumentTypeError(f'"{string}" is not a directory')
+
+
+def _parse_args(cmdln_args):
+ parser = argparse.ArgumentParser(
+ description="Calls gradle and generate json file with dependencies"
+ )
+ parser.add_argument(
+ "gradle_root",
+ metavar="GRADLE_ROOT",
+ type=is_dir,
+ help="The directory where to call gradle from",
+ )
+ return parser.parse_args(args=cmdln_args)
+
+
+def _set_logging_config():
+ logging.basicConfig(
+ level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s"
+ )
+
+
+def _merge_build_config(
+ existing_build_config, upstream_deps_per_project, variants_config
+):
+ updated_build_config = {
+ "projects": {
+ project: {"upstream_dependencies": deps}
+ for project, deps in upstream_deps_per_project.items()
+ }
+ }
+ updated_variant_config = {"variants": variants_config} if variants_config else {}
+ return merge(existing_build_config, updated_build_config, updated_variant_config)
+
+
+def _get_variants(gradle_root):
+ cmd = list(_DEFAULT_GRADLE_COMMAND) + ["printVariants"]
+ output_lines = subprocess.check_output(
+ cmd, universal_newlines=True, cwd=gradle_root
+ ).splitlines()
+ variants_line = [line for line in output_lines if line.startswith("variants: ")][0]
+ variants_json = variants_line.split(" ", 1)[1]
+ return json.loads(variants_json)
+
+
+def _should_print_variants(gradle_root):
+ return gradle_root.endswith("fenix") or gradle_root.endswith("focus-android")
+
+
+def main():
+ args = _parse_args(sys.argv[1:])
+ gradle_root = args.gradle_root
+ build_config_file = os.path.join(gradle_root, ".buildconfig.yml")
+ _set_logging_config()
+
+ with open(build_config_file) as f:
+ existing_build_config = yaml.safe_load(f)
+
+ upstream_deps_per_project = _get_upstream_deps_per_gradle_project(
+ gradle_root, existing_build_config
+ )
+
+ variants_config = (
+ _get_variants(gradle_root) if _should_print_variants(gradle_root) else {}
+ )
+ merged_build_config = _merge_build_config(
+ existing_build_config, upstream_deps_per_project, variants_config
+ )
+
+ with open(build_config_file, "w") as f:
+ yaml.safe_dump(merged_build_config, f)
+ logger.info(f"Updated {build_config_file} with latest gradle config!")
+
+
+__name__ == "__main__" and main()
diff --git a/taskcluster/scripts/misc/android-gradle-dependencies.sh b/taskcluster/scripts/misc/android-gradle-dependencies.sh
index 2624dc961a..354f25f6c2 100755
--- a/taskcluster/scripts/misc/android-gradle-dependencies.sh
+++ b/taskcluster/scripts/misc/android-gradle-dependencies.sh
@@ -17,5 +17,18 @@ export MOZCONFIG=mobile/android/config/mozconfigs/android-arm-gradle-dependencie
./mach build
./mach gradle downloadDependencies
./mach android gradle-dependencies
+pushd mobile/android/fenix
+./gradlew detekt lint assembleDebug mozilla-lint-rules:test
+popd
+pushd mobile/android/focus-android
+./gradlew lint
+popd
+pushd mobile/android/android-components
+# Before building anything we explicitly build one component that contains Glean and initializes
+# the Miniconda Python environment and doesn't have (almost) any other transitive dependencies.
+# If that happens concurrently with other tasks then this seems to fail quite often.
+./gradlew service-nimbus:build
+./gradlew -Pcoverage detekt lint service-nimbus:assembleAndroidTest samples-browser:testGeckoDebugUnitTest tooling-lint:test
+popd
. taskcluster/scripts/misc/android-gradle-dependencies/after.sh
diff --git a/taskcluster/scripts/misc/are-we-esmified-yet.py b/taskcluster/scripts/misc/are-we-esmified-yet.py
deleted file mode 100644
index 9723565dfc..0000000000
--- a/taskcluster/scripts/misc/are-we-esmified-yet.py
+++ /dev/null
@@ -1,190 +0,0 @@
-#!/usr/bin/env python3
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import json
-import pathlib
-import re
-import subprocess
-import sys
-
-TBPL_FAILURE = 2
-
-excluded_files = [
- # Testcase for loader.
- "js/xpconnect/tests/chrome/file_expandosharing.jsm",
- "js/xpconnect/tests/unit/environment_script.js",
- "js/xpconnect/tests/unit/bogus_element_type.jsm",
- "js/xpconnect/tests/unit/bogus_exports_type.jsm",
- "js/xpconnect/tests/unit/envChain.jsm",
- "js/xpconnect/tests/unit/envChain_subscript.jsm",
- "js/xpconnect/tests/unit/environment_checkscript.jsm",
- "js/xpconnect/tests/unit/environment_loadscript.jsm",
- "js/xpconnect/tests/unit/import_stack.jsm",
- "js/xpconnect/tests/unit/importer.jsm",
- "js/xpconnect/tests/unit/jsm_loaded-1.jsm",
- "js/xpconnect/tests/unit/jsm_loaded-2.jsm",
- "js/xpconnect/tests/unit/jsm_loaded-3.jsm",
- "js/xpconnect/tests/unit/not-esmified-not-exported.jsm",
- "js/xpconnect/tests/unit/recursive_importA.jsm",
- "js/xpconnect/tests/unit/recursive_importB.jsm",
- "js/xpconnect/tests/unit/syntax_error.jsm",
- "js/xpconnect/tests/unit/TestBlob.jsm",
- "js/xpconnect/tests/unit/TestFile.jsm",
- "js/xpconnect/tests/unit/uninitialized_lexical.jsm",
- "dom/url/tests/file_url.jsm",
- "dom/url/tests/file_worker_url.jsm",
- "dom/url/tests/test_bug883784.jsm",
- "dom/workers/test/WorkerTest.jsm",
- "dom/encoding/test/file_stringencoding.jsm",
- "remote/shared/messagehandler/test/browser/resources/modules/root/invalid.jsm",
- "toolkit/actors/TestProcessActorChild.jsm",
- "toolkit/actors/TestProcessActorParent.jsm",
- "toolkit/actors/TestWindowChild.jsm",
- "toolkit/actors/TestWindowParent.jsm",
- # Testcase for build system.
- "python/mozbuild/mozbuild/test/backend/data/build/bar.jsm",
- "python/mozbuild/mozbuild/test/backend/data/build/baz.jsm",
- "python/mozbuild/mozbuild/test/backend/data/build/foo.jsm",
- "python/mozbuild/mozbuild/test/backend/data/build/qux.jsm",
- # EXPORTED_SYMBOLS inside testcase.
- "tools/lint/eslint/eslint-plugin-mozilla/tests/mark-exported-symbols-as-used.js",
-]
-
-if pathlib.Path(".hg").exists():
- mode = "hg"
-elif pathlib.Path(".git").exists():
- mode = "git"
-else:
- print(
- "Error: This script needs to be run inside mozilla-central checkout "
- "of either mercurial or git.",
- file=sys.stderr,
- )
- sys.exit(TBPL_FAILURE)
-
-
-def new_files_struct():
- return {
- "jsm": [],
- "esm": [],
- "subdir": {},
- }
-
-
-def put_file(files, kind, path):
- """Put a path into files tree structure."""
-
- if str(path) in excluded_files:
- return
-
- name = path.name
-
- current_files = files
- for part in path.parent.parts:
- if part not in current_files["subdir"]:
- current_files["subdir"][part] = new_files_struct()
- current_files = current_files["subdir"][part]
-
- current_files[kind].append(name)
-
-
-def run(cmd):
- """Run command and return output as lines, excluding empty line."""
- lines = subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode()
- return filter(lambda x: x != "", lines.split("\n"))
-
-
-def collect_jsm(files):
- """Collect JSM files."""
- kind = "jsm"
-
- # jsm files
- if mode == "hg":
- cmd = ["hg", "files", "set:glob:**/*.jsm"]
- else:
- cmd = ["git", "ls-files", "*.jsm"]
- for line in run(cmd):
- put_file(files, kind, pathlib.Path(line))
-
- # js files with EXPORTED_SYMBOLS
- if mode == "hg":
- cmd = ["hg", "files", r"set:grep('EXPORTED_SYMBOLS = \[') and glob:**/*.js"]
- for line in run(cmd):
- put_file(files, kind, pathlib.Path(line))
- else:
- handled = {}
- cmd = ["git", "grep", r"EXPORTED_SYMBOLS = \[", "*.js"]
- for line in run(cmd):
- m = re.search("^([^:]+):", line)
- if not m:
- continue
- path = m.group(1)
- if path in handled:
- continue
- handled[path] = True
- put_file(files, kind, pathlib.Path(path))
-
-
-def collect_esm(files):
- """Collect system ESM files."""
- kind = "esm"
-
- # sys.mjs files
- if mode == "hg":
- cmd = ["hg", "files", "set:glob:**/*.sys.mjs"]
- else:
- cmd = ["git", "ls-files", "*.sys.mjs"]
- for line in run(cmd):
- put_file(files, kind, pathlib.Path(line))
-
-
-def to_stat(files):
- """Convert files tree into status tree."""
- jsm = len(files["jsm"])
- esm = len(files["esm"])
- subdir = {}
-
- for key, sub_files in files["subdir"].items():
- sub_stat = to_stat(sub_files)
-
- subdir[key] = sub_stat
- jsm += sub_stat["jsm"]
- esm += sub_stat["esm"]
-
- stat = {
- "jsm": jsm,
- "esm": esm,
- }
- if len(subdir):
- stat["subdir"] = subdir
-
- return stat
-
-
-if mode == "hg":
- cmd = ["hg", "parent", "--template", "{node}"]
- commit_hash = list(run(cmd))[0]
-
- cmd = ["hg", "parent", "--template", "{date|shortdate}"]
- date = list(run(cmd))[0]
-else:
- cmd = ["git", "log", "-1", "--pretty=%H"]
- git_hash = list(run(cmd))[0]
- cmd = ["git", "cinnabar", "git2hg", git_hash]
- commit_hash = list(run(cmd))[0]
-
- cmd = ["git", "log", "-1", "--pretty=%cs"]
- date = list(run(cmd))[0]
-
-files = new_files_struct()
-collect_jsm(files)
-collect_esm(files)
-
-stat = to_stat(files)
-stat["hash"] = commit_hash
-stat["date"] = date
-
-print(json.dumps(stat, indent=2))
diff --git a/taskcluster/scripts/misc/build-cpython.sh b/taskcluster/scripts/misc/build-cpython.sh
index aac2034d0a..95b5e81733 100755
--- a/taskcluster/scripts/misc/build-cpython.sh
+++ b/taskcluster/scripts/misc/build-cpython.sh
@@ -35,7 +35,7 @@ case `uname -s` in
macosx_version_min=10.12
;;
esac
- macosx_sdk=14.2
+ macosx_sdk=14.4
# NOTE: both CFLAGS and CPPFLAGS need to be set here, otherwise
# configure step fails.
sysroot_flags="-isysroot ${MOZ_FETCHES_DIR}/MacOSX${macosx_sdk}.sdk -mmacosx-version-min=${macosx_version_min}"
diff --git a/taskcluster/scripts/misc/build-custom-car.sh b/taskcluster/scripts/misc/build-custom-car.sh
index 62c135c83e..78757a4664 100755
--- a/taskcluster/scripts/misc/build-custom-car.sh
+++ b/taskcluster/scripts/misc/build-custom-car.sh
@@ -55,7 +55,7 @@ fi
# Logic for macosx64
if [[ $(uname -s) == "Darwin" ]]; then
# Modify the config with fetched sdk path
- export MACOS_SYSROOT="$MOZ_FETCHES_DIR/MacOSX14.2.sdk"
+ export MACOS_SYSROOT="$MOZ_FETCHES_DIR/MacOSX14.4.sdk"
# Avoid mixing up the system python and toolchain python in the
# python path configuration
diff --git a/taskcluster/scripts/misc/build-geckodriver.sh b/taskcluster/scripts/misc/build-geckodriver.sh
index 7434ee2ef8..59b3946821 100755
--- a/taskcluster/scripts/misc/build-geckodriver.sh
+++ b/taskcluster/scripts/misc/build-geckodriver.sh
@@ -18,14 +18,14 @@ case "$TARGET" in
COMPRESS_EXT=zip
. $GECKO_PATH/taskcluster/scripts/misc/vs-setup.sh
# Bug 1584530: don't require the Microsoft MSVC runtime to be installed.
- export RUSTFLAGS="-Ctarget-feature=+crt-static -C linker=$MOZ_FETCHES_DIR/clang/bin/lld-link"
+ RUSTFLAGS="-Ctarget-feature=+crt-static -C linker=$MOZ_FETCHES_DIR/clang/bin/lld-link"
export TARGET_CFLAGS="-Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml"
export TARGET_CXXFLAGS="-Xclang -ivfsoverlay -Xclang $MOZ_FETCHES_DIR/vs/overlay.yaml"
;;
# OSX cross builds are a bit harder
*-apple-darwin)
export PATH="$MOZ_FETCHES_DIR/clang/bin:$PATH"
- export RUSTFLAGS="-C linker=$GECKO_PATH/taskcluster/scripts/misc/osx-cross-linker"
+ RUSTFLAGS="-C linker=$GECKO_PATH/taskcluster/scripts/misc/osx-cross-linker"
if test "$TARGET" = "aarch64-apple-darwin"; then
export MACOSX_DEPLOYMENT_TARGET=11.0
else
@@ -33,15 +33,16 @@ case "$TARGET" in
fi
;;
aarch64-unknown-linux-musl)
- export RUSTFLAGS="-C linker=$MOZ_FETCHES_DIR/clang/bin/clang -C link-arg=--target=$TARGET -C link-arg=-fuse-ld=lld"
+ RUSTFLAGS="-C linker=$MOZ_FETCHES_DIR/clang/bin/clang -C link-arg=--target=$TARGET -C link-arg=-fuse-ld=lld"
;;
esac
export PATH="$MOZ_FETCHES_DIR/rustc/bin:$PATH"
+export RUSTFLAGS="-Dwarnings $RUSTFLAGS"
cd $GECKO_PATH/testing/geckodriver
-cp $GECKO_PATH/.cargo/config.in $GECKO_PATH/.cargo/config
+cp $GECKO_PATH/.cargo/config.toml.in $GECKO_PATH/.cargo/config.toml
cargo build --frozen --verbose --release --target "$TARGET"
diff --git a/taskcluster/scripts/misc/build-gn-macosx.sh b/taskcluster/scripts/misc/build-gn-macosx.sh
index 0d7f5d50a3..01285f4731 100755
--- a/taskcluster/scripts/misc/build-gn-macosx.sh
+++ b/taskcluster/scripts/misc/build-gn-macosx.sh
@@ -5,7 +5,7 @@ set -e -v
WORKSPACE=$HOME/workspace
-CROSS_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.2.sdk
+CROSS_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.4.sdk
export MACOSX_DEPLOYMENT_TARGET=10.12
export CC=$MOZ_FETCHES_DIR/clang/bin/clang
diff --git a/taskcluster/scripts/misc/build-llvm-common.sh b/taskcluster/scripts/misc/build-llvm-common.sh
index 73efc5093d..a5cd8b542f 100755
--- a/taskcluster/scripts/misc/build-llvm-common.sh
+++ b/taskcluster/scripts/misc/build-llvm-common.sh
@@ -39,11 +39,11 @@ case "$target" in
-DCMAKE_LIPO=$MOZ_FETCHES_DIR/clang/bin/llvm-lipo
-DCMAKE_SYSTEM_NAME=Darwin
-DCMAKE_SYSTEM_VERSION=$MACOSX_DEPLOYMENT_TARGET
- -DCMAKE_OSX_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.2.sdk
+ -DCMAKE_OSX_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.4.sdk
-DCMAKE_EXE_LINKER_FLAGS=-fuse-ld=lld
-DCMAKE_SHARED_LINKER_FLAGS=-fuse-ld=lld
-DDARWIN_osx_ARCHS=$arch
- -DDARWIN_osx_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.2.sdk
+ -DDARWIN_osx_SYSROOT=$MOZ_FETCHES_DIR/MacOSX14.4.sdk
-DDARWIN_macosx_OVERRIDE_SDK_VERSION=11.0
-DDARWIN_osx_BUILTIN_ARCHS=$arch
-DLLVM_DEFAULT_TARGET_TRIPLE=$target
diff --git a/taskcluster/scripts/misc/build-nasm.sh b/taskcluster/scripts/misc/build-nasm.sh
index 98370b312f..bf01b8cc55 100755
--- a/taskcluster/scripts/misc/build-nasm.sh
+++ b/taskcluster/scripts/misc/build-nasm.sh
@@ -38,13 +38,13 @@ case "$1" in
macosx64)
export MACOSX_DEPLOYMENT_TARGET=10.12
TARGET=x86_64-apple-darwin
- CC="clang -fuse-ld=lld --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk"
+ CC="clang -fuse-ld=lld --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk"
EXE=
;;
macosx64-aarch64)
export MACOSX_DEPLOYMENT_TARGET=11.0
TARGET=aarch64-apple-darwin
- CC="clang -fuse-ld=lld --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk"
+ CC="clang -fuse-ld=lld --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk"
EXE=
;;
*)
diff --git a/taskcluster/scripts/misc/build-pkgconf.sh b/taskcluster/scripts/misc/build-pkgconf.sh
index ef211eeef1..330a8c53f3 100755
--- a/taskcluster/scripts/misc/build-pkgconf.sh
+++ b/taskcluster/scripts/misc/build-pkgconf.sh
@@ -16,13 +16,13 @@ x86_64-unknown-linux-gnu)
x86_64-apple-darwin)
export MACOSX_DEPLOYMENT_TARGET=10.12
TARGET=$1
- CC="clang --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk"
+ CC="clang --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk"
EXE=
;;
aarch64-apple-darwin)
export MACOSX_DEPLOYMENT_TARGET=11.0
TARGET=$1
- CC="clang --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk"
+ CC="clang --target=$TARGET -isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk"
EXE=
;;
x86_64-pc-windows-gnu)
diff --git a/taskcluster/scripts/misc/build-rust-based-toolchain.sh b/taskcluster/scripts/misc/build-rust-based-toolchain.sh
index 707ba9d478..42715f6607 100755
--- a/taskcluster/scripts/misc/build-rust-based-toolchain.sh
+++ b/taskcluster/scripts/misc/build-rust-based-toolchain.sh
@@ -31,8 +31,8 @@ x86_64-unknown-linux-gnu)
fi
export CC="$MOZ_FETCHES_DIR/clang/bin/clang"
export CXX="$MOZ_FETCHES_DIR/clang/bin/clang++"
- export TARGET_CFLAGS="-isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk"
- export TARGET_CXXFLAGS="-isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk -stdlib=libc++"
+ export TARGET_CFLAGS="-isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk"
+ export TARGET_CXXFLAGS="-isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk -stdlib=libc++"
;;
*-pc-windows-msvc)
# Cross-compiling for Windows on Linux.
diff --git a/taskcluster/scripts/misc/fetch-content b/taskcluster/scripts/misc/fetch-content
index 6e7b625dce..8dcf0960cc 100755
--- a/taskcluster/scripts/misc/fetch-content
+++ b/taskcluster/scripts/misc/fetch-content
@@ -263,8 +263,6 @@ def download_to_path(url, path, sha256=None, size=None):
fh.write(chunk)
return
- except IntegrityError:
- raise
except Exception as e:
log("Download failed: {}".format(e))
continue
@@ -275,17 +273,15 @@ def download_to_path(url, path, sha256=None, size=None):
def download_to_memory(url, sha256=None, size=None):
"""Download a URL to memory, possibly with verification."""
- data = b""
for _ in retrier(attempts=5, sleeptime=60):
- try:
- log("Downloading %s" % (url))
+ data = b""
+ log("Downloading %s" % (url))
+ try:
for chunk in stream_download(url, sha256=sha256, size=size):
data += chunk
return data
- except IntegrityError:
- raise
except Exception as e:
log("Download failed: {}".format(e))
continue
diff --git a/taskcluster/scripts/misc/fetch-talos-pdfs.py b/taskcluster/scripts/misc/fetch-talos-pdfs.py
new file mode 100755
index 0000000000..059af062e7
--- /dev/null
+++ b/taskcluster/scripts/misc/fetch-talos-pdfs.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+This script downloads all the required PDFs from the test_manifest.json
+file found in the mozilla pdf.js repo.
+"""
+
+import json
+import os
+import pathlib
+import shutil
+
+import requests
+from redo import retriable
+
+
+def log(msg):
+ print("fetch-talos-pdf: %s" % msg)
+
+
+@retriable(attempts=7, sleeptime=5, sleepscale=2)
+def fetch_file(url, filepath):
+ """Download a file from the given url to a given file.
+
+ :param str url: URL to download file from.
+ :param Path filepath: Location to ouput the downloaded file
+ (includes the name of the file).
+ """
+ size = 4096
+ r = requests.get(url, stream=True)
+ r.raise_for_status()
+
+ with filepath.open("wb") as fd:
+ for chunk in r.iter_content(size):
+ fd.write(chunk)
+
+
+def fetch_talos_pdf_link(pdf_path, output_file):
+ """Fetches a PDF file with a link into the output file location.
+
+ :param Path pdf_path: Path to a PDF file that contains a URL to download from.
+ :param Path output_file: Location (including the file name) to download PDF to.
+ """
+ pdf_link = pdf_path.read_text().strip()
+ log(f"Downloading from PDF link: {pdf_link}")
+ fetch_file(pdf_link, output_file)
+
+
+def gather_talos_pdf(test_folder, pdf_info, output_dir):
+ """Gathers a PDF file into the output directory.
+
+ :param Path test_folder: The test folder that the pdfs can be found in.
+ :param Path pdf_info: Information about the pdf we're currently gathering, and
+ found in the test/test_manifest.json file from the pdf.js repo.
+ :param Path output_dir: The directory to move/download the PDF to.
+ """
+ pdf_file = pdf_info["file"]
+ output_pdf_path = pathlib.Path(output_dir, pathlib.Path(pdf_file).name)
+
+ log(f"Gathering PDF {pdf_file}...")
+ if output_pdf_path.exists():
+ log(f"{pdf_file} already exists in output location")
+ elif pdf_info.get("link", False):
+ fetch_talos_pdf_link(
+ pathlib.Path(test_folder, pdf_file + ".link"), output_pdf_path
+ )
+ else:
+ log(f"Copying PDF to output location {output_pdf_path}")
+ shutil.copy(pathlib.Path(test_folder, pdf_file), output_pdf_path)
+
+
+def gather_talos_pdfs(pdf_js_repo, output_dir):
+ """Gather all pdfs to be used in the talos pdfpaint test.
+
+ Uses the pdf.js repo to gather the files from it's test/test_manifest.json
+ file. Some of these are also links that need to be downloaded. These
+ are output in an output directory.
+
+ :param Path pdf_js_repo: Path to the Mozilla Github pdf.js repo.
+ :param Path output_dir: Output directory for the PDFs.
+ """
+ test_manifest_path = pathlib.Path(
+ pdf_js_repo, "test", "test_manifest.json"
+ ).resolve()
+ test_folder = test_manifest_path.parent
+
+ # Gather all the PDFs into the output directory
+ test_manifest = json.loads(test_manifest_path.read_text())
+ for pdf_info in test_manifest:
+ gather_talos_pdf(test_folder, pdf_info, output_dir)
+
+ # Include the test manifest in the output directory as it
+ # contains the names of the tests
+ shutil.copy(test_manifest_path, pathlib.Path(output_dir, test_manifest_path.name))
+
+
+if __name__ == "__main__":
+ moz_fetches_dir = os.environ.get("MOZ_FETCHES_DIR", "")
+ if not moz_fetches_dir:
+ raise Exception(
+ "MOZ_FETCHES_DIR is not set to the path containing the pdf.js repo"
+ )
+
+ pdf_js_repo = pathlib.Path(moz_fetches_dir, "pdf.js")
+ if not pdf_js_repo.exists():
+ raise Exception("Can't find the pdf.js repository in MOZ_FETCHES_DIR")
+
+ output_dir = os.environ.get("OUTPUT_DIR", "")
+ if not output_dir:
+ raise Exception("OUTPUT_DIR is not set for the file output")
+
+ output_dir_path = pathlib.Path(output_dir)
+ output_dir_path.mkdir(parents=True, exist_ok=True)
+ gather_talos_pdfs(pdf_js_repo, output_dir_path)
diff --git a/taskcluster/scripts/misc/get-hostutils.sh b/taskcluster/scripts/misc/get-hostutils.sh
new file mode 100755
index 0000000000..95173c1b4f
--- /dev/null
+++ b/taskcluster/scripts/misc/get-hostutils.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+artifact=$(basename "$TOOLCHAIN_ARTIFACT")
+project=${artifact%.tar.*}
+
+cd $GECKO_PATH
+
+. taskcluster/scripts/misc/tooltool-download.sh
+
+cd $MOZ_FETCHES_DIR
+mv host-utils-* $project
+tar -acvf $artifact $project
+mkdir -p $UPLOAD_DIR
+mv $artifact $UPLOAD_DIR
diff --git a/taskcluster/scripts/misc/gradle-python-envs.sh b/taskcluster/scripts/misc/gradle-python-envs.sh
new file mode 100755
index 0000000000..5873e3fa91
--- /dev/null
+++ b/taskcluster/scripts/misc/gradle-python-envs.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+set -x -e -v
+
+VERSION="$1"
+
+BASE_URL=https://plugins.gradle.org/m2/gradle/plugin/com/jetbrains/python/gradle-python-envs
+
+mkdir -p "${UPLOAD_DIR}"
+wget --no-parent --recursive --execute robots=off "${BASE_URL}/${VERSION}/"
+tar caf "${UPLOAD_DIR}/gradle-python-envs-${VERSION}.tar.zst" plugins.gradle.org
diff --git a/taskcluster/scripts/misc/osx-cross-linker b/taskcluster/scripts/misc/osx-cross-linker
index ec08589524..886002bb07 100755
--- a/taskcluster/scripts/misc/osx-cross-linker
+++ b/taskcluster/scripts/misc/osx-cross-linker
@@ -4,5 +4,5 @@ exec $MOZ_FETCHES_DIR/clang/bin/clang -v \
-fuse-ld=lld \
-mmacosx-version-min=${MACOSX_DEPLOYMENT_TARGET:-10.12} \
-target $TARGET \
- -isysroot $MOZ_FETCHES_DIR/MacOSX14.2.sdk \
+ -isysroot $MOZ_FETCHES_DIR/MacOSX14.4.sdk \
"$@"
diff --git a/taskcluster/scripts/misc/repack_rust.py b/taskcluster/scripts/misc/repack_rust.py
index 43bbe44f16..2c8ff7288d 100755
--- a/taskcluster/scripts/misc/repack_rust.py
+++ b/taskcluster/scripts/misc/repack_rust.py
@@ -422,6 +422,7 @@ def build_src(install_dir, host, targets, patches):
[build]
docs = false
sanitizers = true
+ profiler = true
extended = true
tools = ["analysis", "cargo", "rustfmt", "clippy", "src", "rust-analyzer"]
cargo-native-static = true
diff --git a/taskcluster/scripts/misc/run-fetch-talos-pdfs.sh b/taskcluster/scripts/misc/run-fetch-talos-pdfs.sh
new file mode 100755
index 0000000000..c5c350fe78
--- /dev/null
+++ b/taskcluster/scripts/misc/run-fetch-talos-pdfs.sh
@@ -0,0 +1,10 @@
+#! /bin/bash -vex
+set -x -e -v
+
+export OUTPUT_DIR=/builds/worker/talos-pdfs
+
+cd $GECKO_PATH
+./mach python taskcluster/scripts/misc/fetch-talos-pdfs.py
+
+mkdir -p $UPLOAD_DIR
+tar -cavf $UPLOAD_DIR/talos-pdfs.tar.zst -C $OUTPUT_DIR/.. talos-pdfs
diff --git a/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh b/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh
index bfed36012c..d984f8d8e8 100755
--- a/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh
+++ b/taskcluster/scripts/misc/wr-macos-cross-build-setup.sh
@@ -3,7 +3,7 @@ set -x -e -v
export TARGET_TRIPLE="x86_64-apple-darwin"
-MACOS_SYSROOT="${MOZ_FETCHES_DIR}/MacOSX14.2.sdk"
+MACOS_SYSROOT="${MOZ_FETCHES_DIR}/MacOSX14.4.sdk"
CLANGDIR="${MOZ_FETCHES_DIR}/clang"
# Deploy the wrench dependencies
diff --git a/taskcluster/scripts/misc/wrench-deps-vendoring.sh b/taskcluster/scripts/misc/wrench-deps-vendoring.sh
index 8b1897bfd8..7fd77bc074 100755
--- a/taskcluster/scripts/misc/wrench-deps-vendoring.sh
+++ b/taskcluster/scripts/misc/wrench-deps-vendoring.sh
@@ -12,7 +12,7 @@ cd $GECKO_PATH
export PATH=$PATH:$MOZ_FETCHES_DIR/rustc/bin:$HOME/.cargo/bin
cd gfx/wr/
mkdir .cargo
-cargo vendor --locked --sync ./Cargo.toml > .cargo/config
+cargo vendor --locked --sync ./Cargo.toml > .cargo/config.toml
mkdir wrench-deps
mv vendor .cargo wrench-deps/
diff --git a/taskcluster/scripts/slack_notifier.py b/taskcluster/scripts/slack_notifier.py
new file mode 100644
index 0000000000..c4377f0ed0
--- /dev/null
+++ b/taskcluster/scripts/slack_notifier.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This module provides functionalities for sending notifications to Slack channels, specifically designed for use in automated testing and release processes. It includes capabilities for sending both success and error notifications with customizable message templates. The module leverages Taskcluster for notification services and integrates with Slack's API to deliver real-time updates.
+
+Key Features:
+- SLACK_SUCCESS_MESSAGE_TEMPLATE: A predefined template for formatting success messages to be sent to Slack. This template includes placeholders for dynamic content such as product version and release details.
+- SLACK_ERROR_MESSAGE_TEMPLATE: A template for error messages, used to notify about failures or issues in automated processes, particularly with TestRail API interactions.
+- send_slack_notification: A function that sends a Slack notification based on a provided template and value dictionary. It handles the construction of the message payload and interfaces with Taskcluster's Slack notification service.
+- get_taskcluster_options: Retrieves configuration options for Taskcluster based on the current runtime environment, ensuring appropriate setup for notification delivery.
+- send_error_notification: A higher-level function that formats and sends error notifications to a specified Slack channel.
+- send_success_notification: Similarly, this function sends success notifications to a specified Slack channel, using the success message template.
+
+Usage:
+The module is intended to be integrated into automated testing and release workflows, where Slack notifications are required to report the status of various processes, such as test executions or release milestones.
+
+Required Values for Notifications:
+
+These values are required when calling the `send_success_notification` and `send_slack_notification` functions.
+They must be passed as an object with the following keys and their respective values.
+
+Required Keys and Expected Values:
+- RELEASE_TYPE: <string> Release Type or Stage (e.g., Alpha, Beta, RC).
+- RELEASE_VERSION: <string> Release Version from versions.txt (e.g., '124.0b5').
+- SHIPPING_PRODUCT: <string> Release Tag Name (e.g., fennec, focus).
+- TESTRAIL_PROJECT_ID: <int> Project ID for TestRail Project (e.g., Fenix Browser).
+- TESTRAIL_PRODUCT_TYPE: <string> Name for the official release product (e.g., Firefox, not fennec).
+
+These values are used as arguments for `success_values` and `values` when calling the respective functions.
+
+Example Usage:
+
+success_values = {
+ "RELEASE_TYPE": "Beta",
+ "RELEASE_VERSION": "124.0b5",
+ "SHIPPING_PRODUCT": "fennec",
+ "TESTRAIL_PROJECT_ID": 59, # Fenix Browser
+ "TESTRAIL_PRODUCT_TYPE": "Firefox"
+}
+
+send_success_notification(success_values, 'channel_id', taskcluster_options)
+
+values = {
+ "timestamp": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
+ "error_message": error_message,
+}
+
+send_error_notification(values, 'channel_id', taskcluster_options)
+"""
+
+import json
+import os
+import time
+import traceback
+from string import Template
+
+import taskcluster
+
+SLACK_SUCCESS_MESSAGE_TEMPLATE = Template(
+ """
+[
+ {
+ "type": "header",
+ "text": {
+ "type": "plain_text",
+ "text": "New Release: :firefox: $SHIPPING_PRODUCT-v$RELEASE_VERSION :star:"
+ }
+ },
+ {
+ "type": "divider"
+ },
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": "*Testrail Release*: $TESTRAIL_PRODUCT_TYPE $RELEASE_TYPE $RELEASE_VERSION <https://testrail.stage.mozaws.net/index.php?/projects/overview/$TESTRAIL_PROJECT_ID|Milestone> has been created:testrail:"
+ }
+ },
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": "*UI Automated Tests*:"
+ }
+ },
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": " :white_check_mark: Automated smoke test - Google Pixel 3(Android 11)"
+ }
+ },
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": ":white_check_mark: Automated smoke test - Google Pixel 2(Android 9)"
+ }
+ },
+ {
+ "type": "divider"
+ },
+ {
+ "type": "context",
+ "elements": [
+ {
+ "type": "mrkdwn",
+ "text": ":testops-notify: created by <https://mozilla-hub.atlassian.net/wiki/spaces/MTE/overview|Mobile Test Engineering>"
+ }
+ ]
+ }
+]
+"""
+)
+
+SLACK_ERROR_MESSAGE_TEMPLATE = Template(
+ """
+[
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": "Failed to call TestRail API at $timestamp with error: $error_message"
+ }
+ }
+]
+"""
+)
+
+
+def send_slack_notification(template, values, channel_id, options):
+ """
+ Sends a Slack notification based on the provided template and values.
+
+ :param template: Template object for the Slack message.
+ :param values: Dictionary containing values to substitute in the template.
+ :param channel_id: Slack channel ID to send the message to.
+ :param options: Taskcluster options for the notification service.
+ """
+ slack_message = json.loads(template.safe_substitute(**values))
+ # workaround for https://github.com/taskcluster/taskcluster/issues/6801
+ duplicate_message_workaround = str(int(time.time()))
+ payload = {
+ "channelId": channel_id,
+ "text": duplicate_message_workaround,
+ "blocks": slack_message,
+ }
+
+ try:
+ response = taskcluster.Notify(options).slack(payload)
+ print("Response from API:", response)
+ except Exception as e:
+ print(f"Error sending Slack message: {e}")
+ traceback.print_exc()
+
+ if hasattr(e, "response"):
+ print("Response content:", e.response.text)
+
+
+def get_taskcluster_options():
+ """
+ Retrieves the Taskcluster setup options according to the current environment.
+
+ :return: A dictionary of Taskcluster options.
+ """
+ options = taskcluster.optionsFromEnvironment()
+ proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL")
+
+ if proxy_url is not None:
+ # Always use proxy url when available
+ options["rootUrl"] = proxy_url
+
+ if "rootUrl" not in options:
+ # Always have a value in root url
+ options["rootUrl"] = "https://community-tc.services.mozilla.com"
+
+ return options
+
+
+def send_error_notification(error_message, channel_id, options):
+ values = {
+ "timestamp": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
+ "error_message": error_message,
+ }
+ send_slack_notification(SLACK_ERROR_MESSAGE_TEMPLATE, values, channel_id, options)
+
+
+def send_success_notification(success_values, channel_id, options):
+ send_slack_notification(
+ SLACK_SUCCESS_MESSAGE_TEMPLATE, success_values, channel_id, options
+ )
diff --git a/taskcluster/scripts/testrail_main.py b/taskcluster/scripts/testrail_main.py
new file mode 100644
index 0000000000..19bb2f41ea
--- /dev/null
+++ b/taskcluster/scripts/testrail_main.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This Python script automates creating milestones and test runs in TestRail and updating
+test cases based on the results of automated smoke tests for different product releases.
+
+Functionality includes:
+- Reading TestRail credentials and environment variables.
+- Building milestone names and descriptions.
+- Interacting with the TestRail API to create milestones, test runs, and update test cases.
+- Sending notifications to a specified Slack channel.
+"""
+
+import os
+import sys
+
+from lib.testrail_api import TestRail
+from lib.testrail_utils import (
+ build_milestone_description,
+ build_milestone_name,
+ get_release_type,
+ get_release_version,
+ load_testrail_credentials,
+)
+from slack_notifier import (
+ get_taskcluster_options,
+ send_error_notification,
+ send_success_notification,
+)
+
+# Constants
+SUCCESS_CHANNEL_ID = "C02KDDS9QM9" # mobile-testeng
+ERROR_CHANNEL_ID = "G016BC5FUHJ" # mobile-alerts-sandbox
+
+
+def main():
+ # Load TestRail credentials
+ credentials = load_testrail_credentials(".testrail_credentials.json")
+ testrail = TestRail(
+ credentials["host"], credentials["username"], credentials["password"]
+ )
+
+ # Read task environment variables
+ try:
+ shipping_product = os.environ["SHIPPING_PRODUCT"]
+ testrail_product_type = os.environ["TESTRAIL_PRODUCT_TYPE"]
+ testrail_project_id = os.environ["TESTRAIL_PROJECT_ID"]
+ testrail_test_suite_id = os.environ["TESTRAIL_TEST_SUITE_ID"]
+ except KeyError as e:
+ raise ValueError(f"ERROR: Missing Environment Variable: {e}")
+
+ # Release information
+ release_version = get_release_version()
+ release_type = get_release_type(release_version)
+
+ # Build milestone information
+ milestone_name = build_milestone_name(
+ testrail_product_type, release_type, release_version
+ )
+ milestone_description = build_milestone_description(milestone_name)
+
+ # Configure Taskcluster API
+ options = get_taskcluster_options()
+
+ try:
+ # Check if milestone exists
+ if testrail.does_milestone_exist(testrail_project_id, milestone_name):
+ print(f"Milestone for {milestone_name} already exists. Exiting script...")
+ sys.exit()
+
+ # Create milestone and test runs
+ devices = ["Google Pixel 3(Android11)", "Google Pixel 2(Android11)"]
+ testrail.create_milestone_and_test_runs(
+ testrail_project_id,
+ milestone_name,
+ milestone_description,
+ devices,
+ testrail_test_suite_id,
+ )
+
+ # Send success notification
+ success_values = {
+ "RELEASE_TYPE": release_type,
+ "RELEASE_VERSION": release_version,
+ "SHIPPING_PRODUCT": shipping_product,
+ "TESTRAIL_PROJECT_ID": testrail_project_id,
+ "TESTRAIL_PRODUCT_TYPE": testrail_product_type,
+ }
+ send_success_notification(success_values, SUCCESS_CHANNEL_ID, options)
+
+ except Exception as error_message:
+ send_error_notification(str(error_message), ERROR_CHANNEL_ID, options)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/scripts/tests/test-lab.py b/taskcluster/scripts/tests/test-lab.py
new file mode 100644
index 0000000000..b8b812df89
--- /dev/null
+++ b/taskcluster/scripts/tests/test-lab.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Firebase Test Lab (Flank) test runner script for Taskcluster
+# This script is used to run UI tests on Firebase Test Lab using Flank
+# It requires a service account key file to authenticate with Firebase Test Lab
+# It also requires the `gcloud` command line tool to be installed and configured
+# Lastly it requires the `flank.jar` file to be present in the `test-tools` directory set up in the task definition
+# The service account key file is stored in the `secrets` section of the task definition
+
+# Flank: https://flank.github.io/flank/
+
+import argparse
+import logging
+import os
+import subprocess
+import sys
+from enum import Enum
+from pathlib import Path
+from typing import List, Optional, Union
+
+
+# Worker paths and binaries
+class Worker(Enum):
+ JAVA_BIN = "/usr/bin/java"
+ FLANK_BIN = "/builds/worker/test-tools/flank.jar"
+ RESULTS_DIR = "/builds/worker/artifacts/results"
+ ARTIFACTS_DIR = "/builds/worker/artifacts"
+
+
+ANDROID_TEST = "./automation/taskcluster/androidTest"
+
+
+def setup_logging():
+ """Configure logging for the script."""
+ log_format = "%(message)s"
+ logging.basicConfig(level=logging.INFO, format=log_format)
+
+
+def run_command(
+ command: List[Union[str, bytes]], log_path: Optional[str] = None
+) -> int:
+ """Execute a command, log its output, and check for errors.
+
+ Args:
+ command: The command to execute
+ log_path: The path to a log file to write the command output to
+ Returns:
+ int: The exit code of the command
+ """
+
+ with subprocess.Popen(
+ command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
+ ) as process:
+ if log_path:
+ with open(log_path, "a") as log_file:
+ for line in process.stdout:
+ sys.stdout.write(line)
+ log_file.write(line)
+ else:
+ for line in process.stdout:
+ sys.stdout.write(line)
+ process.wait()
+ sys.stdout.flush()
+ if process.returncode != 0:
+ error_message = f"Command {' '.join(command)} failed with exit code {process.returncode}"
+ logging.error(error_message)
+ return process.returncode
+
+
+def setup_environment():
+ """Configure Google Cloud project and authenticate with the service account."""
+ project_id = os.getenv("GOOGLE_PROJECT")
+ credentials_file = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
+ if not project_id or not credentials_file:
+ logging.error(
+ "Error: GOOGLE_PROJECT and GOOGLE_APPLICATION_CREDENTIALS environment variables must be set."
+ )
+ sys.exit(1)
+
+ run_command(["gcloud", "config", "set", "project", project_id])
+ run_command(
+ ["gcloud", "auth", "activate-service-account", "--key-file", credentials_file]
+ )
+
+
+def execute_tests(
+ flank_config: str, apk_app: Path, apk_test: Optional[Path] = None
+) -> int:
+ """Run UI tests on Firebase Test Lab using Flank.
+
+ Args:
+ flank_config: The YML configuration for Flank to use e.g, automation/taskcluster/androidTest/flank-<config>.yml
+ apk_app: Absolute path to a Android APK application package (optional) for robo test or instrumentation test
+ apk_test: Absolute path to a Android APK androidTest package
+ Returns:
+ int: The exit code of the command
+ """
+
+ run_command([Worker.JAVA_BIN.value, "-jar", Worker.FLANK_BIN.value, "--version"])
+
+ flank_command = [
+ Worker.JAVA_BIN.value,
+ "-jar",
+ Worker.FLANK_BIN.value,
+ "android",
+ "run",
+ "--config",
+ f"{ANDROID_TEST}/flank-{flank_config}.yml",
+ "--app",
+ str(apk_app),
+ "--local-result-dir",
+ Worker.RESULTS_DIR.value,
+ "--project",
+ os.environ.get("GOOGLE_PROJECT"),
+ "--client-details",
+ f'matrixLabel={os.environ.get("PULL_REQUEST_NUMBER", "None")}',
+ ]
+
+ # Add androidTest APK if provided (optional) as robo test or instrumentation test
+ if apk_test:
+ flank_command.extend(["--test", str(apk_test)])
+
+ exit_code = run_command(flank_command, "flank.log")
+ if exit_code == 0:
+ logging.info("All UI test(s) have passed!")
+ return exit_code
+
+
+def process_results(flank_config: str, test_type: str = "instrumentation") -> None:
+ """Process and parse test results.
+
+ Args:
+ flank_config: The YML configuration for Flank to use e.g, automation/taskcluster/androidTest/flank-<config>.yml
+ """
+
+ # Ensure directories exist and scripts are executable
+ github_dir = os.path.join(Worker.ARTIFACTS_DIR.value, "github")
+ os.makedirs(github_dir, exist_ok=True)
+
+ parse_ui_test_script = os.path.join(ANDROID_TEST, "parse-ui-test.py")
+ parse_ui_test_fromfile_script = os.path.join(
+ ANDROID_TEST, "parse-ui-test-fromfile.py"
+ )
+ copy_robo_crash_artifacts_script = os.path.join(
+ ANDROID_TEST, "copy-robo-crash-artifacts.py"
+ )
+
+ os.chmod(parse_ui_test_script, 0o755)
+ os.chmod(parse_ui_test_fromfile_script, 0o755)
+ os.chmod(copy_robo_crash_artifacts_script, 0o755)
+
+ # Run parsing scripts and check for errors
+
+ # Process the results differently based on the test type: robo or instrumentation
+ exit_code = 0
+ if test_type == "instrumentation":
+ exit_code = run_command(
+ [parse_ui_test_fromfile_script, "--results", Worker.RESULTS_DIR.value],
+ "flank.log",
+ )
+
+ # If the test type is robo, run a script that copies the crash artifacts from Cloud Storage over (if there are any from failed devices)
+ if test_type == "robo":
+ exit_code = run_command([copy_robo_crash_artifacts_script])
+
+ command = [
+ parse_ui_test_script,
+ "--exit-code",
+ str(0),
+ "--log",
+ "flank.log",
+ "--results",
+ Worker.RESULTS_DIR.value,
+ "--output-md",
+ os.path.join(github_dir, "customCheckRunText.md"),
+ "--device-type",
+ flank_config,
+ ]
+ if exit_code == 0:
+ # parse_ui_test_script error messages are pretty generic; only
+ # report them if errors have not already been reported
+ command.append("--report-treeherder-failures")
+ run_command(
+ command,
+ "flank.log",
+ )
+
+
+def main():
+ """Parse command line arguments and execute the test runner."""
+ parser = argparse.ArgumentParser(
+ description="Run UI tests on Firebase Test Lab using Flank as a test runner"
+ )
+ parser.add_argument(
+ "flank_config",
+ help="The YML configuration for Flank to use e.g, automation/taskcluster/androidTest/flank-<config>.yml",
+ )
+ parser.add_argument(
+ "apk_app", help="Absolute path to a Android APK application package"
+ )
+ parser.add_argument(
+ "--apk_test",
+ help="Absolute path to a Android APK androidTest package",
+ default=None,
+ )
+ args = parser.parse_args()
+
+ setup_environment()
+
+ # Only resolve apk_test if it is provided
+ apk_test_path = Path(args.apk_test).resolve() if args.apk_test else None
+ exit_code = execute_tests(
+ flank_config=args.flank_config,
+ apk_app=Path(args.apk_app).resolve(),
+ apk_test=apk_test_path,
+ )
+
+ # Determine the instrumentation type to process the results differently
+ instrumentation_type = "instrumentation" if args.apk_test else "robo"
+ process_results(flank_config=args.flank_config, test_type=instrumentation_type)
+
+ sys.exit(exit_code)
+
+
+if __name__ == "__main__":
+ setup_logging()
+ main()
diff --git a/taskcluster/scripts/write-dummy-secret.py b/taskcluster/scripts/write-dummy-secret.py
new file mode 100755
index 0000000000..c96caad749
--- /dev/null
+++ b/taskcluster/scripts/write-dummy-secret.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import argparse
+import errno
+import os
+
+
+def write_secret_to_file(path, secret):
+ path = os.path.abspath(os.path.join(os.getcwd(), path))
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ print(f"Outputting secret to: {path}")
+
+ with open(path, "w") as f:
+ f.write(secret)
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Store a dummy secret to a file")
+
+ parser.add_argument(
+ "-c", dest="content", action="store", help="content of the secret"
+ )
+ parser.add_argument(
+ "-f", dest="path", action="store", help="file to save secret to"
+ )
+
+ result = parser.parse_args()
+
+ write_secret_to_file(result.path, result.content)
+
+
+if __name__ == "__main__":
+ main()