summaryrefslogtreecommitdiffstats
path: root/taskcluster/docker/updatebot
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /taskcluster/docker/updatebot
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'taskcluster/docker/updatebot')
-rw-r--r--taskcluster/docker/updatebot/Dockerfile24
-rw-r--r--taskcluster/docker/updatebot/VERSION1
-rw-r--r--taskcluster/docker/updatebot/arcanist_patch_size.patch13
-rw-r--r--taskcluster/docker/updatebot/arcanist_windows_stream.patch19
-rw-r--r--taskcluster/docker/updatebot/hgrc7
-rw-r--r--taskcluster/docker/updatebot/moz.build8
-rwxr-xr-xtaskcluster/docker/updatebot/privileged-setup.sh82
-rwxr-xr-xtaskcluster/docker/updatebot/run.py223
-rwxr-xr-xtaskcluster/docker/updatebot/setup.sh14
-rw-r--r--taskcluster/docker/updatebot/updatebot-version.sh2
-rw-r--r--taskcluster/docker/updatebot/windows-php.ini130
-rw-r--r--taskcluster/docker/updatebot/windows-setup.sh135
12 files changed, 658 insertions, 0 deletions
diff --git a/taskcluster/docker/updatebot/Dockerfile b/taskcluster/docker/updatebot/Dockerfile
new file mode 100644
index 0000000000..a58c9c07a8
--- /dev/null
+++ b/taskcluster/docker/updatebot/Dockerfile
@@ -0,0 +1,24 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Tom Ritter <tom@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+
+# %include taskcluster/docker/recipes/install-node.sh
+ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /setup/install-node.sh
+
+COPY privileged-setup.sh /setup/privileged-setup.sh
+COPY updatebot-version.sh /setup/updatebot-version.sh
+COPY setup.sh /builds/worker/setup.sh
+COPY run.py /builds/worker/run.py
+COPY hgrc /etc/mercurial/hgrc.d/updatebot.rc
+
+RUN cd /setup && ./privileged-setup.sh
+
+ENV HOME /builds/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
+ENV PYTHONUNBUFFERED 1
+ENV PATH "/builds/worker/go/bin:/builds/worker/fetches/rustc/bin:/builds/worker/fetches/cargo-vet:${PATH}"
+
+RUN cd /builds/worker && ./setup.sh
diff --git a/taskcluster/docker/updatebot/VERSION b/taskcluster/docker/updatebot/VERSION
new file mode 100644
index 0000000000..56a6051ca2
--- /dev/null
+++ b/taskcluster/docker/updatebot/VERSION
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/taskcluster/docker/updatebot/arcanist_patch_size.patch b/taskcluster/docker/updatebot/arcanist_patch_size.patch
new file mode 100644
index 0000000000..7540c76fbd
--- /dev/null
+++ b/taskcluster/docker/updatebot/arcanist_patch_size.patch
@@ -0,0 +1,13 @@
+diff --git a/src/workflow/ArcanistDiffWorkflow.php b/src/workflow/ArcanistDiffWorkflow.php
+index 38aa4b62..c9b7e215 100644
+--- a/src/workflow/ArcanistDiffWorkflow.php
++++ b/src/workflow/ArcanistDiffWorkflow.php
+@@ -899,7 +899,7 @@ EOTEXT
+ throw new Exception(pht('Repository API is not supported.'));
+ }
+
+- $limit = 1024 * 1024 * 4;
++ $limit = 1024 * 1024 * 12;
+ foreach ($changes as $change) {
+ $size = 0;
+ foreach ($change->getHunks() as $hunk) {
diff --git a/taskcluster/docker/updatebot/arcanist_windows_stream.patch b/taskcluster/docker/updatebot/arcanist_windows_stream.patch
new file mode 100644
index 0000000000..953fbdce85
--- /dev/null
+++ b/taskcluster/docker/updatebot/arcanist_windows_stream.patch
@@ -0,0 +1,19 @@
+diff --git a/src/channel/PhutilSocketChannel.php b/src/channel/PhutilSocketChannel.php
+index 4bd2a47a..2ad3fd85 100644
+--- a/src/channel/PhutilSocketChannel.php
++++ b/src/channel/PhutilSocketChannel.php
+@@ -46,9 +46,11 @@ final class PhutilSocketChannel extends PhutilChannel {
+ if (!$socket) {
+ continue;
+ }
+- $ok = stream_set_blocking($socket, false);
+- if (!$ok) {
+- throw new Exception(pht('Failed to set socket nonblocking!'));
++ if (strtoupper(substr(PHP_OS, 0, 3)) !== 'WIN') {
++ $ok = stream_set_blocking($socket, false);
++ if (!$ok) {
++ throw new Exception(pht('Failed to set socket nonblocking!'));
++ }
+ }
+ }
+
diff --git a/taskcluster/docker/updatebot/hgrc b/taskcluster/docker/updatebot/hgrc
new file mode 100644
index 0000000000..7d1db24ae0
--- /dev/null
+++ b/taskcluster/docker/updatebot/hgrc
@@ -0,0 +1,7 @@
+[ui]
+ssh = ssh -i $HOME/id_rsa -l updatebot@mozilla.com -o UserKnownHostsFile=$HOME/ssh_known_hosts
+username = Updatebot <updatebot@mozilla.com>
+
+[extensions]
+strip =
+push-to-try = $HOME/.mozbuild/version-control-tools/hgext/push-to-try \ No newline at end of file
diff --git a/taskcluster/docker/updatebot/moz.build b/taskcluster/docker/updatebot/moz.build
new file mode 100644
index 0000000000..315dc32600
--- /dev/null
+++ b/taskcluster/docker/updatebot/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files("**"):
+ BUG_COMPONENT = ("Developer Infrastructure", "Mach Vendor & Updatebot")
diff --git a/taskcluster/docker/updatebot/privileged-setup.sh b/taskcluster/docker/updatebot/privileged-setup.sh
new file mode 100755
index 0000000000..640b0826e2
--- /dev/null
+++ b/taskcluster/docker/updatebot/privileged-setup.sh
@@ -0,0 +1,82 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -vex
+
+. ./updatebot-version.sh # Get UPDATEBOT_REVISION
+
+# If you edit this, be sure to edit fetch/updatebot.yml
+export SQLPROXY_REVISION=fb1939ab92846761595833361c6b0b0ecd543861
+
+export DEBIAN_FRONTEND=noninteractive
+
+# Update apt-get lists
+apt-get update -y
+
+# Install dependencies
+apt-get install -y --no-install-recommends \
+ arcanist \
+ ca-certificates \
+ curl \
+ ed \
+ golang-go \
+ gcc \
+ libc6-dev \
+ python3-minimal \
+ python3-wheel \
+ python3-pip \
+ python3-venv \
+ python3-requests \
+ python3-requests-unixsocket \
+ python3-setuptools \
+ openssh-client \
+ rsync \
+ wget
+
+mkdir -p /builds/worker/.mozbuild
+chown -R worker:worker /builds/worker/
+export GOPATH=/builds/worker/go
+
+# nodejs 16 for pdfjs
+. install-node.sh
+
+# pdf.js setup
+# We want to aviod downloading a ton of packages all the time, so
+# we will preload the pdf.js repo (and packages) in the Docker image
+# and only update it at runtime. This means that the `./mach vendor`
+# behavior for pdf.js will also be kind of custom
+npm install -g gulp-cli
+cd /builds/worker/
+git clone https://github.com/mozilla/pdf.js.git
+cd /builds/worker/pdf.js
+npm ci --legacy-peer-deps
+
+# seed a v8 repository because it's large, and doing an update will
+# be much faster than a new clone each time.
+cd /builds/worker/
+git clone https://github.com/v8/v8.git
+
+# Build Google's Cloud SQL Proxy from source
+cd /builds/worker/
+mkdir cloud_sql_proxy
+cd cloud_sql_proxy
+go mod init cloud_sql_proxy
+go get github.com/GoogleCloudPlatform/cloudsql-proxy/cmd/cloud_sql_proxy@$SQLPROXY_REVISION
+
+# Check out source code
+cd /builds/worker/
+git clone https://github.com/mozilla-services/updatebot.git
+cd updatebot
+git checkout "$UPDATEBOT_REVISION"
+
+# Set up dependencies
+cd /builds/worker/
+chown -R worker:worker .
+chown -R worker:worker .*
+
+python3 -m pip install -U pip
+python3 -m pip install poetry==1.2.2
+
+rm -rf /setup
diff --git a/taskcluster/docker/updatebot/run.py b/taskcluster/docker/updatebot/run.py
new file mode 100755
index 0000000000..ad9bcb8788
--- /dev/null
+++ b/taskcluster/docker/updatebot/run.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+sys.path.append("/builds/worker/checkouts/gecko/third_party/python")
+sys.path.append(".")
+
+import base64
+import os
+import platform
+import signal
+import stat
+import subprocess
+
+import requests
+
+import taskcluster
+
+# Bump this number when you need to cause a commit for the job to re-run: 21
+
+if len(sys.argv) < 3:
+ print("Usage:", sys.argv[0], "gecko-dev-path updatebot-path [moz-fetches-dir]")
+ sys.exit(1)
+
+GECKO_DEV_PATH = sys.argv[1].replace("/", os.path.sep)
+UPDATEBOT_PATH = sys.argv[2].replace("/", os.path.sep)
+
+# Only needed on Windows
+if len(sys.argv) > 3:
+ FETCHES_PATH = sys.argv[3].replace("/", os.path.sep)
+else:
+ FETCHES_PATH = None
+
+HOME_PATH = os.path.expanduser("~")
+
+OPERATING_MODE = (
+ "prod"
+ if os.environ.get("GECKO_HEAD_REPOSITORY", "")
+ == "https://hg.mozilla.org/mozilla-central"
+ else "dev"
+)
+
+DEV_PHAB_URL = "https://phabricator-dev.allizom.org/"
+PROD_PHAB_URL = "https://phabricator.services.mozilla.com/"
+
+phabricator_url = DEV_PHAB_URL if OPERATING_MODE == "dev" else PROD_PHAB_URL
+
+
+def log(*args):
+ print(*args)
+
+
+def get_secret(name):
+ secret = None
+ if "TASK_ID" in os.environ:
+ secrets_url = (
+ "http://taskcluster/secrets/v1/secret/project/updatebot/"
+ + ("3" if OPERATING_MODE == "prod" else "2")
+ + "/"
+ + name
+ )
+ res = requests.get(secrets_url)
+ res.raise_for_status()
+ secret = res.json()
+ else:
+ secrets = taskcluster.Secrets(taskcluster.optionsFromEnvironment())
+ secret = secrets.get("project/updatebot/" + OPERATING_MODE + "/" + name)
+ secret = secret["secret"] if "secret" in secret else None
+ secret = secret["value"] if "value" in secret else None
+ return secret
+
+
+# Get TC Secrets =======================================
+log("Operating mode is ", OPERATING_MODE)
+log("Getting secrets...")
+bugzilla_api_key = get_secret("bugzilla-api-key")
+phabricator_token = get_secret("phabricator-token")
+try_sshkey = get_secret("try-sshkey")
+database_config = get_secret("database-password")
+sentry_url = get_secret("sentry-url")
+sql_proxy_config = get_secret("sql-proxy-config")
+
+# Update Updatebot =======================================
+if OPERATING_MODE == "dev":
+ """
+ If we are in development mode, we will update from github.
+ (This command will probably only work if we checkout a branch FWIW.)
+
+ This allows us to iterate faster by committing to github and
+ re-running the cron job on Taskcluster, without rebuilding the
+ Docker image.
+
+ However, this mechanism is bypassing the security feature we
+ have in-tree, where upstream out-of-tree code is fixed at a known
+ revision and cannot be changed without a commit to m-c.
+
+ Therefore, we only do this in dev mode when running on try.
+ """
+
+ os.chdir(UPDATEBOT_PATH)
+ log("Performing git repo update...")
+ command = ["git", "symbolic-ref", "-q", "HEAD"]
+
+ r = subprocess.run(command)
+ if r.returncode == 0:
+ # This indicates we are on a branch, and not a specific revision
+ subprocess.check_call(["git", "pull", "origin"])
+
+# Set Up SSH & Phabricator ==============================
+os.chdir(HOME_PATH)
+log("Setting up ssh and phab keys...")
+with open("id_rsa", "w") as sshkey:
+ sshkey.write(try_sshkey)
+os.chmod("id_rsa", stat.S_IRUSR | stat.S_IWUSR)
+
+arc_filename = ".arcrc"
+if platform.system() == "Windows":
+ arc_path = os.path.join(FETCHES_PATH, "..", "AppData", "Roaming")
+ os.makedirs(arc_path, exist_ok=True)
+ os.chdir(arc_path)
+ log("Writing %s to %s" % (arc_filename, arc_path))
+else:
+ os.chdir(HOME_PATH)
+
+arcrc = open(arc_filename, "w")
+towrite = """
+{
+ "hosts": {
+ "PHAB_URL_HERE": {
+ "token": "TOKENHERE"
+ }
+ }
+}
+""".replace(
+ "TOKENHERE", phabricator_token
+).replace(
+ "PHAB_URL_HERE", phabricator_url + "api/"
+)
+arcrc.write(towrite)
+arcrc.close()
+os.chmod(arc_filename, stat.S_IRUSR | stat.S_IWUSR)
+
+# Set up the Cloud SQL Proxy =============================
+os.chdir(HOME_PATH)
+log("Setting up cloud_sql_proxy...")
+with open("sql-proxy-key", "w") as proxy_key_file:
+ proxy_key_file.write(
+ base64.b64decode(sql_proxy_config["key-value"]).decode("utf-8")
+ )
+
+instance_name = sql_proxy_config["instance-name"]
+if platform.system() == "Linux":
+ sql_proxy_command = "/builds/worker/go/bin/cloud_sql_proxy"
+else:
+ sql_proxy_command = os.path.join(UPDATEBOT_PATH, "..", "cloud_sql_proxy.exe")
+
+sql_proxy_command += (
+ " -instances=" + instance_name + "=tcp:3306 -credential_file=sql-proxy-key"
+)
+sql_proxy_args = {
+ "stdout": subprocess.PIPE,
+ "stderr": subprocess.PIPE,
+ "shell": True,
+ "start_new_session": True,
+}
+
+if platform.system() == "Windows":
+ si = subprocess.STARTUPINFO()
+ si.dwFlags = subprocess.CREATE_NEW_PROCESS_GROUP
+
+ sql_proxy_args["startupinfo"] = si
+
+sql_proxy = subprocess.Popen((sql_proxy_command), **sql_proxy_args)
+
+try:
+ (stdout, stderr) = sql_proxy.communicate(input=None, timeout=2)
+ log("sql proxy stdout:", stdout.decode("utf-8"))
+ log("sql proxy stderr:", stderr.decode("utf-8"))
+except subprocess.TimeoutExpired:
+ log("no sqlproxy output in 2 seconds, this means it probably didn't error.")
+ log("sqlproxy pid:", sql_proxy.pid)
+
+database_config["host"] = "127.0.0.1"
+
+# Vendor =================================================
+log("Getting Updatebot ready...")
+os.chdir(UPDATEBOT_PATH)
+localconfig = {
+ "General": {
+ "env": OPERATING_MODE,
+ "gecko-path": GECKO_DEV_PATH,
+ },
+ "Logging": {
+ "local": True,
+ "sentry": True,
+ "sentry_config": {"url": sentry_url, "debug": False},
+ },
+ "Database": database_config,
+ "Bugzilla": {
+ "apikey": bugzilla_api_key,
+ },
+ "Taskcluster": {
+ "url_treeherder": "https://treeherder.mozilla.org/",
+ "url_taskcluster": "http://taskcluster/",
+ },
+}
+
+log("Writing local config file")
+config = open("localconfig.py", "w")
+config.write("localconfig = " + str(localconfig))
+config.close()
+
+log("Running updatebot")
+# On Windows, Updatebot is run by windows-setup.sh
+if platform.system() == "Linux":
+ subprocess.check_call(["python3", "-m", "poetry", "run", "./automation.py"])
+
+ # Clean up ===============================================
+ log("Killing cloud_sql_proxy")
+ os.kill(sql_proxy.pid, signal.SIGTERM)
diff --git a/taskcluster/docker/updatebot/setup.sh b/taskcluster/docker/updatebot/setup.sh
new file mode 100755
index 0000000000..e7c6fe03e0
--- /dev/null
+++ b/taskcluster/docker/updatebot/setup.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -vex
+
+# Copy the system known_hosts to the home directory so we have uniformity with Windows
+# and the ssh command will find them in the same place.
+cp /etc/ssh/ssh_known_hosts "$HOME/ssh_known_hosts"
+
+# If poetry is not run as worker, then it won't work when run as user later.
+cd /builds/worker/updatebot
+/usr/local/bin/poetry install --no-ansi
diff --git a/taskcluster/docker/updatebot/updatebot-version.sh b/taskcluster/docker/updatebot/updatebot-version.sh
new file mode 100644
index 0000000000..2dd61412ce
--- /dev/null
+++ b/taskcluster/docker/updatebot/updatebot-version.sh
@@ -0,0 +1,2 @@
+export UPDATEBOT_REVISION=dee7a25bb71daf3356d146ab9582720d4a490264
+
diff --git a/taskcluster/docker/updatebot/windows-php.ini b/taskcluster/docker/updatebot/windows-php.ini
new file mode 100644
index 0000000000..8ccaa05ce4
--- /dev/null
+++ b/taskcluster/docker/updatebot/windows-php.ini
@@ -0,0 +1,130 @@
+[PHP]
+
+engine = On
+short_open_tag = Off
+precision = 14
+output_buffering = 4096
+zlib.output_compression = Off
+implicit_flush = Off
+unserialize_callback_func =
+serialize_precision = -1
+disable_functions =
+disable_classes =
+zend.enable_gc = On
+zend.exception_ignore_args = Off
+expose_php = On
+max_execution_time = 30
+max_input_time = 60
+memory_limit = 128M
+error_reporting = E_ALL
+display_errors = On
+display_startup_errors = On
+log_errors = On
+log_errors_max_len = 1024
+ignore_repeated_errors = Off
+ignore_repeated_source = Off
+report_memleaks = On
+variables_order = "GPCS"
+request_order = "GP"
+register_argc_argv = Off
+auto_globals_jit = On
+post_max_size = 8M
+auto_prepend_file =
+auto_append_file =
+default_mimetype = "text/html"
+default_charset = "UTF-8"
+doc_root =
+user_dir =
+extension_dir = "ext"
+enable_dl = Off
+file_uploads = On
+upload_max_filesize = 2M
+max_file_uploads = 20
+allow_url_fopen = On
+allow_url_include = Off
+default_socket_timeout = 60
+extension=curl
+
+[CLI Server]
+cli_server.color = On
+
+[Pdo_mysql]
+pdo_mysql.default_socket=
+
+[mail function]
+SMTP = localhost
+smtp_port = 25
+mail.add_x_header = Off
+
+[ODBC]
+odbc.allow_persistent = On
+odbc.check_persistent = On
+odbc.max_persistent = -1
+odbc.max_links = -1
+odbc.defaultlrl = 4096
+odbc.defaultbinmode = 1
+
+[MySQLi]
+mysqli.max_persistent = -1
+mysqli.allow_persistent = On
+mysqli.max_links = -1
+mysqli.default_port = 3306
+mysqli.default_socket =
+mysqli.default_host =
+mysqli.default_user =
+mysqli.default_pw =
+mysqli.reconnect = Off
+
+[mysqlnd]
+mysqlnd.collect_statistics = On
+mysqlnd.collect_memory_statistics = On
+
+[PostgreSQL]
+pgsql.allow_persistent = On
+pgsql.auto_reset_persistent = Off
+pgsql.max_persistent = -1
+pgsql.max_links = -1
+pgsql.ignore_notice = 0
+pgsql.log_notice = 0
+
+[bcmath]
+bcmath.scale = 0
+
+[Session]
+session.save_handler = files
+session.use_strict_mode = 0
+session.use_cookies = 1
+session.use_only_cookies = 1
+session.name = PHPSESSID
+session.auto_start = 0
+session.cookie_lifetime = 0
+session.cookie_path = /
+session.cookie_domain =
+session.cookie_httponly =
+session.cookie_samesite =
+session.serialize_handler = php
+session.gc_probability = 1
+session.gc_divisor = 1000
+session.gc_maxlifetime = 1440
+session.referer_check =
+session.cache_limiter = nocache
+session.cache_expire = 180
+session.use_trans_sid = 0
+session.sid_length = 26
+session.trans_sid_tags = "a=href,area=href,frame=src,form="
+session.sid_bits_per_character = 5
+
+[Assertion]
+zend.assertions = 1
+
+[Tidy]
+tidy.clean_output = Off
+
+[soap]
+soap.wsdl_cache_enabled=1
+soap.wsdl_cache_dir="/tmp"
+soap.wsdl_cache_ttl=86400
+soap.wsdl_cache_limit = 5
+
+[ldap]
+ldap.max_links = -1
diff --git a/taskcluster/docker/updatebot/windows-setup.sh b/taskcluster/docker/updatebot/windows-setup.sh
new file mode 100644
index 0000000000..c04d0a6fbf
--- /dev/null
+++ b/taskcluster/docker/updatebot/windows-setup.sh
@@ -0,0 +1,135 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -vex
+
+. ./taskcluster/docker/updatebot/updatebot-version.sh # Get UPDATEBOT_REVISION
+
+HOME=$(python3 -c "import os;print(os.path.expanduser('~'))")
+export HOME
+GECKO_PATH="$PWD"
+UPDATEBOT_PATH="$MOZ_FETCHES_DIR/updatebot"
+
+# MOZ_FETCHES_DIR is in Z:/ format. When we update the PATH we need to use
+# /z/ format. Fortunately, we can translate them like so:
+cd "$MOZ_FETCHES_DIR"
+MOZ_FETCHES_PATH="$PWD"
+
+#########################################################
+# Install dependencies
+
+# Move depot_tools
+cd "$MOZ_FETCHES_DIR"
+mv depot_tools.git depot_tools
+
+
+# Generating a new version of the preloaded depot_tools download can be done by:
+# 1) Running the task, uncommenting the variable assignment below, uncommenting the
+# _GENERATE_DEPOT_TOOLS_BINARIES_ section in taskcluster/ci/updatebot/kind.yml,
+# and ensuring that an angle update will actually take place (so it downloads the depot_tools)
+# 2) Downloading and sanity-checking the depot_tools-preloaded-binaries-GIT_HASH-DATE.zip artifact
+# 3) Adding it to tooltool
+# 4) Updating the updatebot manifest
+# Note that even for the same git revision the downloaded tools can change, so they are tagged
+# with both the git hash and the date it was generated
+
+# export GENERATE_DEPOT_TOOLS_BINARIES=1
+
+if test -n "$GENERATE_DEPOT_TOOLS_BINARIES"; then
+ cp -r depot_tools depot_tools-from-git
+fi
+
+# Git is at /c/Program Files/Git/cmd/git.exe
+# It's in PATH for this script (confusingly) but not in PATH so we need to add it
+export PATH="/c/Program Files/Git/cmd:$PATH"
+
+# php & arcanist
+if [ -n "$TOOLTOOL_MANIFEST" ]; then
+ . "$GECKO_PATH/taskcluster/scripts/misc/tooltool-download.sh"
+fi
+
+cp "$MOZ_FETCHES_DIR/vcruntime140.dll" "$MOZ_FETCHES_DIR/php-win"
+cp "$GECKO_PATH/taskcluster/docker/updatebot/windows-php.ini" "$MOZ_FETCHES_DIR/php-win/php.ini"
+
+cd "$MOZ_FETCHES_DIR/arcanist"
+patch -p1 < "$GECKO_PATH/taskcluster/docker/updatebot/arcanist_windows_stream.patch"
+patch -p1 < "$GECKO_PATH/taskcluster/docker/updatebot/arcanist_patch_size.patch"
+cd "$MOZ_FETCHES_DIR"
+
+export PATH="$MOZ_FETCHES_PATH/php-win:$PATH"
+export PATH="$MOZ_FETCHES_PATH/arcanist/bin:$PATH"
+
+# get Updatebot
+cd "$MOZ_FETCHES_DIR"
+git clone https://github.com/mozilla-services/updatebot.git
+cd updatebot
+git checkout "$UPDATEBOT_REVISION"
+
+# base python needs
+python3 -m pip install --no-warn-script-location --user -U pip
+python3 -m pip install --no-warn-script-location --user poetry wheel requests setuptools
+
+# updatebot dependencies
+cd "$UPDATEBOT_PATH"
+python3 -m poetry install
+
+# taskcluster secrets and writing out localconfig
+cd "$GECKO_PATH"
+python3 ./taskcluster/docker/updatebot/run.py "$GECKO_PATH" "$UPDATEBOT_PATH" "$MOZ_FETCHES_PATH"
+
+# mercurial configuration
+cp "$GECKO_PATH/taskcluster/docker/updatebot/hgrc" "$HOME/.hgrc"
+# Windows is not happy with $HOME in the hgrc so we need to do a hack to replace it
+# with the actual value
+( echo "cat <<EOF" ; cat "$HOME/.hgrc" ) | sh > tmp
+mv tmp "$HOME/.hgrc"
+
+# ssh known hosts
+cp "$GECKO_PATH/taskcluster/docker/push-to-try/known_hosts" "$HOME/ssh_known_hosts"
+
+#########################################################
+# Run it
+export PYTHONIOENCODING=utf8
+export PYTHONUNBUFFERED=1
+
+cd "$UPDATEBOT_PATH"
+python3 -m poetry run python3 ./automation.py
+
+#########################################################
+if test -n "$GENERATE_DEPOT_TOOLS_BINARIES"; then
+ # Artifacts
+
+ cd "$MOZ_FETCHES_PATH"
+ mv depot_tools depot_tools-from-tc
+
+ # Clean out unneeded files
+ # Need to use cmd because for some reason rm from bash throws 'Access Denied'
+ cmd '/c for /d /r %i in (*__pycache__) do rmdir /s /q %i'
+ rm -rf depot_tools-from-git/.git || true
+
+ # Delete the files that are already in git
+ find depot_tools-from-git -mindepth 1 -maxdepth 1 | sed s/depot_tools-from-git/depot_tools-from-tc/ | while read -r d; do rm -rf "$d"; done
+
+ # Make the artifact
+ rm -rf depot_tools-preloaded-binaries #remove it if it existed (i.e. we probably have one from tooltool already)
+ mv depot_tools-from-tc depot_tools-preloaded-binaries
+
+ # zip can't add symbolic links, and exits with an error code. || true avoids a script crash
+ zip -r depot_tools-preloaded-binaries.zip depot_tools-preloaded-binaries/ || true
+
+ # Convoluted way to get the git hash, because we don't have a .git directory
+ # Adding extra print statements just in case we need to debug it
+ GIT_HASH=$(grep depot_tools -A 1 "$GECKO_PATH/taskcluster/ci/fetch/updatebot.yml" | tee /dev/tty | grep revision | tee /dev/tty | awk -F': *' '{print $2}' | tee /dev/tty)
+ DATE=$(date -I)
+ mv depot_tools-preloaded-binaries.zip "depot_tools-preloaded-binaries-$GIT_HASH-$DATE.zip"
+
+ # Put the artifact into the directory we will look for it
+ mkdir -p "$GECKO_PATH/obj-build/depot_tools" || true
+ mv "depot_tools-preloaded-binaries-$GIT_HASH-$DATE.zip" "$GECKO_PATH/obj-build/depot_tools"
+fi
+
+#########################################################
+echo "Killing SQL Proxy"
+taskkill -f -im cloud_sql_proxy.exe || true