summaryrefslogtreecommitdiffstats
path: root/taskcluster/docker
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--taskcluster/docker/REGISTRY1
-rw-r--r--taskcluster/docker/android-build/Dockerfile45
-rw-r--r--taskcluster/docker/android-build/README.md2
-rw-r--r--taskcluster/docker/android-build/VERSION1
-rw-r--r--taskcluster/docker/condprof/Dockerfile15
-rw-r--r--taskcluster/docker/custom-car-linux/Dockerfile24
-rw-r--r--taskcluster/docker/custom-v8/Dockerfile22
-rw-r--r--taskcluster/docker/debian-base/Dockerfile66
-rw-r--r--taskcluster/docker/debian-build/Dockerfile51
-rw-r--r--taskcluster/docker/debian-packages/Dockerfile11
-rw-r--r--taskcluster/docker/debian-raw/Dockerfile62
-rwxr-xr-xtaskcluster/docker/debian-raw/gpgvnoexpkeysig52
-rwxr-xr-xtaskcluster/docker/debian-raw/taskcluster-hack.sh7
-rw-r--r--taskcluster/docker/debian-repackage/Dockerfile34
-rw-r--r--taskcluster/docker/decision/Dockerfile15
-rw-r--r--taskcluster/docker/decision/HASH1
-rw-r--r--taskcluster/docker/decision/README.md5
-rw-r--r--taskcluster/docker/decision/VERSION1
-rwxr-xr-xtaskcluster/docker/decision/comm-task-env199
-rw-r--r--taskcluster/docker/decision/system-setup.sh14
-rw-r--r--taskcluster/docker/diffoscope/Dockerfile33
-rw-r--r--taskcluster/docker/diffoscope/get_and_diffoscope103
-rw-r--r--taskcluster/docker/diffoscope/readelf13
-rwxr-xr-xtaskcluster/docker/diffoscope/report_error36
-rw-r--r--taskcluster/docker/diffoscope/test_diffoscope47
-rw-r--r--taskcluster/docker/fetch/Dockerfile36
-rw-r--r--taskcluster/docker/firefox-flatpak/Dockerfile19
-rw-r--r--taskcluster/docker/firefox-flatpak/close_range.c12
-rw-r--r--taskcluster/docker/firefox-flatpak/default-preferences.js11
-rw-r--r--taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py18
-rw-r--r--taskcluster/docker/firefox-flatpak/launch-script.sh3
-rw-r--r--taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in37
-rw-r--r--taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop386
-rw-r--r--taskcluster/docker/firefox-flatpak/policies.json6
-rwxr-xr-xtaskcluster/docker/firefox-flatpak/runme.sh190
-rw-r--r--taskcluster/docker/firefox-snap/Dockerfile71
-rw-r--r--taskcluster/docker/firefox-snap/Makefile12
-rw-r--r--taskcluster/docker/firefox-snap/download_and_install_snap.sh27
-rw-r--r--taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py15
-rw-r--r--taskcluster/docker/firefox-snap/firefox.desktop219
-rw-r--r--taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in92
-rw-r--r--taskcluster/docker/firefox-snap/policies.json5
-rwxr-xr-xtaskcluster/docker/firefox-snap/runme.sh104
-rwxr-xr-xtaskcluster/docker/firefox-snap/tmpdir6
-rw-r--r--taskcluster/docker/funsize-update-generator/Dockerfile48
-rw-r--r--taskcluster/docker/funsize-update-generator/Makefile9
-rw-r--r--taskcluster/docker/funsize-update-generator/README7
-rw-r--r--taskcluster/docker/funsize-update-generator/dep1.pubkey14
-rw-r--r--taskcluster/docker/funsize-update-generator/nightly.pubkey14
-rw-r--r--taskcluster/docker/funsize-update-generator/release.pubkey14
-rw-r--r--taskcluster/docker/funsize-update-generator/requirements.in7
-rw-r--r--taskcluster/docker/funsize-update-generator/requirements.txt663
-rw-r--r--taskcluster/docker/funsize-update-generator/runme.sh61
-rw-r--r--taskcluster/docker/funsize-update-generator/scripts/funsize.py471
-rwxr-xr-xtaskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh157
-rw-r--r--taskcluster/docker/gdb-test/Dockerfile15
-rw-r--r--taskcluster/docker/github-sync/Dockerfile14
-rwxr-xr-xtaskcluster/docker/github-sync/prepare.sh31
-rw-r--r--taskcluster/docker/github-sync/requirements.txt3
-rw-r--r--taskcluster/docker/image_builder/Dockerfile99
-rw-r--r--taskcluster/docker/image_builder/VERSION1
-rw-r--r--taskcluster/docker/image_builder/apt.conf5
-rw-r--r--taskcluster/docker/image_builder/build-image/Cargo.lock1085
-rw-r--r--taskcluster/docker/image_builder/build-image/Cargo.toml23
-rw-r--r--taskcluster/docker/image_builder/build-image/src/config.rs112
-rw-r--r--taskcluster/docker/image_builder/build-image/src/main.rs182
-rw-r--r--taskcluster/docker/image_builder/build-image/src/taskcluster.rs55
-rw-r--r--taskcluster/docker/image_builder/policy.json11
-rw-r--r--taskcluster/docker/index-task/.eslintrc.js11
-rw-r--r--taskcluster/docker/index-task/Dockerfile11
-rw-r--r--taskcluster/docker/index-task/README36
-rw-r--r--taskcluster/docker/index-task/insert-indexes.js73
-rw-r--r--taskcluster/docker/index-task/package.json12
-rw-r--r--taskcluster/docker/index-task/yarn.lock326
-rw-r--r--taskcluster/docker/lint/Dockerfile36
-rw-r--r--taskcluster/docker/lint/system-setup.sh92
-rw-r--r--taskcluster/docker/partner-repack/Dockerfile20
-rw-r--r--taskcluster/docker/partner-repack/known_hosts3
-rw-r--r--taskcluster/docker/periodic-updates/.eslintrc.js70
-rw-r--r--taskcluster/docker/periodic-updates/Dockerfile11
-rw-r--r--taskcluster/docker/periodic-updates/README.md96
-rwxr-xr-xtaskcluster/docker/periodic-updates/runme.sh93
-rw-r--r--taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js674
-rw-r--r--taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js557
-rwxr-xr-xtaskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh618
-rwxr-xr-xtaskcluster/docker/periodic-updates/setup.sh25
-rw-r--r--taskcluster/docker/push-to-try/Dockerfile22
-rw-r--r--taskcluster/docker/push-to-try/hgrc2
-rw-r--r--taskcluster/docker/push-to-try/known_hosts2
-rw-r--r--taskcluster/docker/recipes/common.sh10
-rw-r--r--taskcluster/docker/recipes/debian-test-system-setup.sh84
-rw-r--r--taskcluster/docker/recipes/dot-config/pip/pip.conf2
-rwxr-xr-xtaskcluster/docker/recipes/hgrc33
-rw-r--r--taskcluster/docker/recipes/install-node.sh15
-rwxr-xr-xtaskcluster/docker/recipes/setup_packages.sh13
-rw-r--r--taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh163
-rw-r--r--taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh24
-rw-r--r--taskcluster/docker/recipes/xvfb.sh75
-rw-r--r--taskcluster/docker/sentry/Dockerfile11
-rwxr-xr-xtaskcluster/docker/sentry/prepare.sh18
-rw-r--r--taskcluster/docker/sentry/submit_sentry_release.sh29
-rw-r--r--taskcluster/docker/static-analysis-build/Dockerfile61
-rw-r--r--taskcluster/docker/system-symbols-linux-scraper/Dockerfile28
-rw-r--r--taskcluster/docker/system-symbols-linux-scraper/SHA256SUMS.txt6
-rwxr-xr-xtaskcluster/docker/system-symbols-linux-scraper/run.sh80
-rw-r--r--taskcluster/docker/system-symbols-mac/Dockerfile37
-rw-r--r--taskcluster/docker/system-symbols-mac/requirements.txt2
-rw-r--r--taskcluster/docker/system-symbols-mac/setup.sh27
-rw-r--r--taskcluster/docker/system-symbols-win/Dockerfile26
-rw-r--r--taskcluster/docker/system-symbols-win/requirements.txt19
-rw-r--r--taskcluster/docker/toolchain-build/Dockerfile62
-rw-r--r--taskcluster/docker/ubuntu1804-base/Dockerfile70
-rw-r--r--taskcluster/docker/ubuntu1804-test-base/Dockerfile24
-rw-r--r--taskcluster/docker/ubuntu1804-test/Dockerfile86
-rw-r--r--taskcluster/docker/ubuntu1804-test/apport1
-rw-r--r--taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop19
-rw-r--r--taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop9
-rw-r--r--taskcluster/docker/ubuntu1804-test/dbus.sh7
-rw-r--r--taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs15
-rw-r--r--taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale1
-rw-r--r--taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf3
-rw-r--r--taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf1
-rw-r--r--taskcluster/docker/ubuntu1804-test/fonts.conf5
-rw-r--r--taskcluster/docker/ubuntu1804-test/motd6
-rw-r--r--taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell22
-rw-r--r--taskcluster/docker/update-verify/Dockerfile32
-rw-r--r--taskcluster/docker/update-verify/system-setup.sh29
-rw-r--r--taskcluster/docker/updatebot/Dockerfile24
-rw-r--r--taskcluster/docker/updatebot/VERSION1
-rw-r--r--taskcluster/docker/updatebot/arcanist_patch_size.patch13
-rw-r--r--taskcluster/docker/updatebot/arcanist_windows_stream.patch19
-rw-r--r--taskcluster/docker/updatebot/hgrc7
-rw-r--r--taskcluster/docker/updatebot/moz.build8
-rwxr-xr-xtaskcluster/docker/updatebot/privileged-setup.sh82
-rwxr-xr-xtaskcluster/docker/updatebot/run.py223
-rwxr-xr-xtaskcluster/docker/updatebot/setup.sh14
-rw-r--r--taskcluster/docker/updatebot/updatebot-version.sh2
-rw-r--r--taskcluster/docker/updatebot/windows-php.ini130
-rw-r--r--taskcluster/docker/updatebot/windows-setup.sh135
-rw-r--r--taskcluster/docker/valgrind-build/Dockerfile66
-rw-r--r--taskcluster/docker/webrender/Dockerfile16
141 files changed, 9915 insertions, 0 deletions
diff --git a/taskcluster/docker/REGISTRY b/taskcluster/docker/REGISTRY
new file mode 100644
index 0000000000..cb1e1bb482
--- /dev/null
+++ b/taskcluster/docker/REGISTRY
@@ -0,0 +1 @@
+taskcluster
diff --git a/taskcluster/docker/android-build/Dockerfile b/taskcluster/docker/android-build/Dockerfile
new file mode 100644
index 0000000000..22eb3c51c8
--- /dev/null
+++ b/taskcluster/docker/android-build/Dockerfile
@@ -0,0 +1,45 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Nick Alexander <nalexander@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+# rsync is required for l10n single locale repacks. less, screen, and
+# vim, help debugging interactive tasks in Task Cluster.
+# git and openssh-client are used to upload GeckoView javadoc to Github.
+RUN apt-get update && \
+ apt-get install \
+ autoconf2.13 \
+ build-essential \
+ base-files \
+ ccache \
+ cmake \
+ curl \
+ file \
+ gnupg \
+ jq \
+ less \
+ openssh-client \
+ procps \
+ python3-dev \
+ rsync \
+ screen \
+ sudo \
+ tar \
+ unzip \
+ uuid \
+ valgrind \
+ vim \
+ x11-utils \
+ xvfb \
+ wget \
+ zip \
+ zstd
+
+# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb.
+# %include taskcluster/docker/recipes/xvfb.sh
+COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
+
+# Back to the workdir, matching desktop-build.
+WORKDIR /builds/worker
diff --git a/taskcluster/docker/android-build/README.md b/taskcluster/docker/android-build/README.md
new file mode 100644
index 0000000000..6096b08368
--- /dev/null
+++ b/taskcluster/docker/android-build/README.md
@@ -0,0 +1,2 @@
+This is a docker script for fetching Android Gradle dependenices for
+use in Mozilla's build clusters.
diff --git a/taskcluster/docker/android-build/VERSION b/taskcluster/docker/android-build/VERSION
new file mode 100644
index 0000000000..4e379d2bfe
--- /dev/null
+++ b/taskcluster/docker/android-build/VERSION
@@ -0,0 +1 @@
+0.0.2
diff --git a/taskcluster/docker/condprof/Dockerfile b/taskcluster/docker/condprof/Dockerfile
new file mode 100644
index 0000000000..76817e7927
--- /dev/null
+++ b/taskcluster/docker/condprof/Dockerfile
@@ -0,0 +1,15 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Gregory Mierzwinski <gmierzwinski@mozilla.com>
+
+VOLUME /builds/worker/.cache
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/tooltool-cache
+VOLUME /builds/worker/workspace
+
+RUN apt-get -y update && \
+ apt-get install -y python3.7 \
+ python3.7-venv \
+ python3.7-dev
+
+CMD ["/bin/bash", "--login"]
+
diff --git a/taskcluster/docker/custom-car-linux/Dockerfile b/taskcluster/docker/custom-car-linux/Dockerfile
new file mode 100644
index 0000000000..17f24b9042
--- /dev/null
+++ b/taskcluster/docker/custom-car-linux/Dockerfile
@@ -0,0 +1,24 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER kshampur <kshampur@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV XZ_OPT=-T0
+
+RUN apt-get update && \
+ apt-get install \
+ bzip2 \
+ curl \
+ libbz2-dev \
+ libglib2.0-dev \
+ libtinfo5 \
+ pkg-config \
+ tar \
+ unzip \
+ wget \
+ zip \
+ lsb-release \
+ gperf \
+ procps
diff --git a/taskcluster/docker/custom-v8/Dockerfile b/taskcluster/docker/custom-v8/Dockerfile
new file mode 100644
index 0000000000..810c2eb090
--- /dev/null
+++ b/taskcluster/docker/custom-v8/Dockerfile
@@ -0,0 +1,22 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Joel Maher <jmaher@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV XZ_OPT=-T0
+
+RUN apt-get update && \
+ apt-get install \
+ bzip2 \
+ curl \
+ libbz2-dev \
+ libglib2.0-dev \
+ libtinfo5 \
+ pkg-config \
+ tar \
+ unzip \
+ wget \
+ zip \
+ procps
diff --git a/taskcluster/docker/debian-base/Dockerfile b/taskcluster/docker/debian-base/Dockerfile
new file mode 100644
index 0000000000..ad48749a38
--- /dev/null
+++ b/taskcluster/docker/debian-base/Dockerfile
@@ -0,0 +1,66 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+### Add worker user and setup its workspace.
+RUN mkdir /builds && \
+ groupadd -g 1000 worker && \
+ useradd -u 1000 -g 1000 -d /builds/worker -s /bin/bash -m worker && \
+ mkdir -p /builds/worker/workspace && \
+ chown -R worker:worker /builds
+
+# Declare default working folder
+WORKDIR /builds/worker
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV HOME=/builds/worker \
+ SHELL=/bin/bash \
+ USER=worker \
+ LOGNAME=worker \
+ HOSTNAME=taskcluster-worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
+ apt-get update && \
+ apt-get dist-upgrade && \
+ apt-get install \
+ git \
+ less \
+ make \
+ mercurial \
+ patch \
+ python3 \
+ python3-distutils-extra \
+ python3-minimal \
+ python3-zstandard \
+ python3-psutil \
+ python3-venv \
+ vim-tiny \
+ xz-utils \
+ zstd
+
+# %include testing/mozharness/external_tools/robustcheckout.py
+COPY topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
+
+# %include taskcluster/docker/recipes/hgrc
+COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc
+
+# Add pip configuration, among other things.
+# %include taskcluster/docker/recipes/dot-config
+COPY topsrcdir/taskcluster/docker/recipes/dot-config /builds/worker/.config
+
+# %include taskcluster/scripts/run-task
+COPY topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task
+
+# %include taskcluster/scripts/misc/fetch-content
+ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content
+
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
diff --git a/taskcluster/docker/debian-build/Dockerfile b/taskcluster/docker/debian-build/Dockerfile
new file mode 100644
index 0000000000..4e0e04c150
--- /dev/null
+++ b/taskcluster/docker/debian-build/Dockerfile
@@ -0,0 +1,51 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV XZ_OPT=-T0
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
+
+# binutils is needed for base toolchain builds. Ideally, it wouldn't be needed.
+# libc6-i386 and lib32gcc1 are needed for wine.
+# libdbus-glib-1-2 and libgtk-3-0 are needed to run xpcshell during the build.
+# lib32atomic1, lib32stdc++6 and lib32z1 are needed to run some 32-bits
+# cpio is needed by xar (mac repackaging on linux)
+# spidermonkey tests. libasound2 is needed to run xpcshell after we introduced
+# the dependencies on alsa via Web MIDI.
+RUN apt-get update && \
+ apt-get dist-upgrade && \
+ apt-get install \
+ binutils \
+ bzip2 \
+ curl \
+ cpio \
+ file \
+ gawk \
+ gnupg \
+ jq \
+ lib32atomic1 \
+ 'lib32gcc(1|-s1)$' \
+ lib32stdc++6 \
+ lib32z1 \
+ libasound2 \
+ libc6-i386 \
+ libdbus-glib-1-2 \
+ libgtk-3-0 \
+ libucl1 \
+ p7zip-full \
+ procps \
+ python3-dev \
+ rsync \
+ screen \
+ tar \
+ unzip \
+ uuid \
+ wget \
+ x11-utils \
+ zip
diff --git a/taskcluster/docker/debian-packages/Dockerfile b/taskcluster/docker/debian-packages/Dockerfile
new file mode 100644
index 0000000000..1e651a48b7
--- /dev/null
+++ b/taskcluster/docker/debian-packages/Dockerfile
@@ -0,0 +1,11 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+RUN apt-get install --install-recommends \
+ apt-utils \
+ aptitude \
+ build-essential \
+ devscripts \
+ equivs \
+ fakeroot \
+ git
diff --git a/taskcluster/docker/debian-raw/Dockerfile b/taskcluster/docker/debian-raw/Dockerfile
new file mode 100644
index 0000000000..33b28d11a2
--- /dev/null
+++ b/taskcluster/docker/debian-raw/Dockerfile
@@ -0,0 +1,62 @@
+ARG BASE_IMAGE
+FROM $BASE_IMAGE
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
+
+# %include taskcluster/docker/recipes/setup_packages.sh
+COPY topsrcdir/taskcluster/docker/recipes/setup_packages.sh /usr/local/sbin/
+
+COPY taskcluster-hack.sh /usr/local/sbin
+COPY gpgvnoexpkeysig /usr/local/sbin
+
+ARG DIST
+ARG SNAPSHOT
+ARG TASKCLUSTER_ROOT_URL
+# Set apt sources list to a snapshot.
+# Note: the use of gpgvnoexpkeysig is because the Debian Jessie GPG key expired.
+RUN if [ -n "$DIST" ]; then for s in debian_$DIST debian_$DIST-updates debian_$DIST-backports debian-security_$DIST-security debian-debug_$DIST-debug debian-debug_$DIST-proposed-updates-debug debian-debug_$DIST-backports-debug; do \
+ case "$s" in \
+ debian-debug_jessie*|debian_jessie-updates) \
+ : No debian-debug/updates archive for Jessie; \
+ ;; \
+ debian-security_jessie-security) \
+ echo "deb http://archive.debian.org/debian-security/ jessie/updates main"; \
+ ;; \
+ debian*_jessie*) \
+ echo "deb http://archive.debian.org/${s%_*}/ ${s#*_} main"; \
+ ;; \
+ debian-security_buster-security) \
+ d=${s#*_}; \
+ echo "deb http://snapshot.debian.org/archive/${s%_*}/$SNAPSHOT/ ${d%-security}/updates main"; \
+ ;; \
+ *) \
+ echo "deb http://snapshot.debian.org/archive/${s%_*}/$SNAPSHOT/ ${s#*_} main"; \
+ ;; \
+ esac; \
+ done > /etc/apt/sources.list ; fi && \
+ ( echo 'quiet "true";'; \
+ echo 'APT::Get::Assume-Yes "true";'; \
+ echo 'APT::Install-Recommends "false";'; \
+ echo 'Acquire::Check-Valid-Until "false";'; \
+ echo 'Acquire::Retries "5";'; \
+ if dpkg --compare-versions $(apt --version | awk '{print $2}') ge 2.1.15; then \
+ echo 'dir::bin::methods::https "/usr/local/sbin/taskcluster-hack.sh";'; \
+ fi; \
+ if [ "$DIST" = "jessie" ]; then \
+ echo 'Dir::Bin::gpg "/usr/local/sbin/gpgvnoexpkeysig";'; \
+ fi; \
+ ) > /etc/apt/apt.conf.d/99taskcluster && \
+ ( echo 'Package: *'; \
+ echo 'Pin: origin "'$TASKCLUSTER_ROOT_URL'"' | sed 's,https://,,'; \
+ echo 'Pin-Priority: 1001'; \
+ ) > /etc/apt/preferences.d/99taskcluster
+
+RUN apt-get update && \
+ apt-get dist-upgrade && \
+ apt-get install \
+ apt-transport-https \
+ ca-certificates
diff --git a/taskcluster/docker/debian-raw/gpgvnoexpkeysig b/taskcluster/docker/debian-raw/gpgvnoexpkeysig
new file mode 100755
index 0000000000..fbbfd7a658
--- /dev/null
+++ b/taskcluster/docker/debian-raw/gpgvnoexpkeysig
@@ -0,0 +1,52 @@
+#!/bin/sh
+#
+# Downloaded from https://gitlab.mister-muffin.de/josch/mmdebstrap/raw/branch/main/gpgvnoexpkeysig
+#
+# This script is in the public domain
+#
+# Author: Johannes Schauer Marin Rodrigues <josch@mister-muffin.de>
+#
+# This is a wrapper around gpgv as invoked by apt. It turns EXPKEYSIG results
+# from gpgv into GOODSIG results. This is necessary for apt to access very old
+# timestamps from snapshot.debian.org for which the GPG key is already expired:
+#
+# Get:1 http://snapshot.debian.org/archive/debian/20150106T000000Z unstable InRelease [242 kB]
+# Err:1 http://snapshot.debian.org/archive/debian/20150106T000000Z unstable InRelease
+# The following signatures were invalid: EXPKEYSIG 8B48AD6246925553 Debian Archive Automatic Signing Key (7.0/wheezy) <ftpmaster@debian.org>
+# Reading package lists...
+# W: GPG error: http://snapshot.debian.org/archive/debian/20150106T000000Z unstable InRelease: The following signatures were invalid: EXPKEYSIG 8B48AD6246925553 Debian Archive Automatic Signing Key (7.0/wheezy) <ftpmaster@debian.org>
+# E: The repository 'http://snapshot.debian.org/archive/debian/20150106T000000Z unstable InRelease' is not signed.
+#
+# To use this script, call apt with
+#
+# -o Apt::Key::gpgvcommand=/usr/libexec/mmdebstrap/gpgvnoexpkeysig
+#
+# Scripts doing similar things can be found here:
+#
+# * debuerreotype as /usr/share/debuerreotype/scripts/.gpgv-ignore-expiration.sh
+# * derivative census: salsa.d.o/deriv-team/census/-/blob/master/bin/fakegpgv
+
+set -eu
+
+find_gpgv_status_fd() {
+ while [ "$#" -gt 0 ]; do
+ if [ "$1" = '--status-fd' ]; then
+ echo "$2"
+ return 0
+ fi
+ shift
+ done
+ # default fd is stdout
+ echo 1
+}
+GPGSTATUSFD="$(find_gpgv_status_fd "$@")"
+
+case $GPGSTATUSFD in
+ ''|*[!0-9]*)
+ echo "invalid --status-fd argument" >&2
+ exit 1
+ ;;
+esac
+
+# we need eval because we cannot redirect a variable fd
+eval 'exec gpgv "$@" '"$GPGSTATUSFD"'>&1 | sed "s/^\[GNUPG:\] EXPKEYSIG /[GNUPG:] GOODSIG /" >&'"$GPGSTATUSFD"
diff --git a/taskcluster/docker/debian-raw/taskcluster-hack.sh b/taskcluster/docker/debian-raw/taskcluster-hack.sh
new file mode 100755
index 0000000000..eecac021ec
--- /dev/null
+++ b/taskcluster/docker/debian-raw/taskcluster-hack.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+# APT version 2.1.15 and newer changed how they handle quoting in redirections
+# in a way that breaks the setup for APT repos in taskcluster artifacts
+# (unfortunately, there's also no setup on the taskcluster end that would work
+# with both old and newer versions of APT, short of removing redirections
+# entirely).
+/usr/lib/apt/methods/https | sed -u '/^New-URI:/s/+/%2b/g'
diff --git a/taskcluster/docker/debian-repackage/Dockerfile b/taskcluster/docker/debian-repackage/Dockerfile
new file mode 100644
index 0000000000..2ca035be0b
--- /dev/null
+++ b/taskcluster/docker/debian-repackage/Dockerfile
@@ -0,0 +1,34 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mozilla Releng <release@mozilla.com>
+
+VOLUME /builds/worker/workspace
+
+# At the moment the Firefox build baseline is jessie.
+# Things in the build/CI system that run in the task's container
+# (mach, run-task, etc.) do not support jessie.
+# There's also no jessie `base` images or packages generated in the CI.
+# To generate Firefox Debian packages compatible with the baseline
+# we bootstrap basic i386 and amd64 jessie systems on bullseye.
+# We use these to generate shared library dependencies in jessie using chroot.
+# python/mozbuild/mozbuild/repackaging/deb.py checks for jessie systems
+# bootstrapped under /srv/jessie-i386 and /srv/jessie-amd64
+# If they aren't there, deb.py runs the repackage in the working directory without using chroot.
+# To keep the build and repackage enviroments consistent the Debian baseline used
+# here (jessie) should be synchronized with the baseline used in
+# taskcluster/scripts/misc/build-sysroot.sh
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
+ apt-get update && \
+ apt-get install debootstrap python3-distutils && set -xe && \
+ for arch in i386 amd64; do \
+ debootstrap \
+ --arch=$arch \
+ --variant=buildd \
+ --include=debhelper,libasound2,libdbus-glib-1-2,libgtk-3-0,libx11-xcb1,libxtst6 \
+ --keyring=/usr/share/keyrings/debian-archive-removed-keys.gpg \
+ --verbose \
+ jessie \
+ /srv/jessie-$arch \
+ https://archive.debian.org/debian; \
+ done
diff --git a/taskcluster/docker/decision/Dockerfile b/taskcluster/docker/decision/Dockerfile
new file mode 100644
index 0000000000..f2aa83903e
--- /dev/null
+++ b/taskcluster/docker/decision/Dockerfile
@@ -0,0 +1,15 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Andrew Halberstadt <ahal@mozilla.com>
+
+RUN mkdir /builds/worker/artifacts && \
+ chown worker:worker /builds/worker/artifacts
+
+ADD system-setup.sh /tmp/system-setup.sh
+RUN bash /tmp/system-setup.sh
+
+ADD comm-task-env /builds/worker/bin/comm-task-env
+
+ENV PATH=/builds/worker/bin:$PATH
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/decision/HASH b/taskcluster/docker/decision/HASH
new file mode 100644
index 0000000000..b854e10d0f
--- /dev/null
+++ b/taskcluster/docker/decision/HASH
@@ -0,0 +1 @@
+sha256:9f69fe08c28e3cb3cc296451f0a2735df6e25d0e3c877ea735ef1b7f0b345b06
diff --git a/taskcluster/docker/decision/README.md b/taskcluster/docker/decision/README.md
new file mode 100644
index 0000000000..4490880be8
--- /dev/null
+++ b/taskcluster/docker/decision/README.md
@@ -0,0 +1,5 @@
+# Decision Tasks
+
+The decision image is a "boostrapping" image for the in tree logic it
+deals with cloning gecko and the related utilities for providing an
+environment where we can run gecko.
diff --git a/taskcluster/docker/decision/VERSION b/taskcluster/docker/decision/VERSION
new file mode 100644
index 0000000000..1454f6ed4b
--- /dev/null
+++ b/taskcluster/docker/decision/VERSION
@@ -0,0 +1 @@
+4.0.1
diff --git a/taskcluster/docker/decision/comm-task-env b/taskcluster/docker/decision/comm-task-env
new file mode 100755
index 0000000000..65481497ae
--- /dev/null
+++ b/taskcluster/docker/decision/comm-task-env
@@ -0,0 +1,199 @@
+#!/usr/bin/python3 -u
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Thunderbird build environment prep for run-task,
+for use with comm-central derived repositories.
+
+This script is meant to run prior to run-task on repositories like
+comm-central that need to check out a copy of a mozilla repository
+in order to build.
+See bug 1491371 for background on why this is necessary.
+
+A project will have a file named ".gecko_rev.yml" in it's root. See the
+constant "GECKO_REV_CONF" if you want to change that. To download it, the
+script uses the project repository URL and the revision number.
+Those are defined in the environment variables:
+COMM_HEAD_REPOSITORY
+COMM_HEAD_REV
+
+.gecko_rev.yml has a structure like (for comm-central):
+```
+GECKO_BASE_REPOSITORY: https://hg.mozilla.org/mozilla-unified
+GECKO_HEAD_REPOSITORY: https://hg.mozilla.org/mozilla-central
+GECKO_HEAD_REF: default
+```
+or for branches:
+```
+GECKO_BASE_REPOSITORY: https://hg.mozilla.org/mozilla-unified
+GECKO_HEAD_REPOSITORY: https://hg.mozilla.org/releases/mozilla-beta
+GECKO_HEAD_REF: THUNDERBIRD_60_VERBRANCH
+GECKO_HEAD_REV: 6a830d12f15493a70b1192022c9985eba2139910
+
+Note about GECKO_HEAD_REV and GECKO_HEAD_REF:
+GECKO_HEAD_REF is a branch name or "default".
+GECKO_HEAD_REV is a revision hash.
+```
+"""
+
+import sys
+
+import os
+import socket
+import time
+from datetime import datetime
+from pprint import pformat
+
+import urllib.error
+import urllib.request
+
+import yaml
+
+if sys.version_info[0:2] < (3, 5):
+ print('run-task-wrapper requires Python 3.5+')
+ sys.exit(1)
+
+GECKO_REV_CONF = ".gecko_rev.yml"
+DEBUG = bool(os.environ.get("RTW_DEBUG", False))
+
+
+def print_message(msg, prefix=__file__, level=""):
+ """
+ Print messages.
+ :param object msg: message to print, usually a string, but not always
+ :param str prefix: message prefix
+ :param str level: message level (DEBUG, ERROR, INFO)
+ """
+ if not isinstance(msg, str):
+ msg = pformat(msg)
+ now = datetime.utcnow().isoformat()
+ # slice microseconds to 3 decimals.
+ now = now[:-3] if now[-7:-6] == '.' else now
+ if level:
+ sys.stdout.write('[{prefix} {now}Z] {level}: {msg}\n'.format(
+ prefix=prefix, now=now, level=level, msg=msg))
+ else:
+ sys.stdout.write('[{prefix} {now}Z] {msg}\n'.format(
+ prefix=prefix, now=now, msg=msg))
+ sys.stdout.flush()
+
+
+def error_exit(msg):
+ """Print the error message and exit with error."""
+ print_message(msg, level="ERROR")
+ if DEBUG:
+ raise Exception(msg)
+
+ sys.exit(1)
+
+
+def print_debug(msg):
+ """Prints a message with DEBUG prefix if DEBUG is enabled
+ with the environment variable "RTW_DEBUG".
+ """
+ if DEBUG:
+ print_message(msg, level="DEBUG")
+
+
+def check_environ():
+ """Check that the necessary environment variables to find the
+ comm- repository are defined. (Set in .taskcluster.yml)
+ :return: tuple(str, str)
+ """
+ print_debug("Checking environment variables...")
+ project_head_repo = os.environ.get("COMM_HEAD_REPOSITORY", None)
+ project_head_rev = os.environ.get("COMM_HEAD_REV", None)
+
+ if project_head_repo is None or project_head_rev is None:
+ error_exit("Environment NOT Ok:\n\tHead: {}\n\tRev: {}\n").format(
+ project_head_repo, project_head_rev)
+
+ print_debug("Environment Ok:\n\tHead: {}\n\tRev: {}\n".format(
+ project_head_repo, project_head_rev))
+ return project_head_repo, project_head_rev
+
+
+def download_url(url, retry=1):
+ """Downloads the given URL. Naively retries (when asked) upon failure
+ :param url: str
+ :param retry: int
+ :return: str
+ """
+ # Use 1-based counting for display and calculation purposes.
+ for i in range(1, retry+1):
+ try:
+ print_message('Fetching {}. Attempt {} of {}.'.format(
+ url, i, retry))
+ with urllib.request.urlopen(url, timeout=10) as response:
+ data = response.read().decode("utf-8")
+ return data
+ except (urllib.error.URLError, socket.timeout) as exc:
+ print_message('Unable to retrieve {}'.format(url))
+ if isinstance(exc, urllib.error.URLError):
+ print_message(exc.reason)
+ else: # socket.timeout
+ print_message('Connection timed out.')
+
+ if i < retry: # No more retries
+ wait_time = i * 5 # fail #1: sleep 5s. #2, sleep 10s
+ print_message('Retrying in {} seconds.'.format(wait_time))
+ time.sleep(wait_time)
+
+ error_exit('No more retry attempts! Aborting.')
+
+
+def fetch_gecko_conf(project_head_repo, project_revision):
+ """Downloads .gecko_rev.yml from the project repository
+ :param project_head_repo: str
+ :param project_revision: str
+ :return: dict
+ """
+ gecko_conf_url = '/'.join(
+ [project_head_repo, 'raw-file', project_revision, GECKO_REV_CONF])
+
+ gecko_conf_yml = download_url(gecko_conf_url, retry=5)
+
+ try:
+ gecko_conf = yaml.safe_load(gecko_conf_yml)
+ return gecko_conf
+ except yaml.YAMLError as exc:
+ err_txt = ["Error processing Gecko YAML configuration."]
+ if hasattr(exc, "problem_mark"):
+ mark = exc.problem_mark # pylint: disable=no-member
+ err_txt.append("Error position: line {}, column {}".format(
+ mark.line + 1, mark.column + 1))
+ error_exit('\n'.join(err_txt))
+
+
+def update_environment(gecko_conf):
+ """Adds the new variables defined in gecko_conf to the
+ running environment.
+ :param gecko_conf: dict
+ """
+ print_message("Updating environment with:")
+ print_message(gecko_conf)
+ os.environ.update(gecko_conf)
+
+ print_debug("New environment:")
+ print_debug(os.environ)
+
+
+def exec_run_task(args):
+ """Executes run-task with a modified environment."""
+ print_message("Executing: {}".format(pformat(args)))
+ os.execv(args[0], args[0:])
+
+
+def main():
+ """Main function."""
+ args = sys.argv[1:] # Remaining args starting with run-task
+
+ project_head_repo, project_revision = check_environ()
+ gecko_conf = fetch_gecko_conf(project_head_repo, project_revision)
+ update_environment(gecko_conf)
+ exec_run_task(args)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/docker/decision/system-setup.sh b/taskcluster/docker/decision/system-setup.sh
new file mode 100644
index 0000000000..0f9d04fa50
--- /dev/null
+++ b/taskcluster/docker/decision/system-setup.sh
@@ -0,0 +1,14 @@
+#!/usr/bin/env bash
+
+set -v -e
+
+test "$(whoami)" == 'root'
+
+apt-get update
+apt-get install \
+ python \
+ sudo \
+ python3-yaml
+
+rm -rf /var/lib/apt/lists/
+rm "$0"
diff --git a/taskcluster/docker/diffoscope/Dockerfile b/taskcluster/docker/diffoscope/Dockerfile
new file mode 100644
index 0000000000..22e68fd784
--- /dev/null
+++ b/taskcluster/docker/diffoscope/Dockerfile
@@ -0,0 +1,33 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV LANG=en_US.UTF-8
+
+RUN apt-get install \
+ binutils-multiarch \
+ bzip2 \
+ curl \
+ enjarify \
+ diffoscope \
+ jsbeautifier \
+ libc++abi1 \
+ locales \
+ default-jdk-headless \
+ python3-progressbar \
+ unzip \
+ zip \
+ && \
+ sed -i '/en_US.UTF-8/s/^# *//' /etc/locale.gen && \
+ locale-gen
+
+COPY get_and_diffoscope /builds/worker/bin/get_and_diffoscope
+COPY readelf /builds/worker/bin/readelf
+COPY report_error /builds/worker/bin/report_error
+COPY test_diffoscope /builds/worker/bin/test_diffoscope
+
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/* && \
+ /builds/worker/bin/test_diffoscope
diff --git a/taskcluster/docker/diffoscope/get_and_diffoscope b/taskcluster/docker/diffoscope/get_and_diffoscope
new file mode 100644
index 0000000000..9c51af2508
--- /dev/null
+++ b/taskcluster/docker/diffoscope/get_and_diffoscope
@@ -0,0 +1,103 @@
+#!/bin/bash
+
+set -e
+set -x
+
+cd /builds/worker
+
+mkdir a b
+
+# /builds/worker/bin contains wrapper binaries to divert what diffoscope
+# needs to use, so it needs to appear first.
+export PATH=/builds/worker/bin:$PATH
+
+# Until https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=879010 is
+# implemented, it's better to first manually extract the data.
+# Plus dmg files are not supported yet.
+
+case "$ORIG_URL" in
+*.zip|*.apk)
+ curl -L "$ORIG_URL" > a.zip
+ curl -L "$NEW_URL" > b.zip
+ unzip -d a a.zip
+ unzip -d b b.zip
+ ;;
+*.tar.bz2)
+ curl -L "$ORIG_URL" | tar -C a -jxf -
+ curl -L "$NEW_URL" | tar -C b -jxf -
+ ;;
+*.tar.gz)
+ curl -L "$ORIG_URL" | tar -C a -zxf -
+ curl -L "$NEW_URL" | tar -C b -zxf -
+ ;;
+*.dmg)
+ for tool in lipo otool; do
+ ln -s $MOZ_FETCHES_DIR/cctools/bin/x86_64-apple-darwin*-$tool bin/$tool
+ done
+ curl -L "$ORIG_URL" > a.dmg
+ curl -L "$NEW_URL" > b.dmg
+ for i in a b; do
+ $MOZ_FETCHES_DIR/dmg/dmg extract $i.dmg $i.hfs
+ $MOZ_FETCHES_DIR/dmg/hfsplus $i.hfs extractall / $i
+ done
+ ;;
+*)
+ ARTIFACT=$(basename "${ORIG_URL}")
+ curl -L "$ORIG_URL" > "a/${ARTIFACT}"
+ curl -L "$NEW_URL" > "b/${ARTIFACT}"
+esac
+
+case "$ORIG_URL" in
+*/target.apk)
+ OMNIJAR=assets/omni.ja
+ ;;
+*)
+ OMNIJAR=omni.ja
+ ;;
+esac
+
+# Builds are 99% of the time differing in some small ways, so it's not
+# really useful to report a failure (at least not until we actually
+# care about the builds being 100% identical).
+POST=true
+
+fail() {
+ exit 1
+}
+
+for option; do
+ case "$option" in
+ --unpack)
+ CURDIR=$PWD
+ for dir in a b; do
+ # Need to run mach python from inside the gecko source.
+ # See bug #1533642.
+ (cd $GECKO_PATH && ./mach python toolkit/mozapps/installer/unpack.py --omnijar $OMNIJAR $CURDIR/$dir)
+ done
+ ;;
+ --fail)
+ POST="fail"
+ ;;
+ *)
+ echo "Unsupported option: $option" >&2
+ exit 1
+ esac
+done
+
+if [ -n "$PRE_DIFF" ]; then
+ eval $PRE_DIFF
+fi
+
+if diffoscope \
+ --html diff.html \
+ --text diff.txt \
+ --progress \
+ $DIFFOSCOPE_ARGS \
+ a b
+then
+ # Ok
+ :
+else
+ $(dirname $0)/report_error diff
+ $POST
+fi
diff --git a/taskcluster/docker/diffoscope/readelf b/taskcluster/docker/diffoscope/readelf
new file mode 100644
index 0000000000..6b864171d7
--- /dev/null
+++ b/taskcluster/docker/diffoscope/readelf
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+case "$1 $2" in
+"--wide --symbols")
+ # When called with --wide --symbols, we remove the first column (which
+ # is essentially a line number that is not very useful), and then sort,
+ # which will order symbols by address, making a diff more useful.
+ /usr/bin/readelf "$@" | awk -F: '{print $2}' | sort
+ ;;
+*)
+ exec /usr/bin/readelf "$@"
+ ;;
+esac
diff --git a/taskcluster/docker/diffoscope/report_error b/taskcluster/docker/diffoscope/report_error
new file mode 100755
index 0000000000..9074e33cae
--- /dev/null
+++ b/taskcluster/docker/diffoscope/report_error
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+
+import sys
+stem = sys.argv[1]
+
+# We "parse" the diff output, so we look at the lines that contain a "tee", like:
+# ├── +++ b/firefox
+# │ ├── +++ b/firefox/libxul.so
+# │ │ ├── readelf --wide --notes {}
+# We ignore lines like the last one, to only report file names. And we ignore
+# lines for directories such as the first one.
+TEE = "├── "
+VERTICAL_LINE = "│"
+paths = set()
+with open(f"{stem}.txt") as fh:
+ for l in fh:
+ l = l.rstrip()
+ before, tee, after = l.partition(TEE)
+ if not tee:
+ continue
+ before = before.split()
+ assert all(x == VERTICAL_LINE for x in before)
+ depth = len(before)
+ _, plus, after = after.partition("+++ ")
+ if not plus:
+ continue
+ _, b, full_path = after.partition("b/")
+ assert b == "b/"
+ parent_path = "/".join(full_path.split("/")[:-1])
+ if parent_path in paths:
+ paths.remove(parent_path)
+ if full_path:
+ paths.add(full_path)
+
+for p in sorted(paths):
+ print(f"TEST-UNEXPECTED-FAIL | {p} differs. See the {stem}.html or {stem}.txt artifact")
diff --git a/taskcluster/docker/diffoscope/test_diffoscope b/taskcluster/docker/diffoscope/test_diffoscope
new file mode 100644
index 0000000000..ab1c6c1eee
--- /dev/null
+++ b/taskcluster/docker/diffoscope/test_diffoscope
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+set -e
+
+WORKDIR=$(mktemp -d)
+cd $WORKDIR
+
+mkdir -p a/foo/bar/bar
+mkdir -p a/foo/bar/baz
+mkdir -p b/foo/bar/bar
+mkdir -p b/foo/bar/baz
+
+# A file that is modified
+echo qux > a/foo/bar/qux
+echo quz > b/foo/bar/qux
+
+# A binary file that is modified
+cp $(which ls) a/foo/bin
+cp $(which cat) b/foo/bin
+
+# A file that is removed
+echo hoge > a/foo/bar/bar/hoge
+
+# A file that is created
+echo fuga > b/foo/bar/baz/fuga
+
+# Also add a zip file with the same contents
+(cd a/foo; zip -r bar.zip bar)
+(cd b/foo; zip -r bar.zip bar)
+
+if TERM=linux diffoscope --no-progress --text diff.txt a b; then
+ echo "diffoscope didn't find differences?"
+ exit 1
+fi
+
+cat > expected.txt <<EOF
+TEST-UNEXPECTED-FAIL | foo/bar.zip differs. See the diff.html or diff.txt artifact
+TEST-UNEXPECTED-FAIL | foo/bar/bar differs. See the diff.html or diff.txt artifact
+TEST-UNEXPECTED-FAIL | foo/bar/baz differs. See the diff.html or diff.txt artifact
+TEST-UNEXPECTED-FAIL | foo/bar/qux differs. See the diff.html or diff.txt artifact
+TEST-UNEXPECTED-FAIL | foo/bin differs. See the diff.html or diff.txt artifact
+EOF
+
+$(dirname $0)/report_error diff | diff -u - expected.txt || exit 1
+
+cd $OLDPWD
+rm -rf $WORKDIR
diff --git a/taskcluster/docker/fetch/Dockerfile b/taskcluster/docker/fetch/Dockerfile
new file mode 100644
index 0000000000..792b3f43a6
--- /dev/null
+++ b/taskcluster/docker/fetch/Dockerfile
@@ -0,0 +1,36 @@
+FROM $DOCKER_IMAGE_PARENT
+
+### Add worker user and setup its workspace.
+RUN mkdir /builds && \
+ groupadd -g 1000 worker && \
+ useradd -u 1000 -g 1000 -d /builds/worker -s /bin/bash -m worker && \
+ mkdir -p /builds/worker/workspace && \
+ chown -R worker:worker /builds
+
+# Declare default working folder
+WORKDIR /builds/worker
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
+ apt-get update && \
+ apt-get install \
+ gnupg \
+ bzip2 \
+ git \
+ openssh-client \
+ python3-pip \
+ python3-requests \
+ python3-zstandard \
+ unzip
+
+# %include taskcluster/scripts/run-task
+ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task
+
+# %include taskcluster/scripts/misc/fetch-content
+ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content
+
+# %include taskcluster/scripts/misc/fetch-chromium.py
+ADD topsrcdir/taskcluster/scripts/misc/fetch-chromium.py /builds/worker/bin/fetch-chromium.py
+
+RUN pip3 install redo==2.0.4
diff --git a/taskcluster/docker/firefox-flatpak/Dockerfile b/taskcluster/docker/firefox-flatpak/Dockerfile
new file mode 100644
index 0000000000..ad95e6cd28
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/Dockerfile
@@ -0,0 +1,19 @@
+FROM freedesktopsdk/flatpak:22.08-x86_64
+MAINTAINER release@mozilla.com
+
+RUN mkdir /scripts/
+WORKDIR /scripts/
+# Copy everything in the docker/firefox-flatpak folder but the Dockerfile
+#
+# XXX The following pattern is neither a regex nor a glob one. It's
+# documented at https://golang.org/pkg/path/filepath/#Match. There's no
+# way of explicitly filtering out "Dockerfile". If one day, someone needs
+# to add a file starting with "D", then we must revisit the pattern below.
+COPY [^D]* /scripts/
+
+RUN ["gcc", "-Wall", "-shared", "-o", "/scripts/close_range.so", "/scripts/close_range.c"]
+
+ENV LD_PRELOAD /scripts/close_range.so
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/firefox-flatpak/close_range.c b/taskcluster/docker/firefox-flatpak/close_range.c
new file mode 100644
index 0000000000..d786e78e3b
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/close_range.c
@@ -0,0 +1,12 @@
+/*
+ This Source Code Form is subject to the terms of the Mozilla Public
+ License, v. 2.0. If a copy of the MPL was not distributed with this
+ file, You can obtain one at http://mozilla.org/MPL/2.0/.
+*/
+
+#include <errno.h>
+
+int close_range(unsigned int first, unsigned int last, unsigned int flags) {
+ errno = ENOSYS;
+ return -1;
+}
diff --git a/taskcluster/docker/firefox-flatpak/default-preferences.js b/taskcluster/docker/firefox-flatpak/default-preferences.js
new file mode 100644
index 0000000000..95663d03db
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/default-preferences.js
@@ -0,0 +1,11 @@
+/*global pref*/
+/*eslint no-undef: "error"*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+pref("intl.locale.requested", "");
+pref("app.update.auto", false);
+pref("app.update.enabled", false);
+pref("app.update.autoInstallEnabled", false);
+pref("browser.shell.checkDefaultBrowser", false);
+pref("spellchecker.dictionary_path", "/usr/share/hunspell");
diff --git a/taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py b/taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py
new file mode 100644
index 0000000000..b1eb745d7f
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python3
+
+"""
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"""
+
+
+import json
+import sys
+
+l10n_changesets_json_path = sys.argv[1]
+with open(l10n_changesets_json_path) as f:
+ locales = json.load(f).keys()
+linux_locales = [l for l in locales if l != "ja-JP-mac"]
+
+print("\n".join(sorted(linux_locales)))
diff --git a/taskcluster/docker/firefox-flatpak/launch-script.sh b/taskcluster/docker/firefox-flatpak/launch-script.sh
new file mode 100644
index 0000000000..98279e71eb
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/launch-script.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+export TMPDIR=$XDG_CACHE_HOME/tmp
+exec /app/lib/firefox/firefox "$@"
diff --git a/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in
new file mode 100644
index 0000000000..02d6ac110d
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<application>
+ <id type="desktop">org.mozilla.firefox</id>
+ <launchable type="desktop-id">org.mozilla.firefox.desktop</launchable>
+ <name>Firefox</name>
+ <developer_name>Mozilla</developer_name>
+ <summary>Fast, Private &amp; Safe Web Browser</summary>
+ <metadata_license>CC0-1.0</metadata_license>
+ <project_license>MPL-2.0</project_license>
+ <description>
+ <p>When it comes to your life online, you have a choice: accept the factory settings or put your privacy first. When you choose Firefox as your default browser, you’re choosing to protect your data while supporting an independent tech company. Firefox is also the only major browser backed by a non-profit fighting to give you more openness, transparency and control of your life online. Join hundreds of millions of people who choose to protect what's important by choosing Firefox - a web browser designed to be fast, easy to use, customizable and private.</p>
+ </description>
+ <releases>
+ <release version="$VERSION" date="$DATE"/>
+ </releases>
+ <keywords>
+ <keyword>mozilla</keyword>
+ <keyword>internet</keyword>
+ <keyword>web</keyword>
+ </keywords>
+ <content_rating type="oars-1.1" />
+ <url type="homepage">https://www.mozilla.org/firefox/</url>
+ <url type="donation">https://donate.mozilla.org/</url>
+ <url type="bugtracker">https://bugzilla.mozilla.org/</url>
+ <url type="help">https://support.mozilla.org/</url>
+ <url type="translate">https://wiki.mozilla.org/L10n:Starting_a_localization</url>
+
+ <screenshots>
+ <screenshot type="default">https://raw.githubusercontent.com/mozilla-releng/scriptworker-scripts/master/pushflatpakscript/media/screenshots/image1.png</screenshot>
+ <screenshot>https://raw.githubusercontent.com/mozilla-releng/scriptworker-scripts/master/pushflatpakscript/media/screenshots/image2.png</screenshot>
+ <screenshot>https://raw.githubusercontent.com/mozilla-releng/scriptworker-scripts/master/pushflatpakscript/media/screenshots/image3.png</screenshot>
+ </screenshots>
+ <custom>
+ <value key="flathub::manifest">https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/docker/firefox-flatpak/runme.sh</value>
+ </custom>
+
+</application>
diff --git a/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop
new file mode 100644
index 0000000000..21c3e7aec0
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop
@@ -0,0 +1,386 @@
+[Desktop Entry]
+Version=1.0
+Name=Firefox Web Browser
+Name[ar]=متصفح الويب فَيَرفُكْس
+Name[ast]=Restolador web Firefox
+Name[bn]=ফায়ারফক্স ওয়েব ব্রাউজার
+Name[ca]=Navegador web Firefox
+Name[cs]=Firefox Webový prohlížeč
+Name[da]=Firefox - internetbrowser
+Name[el]=Περιηγητής Firefox
+Name[es]=Navegador web Firefox
+Name[et]=Firefoxi veebibrauser
+Name[fa]=مرورگر اینترنتی Firefox
+Name[fi]=Firefox-selain
+Name[fr]=Navigateur Web Firefox
+Name[gl]=Navegador web Firefox
+Name[he]=דפדפן האינטרנט Firefox
+Name[hr]=Firefox web preglednik
+Name[hu]=Firefox webböngésző
+Name[it]=Firefox Browser Web
+Name[ja]=Firefox ウェブ・ブラウザ
+Name[ko]=Firefox 웹 브라우저
+Name[ku]=Geroka torê Firefox
+Name[lt]=Firefox interneto naršyklė
+Name[nb]=Firefox Nettleser
+Name[nl]=Firefox webbrowser
+Name[nn]=Firefox Nettlesar
+Name[no]=Firefox Nettleser
+Name[pl]=Przeglądarka WWW Firefox
+Name[pt]=Firefox Navegador Web
+Name[pt_BR]=Navegador Web Firefox
+Name[ro]=Firefox – Navigator Internet
+Name[ru]=Веб-браузер Firefox
+Name[sk]=Firefox - internetový prehliadač
+Name[sl]=Firefox spletni brskalnik
+Name[sv]=Firefox webbläsare
+Name[tr]=Firefox Web Tarayıcısı
+Name[ug]=Firefox توركۆرگۈ
+Name[uk]=Веб-браузер Firefox
+Name[vi]=Trình duyệt web Firefox
+Name[zh_CN]=Firefox 网络浏览器
+Name[zh_TW]=Firefox 網路瀏覽器
+Comment=Browse the World Wide Web
+Comment[ar]=تصفح الشبكة العنكبوتية العالمية
+Comment[ast]=Restola pela Rede
+Comment[bn]=ইন্টারনেট ব্রাউজ করুন
+Comment[ca]=Navegueu per la web
+Comment[cs]=Prohlížení stránek World Wide Webu
+Comment[da]=Surf på internettet
+Comment[de]=Im Internet surfen
+Comment[el]=Μπορείτε να περιηγηθείτε στο διαδίκτυο (Web)
+Comment[es]=Navegue por la web
+Comment[et]=Lehitse veebi
+Comment[fa]=صفحات شبکه جهانی اینترنت را مرور نمایید
+Comment[fi]=Selaa Internetin WWW-sivuja
+Comment[fr]=Naviguer sur le Web
+Comment[gl]=Navegar pola rede
+Comment[he]=גלישה ברחבי האינטרנט
+Comment[hr]=Pretražite web
+Comment[hu]=A világháló böngészése
+Comment[it]=Esplora il web
+Comment[ja]=ウェブを閲覧します
+Comment[ko]=웹을 돌아 다닙니다
+Comment[ku]=Li torê bigere
+Comment[lt]=Naršykite internete
+Comment[nb]=Surf på nettet
+Comment[nl]=Verken het internet
+Comment[nn]=Surf på nettet
+Comment[no]=Surf på nettet
+Comment[pl]=Przeglądanie stron WWW
+Comment[pt]=Navegue na Internet
+Comment[pt_BR]=Navegue na Internet
+Comment[ro]=Navigați pe Internet
+Comment[ru]=Доступ в Интернет
+Comment[sk]=Prehliadanie internetu
+Comment[sl]=Brskajte po spletu
+Comment[sv]=Surfa på webben
+Comment[tr]=İnternet'te Gezinin
+Comment[ug]=دۇنيادىكى توربەتلەرنى كۆرگىلى بولىدۇ
+Comment[uk]=Перегляд сторінок Інтернету
+Comment[vi]=Để duyệt các trang web
+Comment[zh_CN]=浏览互联网
+Comment[zh_TW]=瀏覽網際網路
+GenericName=Web Browser
+GenericName[ar]=متصفح ويب
+GenericName[ast]=Restolador Web
+GenericName[bn]=ওয়েব ব্রাউজার
+GenericName[ca]=Navegador web
+GenericName[cs]=Webový prohlížeč
+GenericName[da]=Webbrowser
+GenericName[el]=Περιηγητής διαδικτύου
+GenericName[es]=Navegador web
+GenericName[et]=Veebibrauser
+GenericName[fa]=مرورگر اینترنتی
+GenericName[fi]=WWW-selain
+GenericName[fr]=Navigateur Web
+GenericName[gl]=Navegador Web
+GenericName[he]=דפדפן אינטרנט
+GenericName[hr]=Web preglednik
+GenericName[hu]=Webböngésző
+GenericName[it]=Browser web
+GenericName[ja]=ウェブ・ブラウザ
+GenericName[ko]=웹 브라우저
+GenericName[ku]=Geroka torê
+GenericName[lt]=Interneto naršyklė
+GenericName[nb]=Nettleser
+GenericName[nl]=Webbrowser
+GenericName[nn]=Nettlesar
+GenericName[no]=Nettleser
+GenericName[pl]=Przeglądarka WWW
+GenericName[pt]=Navegador Web
+GenericName[pt_BR]=Navegador Web
+GenericName[ro]=Navigator Internet
+GenericName[ru]=Веб-браузер
+GenericName[sk]=Internetový prehliadač
+GenericName[sl]=Spletni brskalnik
+GenericName[sv]=Webbläsare
+GenericName[tr]=Web Tarayıcı
+GenericName[ug]=توركۆرگۈ
+GenericName[uk]=Веб-браузер
+GenericName[vi]=Trình duyệt Web
+GenericName[zh_CN]=网络浏览器
+GenericName[zh_TW]=網路瀏覽器
+Keywords=Internet;WWW;Browser;Web;Explorer
+Keywords[ar]=انترنت;إنترنت;متصفح;ويب;وب
+Keywords[ast]=Internet;WWW;Restolador;Web;Esplorador
+Keywords[ca]=Internet;WWW;Navegador;Web;Explorador;Explorer
+Keywords[cs]=Internet;WWW;Prohlížeč;Web;Explorer
+Keywords[da]=Internet;Internettet;WWW;Browser;Browse;Web;Surf;Nettet
+Keywords[de]=Internet;WWW;Browser;Web;Explorer;Webseite;Site;surfen;online;browsen
+Keywords[el]=Internet;WWW;Browser;Web;Explorer;Διαδίκτυο;Περιηγητής;Firefox;Φιρεφοχ;Ιντερνετ
+Keywords[es]=Explorador;Internet;WWW
+Keywords[fi]=Internet;WWW;Browser;Web;Explorer;selain;Internet-selain;internetselain;verkkoselain;netti;surffaa
+Keywords[fr]=Internet;WWW;Browser;Web;Explorer;Fureteur;Surfer;Navigateur
+Keywords[he]=דפדפן;אינטרנט;רשת;אתרים;אתר;פיירפוקס;מוזילה;
+Keywords[hr]=Internet;WWW;preglednik;Web
+Keywords[hu]=Internet;WWW;Böngésző;Web;Háló;Net;Explorer
+Keywords[it]=Internet;WWW;Browser;Web;Navigatore
+Keywords[is]=Internet;WWW;Vafri;Vefur;Netvafri;Flakk
+Keywords[ja]=Internet;WWW;Web;インターネット;ブラウザ;ウェブ;エクスプローラ
+Keywords[nb]=Internett;WWW;Nettleser;Explorer;Web;Browser;Nettside
+Keywords[nl]=Internet;WWW;Browser;Web;Explorer;Verkenner;Website;Surfen;Online
+Keywords[pt]=Internet;WWW;Browser;Web;Explorador;Navegador
+Keywords[pt_BR]=Internet;WWW;Browser;Web;Explorador;Navegador
+Keywords[ru]=Internet;WWW;Browser;Web;Explorer;интернет;браузер;веб;файрфокс;огнелис
+Keywords[sk]=Internet;WWW;Prehliadač;Web;Explorer
+Keywords[sl]=Internet;WWW;Browser;Web;Explorer;Brskalnik;Splet
+Keywords[tr]=İnternet;WWW;Tarayıcı;Web;Gezgin;Web sitesi;Site;sörf;çevrimiçi;tara
+Keywords[uk]=Internet;WWW;Browser;Web;Explorer;Інтернет;мережа;переглядач;оглядач;браузер;веб;файрфокс;вогнелис;перегляд
+Keywords[vi]=Internet;WWW;Browser;Web;Explorer;Trình duyệt;Trang web
+Keywords[zh_CN]=Internet;WWW;Browser;Web;Explorer;网页;浏览;上网;火狐;Firefox;ff;互联网;网站;
+Keywords[zh_TW]=Internet;WWW;Browser;Web;Explorer;網際網路;網路;瀏覽器;上網;網頁;火狐
+Exec=firefox %u
+Icon=org.mozilla.firefox
+Terminal=false
+Type=Application
+MimeType=text/html;text/xml;application/xhtml+xml;application/vnd.mozilla.xul+xml;text/mml;x-scheme-handler/http;x-scheme-handler/https;
+StartupNotify=true
+Categories=Network;WebBrowser;
+Actions=new-window;new-private-window;profile-manager-window;
+StartupWMClass=firefox
+
+
+[Desktop Action new-window]
+Name=Open a New Window
+Name[ach]=Dirica manyen
+Name[af]=Nuwe venster
+Name[an]=Nueva finestra
+Name[ar]=نافذة جديدة
+Name[as]=নতুন উইন্ডো
+Name[ast]=Ventana nueva
+Name[az]=Yeni Pəncərə
+Name[be]=Новае акно
+Name[bg]=Нов прозорец
+Name[bn_BD]=নতুন উইন্ডো (N)
+Name[bn_IN]=নতুন উইন্ডো
+Name[br]=Prenestr nevez
+Name[brx]=गोदान उइन्ड'(N)
+Name[bs]=Novi prozor
+Name[ca]=Finestra nova
+Name[cak]=K'ak'a' tzuwäch
+Name[cs]=Nové okno
+Name[cy]=Ffenestr Newydd
+Name[da]=Nyt vindue
+Name[de]=Neues Fenster
+Name[dsb]=Nowe wokno
+Name[el]=Νέο παράθυρο
+Name[en_GB]=New Window
+Name[en_US]=New Window
+Name[en_ZA]=New Window
+Name[eo]=Nova fenestro
+Name[es_AR]=Nueva ventana
+Name[es_CL]=Nueva ventana
+Name[es_ES]=Nueva ventana
+Name[es_MX]=Nueva ventana
+Name[et]=Uus aken
+Name[eu]=Leiho berria
+Name[fa]=پنجره جدید‌
+Name[ff]=Henorde Hesere
+Name[fi]=Uusi ikkuna
+Name[fr]=Nouvelle fenêtre
+Name[fy_NL]=Nij finster
+Name[ga_IE]=Fuinneog Nua
+Name[gd]=Uinneag ùr
+Name[gl]=Nova xanela
+Name[gn]=Ovetã pyahu
+Name[gu_IN]=નવી વિન્ડો
+Name[he]=חלון חדש
+Name[hi_IN]=नया विंडो
+Name[hr]=Novi prozor
+Name[hsb]=Nowe wokno
+Name[hu]=Új ablak
+Name[hy_AM]=Նոր Պատուհան
+Name[id]=Jendela Baru
+Name[is]=Nýr gluggi
+Name[it]=Nuova finestra
+Name[ja]=新しいウィンドウ
+Name[ja_JP]=新規ウインドウ
+Name[ka]=ახალი ფანჯარა
+Name[kk]=Жаңа терезе
+Name[km]=បង្អួច​​​ថ្មី
+Name[kn]=ಹೊಸ ಕಿಟಕಿ
+Name[ko]=새 창
+Name[kok]=नवें जनेल
+Name[ks]=نئئ وِنڈو
+Name[lij]=Neuvo barcon
+Name[lo]=ຫນ້າຕ່າງໃຫມ່
+Name[lt]=Naujas langas
+Name[ltg]=Jauns lūgs
+Name[lv]=Jauns logs
+Name[mai]=नव विंडो
+Name[mk]=Нов прозорец
+Name[ml]=പുതിയ ജാലകം
+Name[mr]=नवीन पटल
+Name[ms]=Tetingkap Baru
+Name[my]=ဝင်းဒိုးအသစ်
+Name[nb_NO]=Nytt vindu
+Name[ne_NP]=नयाँ सञ्झ्याल
+Name[nl]=Nieuw venster
+Name[nn_NO]=Nytt vindauge
+Name[or]=ନୂତନ ୱିଣ୍ଡୋ
+Name[pa_IN]=ਨਵੀਂ ਵਿੰਡੋ
+Name[pl]=Nowe okno
+Name[pt_BR]=Nova janela
+Name[pt_PT]=Nova janela
+Name[rm]=Nova fanestra
+Name[ro]=Fereastră nouă
+Name[ru]=Новое окно
+Name[sat]=नावा विंडो (N)
+Name[si]=නව කවුළුවක්
+Name[sk]=Nové okno
+Name[sl]=Novo okno
+Name[son]=Zanfun taaga
+Name[sq]=Dritare e Re
+Name[sr]=Нови прозор
+Name[sv_SE]=Nytt fönster
+Name[ta]=புதிய சாளரம்
+Name[te]=కొత్త విండో
+Name[th]=หน้าต่างใหม่
+Name[tr]=Yeni pencere
+Name[tsz]=Eraatarakua jimpani
+Name[uk]=Нове вікно
+Name[ur]=نیا دریچہ
+Name[uz]=Yangi oyna
+Name[vi]=Cửa sổ mới
+Name[wo]=Palanteer bu bees
+Name[xh]=Ifestile entsha
+Name[zh_CN]=新建窗口
+Name[zh_TW]=開新視窗
+Exec=firefox --new-window %u
+
+[Desktop Action new-private-window]
+Name=Open a New Private Window
+Name[ach]=Dirica manyen me mung
+Name[af]=Nuwe privaatvenster
+Name[an]=Nueva finestra privada
+Name[ar]=نافذة خاصة جديدة
+Name[as]=নতুন ব্যক্তিগত উইন্ডো
+Name[ast]=Ventana privada nueva
+Name[az]=Yeni Məxfi Pəncərə
+Name[be]=Новае акно адасаблення
+Name[bg]=Нов прозорец за поверително сърфиране
+Name[bn_BD]=নতুন ব্যক্তিগত উইন্ডো
+Name[bn_IN]=নতুন ব্যক্তিগত উইন্ডো
+Name[br]=Prenestr merdeiñ prevez nevez
+Name[brx]=गोदान प्राइभेट उइन्ड'
+Name[bs]=Novi privatni prozor
+Name[ca]=Finestra privada nova
+Name[cak]=K'ak'a' ichinan tzuwäch
+Name[cs]=Nové anonymní okno
+Name[cy]=Ffenestr Breifat Newydd
+Name[da]=Nyt privat vindue
+Name[de]=Neues privates Fenster
+Name[dsb]=Nowe priwatne wokno
+Name[el]=Νέο παράθυρο ιδιωτικής περιήγησης
+Name[en_GB]=New Private Window
+Name[en_US]=New Private Window
+Name[en_ZA]=New Private Window
+Name[eo]=Nova privata fenestro
+Name[es_AR]=Nueva ventana privada
+Name[es_CL]=Nueva ventana privada
+Name[es_ES]=Nueva ventana privada
+Name[es_MX]=Nueva ventana privada
+Name[et]=Uus privaatne aken
+Name[eu]=Leiho pribatu berria
+Name[fa]=پنجره ناشناس جدید
+Name[ff]=Henorde Suturo Hesere
+Name[fi]=Uusi yksityinen ikkuna
+Name[fr]=Nouvelle fenêtre de navigation privée
+Name[fy_NL]=Nij priveefinster
+Name[ga_IE]=Fuinneog Nua Phríobháideach
+Name[gd]=Uinneag phrìobhaideach ùr
+Name[gl]=Nova xanela privada
+Name[gn]=Ovetã ñemi pyahu
+Name[gu_IN]=નવી ખાનગી વિન્ડો
+Name[he]=חלון פרטי חדש
+Name[hi_IN]=नयी निजी विंडो
+Name[hr]=Novi privatni prozor
+Name[hsb]=Nowe priwatne wokno
+Name[hu]=Új privát ablak
+Name[hy_AM]=Սկսել Գաղտնի դիտարկում
+Name[id]=Jendela Mode Pribadi Baru
+Name[is]=Nýr huliðsgluggi
+Name[it]=Nuova finestra anonima
+Name[ja]=新しいプライベートウィンドウ
+Name[ja_JP]=新規プライベートウインドウ
+Name[ka]=ახალი პირადი ფანჯარა
+Name[kk]=Жаңа жекелік терезе
+Name[km]=បង្អួច​ឯកជន​ថ្មី
+Name[kn]=ಹೊಸ ಖಾಸಗಿ ಕಿಟಕಿ
+Name[ko]=새 사생활 보호 모드
+Name[kok]=नवो खाजगी विंडो
+Name[ks]=نْو پرایوٹ وینڈو&amp;
+Name[lij]=Neuvo barcon privou
+Name[lo]=ເປີດຫນ້າຕ່າງສວນຕົວຂື້ນມາໃຫມ່
+Name[lt]=Naujas privataus naršymo langas
+Name[ltg]=Jauns privatais lūgs
+Name[lv]=Jauns privātais logs
+Name[mai]=नया निज विंडो (W)
+Name[mk]=Нов приватен прозорец
+Name[ml]=പുതിയ സ്വകാര്യ ജാലകം
+Name[mr]=नवीन वैयक्तिक पटल
+Name[ms]=Tetingkap Persendirian Baharu
+Name[my]=New Private Window
+Name[nb_NO]=Nytt privat vindu
+Name[ne_NP]=नयाँ निजी सञ्झ्याल
+Name[nl]=Nieuw privévenster
+Name[nn_NO]=Nytt privat vindauge
+Name[or]=ନୂତନ ବ୍ୟକ୍ତିଗତ ୱିଣ୍ଡୋ
+Name[pa_IN]=ਨਵੀਂ ਪ੍ਰਾਈਵੇਟ ਵਿੰਡੋ
+Name[pl]=Nowe okno prywatne
+Name[pt_BR]=Nova janela privativa
+Name[pt_PT]=Nova janela privada
+Name[rm]=Nova fanestra privata
+Name[ro]=Fereastră privată nouă
+Name[ru]=Новое приватное окно
+Name[sat]=नावा निजेराक् विंडो (W )
+Name[si]=නව පුද්ගලික කවුළුව (W)
+Name[sk]=Nové okno v režime Súkromné prehliadanie
+Name[sl]=Novo zasebno okno
+Name[son]=Sutura zanfun taaga
+Name[sq]=Dritare e Re Private
+Name[sr]=Нови приватан прозор
+Name[sv_SE]=Nytt privat fönster
+Name[ta]=புதிய தனிப்பட்ட சாளரம்
+Name[te]=కొత్త ఆంతరంగిక విండో
+Name[th]=หน้าต่างส่วนตัวใหม่
+Name[tr]=Yeni gizli pencere
+Name[tsz]=Juchiiti eraatarakua jimpani
+Name[uk]=Приватне вікно
+Name[ur]=نیا نجی دریچہ
+Name[uz]=Yangi maxfiy oyna
+Name[vi]=Cửa sổ riêng tư mới
+Name[wo]=Panlanteeru biir bu bees
+Name[xh]=Ifestile yangasese entsha
+Name[zh_CN]=新建隐私浏览窗口
+Name[zh_TW]=新增隱私視窗
+Exec=firefox --private-window %u
+
+[Desktop Action profile-manager-window]
+Name=Open the Profile Manager
+Name[cs]=Správa profilů
+Name[de]=Profilverwaltung öffnen
+Name[es]=Abrir el Gestor de Perfiles
+Name[fr]=Ouvrir le gestionnaire de profils
+Exec=firefox --ProfileManager
diff --git a/taskcluster/docker/firefox-flatpak/policies.json b/taskcluster/docker/firefox-flatpak/policies.json
new file mode 100644
index 0000000000..2645f20c0c
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/policies.json
@@ -0,0 +1,6 @@
+{
+ "policies": {
+ "DisableAppUpdate": true,
+ "DontCheckDefaultBrowser": true
+ }
+}
diff --git a/taskcluster/docker/firefox-flatpak/runme.sh b/taskcluster/docker/firefox-flatpak/runme.sh
new file mode 100755
index 0000000000..91511fe2d6
--- /dev/null
+++ b/taskcluster/docker/firefox-flatpak/runme.sh
@@ -0,0 +1,190 @@
+#!/bin/bash
+set -xe
+
+# Future products supporting Flatpaks will set this accordingly
+: PRODUCT "${PRODUCT:=firefox}"
+
+# Required env variables
+
+test "$VERSION"
+test "$BUILD_NUMBER"
+test "$CANDIDATES_DIR"
+test "$L10N_CHANGESETS"
+test "$FLATPAK_BRANCH"
+
+# Optional env variables
+: WORKSPACE "${WORKSPACE:=/home/worker/workspace}"
+: ARTIFACTS_DIR "${ARTIFACTS_DIR:=/home/worker/artifacts}"
+
+pwd
+
+# XXX: this is used to populate the datetime in org.mozilla.firefox.appdata.xml
+DATE=$(date +%Y-%m-%d)
+export DATE
+
+SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+TARGET_TAR_XZ_FULL_PATH="$ARTIFACTS_DIR/target.flatpak.tar.xz"
+SOURCE_DEST="${WORKSPACE}/source"
+FREEDESKTOP_VERSION="22.08"
+FIREFOX_BASEAPP_CHANNEL="22.08"
+
+
+# XXX: these commands are temporarily, there's an upcoming fix in the upstream Docker image
+# that we work on top of, from `freedesktopsdk`, that will make these two lines go away eventually
+mkdir -p /root /tmp /var/tmp
+mkdir -p "$ARTIFACTS_DIR"
+rm -rf "$SOURCE_DEST" && mkdir -p "$SOURCE_DEST"
+
+# XXX ensure we have a clean slate in the local flatpak repo
+rm -rf ~/.local/share/flatpak/
+
+
+CURL="curl --location --retry 10 --retry-delay 10"
+
+# Download en-US linux64 binary
+$CURL -o "${WORKSPACE}/firefox.tar.bz2" \
+ "${CANDIDATES_DIR}/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/en-US/firefox-${VERSION}.tar.bz2"
+
+# Use list of locales to fetch L10N XPIs
+$CURL -o "${WORKSPACE}/l10n_changesets.json" "$L10N_CHANGESETS"
+locales=$(python3 "$SCRIPT_DIRECTORY/extract_locales_from_l10n_json.py" "${WORKSPACE}/l10n_changesets.json")
+
+DISTRIBUTION_DIR="$SOURCE_DEST/distribution"
+if [[ "$PRODUCT" == "firefox" ]]; then
+ # Get Flatpak configuration
+ PARTNER_CONFIG_DIR="$WORKSPACE/partner_config"
+ git clone https://github.com/mozilla-partners/flatpak.git "$PARTNER_CONFIG_DIR"
+ mv "$PARTNER_CONFIG_DIR/desktop/flatpak/distribution" "$DISTRIBUTION_DIR"
+else
+ mkdir -p "$DISTRIBUTION_DIR"
+fi
+
+mkdir -p "$DISTRIBUTION_DIR/extensions"
+for locale in $locales; do
+ $CURL -o "$DISTRIBUTION_DIR/extensions/langpack-${locale}@firefox.mozilla.org.xpi" \
+ "$CANDIDATES_DIR/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/xpi/${locale}.xpi"
+done
+
+envsubst < "$SCRIPT_DIRECTORY/org.mozilla.firefox.appdata.xml.in" > "${WORKSPACE}/org.mozilla.firefox.appdata.xml"
+cp -v "$SCRIPT_DIRECTORY/org.mozilla.firefox.desktop" "$WORKSPACE"
+# Add a group policy file to disable app updates, as those are handled by Flathub
+cp -v "$SCRIPT_DIRECTORY/policies.json" "$WORKSPACE"
+cp -v "$SCRIPT_DIRECTORY/default-preferences.js" "$WORKSPACE"
+cp -v "$SCRIPT_DIRECTORY/launch-script.sh" "$WORKSPACE"
+cd "${WORKSPACE}"
+
+flatpak remote-add --user --if-not-exists --from flathub https://dl.flathub.org/repo/flathub.flatpakrepo
+# XXX: added --user to `flatpak install` to avoid ambiguity
+flatpak install --user -y flathub org.mozilla.firefox.BaseApp//${FIREFOX_BASEAPP_CHANNEL} --no-deps
+
+# XXX: this command is temporarily, there's an upcoming fix in the upstream Docker image
+# that we work on top of, from `freedesktopsdk`, that will make these two lines go away eventually
+mkdir -p build
+cp -r ~/.local/share/flatpak/app/org.mozilla.firefox.BaseApp/current/active/files build/files
+
+ARCH=$(flatpak --default-arch)
+cat <<EOF > build/metadata
+[Application]
+name=org.mozilla.firefox
+runtime=org.freedesktop.Platform/${ARCH}/${FREEDESKTOP_VERSION}
+sdk=org.freedesktop.Sdk/${ARCH}/${FREEDESKTOP_VERSION}
+base=app/org.mozilla.firefox.BaseApp/${ARCH}/${FIREFOX_BASEAPP_CHANNEL}
+[Extension org.mozilla.firefox.Locale]
+directory=share/runtime/langpack
+autodelete=true
+locale-subset=true
+
+[Extension org.freedesktop.Platform.ffmpeg-full]
+directory=lib/ffmpeg
+add-ld-path=.
+no-autodownload=true
+version=${FREEDESKTOP_VERSION}
+
+[Extension org.mozilla.firefox.systemconfig]
+directory=etc/firefox
+no-autodownload=true
+EOF
+
+cat <<EOF > build/metadata.locale
+[Runtime]
+name=org.mozilla.firefox.Locale
+
+[ExtensionOf]
+ref=app/org.mozilla.firefox/${ARCH}/${FLATPAK_BRANCH}
+EOF
+
+appdir=build/files
+install -d "${appdir}/lib/"
+(cd "${appdir}/lib/" && tar jxf "${WORKSPACE}/firefox.tar.bz2")
+install -D -m644 -t "${appdir}/share/appdata" org.mozilla.firefox.appdata.xml
+install -D -m644 -t "${appdir}/share/applications" org.mozilla.firefox.desktop
+for size in 16 32 48 64 128; do
+ install -D -m644 "${appdir}/lib/firefox/browser/chrome/icons/default/default${size}.png" "${appdir}/share/icons/hicolor/${size}x${size}/apps/org.mozilla.firefox.png"
+done
+mkdir -p "${appdir}/lib/ffmpeg"
+mkdir -p "${appdir}/etc/firefox"
+
+appstream-compose --prefix="${appdir}" --origin=flatpak --basename=org.mozilla.firefox org.mozilla.firefox
+appstream-util mirror-screenshots "${appdir}"/share/app-info/xmls/org.mozilla.firefox.xml.gz "https://dl.flathub.org/repo/screenshots/org.mozilla.firefox-${FLATPAK_BRANCH}" build/screenshots "build/screenshots/org.mozilla.firefox-${FLATPAK_BRANCH}"
+
+# XXX: we used to `install -D` before which automatically created the components
+# of target, now we need to manually do this since we're symlinking
+mkdir -p "${appdir}/lib/firefox/distribution/extensions"
+# XXX: we put the langpacks in /app/share/locale/$LANG_CODE and symlink that
+# directory to where Firefox looks them up; this way only subset configured
+# on user system is downloaded vs all locales
+for locale in $locales; do
+ install -D -m644 -t "${appdir}/share/runtime/langpack/${locale%%-*}/" "${DISTRIBUTION_DIR}/extensions/langpack-${locale}@firefox.mozilla.org.xpi"
+ ln -sf "/app/share/runtime/langpack/${locale%%-*}/langpack-${locale}@firefox.mozilla.org.xpi" "${appdir}/lib/firefox/distribution/extensions/langpack-${locale}@firefox.mozilla.org.xpi"
+done
+install -D -m644 -t "${appdir}/lib/firefox/distribution" "$DISTRIBUTION_DIR/distribution.ini"
+install -D -m644 -t "${appdir}/lib/firefox/distribution" policies.json
+install -D -m644 -t "${appdir}/lib/firefox/browser/defaults/preferences" default-preferences.js
+install -D -m755 launch-script.sh "${appdir}/bin/firefox"
+
+# We need to set GTK_PATH to load cups printing backend which is missing in
+# freedesktop sdk.
+#
+# We use features=devel to enable ptrace, which we need for the crash
+# reporter. The application is still confined in a pid namespace, so
+# that won't let us escape the flatpak sandbox. See bug 1653852.
+
+flatpak build-finish build \
+ --allow=devel \
+ --share=ipc \
+ --share=network \
+ --env=GTK_PATH=/app/lib/gtkmodules \
+ --socket=pulseaudio \
+ --socket=wayland \
+ --socket=x11 \
+ --socket=pcsc \
+ --socket=cups \
+ --require-version=0.11.1 \
+ --persist=.mozilla \
+ --filesystem=xdg-download:rw \
+ --filesystem=/run/.heim_org.h5l.kcm-socket \
+ --device=all \
+ --talk-name=org.freedesktop.FileManager1 \
+ --system-talk-name=org.freedesktop.NetworkManager \
+ --talk-name=org.a11y.Bus \
+ --talk-name=org.gnome.SessionManager \
+ --talk-name=org.freedesktop.ScreenSaver \
+ --talk-name="org.gtk.vfs.*" \
+ --talk-name=org.freedesktop.Notifications \
+ --own-name="org.mpris.MediaPlayer2.firefox.*" \
+ --own-name="org.mozilla.firefox.*" \
+ --own-name="org.mozilla.firefox_beta.*" \
+ --command=firefox
+
+flatpak build-export --disable-sandbox --no-update-summary --exclude='/share/runtime/langpack/*/*' repo build "$FLATPAK_BRANCH"
+flatpak build-export --disable-sandbox --no-update-summary --metadata=metadata.locale --files=files/share/runtime/langpack repo build "$FLATPAK_BRANCH"
+ostree commit --repo=repo --canonical-permissions --branch=screenshots/x86_64 build/screenshots
+flatpak build-update-repo --generate-static-deltas repo
+tar cvfJ flatpak.tar.xz repo
+
+mv -- flatpak.tar.xz "$TARGET_TAR_XZ_FULL_PATH"
+
+# XXX: if we ever wanted to go back to building flatpak bundles, we can revert this command; useful for testing individual artifacts, not publishable
+# flatpak build-bundle "$WORKSPACE"/repo org.mozilla.firefox.flatpak org.mozilla.firefox
+# TARGET_FULL_PATH="$ARTIFACTS_DIR/target.flatpak"
+# mv -- *.flatpak "$TARGET_FULL_PATH"
diff --git a/taskcluster/docker/firefox-snap/Dockerfile b/taskcluster/docker/firefox-snap/Dockerfile
new file mode 100644
index 0000000000..dc002c0133
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/Dockerfile
@@ -0,0 +1,71 @@
+## The below code is from snapcraft/docker/stable.Dockerfile
+## The modifications done are part of the documentation for enabling core18 snaps.
+## https://snapcraft.io/docs/t/creating-docker-images-for-snapcraft/11739
+
+# /!\ The base image must follow the version of the `coreXX` package
+# https://forum.snapcraft.io/t/issues-dockerizing-a-snapcraft-build-process/30294/5
+FROM ubuntu:focal
+
+ENV LANG='en_US.UTF-8' \
+ LANGUAGE='en_US:en' \
+ LC_ALL='en_US.UTF-8' \
+ PATH="/snap/bin:$PATH" \
+ SNAP='/snap/snapcraft/current' \
+ SNAP_NAME='snapcraft' \
+ SNAP_ARCH='amd64' \
+ TERM='dumb' \
+ TZ='Etc/UTC'
+
+RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
+
+# Grab dependencies. snapd is now required per https://github.com/snapcore/snapcraft/pull/3210
+RUN apt-get update && \
+ apt-get dist-upgrade --yes && \
+ apt-get install --yes \
+ curl \
+ jq \
+ squashfs-tools \
+ locales \
+ bzip2 \
+ curl \
+ gcc \
+ git \
+ python3 \
+ locales \
+ snapd \
+ sudo \
+ && \
+locale-gen "$LANG"
+
+COPY download_and_install_snap.sh .
+
+RUN bash download_and_install_snap.sh 'core20'
+RUN bash download_and_install_snap.sh 'gnome-3-38-2004'
+RUN bash download_and_install_snap.sh 'gnome-3-38-2004-sdk'
+RUN bash download_and_install_snap.sh 'snapcraft'
+
+# Fix Python3 installation: Make sure we use the interpreter from
+# the snapcraft snap:
+RUN unlink /snap/snapcraft/current/usr/bin/python3 && \
+ ln -s /snap/snapcraft/current/usr/bin/python3.* /snap/snapcraft/current/usr/bin/python3 && \
+ echo /snap/snapcraft/current/lib/python3.*/site-packages >> /snap/snapcraft/current/usr/lib/python3/dist-packages/site-packages.pth
+
+# Create a snapcraft runner (TODO: move version detection to the core of snapcraft).
+RUN mkdir -p /snap/bin
+RUN echo "#!/bin/sh" > /snap/bin/snapcraft
+RUN snap_version="$(awk '/^version:/{print $2}' /snap/snapcraft/current/meta/snap.yaml)" && echo "export SNAP_VERSION=\"$snap_version\"" >> /snap/bin/snapcraft
+RUN echo 'exec "$SNAP/usr/bin/python3" "$SNAP/bin/snapcraft" "$@"' >> /snap/bin/snapcraft
+RUN chmod +x /snap/bin/snapcraft
+
+RUN mkdir /scripts/
+WORKDIR /scripts/
+# Copy everything in the docker/firefox-snap folder but the Dockerfile
+#
+# XXX The following pattern is neither a regex nor a glob one. It's
+# documented at https://golang.org/pkg/path/filepath/#Match. There's no
+# way of explicitly filtering out "Dockerfile". If one day, someone needs
+# to add a file starting with "D", then we must revisit the pattern below.
+COPY [^D]* /scripts/
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/firefox-snap/Makefile b/taskcluster/docker/firefox-snap/Makefile
new file mode 100644
index 0000000000..d71dc70881
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/Makefile
@@ -0,0 +1,12 @@
+DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
+IMAGE_NAME = firefox-snapcraft
+FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
+
+build:
+ docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
+
+push:
+ docker push $(FULL_IMAGE_NAME):latest
+
+pull:
+ docker pull $(FULL_IMAGE_NAME):latest
diff --git a/taskcluster/docker/firefox-snap/download_and_install_snap.sh b/taskcluster/docker/firefox-snap/download_and_install_snap.sh
new file mode 100644
index 0000000000..7def88b2aa
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/download_and_install_snap.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+set -ex
+
+SNAP_NAME="$1"
+SNAP_CHANNEL="${2:-stable}"
+SNAP_INSTALL_LOCATION="${3:-/snap}"
+
+SNAP_METADATA="$(curl --header 'X-Ubuntu-Series: 16' "https://api.snapcraft.io/api/v1/snaps/details/$SNAP_NAME?channel=$SNAP_CHANNEL")"
+
+set +x
+SNAP_SHA512="$(echo "$SNAP_METADATA" | jq '.download_sha512' -r)"
+SNAP_DOWNLOAD_URL="$(echo "$SNAP_METADATA" | jq '.download_url' -r)"
+SNAP_LAST_UPDATED="$(echo "$SNAP_METADATA" | jq '.last_updated' -r)"
+SNAP_REVISION="$(echo "$SNAP_METADATA" | jq '.revision' -r)"
+SNAP_VERSION="$(echo "$SNAP_METADATA" | jq '.version' -r)"
+set -x
+
+echo "Downloading $SNAP_NAME, version $SNAP_VERSION, revision $SNAP_REVISION (last updated: $SNAP_LAST_UPDATED)..."
+curl --location "$SNAP_DOWNLOAD_URL" --output "$SNAP_NAME.snap"
+sha512sum -c <(echo "$SNAP_SHA512 $SNAP_NAME.snap")
+
+mkdir -p "$SNAP_INSTALL_LOCATION/$SNAP_NAME"
+unsquashfs -d "$SNAP_INSTALL_LOCATION/$SNAP_NAME/current" "$SNAP_NAME.snap"
+rm "$SNAP_NAME.snap"
+
+echo "$SNAP_NAME version $SNAP_VERSION has correctly been uploaded and installed." \ No newline at end of file
diff --git a/taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py b/taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py
new file mode 100644
index 0000000000..07bc007a31
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import sys
+
+l10n_changesets_json_path = sys.argv[1]
+with open(l10n_changesets_json_path) as f:
+ locales = json.load(f).keys()
+linux_locales = [l for l in locales if l != "ja-JP-mac"]
+
+print("\n".join(sorted(linux_locales)))
diff --git a/taskcluster/docker/firefox-snap/firefox.desktop b/taskcluster/docker/firefox-snap/firefox.desktop
new file mode 100644
index 0000000000..73e36e8d83
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/firefox.desktop
@@ -0,0 +1,219 @@
+[Desktop Entry]
+Version=1.0
+Name=Firefox Web Browser
+Name[ar]=متصفح الويب فَيَرفُكْس
+Name[ast]=Restolador web Firefox
+Name[bn]=ফায়ারফক্স ওয়েব ব্রাউজার
+Name[ca]=Navegador web Firefox
+Name[cs]=Firefox Webový prohlížeč
+Name[da]=Firefox - internetbrowser
+Name[el]=Περιηγητής Firefox
+Name[es]=Navegador web Firefox
+Name[et]=Firefoxi veebibrauser
+Name[fa]=مرورگر اینترنتی Firefox
+Name[fi]=Firefox-selain
+Name[fr]=Navigateur Web Firefox
+Name[gl]=Navegador web Firefox
+Name[he]=דפדפן האינטרנט Firefox
+Name[hr]=Firefox web preglednik
+Name[hu]=Firefox webböngésző
+Name[it]=Firefox Browser Web
+Name[ja]=Firefox ウェブ・ブラウザ
+Name[ko]=Firefox 웹 브라우저
+Name[ku]=Geroka torê Firefox
+Name[lt]=Firefox interneto naršyklė
+Name[nb]=Firefox Nettleser
+Name[nl]=Firefox webbrowser
+Name[nn]=Firefox Nettlesar
+Name[no]=Firefox Nettleser
+Name[pl]=Przeglądarka WWW Firefox
+Name[pt]=Firefox Navegador Web
+Name[pt_BR]=Navegador Web Firefox
+Name[ro]=Firefox – Navigator Internet
+Name[ru]=Веб-браузер Firefox
+Name[sk]=Firefox - internetový prehliadač
+Name[sl]=Firefox spletni brskalnik
+Name[sv]=Firefox webbläsare
+Name[tr]=Firefox Web Tarayıcısı
+Name[ug]=Firefox توركۆرگۈ
+Name[uk]=Веб-браузер Firefox
+Name[vi]=Trình duyệt web Firefox
+Name[zh_CN]=Firefox 网络浏览器
+Name[zh_TW]=Firefox 網路瀏覽器
+Comment=Browse the World Wide Web
+Comment[ar]=تصفح الشبكة العنكبوتية العالمية
+Comment[ast]=Restola pela Rede
+Comment[bn]=ইন্টারনেট ব্রাউজ করুন
+Comment[ca]=Navegueu per la web
+Comment[cs]=Prohlížení stránek World Wide Webu
+Comment[da]=Surf på internettet
+Comment[de]=Im Internet surfen
+Comment[el]=Μπορείτε να περιηγηθείτε στο διαδίκτυο (Web)
+Comment[es]=Navegue por la web
+Comment[et]=Lehitse veebi
+Comment[fa]=صفحات شبکه جهانی اینترنت را مرور نمایید
+Comment[fi]=Selaa Internetin WWW-sivuja
+Comment[fr]=Naviguer sur le Web
+Comment[gl]=Navegar pola rede
+Comment[he]=גלישה ברחבי האינטרנט
+Comment[hr]=Pretražite web
+Comment[hu]=A világháló böngészése
+Comment[it]=Esplora il web
+Comment[ja]=ウェブを閲覧します
+Comment[ko]=웹을 돌아 다닙니다
+Comment[ku]=Li torê bigere
+Comment[lt]=Naršykite internete
+Comment[nb]=Surf på nettet
+Comment[nl]=Verken het internet
+Comment[nn]=Surf på nettet
+Comment[no]=Surf på nettet
+Comment[pl]=Przeglądanie stron WWW
+Comment[pt]=Navegue na Internet
+Comment[pt_BR]=Navegue na Internet
+Comment[ro]=Navigați pe Internet
+Comment[ru]=Доступ в Интернет
+Comment[sk]=Prehliadanie internetu
+Comment[sl]=Brskajte po spletu
+Comment[sv]=Surfa på webben
+Comment[tr]=İnternet'te Gezinin
+Comment[ug]=دۇنيادىكى توربەتلەرنى كۆرگىلى بولىدۇ
+Comment[uk]=Перегляд сторінок Інтернету
+Comment[vi]=Để duyệt các trang web
+Comment[zh_CN]=浏览互联网
+Comment[zh_TW]=瀏覽網際網路
+GenericName=Web Browser
+GenericName[ar]=متصفح ويب
+GenericName[ast]=Restolador Web
+GenericName[bn]=ওয়েব ব্রাউজার
+GenericName[ca]=Navegador web
+GenericName[cs]=Webový prohlížeč
+GenericName[da]=Webbrowser
+GenericName[el]=Περιηγητής διαδικτύου
+GenericName[es]=Navegador web
+GenericName[et]=Veebibrauser
+GenericName[fa]=مرورگر اینترنتی
+GenericName[fi]=WWW-selain
+GenericName[fr]=Navigateur Web
+GenericName[gl]=Navegador Web
+GenericName[he]=דפדפן אינטרנט
+GenericName[hr]=Web preglednik
+GenericName[hu]=Webböngésző
+GenericName[it]=Browser web
+GenericName[ja]=ウェブ・ブラウザ
+GenericName[ko]=웹 브라우저
+GenericName[ku]=Geroka torê
+GenericName[lt]=Interneto naršyklė
+GenericName[nb]=Nettleser
+GenericName[nl]=Webbrowser
+GenericName[nn]=Nettlesar
+GenericName[no]=Nettleser
+GenericName[pl]=Przeglądarka WWW
+GenericName[pt]=Navegador Web
+GenericName[pt_BR]=Navegador Web
+GenericName[ro]=Navigator Internet
+GenericName[ru]=Веб-браузер
+GenericName[sk]=Internetový prehliadač
+GenericName[sl]=Spletni brskalnik
+GenericName[sv]=Webbläsare
+GenericName[tr]=Web Tarayıcı
+GenericName[ug]=توركۆرگۈ
+GenericName[uk]=Веб-браузер
+GenericName[vi]=Trình duyệt Web
+GenericName[zh_CN]=网络浏览器
+GenericName[zh_TW]=網路瀏覽器
+Keywords=Internet;WWW;Browser;Web;Explorer
+Keywords[ar]=انترنت;إنترنت;متصفح;ويب;وب
+Keywords[ast]=Internet;WWW;Restolador;Web;Esplorador
+Keywords[ca]=Internet;WWW;Navegador;Web;Explorador;Explorer
+Keywords[cs]=Internet;WWW;Prohlížeč;Web;Explorer
+Keywords[da]=Internet;Internettet;WWW;Browser;Browse;Web;Surf;Nettet
+Keywords[de]=Internet;WWW;Browser;Web;Explorer;Webseite;Site;surfen;online;browsen
+Keywords[el]=Internet;WWW;Browser;Web;Explorer;Διαδίκτυο;Περιηγητής;Firefox;Φιρεφοχ;Ιντερνετ
+Keywords[es]=Explorador;Internet;WWW
+Keywords[fi]=Internet;WWW;Browser;Web;Explorer;selain;Internet-selain;internetselain;verkkoselain;netti;surffaa
+Keywords[fr]=Internet;WWW;Browser;Web;Explorer;Fureteur;Surfer;Navigateur
+Keywords[he]=דפדפן;אינטרנט;רשת;אתרים;אתר;פיירפוקס;מוזילה;
+Keywords[hr]=Internet;WWW;preglednik;Web
+Keywords[hu]=Internet;WWW;Böngésző;Web;Háló;Net;Explorer
+Keywords[it]=Internet;WWW;Browser;Web;Navigatore
+Keywords[is]=Internet;WWW;Vafri;Vefur;Netvafri;Flakk
+Keywords[ja]=Internet;WWW;Web;インターネット;ブラウザ;ウェブ;エクスプローラ
+Keywords[nb]=Internett;WWW;Nettleser;Explorer;Web;Browser;Nettside
+Keywords[nl]=Internet;WWW;Browser;Web;Explorer;Verkenner;Website;Surfen;Online
+Keywords[pt]=Internet;WWW;Browser;Web;Explorador;Navegador
+Keywords[pt_BR]=Internet;WWW;Browser;Web;Explorador;Navegador
+Keywords[ru]=Internet;WWW;Browser;Web;Explorer;интернет;браузер;веб;файрфокс;огнелис
+Keywords[sk]=Internet;WWW;Prehliadač;Web;Explorer
+Keywords[sl]=Internet;WWW;Browser;Web;Explorer;Brskalnik;Splet
+Keywords[tr]=İnternet;WWW;Tarayıcı;Web;Gezgin;Web sitesi;Site;sörf;çevrimiçi;tara
+Keywords[uk]=Internet;WWW;Browser;Web;Explorer;Інтернет;мережа;переглядач;оглядач;браузер;веб;файрфокс;вогнелис;перегляд
+Keywords[vi]=Internet;WWW;Browser;Web;Explorer;Trình duyệt;Trang web
+Keywords[zh_CN]=Internet;WWW;Browser;Web;Explorer;网页;浏览;上网;火狐;Firefox;ff;互联网;网站;
+Keywords[zh_TW]=Internet;WWW;Browser;Web;Explorer;網際網路;網路;瀏覽器;上網;網頁;火狐
+Exec=firefox %u
+Terminal=false
+X-MultipleArgs=false
+Type=Application
+Icon=/browser/chrome/icons/default/default128.png
+Categories=GNOME;GTK;Network;WebBrowser;
+MimeType=text/html;text/xml;application/xhtml+xml;application/xml;application/rss+xml;application/rdf+xml;image/gif;image/jpeg;image/png;x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/ftp;x-scheme-handler/chrome;video/webm;application/x-xpinstall;
+StartupNotify=true
+Actions=NewWindow;NewPrivateWindow;
+
+[Desktop Action NewWindow]
+Name=Open a New Window
+Name[ar]=افتح نافذة جديدة
+Name[ast]=Abrir una ventana nueva
+Name[bn]=Abrir una ventana nueva
+Name[ca]=Obre una finestra nova
+Name[cs]=Otevřít nové okno
+Name[da]=Åbn et nyt vindue
+Name[de]=Ein neues Fenster öffnen
+Name[el]=Άνοιγμα νέου παραθύρου
+Name[es]=Abrir una ventana nueva
+Name[fi]=Avaa uusi ikkuna
+Name[fr]=Ouvrir une nouvelle fenêtre
+Name[gl]=Abrir unha nova xanela
+Name[he]=פתיחת חלון חדש
+Name[hr]=Otvori novi prozor
+Name[hu]=Új ablak nyitása
+Name[it]=Apri una nuova finestra
+Name[ja]=新しいウィンドウを開く
+Name[ko]=새 창 열기
+Name[ku]=Paceyeke nû veke
+Name[lt]=Atverti naują langą
+Name[nb]=Åpne et nytt vindu
+Name[nl]=Nieuw venster openen
+Name[pt]=Abrir nova janela
+Name[pt_BR]=Abrir nova janela
+Name[ro]=Deschide o fereastră nouă
+Name[ru]=Новое окно
+Name[sk]=Otvoriť nové okno
+Name[sl]=Odpri novo okno
+Name[sv]=Öppna ett nytt fönster
+Name[tr]=Yeni pencere aç
+Name[ug]=يېڭى كۆزنەك ئېچىش
+Name[uk]=Відкрити нове вікно
+Name[vi]=Mở cửa sổ mới
+Name[zh_CN]=新建窗口
+Name[zh_TW]=開啟新視窗
+Exec=firefox -new-window
+
+[Desktop Action NewPrivateWindow]
+Name=Open a New Private Window
+Name[ar]=افتح نافذة جديدة للتصفح الخاص
+Name[ca]=Obre una finestra nova en mode d'incògnit
+Name[de]=Ein neues privates Fenster öffnen
+Name[es]=Abrir una ventana privada nueva
+Name[fi]=Avaa uusi yksityinen ikkuna
+Name[fr]=Ouvrir une nouvelle fenêtre de navigation privée
+Name[he]=פתיחת חלון גלישה פרטית חדש
+Name[hu]=Új privát ablak nyitása
+Name[it]=Apri una nuova finestra anonima
+Name[nb]=Åpne et nytt privat vindu
+Name[ru]=Новое приватное окно
+Name[sl]=Odpri novo okno zasebnega brskanja
+Name[tr]=Yeni bir pencere aç
+Name[uk]=Відкрити нове вікно у потайливому режимі
+Name[zh_TW]=開啟新隱私瀏覽視窗
+Exec=firefox -private-window
diff --git a/taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in b/taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in
new file mode 100644
index 0000000000..18dfc64089
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in
@@ -0,0 +1,92 @@
+name: firefox
+version: @VERSION@-@BUILD_NUMBER@
+summary: Mozilla Firefox web browser
+description: Firefox is a powerful, extensible web browser with support for modern web application technologies.
+confinement: strict
+grade: stable
+base: core20
+compression: lzo
+
+apps:
+ firefox:
+ command: firefox
+ command-chain: [tmpdir]
+ desktop: distribution/firefox.desktop
+ extensions: [gnome-3-38]
+ environment:
+ HOME: "$SNAP_USER_COMMON"
+ GTK_USE_PORTAL: 1
+ slots:
+ - dbus-daemon
+ - mpris
+ plugs:
+ - avahi-observe
+ - browser-sandbox
+ - camera
+ - cups-control
+ - gsettings
+ - hardware-observe
+ - home
+ - joystick
+ - network
+ - network-observe
+ - opengl
+ - pulseaudio
+ - removable-media
+ - screen-inhibit-control
+ - system-packages-doc
+ - u2f-devices
+ - unity7
+ - upower-observe
+
+plugs:
+ browser-sandbox:
+ interface: browser-support
+ allow-sandbox: true
+ etc-firefox-policies:
+ interface: system-files
+ read: [/etc/firefox/policies]
+
+layout:
+ /usr/share/libdrm:
+ bind: $SNAP/gnome-platform/usr/share/libdrm
+
+parts:
+ firefox:
+ plugin: dump
+ source: source
+ stage-packages:
+ - libxt6
+ - libdbus-glib-1-2
+ - libasound2
+ - libpci3
+ - libpulse0
+ - libgl1-mesa-dri
+ - libgl1-mesa-glx
+ - libmirclient9
+ - desktop-file-utils
+ - ffmpeg
+ - libc-bin
+ - locales-all
+ - libcurl3-gnutls
+ prime:
+ - -usr/lib/*/libharfbuzz*
+ - -usr/lib/*/*pango*
+
+ # Find files provided by the base and platform snap and ensure they aren't
+ # duplicated in this snap
+ cleanup:
+ after: [firefox]
+ plugin: nil
+ build-snaps: [core20, gnome-3-38-2004]
+ override-prime: |
+ set -eux
+ for snap in "core20" "gnome-3-38-2004"; do
+ cd "/snap/$snap/current" && find . -type f,l -exec rm -f "$SNAPCRAFT_PRIME/{}" \;
+ done
+
+slots:
+ dbus-daemon:
+ interface: dbus
+ bus: session
+ name: org.mozilla.firefox
diff --git a/taskcluster/docker/firefox-snap/policies.json b/taskcluster/docker/firefox-snap/policies.json
new file mode 100644
index 0000000000..f36622021f
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/policies.json
@@ -0,0 +1,5 @@
+{
+ "policies": {
+ "DisableAppUpdate": true
+ }
+}
diff --git a/taskcluster/docker/firefox-snap/runme.sh b/taskcluster/docker/firefox-snap/runme.sh
new file mode 100755
index 0000000000..308640a8c8
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/runme.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+
+set -xe
+
+# Thunderbird Snap builds will set this to "thunderbird"
+: PRODUCT "${PRODUCT:=firefox}"
+
+# Required env variables
+test "$VERSION"
+test "$BUILD_NUMBER"
+test "$CANDIDATES_DIR"
+test "$L10N_CHANGESETS"
+
+# Optional env variables
+: WORKSPACE "${WORKSPACE:=/home/worker/workspace}"
+: ARTIFACTS_DIR "${ARTIFACTS_DIR:=/home/worker/artifacts}"
+: PUSH_TO_CHANNEL ""
+
+SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+TARGET="target.snap"
+TARGET_FULL_PATH="$ARTIFACTS_DIR/$TARGET"
+SOURCE_DEST="${WORKSPACE}/source"
+
+mkdir -p "$ARTIFACTS_DIR"
+rm -rf "$SOURCE_DEST" && mkdir -p "$SOURCE_DEST"
+
+CURL="curl --location --retry 10 --retry-delay 10"
+
+# Download and extract en-US linux64 binary
+$CURL -o "${WORKSPACE}/${PRODUCT}.tar.bz2" \
+ "${CANDIDATES_DIR}/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/en-US/${PRODUCT}-${VERSION}.tar.bz2"
+tar -C "$SOURCE_DEST" -xf "${WORKSPACE}/${PRODUCT}.tar.bz2" --strip-components=1
+
+DISTRIBUTION_DIR="$SOURCE_DEST/distribution"
+if [[ "$PRODUCT" == "firefox" ]]; then
+ # Get Ubuntu configuration
+ PARTNER_CONFIG_DIR="$WORKSPACE/partner_config"
+ git clone https://github.com/mozilla-partners/canonical.git "$PARTNER_CONFIG_DIR"
+ mv "$PARTNER_CONFIG_DIR/desktop/ubuntu/distribution" "$DISTRIBUTION_DIR"
+else
+ mkdir -p "$DISTRIBUTION_DIR"
+fi
+
+cp -v "$SCRIPT_DIRECTORY/${PRODUCT}.desktop" "$DISTRIBUTION_DIR"
+
+# Add a group policy file to disable app updates, as those are handled by snapd
+cp -v "$SCRIPT_DIRECTORY/policies.json" "$DISTRIBUTION_DIR"
+
+# Use list of locales to fetch L10N XPIs
+$CURL -o "${WORKSPACE}/l10n_changesets.json" "$L10N_CHANGESETS"
+locales=$(python3 "$SCRIPT_DIRECTORY/extract_locales_from_l10n_json.py" "${WORKSPACE}/l10n_changesets.json")
+
+mkdir -p "$DISTRIBUTION_DIR/extensions"
+for locale in $locales; do
+ $CURL -o "$SOURCE_DEST/distribution/extensions/langpack-${locale}@${PRODUCT}.mozilla.org.xpi" \
+ "$CANDIDATES_DIR/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/xpi/${locale}.xpi"
+done
+
+# In addition to the packages downloaded below, snapcraft fetches deb packages from ubuntu.com,
+# when a snap is built,. They may bump packages there and remove the old ones. Updating the
+# database allows snapcraft to find the latest packages.
+# For more context, see 1448239
+apt-get update
+
+# Extract gtk30.mo from Ubuntu language packs
+apt download language-pack-gnome-*-base
+for i in *.deb; do
+ # shellcheck disable=SC2086
+ dpkg-deb --fsys-tarfile $i | tar xv -C "$SOURCE_DEST" --wildcards "./usr/share/locale-langpack/*/LC_MESSAGES/gtk30.mo" || true
+done
+
+# Add wrapper script to set TMPDIR appropriate for the snap
+cp -v "$SCRIPT_DIRECTORY/tmpdir" "$SOURCE_DEST"
+
+# Generate snapcraft manifest
+sed -e "s/@VERSION@/${VERSION}/g" -e "s/@BUILD_NUMBER@/${BUILD_NUMBER}/g" ${PRODUCT}.snapcraft.yaml.in > "${WORKSPACE}/snapcraft.yaml"
+cd "${WORKSPACE}"
+
+# Make sure snapcraft knows we're building amd64, even though we may not be on this arch.
+export SNAP_ARCH='amd64'
+
+snapcraft
+
+mv -- *.snap "$TARGET_FULL_PATH"
+
+cd "$ARTIFACTS_DIR"
+
+# Generate checksums file
+size=$(stat --printf="%s" "$TARGET_FULL_PATH")
+sha=$(sha512sum "$TARGET_FULL_PATH" | awk '{print $1}')
+echo "$sha sha512 $size $TARGET" > "$TARGET.checksums"
+
+echo "Generating signing manifest"
+hash=$(sha512sum "$TARGET.checksums" | awk '{print $1}')
+
+cat << EOF > signing_manifest.json
+[{"file_to_sign": "$TARGET.checksums", "hash": "$hash"}]
+EOF
+
+# For posterity
+find . -ls
+cat "$TARGET.checksums"
+cat signing_manifest.json
diff --git a/taskcluster/docker/firefox-snap/tmpdir b/taskcluster/docker/firefox-snap/tmpdir
new file mode 100755
index 0000000000..e7e60801fa
--- /dev/null
+++ b/taskcluster/docker/firefox-snap/tmpdir
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Set TMPDIR to be under the user's default Downloads dir
+export TMPDIR=$(xdg-user-dir DOWNLOAD)/firefox.tmp
+
+exec "$@"
diff --git a/taskcluster/docker/funsize-update-generator/Dockerfile b/taskcluster/docker/funsize-update-generator/Dockerfile
new file mode 100644
index 0000000000..bbd4f518df
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/Dockerfile
@@ -0,0 +1,48 @@
+FROM debian:11
+
+# Required software
+ENV DEBIAN_FRONTEND noninteractive
+RUN apt-get update -q && \
+ apt-get install -yyq --no-install-recommends \
+ bzip2 \
+ ca-certificates \
+ curl \
+ gcc \
+ jq \
+ libdpkg-perl \
+ libgetopt-simple-perl \
+ liblzma-dev \
+ locales \
+ python3 \
+ libpython3-dev \
+ python3-dev \
+ python3-pip \
+ xz-utils
+RUN useradd -d /home/worker -s /bin/bash -m worker
+COPY requirements.txt /
+
+RUN locale-gen en_CA.UTF-8
+ENV LANG en_CA.UTF-8
+ENV LANGUAGE en_CA.UTF-8
+ENV LANG_ALL en_CA.UTF-8
+ENV LC_ALL en_CA.UTF-8
+
+RUN ["pip", "install", "-r", "/requirements.txt"]
+
+# scripts
+RUN mkdir /home/worker/bin
+COPY scripts/* /home/worker/bin/
+
+COPY runme.sh /runme.sh
+RUN chmod 755 /home/worker/bin/* /*.sh
+RUN mkdir /home/worker/keys
+COPY *.pubkey /home/worker/keys/
+
+ENV HOME /home/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
+
+USER worker
+
+CMD ["/runme.sh"]
diff --git a/taskcluster/docker/funsize-update-generator/Makefile b/taskcluster/docker/funsize-update-generator/Makefile
new file mode 100644
index 0000000000..6b67f0ed90
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/Makefile
@@ -0,0 +1,9 @@
+IMAGE_NAME = funsize-update-generator
+
+build:
+ docker build -t $(IMAGE_NAME) --no-cache --rm .
+
+update_pubkeys:
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
diff --git a/taskcluster/docker/funsize-update-generator/README b/taskcluster/docker/funsize-update-generator/README
new file mode 100644
index 0000000000..5e9507be71
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/README
@@ -0,0 +1,7 @@
+
+To run this locally for testing/development purposes:
+
+1. Find a funsize generating task ID
+2. docker run -t -e TASKCLUSTER_ROOT_URL="https://firefox-ci-tc.services.mozilla.com" -e SIGNING_CERT='nightly' -e MAR_CHANNEL_ID='firefox-mozilla-central' -e TASK_ID="${TASK_ID}" -e EXTRA_PARAMS="--arch=x86_64" funsize-update-generator /runme.sh
+
+The TASK_ID should be a recent "partials" Task.
diff --git a/taskcluster/docker/funsize-update-generator/dep1.pubkey b/taskcluster/docker/funsize-update-generator/dep1.pubkey
new file mode 100644
index 0000000000..927b2cc947
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/dep1.pubkey
@@ -0,0 +1,14 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA8Y6AS+xwKoXZl0X5qOKr
+0I00xC4UN+IMjA1LIQoZ2GBkiqQF3q8v2nWTFE0+47+3NtP0l8tvsQY+LSYR4Fek
+v2Vx4m/CAMKmWzW6Vtlj80y6rQ04V19l41bZXvCIBW5fm9sAvPgc7CngkcLySNqk
+8vf57cUEpOmbsjSOCmK0j8hh03I1eWogpbAVEchSm1xN2sUJaVTvz5j8BfE6Vm0i
+nN7V0zF+AOxzvntZIpfUqMZbHRiMkGn4l9rjia1Rz0qUc9RNCJkNocyKtQ2N2wnN
+FjHpmK9x2V71cS1JQGhgLegrswPCAWY1lTmiLk9LweqGoVL0rqR4LCkb0VCaeSRe
+6bUEYcU1ZQedE80zGKB3AfoC5br1shYY0xjmyRSCQ8m8WE60HzXhL8wczKrn5yoJ
+iF6BxFwcYsvrWBPgIYVZLcqjODfR/M62o8yIfTC7yBcIdycJ0sWhB47dHAFxv1kc
+wv8Ik9ftvDyupE8kwcl58fNOXz93j7IxMry/ey27NyYpESPOUNcjT8TP26FdGebg
+4iJx0/LaYmaNUdchfBBlaYqGdH6ZGK0OeVxzHstGuG0gebm/igYcpaFxiQzvWijX
+MIAU56s4g+yj7pSzT5/s9r8Gv+YhsNHKm4hnwLZaITV0lLMT5h/OZGseQTPMBnAR
+hK3CIfcqG0I23hdwI29ZuUMCAwEAAQ==
+-----END PUBLIC KEY-----
diff --git a/taskcluster/docker/funsize-update-generator/nightly.pubkey b/taskcluster/docker/funsize-update-generator/nightly.pubkey
new file mode 100644
index 0000000000..e51049844c
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/nightly.pubkey
@@ -0,0 +1,14 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAth151NGY8PBzn0bii9Yc
+AjYHZDwP9Lj1c3owG0zLqW2kPcdp86QTAcoYunHGYFFakNG3tooZhzwkMjZ1OrXc
+ERjD6AuVSGIBdsKtKP4vLtMjDUteFN4K2+rveozcnYFZuTWEajGu8uoYsv4QgdEA
+nTBC39j0J33xlfUR+XKuxzhxNrFX+fRFWuLDJrPziMcVA/mzf0gXlhtEsfV0HYyg
+yWpHdIWww+llysD1QOQAHk94Ss8c/4BFXFxlwlLeNlB1ZqLm1LsNy0jUy9EHeO3C
+H6eqmiFEbpdjlrkJdgR1NcTzeY/Qf/nhWH6BAZrSapQycF7OSLU+rFWMQUElSPLc
+NVl7oNAAfSYLTvRjPGi+mJK3wGFQw1EpwQl+elE1oj4+sHvIVpDrLb6btpxfr1cZ
+pR4Di/hkOIymxEDWvtUhOxUXnYbDKQSDcAHKM/xR3sdIAiVtVuL4hyBwlAqkQc2j
+H+SmnCbazgnq5+dN4y5DjoOgbZQ/koE3s3bUzzMeIxaul9v4gMtGROw3PQ3OZcP0
+lgjPRhY+NeTnWMo2nGb4/eS6Cn2qFLfbEQjsj6pJJBNKfvK/gm1jXb3PgXXdf8+d
+2xTPOX8QNpSK7C0w4vYlvSpYZlsx2cznEOV6LDqP0QHUnmd/k1xWRRGiQ7gtT+BV
+Fn0h7JyTGmEdFu6l4OhS8hMCAwEAAQ==
+-----END PUBLIC KEY-----
diff --git a/taskcluster/docker/funsize-update-generator/release.pubkey b/taskcluster/docker/funsize-update-generator/release.pubkey
new file mode 100644
index 0000000000..ec1103d828
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/release.pubkey
@@ -0,0 +1,14 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxCHbY+fP3dvaP9XVbmK6
+i4rbqo72INEWgDSYbr/DIYfCSzHC9H8pU8dyjt+Nd8OtoUZtBD1N9fP7SlrvPZSI
+ZSW4k0e9Ky5aV3Uy+ivamSvYszkhqdeP2y7MBu73XHKYONR9PnKa+ovmREwSEI+h
+1e0ebm8zvF7Ndwx0mOeZkDu9SDkDGg4aj2xrJyBBOuGVjuctMZ6l1davANI5xiJ0
+GBEU3tR1gJs1T4vLBis5mEFn9y4kgyw/HrxmRYGnZL4fLb2fTI+pNW0Twu3KWwwi
+LgLkkVrNWiHSk7YWqxjcg5IA3pQETQ17paTHoB5Mnkvuh6MkDXvRG5VgAHZAigr6
+fJMsasOUaBeos/cD1LDQEIObpetlxc0Fiu/lvUts0755otkhI+yv35+wUa6GJrsE
+CsT7c/LaFtQXg06aGXbMLDn0bE/e+nw9KWT/rE1iYXMFkzrqoTeYJ+v7/fD/ywU8
+m8l4CZmXxzd/RogMrM3xl+j4ucAAltDQyL4yLySaIT05w5U8z2zJDEXFvpFDSRfF
+K3kjLwGub7wNwaQDuh/msIUdavu4g+GNikCXAJ8AssLuYatyHoltd2tf+EIIDW3U
+zzLpymnLo3cAz3IPfXyqVB+mcLcpqbHjl3hWms6l1wGtz6S4WqdrWs/KfzS5EyDK
+r63xn1Rg/XFmR57EsFEXAZ8CAwEAAQ==
+-----END PUBLIC KEY-----
diff --git a/taskcluster/docker/funsize-update-generator/requirements.in b/taskcluster/docker/funsize-update-generator/requirements.in
new file mode 100644
index 0000000000..3360c71690
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/requirements.in
@@ -0,0 +1,7 @@
+aiohttp
+awscli
+mar
+redo
+requests
+scriptworker
+sh
diff --git a/taskcluster/docker/funsize-update-generator/requirements.txt b/taskcluster/docker/funsize-update-generator/requirements.txt
new file mode 100644
index 0000000000..c4ef0cfd0d
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/requirements.txt
@@ -0,0 +1,663 @@
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
+#
+aiohttp==3.8.3 \
+ --hash=sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8 \
+ --hash=sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142 \
+ --hash=sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18 \
+ --hash=sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34 \
+ --hash=sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a \
+ --hash=sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033 \
+ --hash=sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06 \
+ --hash=sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4 \
+ --hash=sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d \
+ --hash=sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b \
+ --hash=sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc \
+ --hash=sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091 \
+ --hash=sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d \
+ --hash=sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85 \
+ --hash=sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb \
+ --hash=sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937 \
+ --hash=sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf \
+ --hash=sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1 \
+ --hash=sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b \
+ --hash=sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d \
+ --hash=sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269 \
+ --hash=sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da \
+ --hash=sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346 \
+ --hash=sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494 \
+ --hash=sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697 \
+ --hash=sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4 \
+ --hash=sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585 \
+ --hash=sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c \
+ --hash=sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da \
+ --hash=sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad \
+ --hash=sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2 \
+ --hash=sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6 \
+ --hash=sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c \
+ --hash=sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849 \
+ --hash=sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa \
+ --hash=sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b \
+ --hash=sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb \
+ --hash=sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7 \
+ --hash=sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715 \
+ --hash=sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76 \
+ --hash=sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d \
+ --hash=sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276 \
+ --hash=sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6 \
+ --hash=sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37 \
+ --hash=sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb \
+ --hash=sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d \
+ --hash=sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c \
+ --hash=sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446 \
+ --hash=sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008 \
+ --hash=sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342 \
+ --hash=sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d \
+ --hash=sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7 \
+ --hash=sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061 \
+ --hash=sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba \
+ --hash=sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7 \
+ --hash=sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290 \
+ --hash=sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0 \
+ --hash=sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d \
+ --hash=sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8 \
+ --hash=sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f \
+ --hash=sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48 \
+ --hash=sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502 \
+ --hash=sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62 \
+ --hash=sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9 \
+ --hash=sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403 \
+ --hash=sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77 \
+ --hash=sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476 \
+ --hash=sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e \
+ --hash=sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96 \
+ --hash=sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5 \
+ --hash=sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784 \
+ --hash=sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091 \
+ --hash=sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b \
+ --hash=sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97 \
+ --hash=sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a \
+ --hash=sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2 \
+ --hash=sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9 \
+ --hash=sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d \
+ --hash=sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73 \
+ --hash=sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017 \
+ --hash=sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363 \
+ --hash=sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c \
+ --hash=sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d \
+ --hash=sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618 \
+ --hash=sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491 \
+ --hash=sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b \
+ --hash=sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca
+ # via
+ # -r requirements.in
+ # scriptworker
+ # taskcluster
+aiomemoizettl==0.0.3 \
+ --hash=sha256:07a6becac60f6cd2604b9f2b73bcd9a50079a0b7b55e2a4e45b1eec5a3ea9659 \
+ --hash=sha256:0a80d2dc765e545263f515363b6700ec8cf86fa3968b529f56390b28e34f743d
+ # via scriptworker
+aiosignal==1.3.1 \
+ --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \
+ --hash=sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17
+ # via aiohttp
+arrow==1.2.3 \
+ --hash=sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1 \
+ --hash=sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2
+ # via scriptworker
+asn1crypto==1.5.1 \
+ --hash=sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c \
+ --hash=sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67
+ # via mar
+async-timeout==4.0.2 \
+ --hash=sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15 \
+ --hash=sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c
+ # via
+ # aiohttp
+ # taskcluster
+attrs==22.2.0 \
+ --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
+ --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
+ # via
+ # aiohttp
+ # jsonschema
+awscli==1.27.46 \
+ --hash=sha256:276cef3cc89e6c1888eb327d4a2d7702105a49271b1e3e93472bcd9f02beb24c \
+ --hash=sha256:a9b64b091db2edd13a611b4c4ec47fc2e7c67b3efe5ea840466f2138a9634c7b
+ # via -r requirements.in
+botocore==1.29.46 \
+ --hash=sha256:78bf25933e35eb6354a9e80fe156f86dce4d346a92afe364dfce25c17ab0639f \
+ --hash=sha256:dbac2fde265f13beb9191ec3ff63b90b515e9ed63875edc3afbd72c5f585e48b
+ # via
+ # awscli
+ # s3transfer
+certifi==2022.12.7 \
+ --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
+ --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+ # via requests
+cffi==1.15.1 \
+ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
+ --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
+ --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
+ --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
+ --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
+ --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
+ --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
+ --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
+ --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
+ --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
+ --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
+ --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
+ --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
+ --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
+ --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
+ --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
+ --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
+ --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
+ --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
+ --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
+ --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
+ --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
+ --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
+ --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
+ --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
+ --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
+ --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
+ --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
+ --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
+ --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
+ --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
+ --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
+ --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
+ --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
+ --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
+ --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
+ --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
+ --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
+ --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
+ --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
+ --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
+ --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
+ --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
+ --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
+ --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
+ --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
+ --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
+ --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
+ --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
+ --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
+ --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
+ --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
+ --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
+ --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
+ --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
+ --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
+ --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
+ --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
+ --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
+ --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
+ --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
+ --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
+ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
+ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
+ # via cryptography
+charset-normalizer==2.1.1 \
+ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
+ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
+ # via
+ # aiohttp
+ # requests
+click==8.1.3 \
+ --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
+ --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
+ # via mar
+colorama==0.4.4 \
+ --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \
+ --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2
+ # via awscli
+construct==2.10.68 \
+ --hash=sha256:7b2a3fd8e5f597a5aa1d614c3bd516fa065db01704c72a1efaaeec6ef23d8b45
+ # via mar
+cryptography==39.0.0 \
+ --hash=sha256:1a6915075c6d3a5e1215eab5d99bcec0da26036ff2102a1038401d6ef5bef25b \
+ --hash=sha256:1ee1fd0de9851ff32dbbb9362a4d833b579b4a6cc96883e8e6d2ff2a6bc7104f \
+ --hash=sha256:407cec680e811b4fc829de966f88a7c62a596faa250fc1a4b520a0355b9bc190 \
+ --hash=sha256:50386acb40fbabbceeb2986332f0287f50f29ccf1497bae31cf5c3e7b4f4b34f \
+ --hash=sha256:6f97109336df5c178ee7c9c711b264c502b905c2d2a29ace99ed761533a3460f \
+ --hash=sha256:754978da4d0457e7ca176f58c57b1f9de6556591c19b25b8bcce3c77d314f5eb \
+ --hash=sha256:76c24dd4fd196a80f9f2f5405a778a8ca132f16b10af113474005635fe7e066c \
+ --hash=sha256:7dacfdeee048814563eaaec7c4743c8aea529fe3dd53127313a792f0dadc1773 \
+ --hash=sha256:80ee674c08aaef194bc4627b7f2956e5ba7ef29c3cc3ca488cf15854838a8f72 \
+ --hash=sha256:844ad4d7c3850081dffba91cdd91950038ee4ac525c575509a42d3fc806b83c8 \
+ --hash=sha256:875aea1039d78557c7c6b4db2fe0e9d2413439f4676310a5f269dd342ca7a717 \
+ --hash=sha256:887cbc1ea60786e534b00ba8b04d1095f4272d380ebd5f7a7eb4cc274710fad9 \
+ --hash=sha256:ad04f413436b0781f20c52a661660f1e23bcd89a0e9bb1d6d20822d048cf2856 \
+ --hash=sha256:bae6c7f4a36a25291b619ad064a30a07110a805d08dc89984f4f441f6c1f3f96 \
+ --hash=sha256:c52a1a6f81e738d07f43dab57831c29e57d21c81a942f4602fac7ee21b27f288 \
+ --hash=sha256:e0a05aee6a82d944f9b4edd6a001178787d1546ec7c6223ee9a848a7ade92e39 \
+ --hash=sha256:e324de6972b151f99dc078defe8fb1b0a82c6498e37bff335f5bc6b1e3ab5a1e \
+ --hash=sha256:e5d71c5d5bd5b5c3eebcf7c5c2bb332d62ec68921a8c593bea8c394911a005ce \
+ --hash=sha256:f3ed2d864a2fa1666e749fe52fb8e23d8e06b8012e8bd8147c73797c506e86f1 \
+ --hash=sha256:f671c1bb0d6088e94d61d80c606d65baacc0d374e67bf895148883461cd848de \
+ --hash=sha256:f6c0db08d81ead9576c4d94bbb27aed8d7a430fa27890f39084c2d0e2ec6b0df \
+ --hash=sha256:f964c7dcf7802d133e8dbd1565914fa0194f9d683d82411989889ecd701e8adf \
+ --hash=sha256:fec8b932f51ae245121c4671b4bbc030880f363354b2f0e0bd1366017d891458
+ # via
+ # mar
+ # pyjwt
+ # scriptworker
+dictdiffer==0.9.0 \
+ --hash=sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578 \
+ --hash=sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595
+ # via scriptworker
+docutils==0.16 \
+ --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
+ --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
+ # via awscli
+frozenlist==1.3.3 \
+ --hash=sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c \
+ --hash=sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f \
+ --hash=sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a \
+ --hash=sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784 \
+ --hash=sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27 \
+ --hash=sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d \
+ --hash=sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3 \
+ --hash=sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678 \
+ --hash=sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a \
+ --hash=sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483 \
+ --hash=sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8 \
+ --hash=sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf \
+ --hash=sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99 \
+ --hash=sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c \
+ --hash=sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48 \
+ --hash=sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5 \
+ --hash=sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56 \
+ --hash=sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e \
+ --hash=sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1 \
+ --hash=sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401 \
+ --hash=sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4 \
+ --hash=sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e \
+ --hash=sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649 \
+ --hash=sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a \
+ --hash=sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d \
+ --hash=sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0 \
+ --hash=sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6 \
+ --hash=sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d \
+ --hash=sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b \
+ --hash=sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6 \
+ --hash=sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf \
+ --hash=sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef \
+ --hash=sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7 \
+ --hash=sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842 \
+ --hash=sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba \
+ --hash=sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420 \
+ --hash=sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b \
+ --hash=sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d \
+ --hash=sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332 \
+ --hash=sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936 \
+ --hash=sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816 \
+ --hash=sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91 \
+ --hash=sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420 \
+ --hash=sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448 \
+ --hash=sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411 \
+ --hash=sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4 \
+ --hash=sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32 \
+ --hash=sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b \
+ --hash=sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0 \
+ --hash=sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530 \
+ --hash=sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669 \
+ --hash=sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7 \
+ --hash=sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1 \
+ --hash=sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5 \
+ --hash=sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce \
+ --hash=sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4 \
+ --hash=sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e \
+ --hash=sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2 \
+ --hash=sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d \
+ --hash=sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9 \
+ --hash=sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642 \
+ --hash=sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0 \
+ --hash=sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703 \
+ --hash=sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb \
+ --hash=sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1 \
+ --hash=sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13 \
+ --hash=sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab \
+ --hash=sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38 \
+ --hash=sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb \
+ --hash=sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb \
+ --hash=sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81 \
+ --hash=sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8 \
+ --hash=sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd \
+ --hash=sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4
+ # via
+ # aiohttp
+ # aiosignal
+github3-py==3.2.0 \
+ --hash=sha256:09b72be1497d346b0968cde8360a0d6af79dc206d0149a63cd3ec86c65c377cc \
+ --hash=sha256:a9016e40609c6f5cb9954dd188d08257dafd09c4da8c0e830a033fca00054b0d
+ # via scriptworker
+idna==3.4 \
+ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
+ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+ # via
+ # requests
+ # yarl
+immutabledict==2.2.3 \
+ --hash=sha256:0e1e8a3f2b3ff062daa19795f947e9ec7a58add269d44e34d3ab4319e1343853 \
+ --hash=sha256:a7b078ebcc4a58ddc73b55f808b26e7c8c2d5183fad325615112689e1a63e714
+ # via scriptworker
+jmespath==1.0.1 \
+ --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
+ --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
+ # via botocore
+json-e==4.5.0 \
+ --hash=sha256:618a94aecc8b8bc7733d6cd0ee7b676e45675566625a38958aa8b30379d9758f \
+ --hash=sha256:e733ce77b4acbbc2c48211057f8cb5af45999e6be4ce0f07585c5580df45826e
+ # via scriptworker
+jsonschema==4.17.3 \
+ --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \
+ --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6
+ # via scriptworker
+mar==3.2.0 \
+ --hash=sha256:602f64a99fb2db578a3d76c9f1ac3efd29bd5f8f0ae8568759fb205162b017d0 \
+ --hash=sha256:dd032cf72b65c62fb4e58e04a347076fda41f84265b41dd248098cc11e396c6e
+ # via -r requirements.in
+mohawk==1.1.0 \
+ --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \
+ --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723
+ # via taskcluster
+multidict==6.0.4 \
+ --hash=sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9 \
+ --hash=sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8 \
+ --hash=sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03 \
+ --hash=sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710 \
+ --hash=sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161 \
+ --hash=sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664 \
+ --hash=sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569 \
+ --hash=sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067 \
+ --hash=sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313 \
+ --hash=sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706 \
+ --hash=sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2 \
+ --hash=sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636 \
+ --hash=sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49 \
+ --hash=sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93 \
+ --hash=sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603 \
+ --hash=sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0 \
+ --hash=sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60 \
+ --hash=sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4 \
+ --hash=sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e \
+ --hash=sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1 \
+ --hash=sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60 \
+ --hash=sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951 \
+ --hash=sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc \
+ --hash=sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe \
+ --hash=sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95 \
+ --hash=sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d \
+ --hash=sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8 \
+ --hash=sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed \
+ --hash=sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2 \
+ --hash=sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775 \
+ --hash=sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87 \
+ --hash=sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c \
+ --hash=sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2 \
+ --hash=sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98 \
+ --hash=sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3 \
+ --hash=sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe \
+ --hash=sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78 \
+ --hash=sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660 \
+ --hash=sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176 \
+ --hash=sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e \
+ --hash=sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988 \
+ --hash=sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c \
+ --hash=sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c \
+ --hash=sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0 \
+ --hash=sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449 \
+ --hash=sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f \
+ --hash=sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde \
+ --hash=sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5 \
+ --hash=sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d \
+ --hash=sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac \
+ --hash=sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a \
+ --hash=sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9 \
+ --hash=sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca \
+ --hash=sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11 \
+ --hash=sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35 \
+ --hash=sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063 \
+ --hash=sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b \
+ --hash=sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982 \
+ --hash=sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258 \
+ --hash=sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1 \
+ --hash=sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52 \
+ --hash=sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480 \
+ --hash=sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7 \
+ --hash=sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461 \
+ --hash=sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d \
+ --hash=sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc \
+ --hash=sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779 \
+ --hash=sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a \
+ --hash=sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547 \
+ --hash=sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0 \
+ --hash=sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171 \
+ --hash=sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf \
+ --hash=sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d \
+ --hash=sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba
+ # via
+ # aiohttp
+ # yarl
+pyasn1==0.4.8 \
+ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
+ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
+ # via rsa
+pycparser==2.21 \
+ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
+ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
+ # via cffi
+pyjwt[crypto]==2.6.0 \
+ --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
+ --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
+ # via github3-py
+pyrsistent==0.19.3 \
+ --hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \
+ --hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \
+ --hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \
+ --hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \
+ --hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \
+ --hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \
+ --hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \
+ --hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \
+ --hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \
+ --hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \
+ --hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \
+ --hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \
+ --hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \
+ --hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \
+ --hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \
+ --hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \
+ --hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \
+ --hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \
+ --hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \
+ --hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \
+ --hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \
+ --hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \
+ --hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \
+ --hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \
+ --hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \
+ --hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \
+ --hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c
+ # via jsonschema
+python-dateutil==2.8.2 \
+ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
+ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
+ # via
+ # arrow
+ # botocore
+ # github3-py
+ # taskcluster
+pyyaml==5.4.1 \
+ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
+ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
+ --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
+ --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
+ --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
+ --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
+ --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
+ --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
+ --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
+ --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
+ --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \
+ --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
+ --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
+ --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
+ --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
+ --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
+ --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
+ --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
+ --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
+ --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
+ --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
+ --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
+ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
+ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
+ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
+ --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
+ --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
+ --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
+ --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0
+ # via
+ # awscli
+ # scriptworker
+redo==2.0.4 \
+ --hash=sha256:81066955041c853b0e6491eb65a0877dce45131c4cfa3d42d923fc2aa8f7a043 \
+ --hash=sha256:c76e4c23ab2f8840261736a851323cd98493710e7a9d36a1058535dca501f293
+ # via -r requirements.in
+requests==2.28.1 \
+ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
+ --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349
+ # via
+ # -r requirements.in
+ # github3-py
+ # taskcluster
+rsa==4.7.2 \
+ --hash=sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2 \
+ --hash=sha256:9d689e6ca1b3038bc82bf8d23e944b6b6037bc02301a574935b2dd946e0353b9
+ # via awscli
+s3transfer==0.6.0 \
+ --hash=sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd \
+ --hash=sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947
+ # via awscli
+scriptworker==46.0.0 \
+ --hash=sha256:94efa759e539f36079abad40db074e9b3c1945602716f7facdd82fe5b53aa6a9 \
+ --hash=sha256:e0b0b88c9d722c06d7a5bf19eab4f6f282613b36d171ba950b5501d8ad198207
+ # via -r requirements.in
+sh==1.14.3 \
+ --hash=sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7
+ # via -r requirements.in
+six==1.16.0 \
+ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
+ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
+ # via
+ # mar
+ # mohawk
+ # python-dateutil
+slugid==2.0.0 \
+ --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
+ --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
+ # via taskcluster
+taskcluster==46.1.1 \
+ --hash=sha256:1f49339b725a465d2ef4c9fb8bbe5ecaaf449707e5353fd6a5a453c776d057b6 \
+ --hash=sha256:222c2511c9dfca4cc70ee64082c2b940e2e8513ae9df3adb7afedacac4ebc97d
+ # via scriptworker
+taskcluster-urls==13.0.1 \
+ --hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
+ --hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
+ --hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b
+ # via taskcluster
+uritemplate==4.1.1 \
+ --hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
+ --hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
+ # via github3-py
+urllib3==1.26.13 \
+ --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \
+ --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8
+ # via
+ # botocore
+ # requests
+yarl==1.8.2 \
+ --hash=sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87 \
+ --hash=sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89 \
+ --hash=sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a \
+ --hash=sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08 \
+ --hash=sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996 \
+ --hash=sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077 \
+ --hash=sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901 \
+ --hash=sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e \
+ --hash=sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee \
+ --hash=sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574 \
+ --hash=sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165 \
+ --hash=sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634 \
+ --hash=sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229 \
+ --hash=sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b \
+ --hash=sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f \
+ --hash=sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7 \
+ --hash=sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf \
+ --hash=sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89 \
+ --hash=sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0 \
+ --hash=sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1 \
+ --hash=sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe \
+ --hash=sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf \
+ --hash=sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76 \
+ --hash=sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951 \
+ --hash=sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863 \
+ --hash=sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06 \
+ --hash=sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562 \
+ --hash=sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6 \
+ --hash=sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c \
+ --hash=sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e \
+ --hash=sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1 \
+ --hash=sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3 \
+ --hash=sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3 \
+ --hash=sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778 \
+ --hash=sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8 \
+ --hash=sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2 \
+ --hash=sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b \
+ --hash=sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d \
+ --hash=sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f \
+ --hash=sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c \
+ --hash=sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581 \
+ --hash=sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918 \
+ --hash=sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c \
+ --hash=sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e \
+ --hash=sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220 \
+ --hash=sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37 \
+ --hash=sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739 \
+ --hash=sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77 \
+ --hash=sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6 \
+ --hash=sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42 \
+ --hash=sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946 \
+ --hash=sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5 \
+ --hash=sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d \
+ --hash=sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146 \
+ --hash=sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a \
+ --hash=sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83 \
+ --hash=sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef \
+ --hash=sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80 \
+ --hash=sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588 \
+ --hash=sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5 \
+ --hash=sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2 \
+ --hash=sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef \
+ --hash=sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826 \
+ --hash=sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05 \
+ --hash=sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516 \
+ --hash=sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0 \
+ --hash=sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4 \
+ --hash=sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2 \
+ --hash=sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0 \
+ --hash=sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd \
+ --hash=sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8 \
+ --hash=sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b \
+ --hash=sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1 \
+ --hash=sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c
+ # via aiohttp
diff --git a/taskcluster/docker/funsize-update-generator/runme.sh b/taskcluster/docker/funsize-update-generator/runme.sh
new file mode 100644
index 0000000000..62f888b995
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/runme.sh
@@ -0,0 +1,61 @@
+#!/bin/sh
+
+set -xe
+
+test "$TASK_ID"
+test "$SIGNING_CERT"
+
+ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+# Strip trailing / if present
+TASKCLUSTER_ROOT_URL="${TASKCLUSTER_ROOT_URL%/}"
+export TASKCLUSTER_ROOT_URL
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="${TASKCLUSTER_ROOT_URL%/}/api/queue/v1"
+
+curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
+
+# auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/
+# -> bucket of tc-gp-private-1d-us-east-1, path of releng/mbsdiff-cache/
+# Trailing slash is important, due to prefix permissions in S3.
+S3_BUCKET_AND_PATH=$(jq -r '.scopes[] | select(contains ("auth:aws-s3"))' /home/worker/task.json | awk -F: '{print $4}')
+
+# Will be empty if there's no scope for AWS S3.
+if [ -n "${S3_BUCKET_AND_PATH}" ] && getent hosts taskcluster
+then
+ # Does this parse as we expect?
+ S3_PATH=${S3_BUCKET_AND_PATH#*/}
+ AWS_BUCKET_NAME=${S3_BUCKET_AND_PATH%/${S3_PATH}*}
+ test "${S3_PATH}"
+ test "${AWS_BUCKET_NAME}"
+
+ set +x # Don't echo these.
+ secret_url="${TASKCLUSTER_PROXY_URL}/api/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}"
+ AUTH=$(curl "${secret_url}")
+ AWS_ACCESS_KEY_ID=$(echo "${AUTH}" | jq -r '.credentials.accessKeyId')
+ AWS_SECRET_ACCESS_KEY=$(echo "${AUTH}" | jq -r '.credentials.secretAccessKey')
+ AWS_SESSION_TOKEN=$(echo "${AUTH}" | jq -r '.credentials.sessionToken')
+ export AWS_ACCESS_KEY_ID
+ export AWS_SECRET_ACCESS_KEY
+ export AWS_SESSION_TOKEN
+ AUTH=
+
+ if [ -n "$AWS_ACCESS_KEY_ID" ] && [ -n "$AWS_SECRET_ACCESS_KEY" ]; then
+ # Pass the full bucket/path prefix, as the script just appends local files.
+ export MBSDIFF_HOOK="/home/worker/bin/mbsdiff_hook.sh -S ${S3_BUCKET_AND_PATH}"
+ fi
+ set -x
+else
+ # disable caching
+ export MBSDIFF_HOOK=
+fi
+
+# EXTRA_PARAMS is optional
+# shellcheck disable=SC2086
+python3 /home/worker/bin/funsize.py \
+ --artifacts-dir "$ARTIFACTS_DIR" \
+ --task-definition /home/worker/task.json \
+ --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \
+ $EXTRA_PARAMS
diff --git a/taskcluster/docker/funsize-update-generator/scripts/funsize.py b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
new file mode 100644
index 0000000000..84fd2fbd0b
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
@@ -0,0 +1,471 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import asyncio
+import configparser
+import json
+import logging
+import os
+import shutil
+import tempfile
+import time
+from contextlib import AsyncExitStack
+from distutils.util import strtobool
+from pathlib import Path
+
+import aiohttp
+from mardor.reader import MarReader
+from mardor.signing import get_keysize
+from scriptworker.utils import get_hash, retry_async
+
+log = logging.getLogger(__name__)
+
+
+ROOT_URL = os.environ.get(
+ "TASKCLUSTER_ROOT_URL", "https://firefox-ci-tc.services.mozilla.com"
+)
+QUEUE_PREFIX = f"{ROOT_URL}/api/queue/"
+ALLOWED_URL_PREFIXES = (
+ "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
+ "http://download.cdn.mozilla.net/pub/firefox/nightly/",
+ "http://ftp.mozilla.org/",
+ "http://download.mozilla.org/",
+ "https://archive.mozilla.org/",
+ "http://archive.mozilla.org/",
+ QUEUE_PREFIX,
+)
+STAGING_URL_PREFIXES = (
+ "http://ftp.stage.mozaws.net/",
+ "https://ftp.stage.mozaws.net/",
+)
+
+BCJ_OPTIONS = {
+ "x86": ["--x86"],
+ "x86_64": ["--x86"],
+ "aarch64": [],
+ # macOS Universal Builds
+ "macos-x86_64-aarch64": [],
+}
+
+
+def verify_signature(mar, cert):
+ log.info("Checking %s signature", mar)
+ with open(mar, "rb") as mar_fh:
+ m = MarReader(mar_fh)
+ if not m.verify(verify_key=cert):
+ raise ValueError(
+ "MAR Signature invalid: %s (%s) against %s", mar, m.signature_type, cert
+ )
+
+
+def process_arguments():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--artifacts-dir", required=True)
+ parser.add_argument("--signing-cert", type=argparse.FileType("rb"), required=True)
+ parser.add_argument("--task-definition", required=True, type=argparse.FileType("r"))
+ parser.add_argument(
+ "--allow-staging-prefixes",
+ action="store_true",
+ default=strtobool(os.environ.get("FUNSIZE_ALLOW_STAGING_PREFIXES", "false")),
+ help="Allow files from staging buckets.",
+ )
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ dest="log_level",
+ action="store_const",
+ const=logging.INFO,
+ default=logging.DEBUG,
+ )
+ parser.add_argument(
+ "--arch",
+ type=str,
+ required=True,
+ choices=BCJ_OPTIONS.keys(),
+ help="The archtecture you are building.",
+ )
+ return parser.parse_args()
+
+
+def validate_mar_channel_id(mar, channel_ids):
+ log.info("Checking %s for MAR_CHANNEL_ID %s", mar, channel_ids)
+ # We may get a string with a list representation, or a single entry string.
+ channel_ids = set(channel_ids.split(","))
+
+ product_info = MarReader(open(mar, "rb")).productinfo
+ if not isinstance(product_info, tuple):
+ raise ValueError(
+ "Malformed product information in mar: {}".format(product_info)
+ )
+
+ found_channel_ids = set(product_info[1].split(","))
+
+ if not found_channel_ids.issubset(channel_ids):
+ raise ValueError(
+ "MAR_CHANNEL_ID mismatch, {} not in {}".format(product_info[1], channel_ids)
+ )
+
+ log.info("%s channel %s in %s", mar, product_info[1], channel_ids)
+
+
+async def retry_download(*args, semaphore=None, **kwargs): # noqa: E999
+ """Retry download() calls."""
+ async with AsyncExitStack() as stack:
+ if semaphore:
+ await stack.enter_async_context(semaphore)
+ await retry_async(
+ download,
+ retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError),
+ args=args,
+ kwargs=kwargs,
+ )
+
+
+def verify_allowed_url(mar, allowed_url_prefixes):
+ if not any(mar.startswith(prefix) for prefix in allowed_url_prefixes):
+ raise ValueError(
+ "{mar} is not in allowed URL prefixes: {p}".format(
+ mar=mar, p=allowed_url_prefixes
+ )
+ )
+
+
+async def download(url, dest, mode=None): # noqa: E999
+ log.info("Downloading %s to %s", url, dest)
+ chunk_size = 4096
+ bytes_downloaded = 0
+ async with aiohttp.ClientSession(raise_for_status=True) as session:
+ start = time.time()
+ async with session.get(url, timeout=120) as resp:
+ # Additional early logging for download timeouts.
+ log.debug("Fetching from url %s", resp.url)
+ for history in resp.history:
+ log.debug("Redirection history: %s", history.url)
+ log.debug("Headers for %s: %s", resp.url, resp.headers)
+ if "Content-Length" in resp.headers:
+ log.debug(
+ "Content-Length expected for %s: %s",
+ url,
+ resp.headers["Content-Length"],
+ )
+ log_interval = chunk_size * 1024
+ with open(dest, "wb") as fd:
+ while True:
+ chunk = await resp.content.read(chunk_size)
+ if not chunk:
+ break
+ fd.write(chunk)
+ bytes_downloaded += len(chunk)
+ log_interval -= len(chunk)
+ if log_interval <= 0:
+ log.debug("Bytes downloaded for %s: %d", url, bytes_downloaded)
+ log_interval = chunk_size * 1024
+ end = time.time()
+ log.info(
+ "Downloaded %s, %s bytes in %s seconds: sha256:%s",
+ url,
+ bytes_downloaded,
+ int(end - start),
+ get_hash(dest, hash_alg="sha256"),
+ )
+ if mode:
+ log.info("chmod %o %s", mode, dest)
+ os.chmod(dest, mode)
+
+
+async def download_buildsystem_bits(partials_config, downloads, tools_dir):
+ """Download external tools needed to make partials."""
+
+ # We're making the assumption that the "to" mar is the same for all,
+ # as that's the way this task is currently used.
+ to_url = extract_download_urls(partials_config, mar_type="to").pop()
+
+ repo = get_option(
+ downloads[to_url]["extracted_path"],
+ filename="platform.ini",
+ section="Build",
+ option="SourceRepository",
+ )
+ revision = get_option(
+ downloads[to_url]["extracted_path"],
+ filename="platform.ini",
+ section="Build",
+ option="SourceStamp",
+ )
+
+ urls = {
+ "make_incremental_update.sh": f"{repo}/raw-file/{revision}/tools/"
+ "update-packaging/make_incremental_update.sh",
+ "common.sh": f"{repo}/raw-file/{revision}/tools/update-packaging/common.sh",
+ "mar": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/"
+ "latest-mozilla-central/mar-tools/linux64/mar",
+ "mbsdiff": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/"
+ "latest-mozilla-central/mar-tools/linux64/mbsdiff",
+ }
+ for filename, url in urls.items():
+ filename = tools_dir / filename
+ await retry_download(url, dest=filename, mode=0o755)
+
+
+def find_file(directory, filename):
+ log.debug("Searching for %s in %s", filename, directory)
+ return next(Path(directory).rglob(filename))
+
+
+def get_option(directory, filename, section, option):
+ log.info("Extracting [%s]: %s from %s/**/%s", section, option, directory, filename)
+ f = find_file(directory, filename)
+ config = configparser.ConfigParser()
+ config.read(f)
+ rv = config.get(section, option)
+ log.info("Found %s", rv)
+ return rv
+
+
+def extract_download_urls(partials_config, mar_type):
+ """Extract a set of urls to download from the task configuration.
+
+ mar_type should be one of "from", "to"
+ """
+ return {definition[f"{mar_type}_mar"] for definition in partials_config}
+
+
+async def download_and_verify_mars(partials_config, allowed_url_prefixes, signing_cert):
+ """Download, check signature, channel ID and unpack MAR files."""
+ # Separate these categories so we can opt to perform checks on only 'to' downloads.
+ from_urls = extract_download_urls(partials_config, mar_type="from")
+ to_urls = extract_download_urls(partials_config, mar_type="to")
+ tasks = list()
+ downloads = dict()
+
+ semaphore = asyncio.Semaphore(2) # Magic 2 to reduce network timeout errors.
+ for url in from_urls.union(to_urls):
+ verify_allowed_url(url, allowed_url_prefixes)
+ downloads[url] = {
+ "download_path": Path(tempfile.mkdtemp()) / Path(url).name,
+ }
+ tasks.append(
+ retry_download(url, downloads[url]["download_path"], semaphore=semaphore)
+ )
+
+ await asyncio.gather(*tasks)
+
+ for url in downloads:
+ # Verify signature, but not from an artifact as we don't
+ # depend on the signing task
+ if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION") and not url.startswith(
+ QUEUE_PREFIX
+ ):
+ verify_signature(downloads[url]["download_path"], signing_cert)
+
+ # Only validate the target channel ID, as we update from beta->release
+ if url in to_urls:
+ validate_mar_channel_id(
+ downloads[url]["download_path"], os.environ["MAR_CHANNEL_ID"]
+ )
+
+ downloads[url]["extracted_path"] = tempfile.mkdtemp()
+ with open(downloads[url]["download_path"], "rb") as mar_fh:
+ log.info(
+ "Unpacking %s into %s",
+ downloads[url]["download_path"],
+ downloads[url]["extracted_path"],
+ )
+ m = MarReader(mar_fh)
+ m.extract(downloads[url]["extracted_path"])
+
+ return downloads
+
+
+async def run_command(cmd, cwd="/", env=None, label=None, silent=False):
+ log.info("Running: %s", cmd)
+ if not env:
+ env = dict()
+ process = await asyncio.create_subprocess_shell(
+ cmd,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.PIPE,
+ cwd=cwd,
+ env=env,
+ )
+ if label:
+ label = "{}: ".format(label)
+ else:
+ label = ""
+
+ async def read_output(stream, label, printcmd):
+ while True:
+ line = await stream.readline()
+ if line == b"":
+ break
+ printcmd("%s%s", label, line.decode("utf-8").rstrip())
+
+ if silent:
+ await process.wait()
+ else:
+ await asyncio.gather(
+ read_output(process.stdout, label, log.info),
+ read_output(process.stderr, label, log.warning),
+ )
+ await process.wait()
+
+
+async def generate_partial(from_dir, to_dir, dest_mar, mar_data, tools_dir, arch):
+ log.info("Generating partial %s", dest_mar)
+ env = os.environ.copy()
+ env["LC_ALL"] = "C"
+ env["MAR"] = tools_dir / "mar"
+ env["MBSDIFF"] = tools_dir / "mbsdiff"
+ if arch:
+ env["BCJ_OPTIONS"] = " ".join(BCJ_OPTIONS[arch])
+ env["MOZ_PRODUCT_VERSION"] = mar_data["version"]
+ env["MAR_CHANNEL_ID"] = mar_data["MAR_CHANNEL_ID"]
+ env["BRANCH"] = mar_data["branch"]
+
+ make_incremental_update = tools_dir / "make_incremental_update.sh"
+ cmd = f"{make_incremental_update} {dest_mar} {from_dir} {to_dir}"
+
+ await run_command(cmd, cwd=dest_mar.parent, env=env, label=dest_mar.name)
+ validate_mar_channel_id(dest_mar, mar_data["MAR_CHANNEL_ID"])
+
+
+async def manage_partial(
+ partial_def, artifacts_dir, tools_dir, downloads, semaphore, arch=None
+):
+ from_url = partial_def["from_mar"]
+ to_url = partial_def["to_mar"]
+ from_path = downloads[from_url]["extracted_path"]
+ to_path = downloads[to_url]["extracted_path"]
+
+ mar_data = {
+ "MAR_CHANNEL_ID": os.environ["MAR_CHANNEL_ID"],
+ "version": get_option(
+ to_path, filename="application.ini", section="App", option="Version"
+ ),
+ "appName": get_option(
+ from_path, filename="application.ini", section="App", option="Name"
+ ),
+ # Use Gecko repo and rev from platform.ini, not application.ini
+ "repo": get_option(
+ to_path, filename="platform.ini", section="Build", option="SourceRepository"
+ ),
+ "revision": get_option(
+ to_path, filename="platform.ini", section="Build", option="SourceStamp"
+ ),
+ "locale": partial_def["locale"],
+ "from_mar": partial_def["from_mar"],
+ "from_size": os.path.getsize(downloads[from_url]["download_path"]),
+ "from_hash": get_hash(downloads[from_url]["download_path"], hash_alg="sha512"),
+ "from_buildid": get_option(
+ from_path, filename="application.ini", section="App", option="BuildID"
+ ),
+ "to_mar": partial_def["to_mar"],
+ "to_size": os.path.getsize(downloads[to_url]["download_path"]),
+ "to_hash": get_hash(downloads[to_url]["download_path"], hash_alg="sha512"),
+ "to_buildid": get_option(
+ to_path, filename="application.ini", section="App", option="BuildID"
+ ),
+ "mar": partial_def["dest_mar"],
+ }
+ # if branch not set explicitly use repo-name
+ mar_data["branch"] = partial_def.get("branch", Path(mar_data["repo"]).name)
+
+ for field in (
+ "update_number",
+ "previousVersion",
+ "previousBuildNumber",
+ "toVersion",
+ "toBuildNumber",
+ ):
+ if field in partial_def:
+ mar_data[field] = partial_def[field]
+
+ dest_mar = Path(artifacts_dir) / mar_data["mar"]
+
+ async with semaphore:
+ await generate_partial(from_path, to_path, dest_mar, mar_data, tools_dir, arch)
+
+ mar_data["size"] = os.path.getsize(dest_mar)
+ mar_data["hash"] = get_hash(dest_mar, hash_alg="sha512")
+ return mar_data
+
+
+async def async_main(args, signing_cert):
+ tasks = []
+
+ allowed_url_prefixes = list(ALLOWED_URL_PREFIXES)
+ if args.allow_staging_prefixes:
+ allowed_url_prefixes += STAGING_URL_PREFIXES
+
+ task = json.load(args.task_definition)
+
+ downloads = await download_and_verify_mars(
+ task["extra"]["funsize"]["partials"], allowed_url_prefixes, signing_cert
+ )
+
+ tools_dir = Path(tempfile.mkdtemp())
+ await download_buildsystem_bits(
+ partials_config=task["extra"]["funsize"]["partials"],
+ downloads=downloads,
+ tools_dir=tools_dir,
+ )
+
+ # May want to consider os.cpu_count() if we ever run on osx/win.
+ # sched_getaffinity is the list of cores we can run on, not the total.
+ semaphore = asyncio.Semaphore(len(os.sched_getaffinity(0)))
+ for definition in task["extra"]["funsize"]["partials"]:
+ tasks.append(
+ asyncio.ensure_future(
+ retry_async(
+ manage_partial,
+ retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError),
+ kwargs=dict(
+ partial_def=definition,
+ artifacts_dir=args.artifacts_dir,
+ tools_dir=tools_dir,
+ arch=args.arch,
+ downloads=downloads,
+ semaphore=semaphore,
+ ),
+ )
+ )
+ )
+ manifest = await asyncio.gather(*tasks)
+
+ for url in downloads:
+ downloads[url]["download_path"].unlink()
+ shutil.rmtree(downloads[url]["extracted_path"])
+ shutil.rmtree(tools_dir)
+
+ return manifest
+
+
+def main():
+ args = process_arguments()
+
+ logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
+ log.setLevel(args.log_level)
+
+ signing_cert = args.signing_cert.read()
+ assert get_keysize(signing_cert) == 4096
+
+ artifacts_dir = Path(args.artifacts_dir)
+ if not artifacts_dir.exists():
+ artifacts_dir.mkdir()
+
+ loop = asyncio.get_event_loop()
+ manifest = loop.run_until_complete(async_main(args, signing_cert))
+ loop.close()
+
+ manifest_file = artifacts_dir / "manifest.json"
+ with open(manifest_file, "w") as fp:
+ json.dump(manifest, fp, indent=2, sort_keys=True)
+
+ log.debug("{}".format(json.dumps(manifest, indent=2, sort_keys=True)))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
new file mode 100755
index 0000000000..965d938247
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
@@ -0,0 +1,157 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool contains functions that are to be used to handle/enable funsize
+# Author: Mihai Tabara
+#
+
+HOOK=
+AWS_BUCKET_NAME=
+LOCAL_CACHE_DIR=
+
+# Don't cache files smaller than this, as it's slower with S3
+# Bug 1437473
+CACHE_THRESHOLD=500000
+
+S3_CACHE_HITS=0
+S3_CACHE_MISSES=0
+
+getsha512(){
+ openssl sha512 "${1}" | awk '{print $2}'
+}
+
+print_usage(){
+ echo "$(basename "$0") [-S S3-BUCKET-NAME] [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH"
+ echo "Script that saves/retrieves from cache presumptive patches as args"
+ echo ""
+ echo "-A SERVER-URL - host where to send the files"
+ echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached"
+ echo "-g pre hook - tests whether patch already in cache"
+ echo "-u post hook - upload patch to cache for future use"
+ echo ""
+ echo "PATH-FROM-URL : path on disk for source file"
+ echo "PATH-TO-URL : path on disk for destination file"
+ echo "PATH-PATCH : path on disk for patch between source and destination"
+}
+
+upload_patch(){
+ if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ]
+ then
+ return 0
+ fi
+ sha_from=$(getsha512 "$1")
+ sha_to=$(getsha512 "$2")
+ patch_path="$3"
+ patch_filename="$(basename "$3")"
+
+ # save to local cache first
+ if [ -n "$LOCAL_CACHE_DIR" ]; then
+ local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from""
+ if $local_cmd >&2; then
+ cp -avf "${patch_path}" "$LOCAL_CACHE_DIR/$sha_from/$sha_to"
+ echo "${patch_path} saved on local cache."
+ fi
+ fi
+
+ if [ -n "${AWS_BUCKET_NAME}" ]; then
+ BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${patch_filename}"
+ if aws s3 cp "${patch_path}" "${BUCKET_PATH}"; then
+ echo "${patch_path} saved on s://${AWS_BUCKET_NAME}"
+ return 0
+ fi
+ echo "${patch_path} failed to be uploaded to s3://${AWS_BUCKET_NAME}"
+ return 1
+ fi
+ return 0
+}
+
+get_patch(){
+ # $1 and $2 are the /path/to/filename
+ if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ]
+ then
+ return 1
+ fi
+ sha_from=$(getsha512 "$1")
+ sha_to=$(getsha512 "$2")
+ destination_file="$3"
+ s3_filename="$(basename "$3")"
+
+ # Try to retrieve from local cache first.
+ if [ -n "$LOCAL_CACHE_DIR" ]; then
+ if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then
+ cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file"
+ echo "Successful retrieved ${destination_file} from local cache."
+ return 0
+ fi
+ fi
+ # If not in the local cache, we might find it remotely.
+
+ if [ -n "${AWS_BUCKET_NAME}" ]; then
+ BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${s3_filename}"
+ if aws s3 ls "${BUCKET_PATH}"; then
+ ((S3_CACHE_HITS++))
+ echo "s3 cache hit for ${s3_filename} (${S3_CACHE_HITS} total hits)"
+ if aws s3 cp "${BUCKET_PATH}" "${destination_file}"; then
+ echo "Successful retrieved ${destination_file} from s3://${AWS_BUCKET_NAME}"
+ return 0
+ else
+ echo "Failed to retrieve ${destination_file} from s3://${AWS_BUCKET_NAME}"
+ return 1
+ fi
+ # Not found, fall through to default error
+ else
+ ((S3_CACHE_MISSES++))
+ echo "s3 cache miss for ${s3_filename} (${S3_CACHE_MISSES} total misses)"
+ fi
+ fi
+ return 1
+}
+
+OPTIND=1
+
+while getopts ":S:c:gu" option; do
+ case $option in
+ S)
+ # This will probably be bucketname/path/prefix but we can use it either way
+ AWS_BUCKET_NAME="$OPTARG"
+ # Ensure trailing slash is there.
+ if [[ ! $AWS_BUCKET_NAME =~ .*/$ ]]; then
+ AWS_BUCKET_NAME="${AWS_BUCKET_NAME}/"
+ fi
+ ;;
+ c)
+ LOCAL_CACHE_DIR="$OPTARG"
+ ;;
+ g)
+ HOOK="PRE"
+ ;;
+ u)
+ HOOK="POST"
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ print_usage
+ exit 1
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ print_usage
+ exit 1
+ ;;
+ *)
+ echo "Unimplemented option: -$OPTARG" >&2
+ print_usage
+ exit 1
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+if [ "$HOOK" == "PRE" ]; then
+ get_patch "$1" "$2" "$3"
+elif [ "$HOOK" == "POST" ]; then
+ upload_patch "$1" "$2" "$3"
+fi
diff --git a/taskcluster/docker/gdb-test/Dockerfile b/taskcluster/docker/gdb-test/Dockerfile
new file mode 100644
index 0000000000..d00504cdd1
--- /dev/null
+++ b/taskcluster/docker/gdb-test/Dockerfile
@@ -0,0 +1,15 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV XZ_OPT=-T0
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
+
+RUN apt-get update && \
+ apt-get install gdb
diff --git a/taskcluster/docker/github-sync/Dockerfile b/taskcluster/docker/github-sync/Dockerfile
new file mode 100644
index 0000000000..bff131ad7b
--- /dev/null
+++ b/taskcluster/docker/github-sync/Dockerfile
@@ -0,0 +1,14 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Kartikaya Gupta <kgupta@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+VOLUME /builds/worker/.ghsync
+
+ADD prepare.sh /setup/prepare-docker.sh
+ADD requirements.txt /setup/requirements.txt
+RUN /bin/bash /setup/prepare-docker.sh /setup/requirements.txt && rm -R /setup
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/github-sync/prepare.sh b/taskcluster/docker/github-sync/prepare.sh
new file mode 100755
index 0000000000..8efdec2e27
--- /dev/null
+++ b/taskcluster/docker/github-sync/prepare.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+set -o errexit
+set -o nounset
+set -o pipefail
+set -o xtrace
+
+test "$(whoami)" == 'root'
+
+# Install stuff we need
+apt-get -y update
+apt-get install -y \
+ cmake \
+ curl \
+ gcc \
+ git \
+ g++ \
+ libffi-dev \
+ libgit2-dev \
+ libssl-dev \
+ python3 \
+ python3-dev \
+ python3-pip \
+ python3-setuptools
+
+# Python packages
+pip3 install -r "$1"
diff --git a/taskcluster/docker/github-sync/requirements.txt b/taskcluster/docker/github-sync/requirements.txt
new file mode 100644
index 0000000000..13fd42681d
--- /dev/null
+++ b/taskcluster/docker/github-sync/requirements.txt
@@ -0,0 +1,3 @@
+requests == 2.21.0
+pygit2 == 1.11.1
+python-hglib == 2.6.1
diff --git a/taskcluster/docker/image_builder/Dockerfile b/taskcluster/docker/image_builder/Dockerfile
new file mode 100644
index 0000000000..ad38a48c49
--- /dev/null
+++ b/taskcluster/docker/image_builder/Dockerfile
@@ -0,0 +1,99 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+FROM golang:1.14 as skopeo
+
+WORKDIR /go/src/
+RUN ["git", "clone", "--no-checkout", "--depth=1", "--branch=v1.1.1", "https://github.com/containers/skopeo", "."]
+RUN ["git", "checkout", "67abbb3cefbdc876447583d5ea45e76bf441eba7"]
+ENV GO111MODULE=on CGO_ENABLED=0
+RUN ["go", "build", \
+ "-mod=vendor", "-o", "out/skopeo", \
+ "-tags", "exclude_graphdriver_devicemapper exclude_graphdriver_btrfs containers_image_openpgp", \
+ # Set unixTempDirForBigFiles so skopeo will extract in a directory hidden by kaniko
+ # We create the directory below.
+ "-ldflags", " -X github.com/containers/image/v5/internal/tmpdir.unixTempDirForBigFiles=/workspace/tmp -X github.com/containers/image/v5/signature.systemDefaultPolicyPath=/kaniko/containers/policy.json -extldflags \"-static\" -w -s", \
+ "./cmd/skopeo"]
+
+FROM golang:1.14 as kaniko
+WORKDIR /go/src/
+RUN ["git", "clone", "--no-checkout", "--depth=1", "--branch=v1.0.0", "https://github.com/GoogleContainerTools/kaniko", "."]
+RUN ["git", "checkout", "146ec6a9cd6f87b4a12e8119ded575d5edca35ac"]
+RUN ["make"]
+
+# Build the `build-image` command as a static binary using musl
+# The setup is loosely based on a stripped down version of
+# https://github.com/emk/rust-musl-builder/blob/master/Dockerfile
+FROM debian:buster as build-image
+
+COPY apt.conf /etc/apt/apt.conf.d/99taskcluster
+
+RUN apt-get update && \
+ apt-get install \
+ build-essential \
+ ca-certificates \
+ curl \
+ musl-dev \
+ musl-tools \
+ && \
+ useradd rust --user-group --create-home --shell /bin/bash
+
+# Run all further code as user `rust`, and create our working directories
+# as the appropriate user.
+USER rust
+
+# Set up our path with all our binary directories, including those for the
+# musl-gcc toolchain and for our Rust toolchain.
+ENV PATH=/home/rust/.cargo/bin:$PATH
+
+# The Rust toolchain to use when building our image. Set by `hooks/build`.
+ENV TOOLCHAIN=1.42.0 \
+ TARGET=x86_64-unknown-linux-musl
+
+# Install our Rust toolchain and the `musl` target. We patch the
+# command-line we pass to the installer so that it won't attempt to
+# interact with the user or fool around with TTYs. We also set the default
+# `--target` to musl so that our users don't need to keep overriding it
+# manually.
+RUN curl https://sh.rustup.rs -sSf | \
+ sh -s -- -y \
+ --profile minimal \
+ --default-toolchain $TOOLCHAIN \
+ --target $TARGET
+
+# Expect our source code to live in /home/rust/src. We'll run the build as
+# user `rust`, which will be uid 1000, gid 1000 outside the container.
+RUN mkdir -p /home/rust/src
+WORKDIR /home/rust/src
+# Add our source code.
+ADD --chown=rust:rust build-image/ ./
+
+# --out-dir is not yet stable
+ENV RUSTC_BOOTSTRAP=1
+# Build our application.
+RUN ["cargo", "build", "--target", "x86_64-unknown-linux-musl", "--out-dir=bin", "--release", "-Zunstable-options"]
+
+FROM scratch as empty
+
+FROM scratch
+
+COPY --from=skopeo /go/src/out/skopeo /kaniko-bootstrap/skopeo
+COPY --from=kaniko /go/src/out/executor /kaniko-bootstrap/executor
+COPY --from=build-image \
+ /home/rust/src/bin/build-image \
+ /kaniko-bootstrap/build-image
+
+ADD https://mkcert.org/generate/ /kaniko-bootstrap/ssl/certs/ca-certificats.crt
+ENV SSL_CERT_DIR=/kaniko/ssl/certs
+
+ADD policy.json /kaniko-bootstrap/containers/policy.json
+
+ENV HOME /root
+ENV USER /root
+WORKDIR /workspace
+
+ENV PATH /usr/local/bin:/kaniko
+
+VOLUME /workspace
+ENTRYPOINT ["/kaniko-bootstrap/build-image"]
diff --git a/taskcluster/docker/image_builder/VERSION b/taskcluster/docker/image_builder/VERSION
new file mode 100644
index 0000000000..fcdb2e109f
--- /dev/null
+++ b/taskcluster/docker/image_builder/VERSION
@@ -0,0 +1 @@
+4.0.0
diff --git a/taskcluster/docker/image_builder/apt.conf b/taskcluster/docker/image_builder/apt.conf
new file mode 100644
index 0000000000..84c0cf10ef
--- /dev/null
+++ b/taskcluster/docker/image_builder/apt.conf
@@ -0,0 +1,5 @@
+quiet "true";
+APT::Get::Assume-Yes "true";
+APT::Install-Recommends "false";
+Acquire::Check-Valid-Until "false";
+Acquire::Retries "5";
diff --git a/taskcluster/docker/image_builder/build-image/Cargo.lock b/taskcluster/docker/image_builder/build-image/Cargo.lock
new file mode 100644
index 0000000000..7e07920591
--- /dev/null
+++ b/taskcluster/docker/image_builder/build-image/Cargo.lock
@@ -0,0 +1,1085 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+[[package]]
+name = "anyhow"
+version = "1.0.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "autocfg"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "base64"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "base64"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "bitflags"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "build-image"
+version = "0.1.0"
+dependencies = [
+ "anyhow 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)",
+ "envy 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "reqwest 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "bytes"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "loom 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "dtoa"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "either"
+version = "1.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "envy"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "fs_extra"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "fuchsia-zircon"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "fuchsia-zircon-sys"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "futures-channel"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "futures-io"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "futures-macro"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro-hack 0.5.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "futures-task"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "once_cell 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "futures-util"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-io 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-macro 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-task 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pin-project 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pin-utils 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro-hack 0.5.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro-nested 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "generator"
+version = "0.6.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "glob"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "h2"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-sink 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "indexmap 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "http"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "http-body"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "httparse"
+version = "1.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "hyper"
+version = "0.13.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-channel 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "h2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pin-project 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hyper 0.13.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-normalization 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "iovec"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "itertools"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "jobserver"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "js-sys"
+version = "0.3.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "libc"
+version = "0.2.71"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "log"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "loom"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "generator 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "scoped-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "memchr"
+version = "2.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "mime"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "mime_guess"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "mio"
+version = "0.6.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miow"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "net2"
+version = "0.2.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "hermit-abi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "pin-project"
+version = "0.4.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "pin-project-internal 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "0.4.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "proc-macro-hack"
+version = "0.5.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "proc-macro-nested"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.1.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "reqwest"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "base64 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "encoding_rs 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hyper 0.13.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mime_guess 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio-rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen-futures 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webpki-roots 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winreg 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "ring"
+version = "0.16.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "once_cell 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rustls"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "scoped-tls"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "sct"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "serde"
+version = "1.0.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde_derive 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "dtoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "socket2"
+version = "0.3.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "syn"
+version = "1.0.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "time"
+version = "0.1.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tinyvec"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "tokio"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "futures-sink 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tower-service"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "try-lock"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "tinyvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "untrusted"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "url"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "version_check"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "want"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen-macro 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bumpalo 3.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen-shared 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+ "web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen-macro-support 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen-backend 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen-shared 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.64"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "web-sys"
+version = "0.3.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "webpki"
+version = "0.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "0.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winreg"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "ws2_32-sys"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "zstd"
+version = "0.5.3+zstd.1.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "zstd-safe"
+version = "2.0.5+zstd.1.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+ "zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "zstd-sys"
+version = "1.4.17+zstd.1.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)",
+ "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[metadata]
+"checksum anyhow 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)" = "85bb70cc08ec97ca5450e6eba421deeea5f172c0fc61f78b5357b2a8e8be195f"
+"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
+"checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7"
+"checksum base64 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff"
+"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
+"checksum bumpalo 3.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820"
+"checksum bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "118cf036fbb97d0816e3c34b2d7a1e8cfc60f68fcf63d550ddbe9bd5f59c213b"
+"checksum cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)" = "77c1f1d60091c1b73e2b1f4560ab419204b178e625fa945ded7b660becd2bd46"
+"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+"checksum dtoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b"
+"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
+"checksum encoding_rs 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "e8ac63f94732332f44fe654443c46f6375d1939684c17b0afb6cb56b0456e171"
+"checksum envy 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f938a4abd5b75fe3737902dbc2e79ca142cc1526827a9e40b829a086758531a9"
+"checksum fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
+"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
+"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
+"checksum futures-channel 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f366ad74c28cca6ba456d95e6422883cfb4b252a83bed929c83abfdbbf2967d5"
+"checksum futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "59f5fff90fd5d971f936ad674802482ba441b6f09ba5e15fd8b39145582ca399"
+"checksum futures-io 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "de27142b013a8e869c14957e6d2edeef89e97c289e69d042ee3a49acd8b51789"
+"checksum futures-macro 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d0b5a30a4328ab5473878237c447333c093297bded83a4983d10f4deea240d39"
+"checksum futures-sink 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3f2032893cb734c7a05d85ce0cc8b8c4075278e93b24b66f9de99d6eb0fa8acc"
+"checksum futures-task 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bdb66b5f09e22019b1ab0830f7785bcea8e7a42148683f99214f73f8ec21a626"
+"checksum futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "8764574ff08b701a084482c3c7031349104b07ac897393010494beaa18ce32c6"
+"checksum generator 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "add72f17bb81521258fcc8a7a3245b1e184e916bfbe34f0ea89558f440df5c68"
+"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
+"checksum h2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "79b7246d7e4b979c03fa093da39cfb3617a96bbeee6310af63991668d7e843ff"
+"checksum hermit-abi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "b9586eedd4ce6b3c498bc3b4dd92fc9f11166aa908a914071953768066c67909"
+"checksum http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "28d569972648b2c512421b5f2a405ad6ac9666547189d0c5477a3f200f3e02f9"
+"checksum http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b"
+"checksum httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9"
+"checksum hyper 0.13.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a6e7655b9594024ad0ee439f3b5a7299369dc2a3f459b47c696f9ff676f9aa1f"
+"checksum hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac965ea399ec3a25ac7d13b8affd4b8f39325cca00858ddf5eb29b79e6b14b08"
+"checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
+"checksum indexmap 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c398b2b113b55809ceb9ee3e753fcbac793f1956663f3c36549c1346015c2afe"
+"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
+"checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
+"checksum itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
+"checksum jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
+"checksum js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)" = "c4b9172132a62451e56142bff9afc91c8e4a4500aa5b847da36815b63bfda916"
+"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+"checksum libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)" = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49"
+"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7"
+"checksum loom 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecc775857611e1df29abba5c41355cdf540e7e9d4acfdf0f355eefee82330b7"
+"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
+"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
+"checksum mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
+"checksum mime_guess 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212"
+"checksum mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)" = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
+"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
+"checksum net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)" = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7"
+"checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
+"checksum once_cell 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0b631f7e854af39a1739f401cf34a8a013dfe09eac4fa4dba91e9768bd28168d"
+"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+"checksum pin-project 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "12e3a6cdbfe94a5e4572812a0201f8c0ed98c1c452c7b8563ce2276988ef9c17"
+"checksum pin-project-internal 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "6a0ffd45cf79d88737d7cc85bfd5d2894bee1139b356e616fe85dc389c61aaf7"
+"checksum pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "282adbf10f2698a7a77f8e983a74b2d18176c19a7fd32a45446139ae7b02b715"
+"checksum pin-utils 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+"checksum proc-macro-hack 0.5.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7e0456befd48169b9f13ef0f0ad46d492cf9d2dbb918bcf38e01eed4ce3ec5e4"
+"checksum proc-macro-nested 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a"
+"checksum proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)" = "beae6331a816b1f65d04c45b078fd8e6c93e8071771f41b8163255bbd8d7c8fa"
+"checksum quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
+"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
+"checksum reqwest 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)" = "3b82c9238b305f26f53443e3a4bc8528d64b8d0bee408ec949eb7bf5635ec680"
+"checksum ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)" = "952cd6b98c85bbc30efa1ba5783b8abf12fec8b3287ffa52605b9432313e34e4"
+"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
+"checksum rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1"
+"checksum ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
+"checksum scoped-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "332ffa32bf586782a3efaeb58f127980944bbc8c4d6913a86107ac2a5ab24b28"
+"checksum sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c"
+"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
+"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+"checksum serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)" = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3"
+"checksum serde_derive 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)" = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e"
+"checksum serde_json 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)" = "3433e879a558dde8b5e8feb2a04899cf34fdde1fafb894687e52105fc1162ac3"
+"checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97"
+"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
+"checksum socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918"
+"checksum spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
+"checksum syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)" = "e8d5d96e8cbb005d6959f119f773bfaebb5684296108fb32600c00cde305b2cd"
+"checksum time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438"
+"checksum tinyvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed"
+"checksum tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d099fa27b9702bed751524694adbe393e18b36b204da91eb1cbbbbb4a5ee2d58"
+"checksum tokio-rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "15cb62a0d2770787abc96e99c1cd98fcf17f94959f3af63ca85bdfb203f051b4"
+"checksum tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499"
+"checksum tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860"
+"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382"
+"checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
+"checksum unicode-normalization 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977"
+"checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
+"checksum untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
+"checksum url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb"
+"checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
+"checksum want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0"
+"checksum wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "6a634620115e4a229108b71bde263bb4220c483b3f07f5ba514ee8d15064c4c2"
+"checksum wasm-bindgen-backend 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "3e53963b583d18a5aa3aaae4b4c1cb535218246131ba22a71f05b518098571df"
+"checksum wasm-bindgen-futures 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)" = "dba48d66049d2a6cc8488702e7259ab7afc9043ad0dc5448444f46f2a453b362"
+"checksum wasm-bindgen-macro 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "3fcfd5ef6eec85623b4c6e844293d4516470d8f19cd72d0d12246017eb9060b8"
+"checksum wasm-bindgen-macro-support 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "9adff9ee0e94b926ca81b57f57f86d5545cdcb1d259e21ec9bdd95b901754c75"
+"checksum wasm-bindgen-shared 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "7f7b90ea6c632dd06fd765d44542e234d5e63d9bb917ecd64d79778a13bd79ae"
+"checksum web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)" = "863539788676619aac1a23e2df3655e96b32b0e05eb72ca34ba045ad573c625d"
+"checksum webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ab146130f5f790d45f82aeeb09e55a256573373ec64409fc19a6fb82fb1032ae"
+"checksum webpki-roots 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8eff4b7516a57307f9349c64bf34caa34b940b66fed4b2fb3136cb7386e5739"
+"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+"checksum winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+"checksum winreg 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69"
+"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
+"checksum zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8"
+"checksum zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055"
+"checksum zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b"
diff --git a/taskcluster/docker/image_builder/build-image/Cargo.toml b/taskcluster/docker/image_builder/build-image/Cargo.toml
new file mode 100644
index 0000000000..be42fe1e34
--- /dev/null
+++ b/taskcluster/docker/image_builder/build-image/Cargo.toml
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+[package]
+name = "build-image"
+version = "0.1.0"
+authors = ["Tom Prince <tom.prince@twistedmatrix.com>"]
+edition = "2018"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+reqwest = { version= "0.10.4", features = ["rustls-tls", "blocking"], default-features = false}
+zstd = "0.5.1"
+url = "2.1.1"
+anyhow = "1.0.27"
+serde = { version = "1.0.105", features = ["derive"]}
+serde_json = "1.0.50"
+envy = "0.4.1"
+fs_extra = "1.1"
+
+[workspace]
diff --git a/taskcluster/docker/image_builder/build-image/src/config.rs b/taskcluster/docker/image_builder/build-image/src/config.rs
new file mode 100644
index 0000000000..94c1d55a10
--- /dev/null
+++ b/taskcluster/docker/image_builder/build-image/src/config.rs
@@ -0,0 +1,112 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use anyhow::Result;
+use serde::de::Error;
+use serde::Deserialize;
+use std::collections::HashMap;
+
+fn default_image_name() -> String {
+ "mozilla.org/taskgraph/default-image:latest".into()
+}
+fn default_zstd_level() -> i32 {
+ 3
+}
+
+fn from_json<'de, D, T>(deserializer: D) -> Result<T, D::Error>
+where
+ D: serde::de::Deserializer<'de>,
+ T: serde::de::DeserializeOwned,
+{
+ let value: String = serde::Deserialize::deserialize(deserializer)?;
+ serde_json::from_str(&value).map_err(|err| {
+ D::Error::invalid_value(serde::de::Unexpected::Str(&value), &&*err.to_string())
+ })
+}
+
+#[derive(Deserialize, Debug, PartialEq, Eq)]
+pub struct Config {
+ pub context_task_id: String,
+ pub context_path: String,
+ pub parent_task_id: Option<String>,
+ #[serde(default = "default_image_name")]
+ pub image_name: String,
+ #[serde(default = "default_zstd_level")]
+ pub docker_image_zstd_level: i32,
+ #[serde(default)]
+ pub debug: bool,
+ #[serde(default, deserialize_with = "from_json")]
+ pub docker_build_args: HashMap<String, String>,
+}
+
+impl Config {
+ pub fn from_env() -> Result<Config> {
+ Ok(envy::from_env()?)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use anyhow::Result;
+
+ #[test]
+ fn test() -> Result<()> {
+ let env: Vec<(String, String)> = vec![
+ ("CONTEXT_TASK_ID".into(), "xGRRgzG6QlCCwsFsyuqm0Q".into()),
+ (
+ "CONTEXT_PATH".into(),
+ "public/docker-contexts/image.tar.gz".into(),
+ ),
+ ];
+ let config: super::Config = envy::from_iter(env.into_iter())?;
+ assert_eq!(
+ config,
+ super::Config {
+ context_task_id: "xGRRgzG6QlCCwsFsyuqm0Q".into(),
+ context_path: "public/docker-contexts/image.tar.gz".into(),
+ parent_task_id: None,
+ image_name: "mozilla.org/taskgraph/default-image:latest".into(),
+ docker_image_zstd_level: 3,
+ debug: false,
+ docker_build_args: Default::default()
+ }
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn test_docker_build_args() -> Result<()> {
+ let env: Vec<(String, String)> = vec![
+ ("CONTEXT_TASK_ID".into(), "xGRRgzG6QlCCwsFsyuqm0Q".into()),
+ (
+ "CONTEXT_PATH".into(),
+ "public/docker-contexts/image.tar.gz".into(),
+ ),
+ (
+ "DOCKER_BUILD_ARGS".into(),
+ serde_json::json! ({
+ "test": "Value",
+ })
+ .to_string(),
+ ),
+ ];
+ let config: super::Config = envy::from_iter(env.into_iter())?;
+ assert_eq!(
+ config,
+ super::Config {
+ context_task_id: "xGRRgzG6QlCCwsFsyuqm0Q".into(),
+ context_path: "public/docker-contexts/image.tar.gz".into(),
+ parent_task_id: None,
+ image_name: "mozilla.org/taskgraph/default-image:latest".into(),
+ docker_image_zstd_level: 3,
+ debug: false,
+ docker_build_args: [("test".to_string(), "Value".to_string())]
+ .iter()
+ .cloned()
+ .collect(),
+ }
+ );
+ Ok(())
+ }
+}
diff --git a/taskcluster/docker/image_builder/build-image/src/main.rs b/taskcluster/docker/image_builder/build-image/src/main.rs
new file mode 100644
index 0000000000..997617c84e
--- /dev/null
+++ b/taskcluster/docker/image_builder/build-image/src/main.rs
@@ -0,0 +1,182 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#![forbid(unsafe_code)]
+
+use std::collections::HashMap;
+use std::path::Path;
+use std::process::Command;
+
+use anyhow::{ensure, Context, Result};
+use fs_extra::dir::{move_dir, CopyOptions};
+use serde::Deserialize;
+
+mod config;
+mod taskcluster;
+
+use config::Config;
+
+fn log_step(msg: &str) {
+ println!("[build-image] {}", msg);
+}
+
+fn read_image_digest(path: &str) -> Result<String> {
+ let output = Command::new("/kaniko/skopeo")
+ .arg("inspect")
+ .arg(format!("docker-archive:{}", path))
+ .stdout(std::process::Stdio::piped())
+ .spawn()?
+ .wait_with_output()?;
+ ensure!(output.status.success(), "Could not inspect parent image.");
+
+ #[derive(Deserialize, Debug)]
+ #[serde(rename_all = "PascalCase")]
+ struct ImageInfo {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ name: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ tag: Option<String>,
+ digest: String,
+ // ...
+ }
+
+ let image_info: ImageInfo = serde_json::from_slice(&output.stdout)
+ .with_context(|| format!("Could parse image info from {:?}", path))?;
+ Ok(image_info.digest)
+}
+
+fn download_parent_image(
+ cluster: &taskcluster::TaskCluster,
+ task_id: &str,
+ dest: &str,
+) -> Result<String> {
+ zstd::stream::copy_decode(
+ cluster.stream_artifact(&task_id, "public/image.tar.zst")?,
+ std::fs::File::create(dest)?,
+ )
+ .context("Could not download parent image.")?;
+
+ read_image_digest(dest)
+}
+
+fn build_image(
+ context_path: &str,
+ dest: &str,
+ debug: bool,
+ build_args: HashMap<String, String>,
+) -> Result<()> {
+ let mut command = Command::new("/kaniko/executor");
+ command
+ .stderr(std::process::Stdio::inherit())
+ .args(&["--context", &format!("tar://{}", context_path)])
+ .args(&["--destination", "image"])
+ .args(&["--dockerfile", "Dockerfile"])
+ .arg("--no-push")
+ .args(&["--cache-dir", "/workspace/cache"])
+ .arg("--single-snapshot")
+ // FIXME: Generating reproducible layers currently causes OOM.
+ // .arg("--reproducible")
+ .arg("--whitelist-var-run=false")
+ .args(&["--tarPath", dest]);
+ if debug {
+ command.args(&["-v", "debug"]);
+ }
+ for (key, value) in build_args {
+ command.args(&["--build-arg", &format!("{}={}", key, value)]);
+ }
+ let status = command.status()?;
+ ensure!(status.success(), "Could not build image.");
+ Ok(())
+}
+
+fn repack_image(source: &str, dest: &str, image_name: &str) -> Result<()> {
+ let status = Command::new("/kaniko/skopeo")
+ .arg("copy")
+ .arg(format!("docker-archive:{}", source))
+ .arg(format!("docker-archive:{}:{}", dest, image_name))
+ .stderr(std::process::Stdio::inherit())
+ .status()?;
+ ensure!(status.success(), "Could repack image.");
+ Ok(())
+}
+
+fn main() -> Result<()> {
+ // Kaniko expects everything to be in /kaniko, so if not running from there, move
+ // everything there.
+ if let Some(path) = std::env::current_exe()?.parent() {
+ if path != Path::new("/kaniko") {
+ let mut options = CopyOptions::new();
+ options.copy_inside = true;
+ move_dir(path, "/kaniko", &options)?;
+ }
+ }
+
+ let config = Config::from_env().context("Could not parse environment variables.")?;
+
+ let cluster = taskcluster::TaskCluster::from_env()?;
+
+ let mut build_args = config.docker_build_args;
+
+ build_args.insert("TASKCLUSTER_ROOT_URL".into(), cluster.root_url());
+
+ log_step("Downloading context.");
+
+ std::io::copy(
+ &mut cluster.stream_artifact(&config.context_task_id, &config.context_path)?,
+ &mut std::fs::File::create("/workspace/context.tar.gz")?,
+ )
+ .context("Could not download image context.")?;
+
+ if let Some(parent_task_id) = config.parent_task_id {
+ log_step("Downloading image.");
+ let digest = download_parent_image(&cluster, &parent_task_id, "/workspace/parent.tar")?;
+
+ log_step(&format!("Parent image digest {}", &digest));
+ std::fs::create_dir_all("/workspace/cache")?;
+ std::fs::rename(
+ "/workspace/parent.tar",
+ format!("/workspace/cache/{}", digest),
+ )?;
+
+ build_args.insert(
+ "DOCKER_IMAGE_PARENT".into(),
+ format!("parent:latest@{}", digest),
+ );
+ }
+
+ log_step("Building image.");
+ build_image(
+ "/workspace/context.tar.gz",
+ "/workspace/image-pre.tar",
+ config.debug,
+ build_args,
+ )?;
+ log_step("Repacking image.");
+ repack_image(
+ "/workspace/image-pre.tar",
+ "/workspace/image.tar",
+ &config.image_name,
+ )?;
+
+ log_step("Compressing image.");
+ compress_file(
+ "/workspace/image.tar",
+ "/workspace/image.tar.zst",
+ config.docker_image_zstd_level,
+ )?;
+
+ Ok(())
+}
+
+fn compress_file(
+ source: impl AsRef<std::path::Path>,
+ dest: impl AsRef<std::path::Path>,
+ zstd_level: i32,
+) -> Result<()> {
+ Ok(zstd::stream::copy_encode(
+ std::fs::File::open(source)?,
+ std::fs::File::create(dest)?,
+ zstd_level,
+ )?)
+}
diff --git a/taskcluster/docker/image_builder/build-image/src/taskcluster.rs b/taskcluster/docker/image_builder/build-image/src/taskcluster.rs
new file mode 100644
index 0000000000..3b39d669f0
--- /dev/null
+++ b/taskcluster/docker/image_builder/build-image/src/taskcluster.rs
@@ -0,0 +1,55 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use anyhow::{Context, Result};
+
+pub struct TaskCluster {
+ root_url: url::Url,
+ client: reqwest::blocking::Client,
+}
+
+impl TaskCluster {
+ pub fn from_env() -> Result<Self> {
+ std::env::var("TASKCLUSTER_ROOT_URL")
+ .context("TASKCLUSTER_ROOT_URL not set.")
+ .and_then(|var| var.parse().context("Couldn't parse TASKCLUSTER_ROOT_URL."))
+ .map(|root_url| TaskCluster {
+ root_url,
+ client: reqwest::blocking::Client::new(),
+ })
+ }
+
+ /// Return the root URL as suitable for passing to other processes.
+ ///
+ /// In particular, any trailing slashes are removed.
+ pub fn root_url(&self) -> String {
+ self.root_url.as_str().trim_end_matches("/").to_string()
+ }
+
+ pub fn task_artifact_url(&self, task_id: &str, path: &str) -> url::Url {
+ let mut url = self.root_url.clone();
+ url.set_path(&format!("api/queue/v1/task/{}/artifacts/{}", task_id, path));
+ url
+ }
+
+ pub fn stream_artifact(&self, task_id: &str, path: &str) -> Result<impl std::io::Read> {
+ let url = self.task_artifact_url(task_id, path);
+ Ok(self.client.get(url).send()?.error_for_status()?)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ #[test]
+ fn test_url() {
+ let cluster = super::TaskCluster {
+ root_url: url::Url::parse("http://taskcluster.example").unwrap(),
+ client: reqwest::blocking::Client::new(),
+ };
+ assert_eq!(
+ cluster.task_artifact_url("QzDLgP4YRwanIvgPt6ClfA","public/docker-contexts/decision.tar.gz"),
+ url::Url::parse("http://taskcluster.example/api/queue/v1/task/QzDLgP4YRwanIvgPt6ClfA/artifacts/public/docker-contexts/decision.tar.gz").unwrap(),
+ );
+ }
+}
diff --git a/taskcluster/docker/image_builder/policy.json b/taskcluster/docker/image_builder/policy.json
new file mode 100644
index 0000000000..c9a9b225cf
--- /dev/null
+++ b/taskcluster/docker/image_builder/policy.json
@@ -0,0 +1,11 @@
+{
+ "default": [{ "type": "reject" }],
+ "transports": {
+ "docker-archive": {
+ "": [{ "type": "insecureAcceptAnything" }]
+ },
+ "dir": {
+ "": [{ "type": "insecureAcceptAnything" }]
+ }
+ }
+}
diff --git a/taskcluster/docker/index-task/.eslintrc.js b/taskcluster/docker/index-task/.eslintrc.js
new file mode 100644
index 0000000000..ff84d9cb69
--- /dev/null
+++ b/taskcluster/docker/index-task/.eslintrc.js
@@ -0,0 +1,11 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+module.exports = {
+ env: {
+ node: true,
+ },
+};
diff --git a/taskcluster/docker/index-task/Dockerfile b/taskcluster/docker/index-task/Dockerfile
new file mode 100644
index 0000000000..493eda4e20
--- /dev/null
+++ b/taskcluster/docker/index-task/Dockerfile
@@ -0,0 +1,11 @@
+FROM node:10-alpine
+
+ENV NODE_ENV production
+RUN mkdir /app
+ADD insert-indexes.js /app/
+ADD package.json /app/
+ADD yarn.lock /app/
+WORKDIR /app
+RUN yarn --frozen-lockfile && yarn cache clean
+
+ENTRYPOINT ["node"]
diff --git a/taskcluster/docker/index-task/README b/taskcluster/docker/index-task/README
new file mode 100644
index 0000000000..9ec00e7897
--- /dev/null
+++ b/taskcluster/docker/index-task/README
@@ -0,0 +1,36 @@
+Index-Image
+===========
+
+This image is designed to be used for indexing other tasks. It takes a task
+definition as follows:
+```js
+{
+ ...,
+ scopes: [
+ 'index:insert-task:my-index.namespace',
+ 'index:insert-task:...',
+ ],
+ payload: {
+ image: '...',
+ env: {
+ TARGET_TASKID: '<taskId-to-be-indexed>',
+ },
+ command: [
+ 'insert-indexes.js',
+ 'my-index.namespace.one',
+ 'my-index.namespace.two',
+ '....',
+ ],
+ features: {
+ taskclusterProxy: true,
+ },
+ maxRunTime: 600,
+ },
+}
+```
+
+As can be seen the `taskId` to be indexed is given by the environment variable
+`TARGET_TASKID` and the `command` arguments specifies namespaces that it must
+be index under. It is **important** to also include scopes on the form
+`index:insert-task:<...>` for all namespaces `<...>` given as `command`
+arguments.
diff --git a/taskcluster/docker/index-task/insert-indexes.js b/taskcluster/docker/index-task/insert-indexes.js
new file mode 100644
index 0000000000..39ff45fd9c
--- /dev/null
+++ b/taskcluster/docker/index-task/insert-indexes.js
@@ -0,0 +1,73 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+let taskcluster = require("taskcluster-client");
+
+// Create instance of index client
+let index = new taskcluster.Index({
+ delayFactor: 750, // Good solid delay for background process
+ retries: 8, // A few extra retries for robustness
+ rootUrl:
+ process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL,
+});
+
+// Create queue instance for fetching taskId
+let queue = new taskcluster.Queue({
+ delayFactor: 750, // Good solid delay for background process
+ retries: 8, // A few extra retries for robustness
+ rootUrl:
+ process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL,
+});
+
+// Load input
+let taskId = process.env.TARGET_TASKID;
+let rank = parseInt(process.env.INDEX_RANK, 10);
+let namespaces = process.argv.slice(2);
+
+// Validate input
+if (!taskId) {
+ console.log("Expected target task as environment variable: TARGET_TASKID");
+ process.exit(1);
+}
+
+if (isNaN(rank)) {
+ console.log("Expected index rank as environment variable: INDEX_RANK");
+ process.exit(1);
+}
+
+// Fetch task definition to get expiration and then insert into index
+queue
+ .task(taskId)
+ .then(task => task.expires)
+ .then(expires => {
+ return Promise.all(
+ namespaces.map(namespace => {
+ console.log(
+ "Inserting %s into index (rank %d) under: %s",
+ taskId,
+ rank,
+ namespace
+ );
+ return index.insertTask(namespace, {
+ taskId,
+ rank,
+ data: {},
+ expires,
+ });
+ })
+ );
+ })
+ .then(() => {
+ console.log("indexing successfully completed.");
+ process.exit(0);
+ })
+ .catch(err => {
+ console.log("Error:\n%s", err);
+ if (err.stack) {
+ console.log("Stack:\n%s", err.stack);
+ }
+ console.log("Properties:\n%j", err);
+ throw err;
+ })
+ .catch(() => process.exit(1));
diff --git a/taskcluster/docker/index-task/package.json b/taskcluster/docker/index-task/package.json
new file mode 100644
index 0000000000..14c035123c
--- /dev/null
+++ b/taskcluster/docker/index-task/package.json
@@ -0,0 +1,12 @@
+{
+ "private": true,
+ "scripts": {
+ "start": "node index.js"
+ },
+ "dependencies": {
+ "taskcluster-client": "^12.2.0"
+ },
+ "engines": {
+ "node": "10"
+ }
+}
diff --git a/taskcluster/docker/index-task/yarn.lock b/taskcluster/docker/index-task/yarn.lock
new file mode 100644
index 0000000000..326936d8a6
--- /dev/null
+++ b/taskcluster/docker/index-task/yarn.lock
@@ -0,0 +1,326 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+amqplib@^0.5.1:
+ version "0.5.3"
+ resolved "https://registry.yarnpkg.com/amqplib/-/amqplib-0.5.3.tgz#7ccfc85d12ee7cd3c6dc861bb07f0648ec3d7193"
+ integrity sha512-ZOdUhMxcF+u62rPI+hMtU1NBXSDFQ3eCJJrenamtdQ7YYwh7RZJHOIM1gonVbZ5PyVdYH4xqBPje9OYqk7fnqw==
+ dependencies:
+ bitsyntax "~0.1.0"
+ bluebird "^3.5.2"
+ buffer-more-ints "~1.0.0"
+ readable-stream "1.x >=1.1.9"
+ safe-buffer "~5.1.2"
+ url-parse "~1.4.3"
+
+asap@~2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
+ integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=
+
+asynckit@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
+ integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
+
+bitsyntax@~0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/bitsyntax/-/bitsyntax-0.1.0.tgz#b0c59acef03505de5a2ed62a2f763c56ae1d6205"
+ integrity sha512-ikAdCnrloKmFOugAfxWws89/fPc+nw0OOG1IzIE72uSOg/A3cYptKCjSUhDTuj7fhsJtzkzlv7l3b8PzRHLN0Q==
+ dependencies:
+ buffer-more-ints "~1.0.0"
+ debug "~2.6.9"
+ safe-buffer "~5.1.2"
+
+bluebird@^3.5.2:
+ version "3.5.3"
+ resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.3.tgz#7d01c6f9616c9a51ab0f8c549a79dfe6ec33efa7"
+ integrity sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw==
+
+boom@4.x.x:
+ version "4.3.1"
+ resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31"
+ integrity sha1-T4owBctKfjiJ90kDD9JbluAdLjE=
+ dependencies:
+ hoek "4.x.x"
+
+boom@5.x.x:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02"
+ integrity sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==
+ dependencies:
+ hoek "4.x.x"
+
+buffer-more-ints@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/buffer-more-ints/-/buffer-more-ints-1.0.0.tgz#ef4f8e2dddbad429ed3828a9c55d44f05c611422"
+ integrity sha512-EMetuGFz5SLsT0QTnXzINh4Ksr+oo4i+UGTXEshiGCQWnsgSs7ZhJ8fzlwQ+OzEMs0MpDAMr1hxnblp5a4vcHg==
+
+combined-stream@^1.0.6:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.7.tgz#2d1d24317afb8abe95d6d2c0b07b57813539d828"
+ integrity sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==
+ dependencies:
+ delayed-stream "~1.0.0"
+
+component-emitter@^1.2.0:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
+ integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=
+
+cookiejar@^2.1.0:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.2.tgz#dd8a235530752f988f9a0844f3fc589e3111125c"
+ integrity sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA==
+
+core-util-is@~1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
+ integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
+
+cryptiles@3.x.x:
+ version "3.1.4"
+ resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.4.tgz#769a68c95612b56faadfcebf57ac86479cbe8322"
+ integrity sha512-8I1sgZHfVwcSOY6mSGpVU3lw/GSIZvusg8dD2+OGehCJpOhQRLNcH0qb9upQnOH4XhgxxFJSg6E2kx95deb1Tw==
+ dependencies:
+ boom "5.x.x"
+
+debug@^3.1.0:
+ version "3.2.6"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
+ integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
+ dependencies:
+ ms "^2.1.1"
+
+debug@~2.6.9:
+ version "2.6.9"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+ integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
+ dependencies:
+ ms "2.0.0"
+
+delayed-stream@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
+ integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
+
+extend@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
+ integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
+
+form-data@^2.3.1:
+ version "2.3.3"
+ resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
+ integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==
+ dependencies:
+ asynckit "^0.4.0"
+ combined-stream "^1.0.6"
+ mime-types "^2.1.12"
+
+formidable@^1.2.0:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.1.tgz#70fb7ca0290ee6ff961090415f4b3df3d2082659"
+ integrity sha512-Fs9VRguL0gqGHkXS5GQiMCr1VhZBxz0JnJs4JmMp/2jL18Fmbzvv7vOFRU+U8TBkHEE/CX1qDXzJplVULgsLeg==
+
+hawk@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038"
+ integrity sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==
+ dependencies:
+ boom "4.x.x"
+ cryptiles "3.x.x"
+ hoek "4.x.x"
+ sntp "2.x.x"
+
+hoek@4.x.x:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.1.tgz#9634502aa12c445dd5a7c5734b572bb8738aacbb"
+ integrity sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==
+
+inherits@~2.0.1, inherits@~2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
+ integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
+
+isarray@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
+ integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=
+
+isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+ integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
+
+lodash@^4.17.4:
+ version "4.17.11"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
+ integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==
+
+methods@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
+ integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
+
+mime-db@~1.37.0:
+ version "1.37.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.37.0.tgz#0b6a0ce6fdbe9576e25f1f2d2fde8830dc0ad0d8"
+ integrity sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg==
+
+mime-types@^2.1.12:
+ version "2.1.21"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.21.tgz#28995aa1ecb770742fe6ae7e58f9181c744b3f96"
+ integrity sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==
+ dependencies:
+ mime-db "~1.37.0"
+
+mime@^1.4.1:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
+ integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
+
+ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+ integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
+
+ms@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
+ integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
+
+process-nextick-args@~2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
+ integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==
+
+promise@^8.0.1:
+ version "8.0.2"
+ resolved "https://registry.yarnpkg.com/promise/-/promise-8.0.2.tgz#9dcd0672192c589477d56891271bdc27547ae9f0"
+ integrity sha512-EIyzM39FpVOMbqgzEHhxdrEhtOSDOtjMZQ0M6iVfCE+kWNgCkAyOdnuCWqfmflylftfadU6FkiMgHZA2kUzwRw==
+ dependencies:
+ asap "~2.0.6"
+
+qs@^6.5.1:
+ version "6.6.0"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.6.0.tgz#a99c0f69a8d26bf7ef012f871cdabb0aee4424c2"
+ integrity sha512-KIJqT9jQJDQx5h5uAVPimw6yVg2SekOKu959OCtktD3FjzbpvaPr8i4zzg07DOMz+igA4W/aNM7OV8H37pFYfA==
+
+querystringify@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.0.tgz#7ded8dfbf7879dcc60d0a644ac6754b283ad17ef"
+ integrity sha512-sluvZZ1YiTLD5jsqZcDmFyV2EwToyXZBfpoVOmktMmW+VEnhgakFHnasVph65fOjGPTWN0Nw3+XQaSeMayr0kg==
+
+"readable-stream@1.x >=1.1.9":
+ version "1.1.14"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9"
+ integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk=
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.1"
+ isarray "0.0.1"
+ string_decoder "~0.10.x"
+
+readable-stream@^2.3.5:
+ version "2.3.6"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
+ integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.1.1"
+ util-deprecate "~1.0.1"
+
+requires-port@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
+ integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=
+
+safe-buffer@~5.1.0, safe-buffer@~5.1.1, safe-buffer@~5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+ integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
+slugid@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/slugid/-/slugid-1.1.0.tgz#e09f00899c09f5a7058edc36dd49f046fd50a82a"
+ integrity sha1-4J8AiZwJ9acFjtw23UnwRv1QqCo=
+ dependencies:
+ uuid "^2.0.1"
+
+sntp@2.x.x:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.1.0.tgz#2c6cec14fedc2222739caf9b5c3d85d1cc5a2cc8"
+ integrity sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==
+ dependencies:
+ hoek "4.x.x"
+
+string_decoder@~0.10.x:
+ version "0.10.31"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
+ integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=
+
+string_decoder@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+ integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
+ dependencies:
+ safe-buffer "~5.1.0"
+
+superagent@~3.8.1:
+ version "3.8.3"
+ resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.8.3.tgz#460ea0dbdb7d5b11bc4f78deba565f86a178e128"
+ integrity sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==
+ dependencies:
+ component-emitter "^1.2.0"
+ cookiejar "^2.1.0"
+ debug "^3.1.0"
+ extend "^3.0.0"
+ form-data "^2.3.1"
+ formidable "^1.2.0"
+ methods "^1.1.1"
+ mime "^1.4.1"
+ qs "^6.5.1"
+ readable-stream "^2.3.5"
+
+taskcluster-client@^12.2.0:
+ version "12.2.0"
+ resolved "https://registry.yarnpkg.com/taskcluster-client/-/taskcluster-client-12.2.0.tgz#423aee3b17566d14f8ad23e4e47532265a74fb89"
+ integrity sha512-2Fu5ICS2663kC2t8ymJYzRDnipj3DsCK//b+H/83RjJvC6cWZ0akKzq0ySvPlNA6ic2UcL4I03bJTCJYBX1dqg==
+ dependencies:
+ amqplib "^0.5.1"
+ debug "^3.1.0"
+ hawk "^6.0.2"
+ lodash "^4.17.4"
+ promise "^8.0.1"
+ slugid "^1.1.0"
+ superagent "~3.8.1"
+ taskcluster-lib-urls "^10.0.0"
+
+taskcluster-lib-urls@^10.0.0:
+ version "10.1.1"
+ resolved "https://registry.yarnpkg.com/taskcluster-lib-urls/-/taskcluster-lib-urls-10.1.1.tgz#67d5b9449b947e5234eafdd15c46267dde29bf74"
+ integrity sha512-tdrK++rCX73FMXk/cXwS6RLTjA3pX8hJlxg1ECLs3L3llCOPMNhQ4wi6lb6yMgHc/s5on/Edj6AlAH7gkxzgPg==
+
+url-parse@~1.4.3:
+ version "1.4.4"
+ resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.4.tgz#cac1556e95faa0303691fec5cf9d5a1bc34648f8"
+ integrity sha512-/92DTTorg4JjktLNLe6GPS2/RvAd/RGr6LuktmWSMLEOa6rjnlrFXNgSbSmkNvCoL2T028A0a1JaJLzRMlFoHg==
+ dependencies:
+ querystringify "^2.0.0"
+ requires-port "^1.0.0"
+
+util-deprecate@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+ integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
+
+uuid@^2.0.1:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a"
+ integrity sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho=
diff --git a/taskcluster/docker/lint/Dockerfile b/taskcluster/docker/lint/Dockerfile
new file mode 100644
index 0000000000..e34d9730d7
--- /dev/null
+++ b/taskcluster/docker/lint/Dockerfile
@@ -0,0 +1,36 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Andrew Halberstadt <ahalberstadt@mozilla.com>
+
+VOLUME /builds/worker/.cache
+VOLUME /builds/worker/checkouts
+
+# We do want to install recommended packages.
+RUN sed -i /APT::Install-Recommends/d /etc/apt/apt.conf.d/99taskcluster
+
+RUN mkdir /build
+# %include python/mozbuild/mozbuild/action/tooltool.py
+ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py
+
+# %include taskcluster/docker/recipes/install-node.sh
+ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /build/install-node.sh
+
+ADD system-setup.sh /tmp/system-setup.sh
+# %include tools/lint/eslint/manifest.tt
+ADD topsrcdir/tools/lint/eslint/manifest.tt /tmp/eslint.tt
+# %include tools/lint/eslint/eslint-plugin-mozilla/manifest.tt
+ADD topsrcdir/tools/lint/eslint/eslint-plugin-mozilla/manifest.tt /tmp/eslint-plugin-mozilla.tt
+# %include tools/lint/spell/codespell_requirements.txt
+ADD topsrcdir/tools/lint/spell/codespell_requirements.txt /tmp/codespell_requirements.txt
+# %include tools/lint/tox/tox_requirements.txt
+ADD topsrcdir/tools/lint/tox/tox_requirements.txt /tmp/tox_requirements.txt
+RUN bash /tmp/system-setup.sh
+
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV LANG en_US.UTF-8
+ENV LC_ALL en_US.UTF-8
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/lint/system-setup.sh b/taskcluster/docker/lint/system-setup.sh
new file mode 100644
index 0000000000..18da4b7ade
--- /dev/null
+++ b/taskcluster/docker/lint/system-setup.sh
@@ -0,0 +1,92 @@
+#!/usr/bin/env bash
+
+set -ve
+
+test "$(whoami)" == 'root'
+
+mkdir -p /setup
+cd /setup
+
+apt_packages=()
+apt_packages+=('curl')
+apt_packages+=('iproute2')
+apt_packages+=('locales')
+apt_packages+=('graphviz')
+apt_packages+=('python3-pip')
+apt_packages+=('python-is-python3')
+apt_packages+=('shellcheck')
+apt_packages+=('sudo')
+apt_packages+=('wget')
+apt_packages+=('unzip')
+apt_packages+=('tar')
+apt_packages+=('zstd')
+
+apt-get update
+apt-get install "${apt_packages[@]}"
+
+# Without this we get spurious "LC_ALL: cannot change locale (en_US.UTF-8)" errors,
+# and python scripts raise UnicodeEncodeError when trying to print unicode characters.
+locale-gen en_US.UTF-8
+dpkg-reconfigure locales
+
+su -c 'git config --global user.email "worker@mozilla.test"' worker
+su -c 'git config --global user.name "worker"' worker
+
+tooltool_fetch() {
+ cat >manifest.tt
+ /build/tooltool.py fetch
+ rm manifest.tt
+}
+
+cd /build
+
+###
+# ESLint Setup
+###
+
+# install node
+# shellcheck disable=SC1091
+. install-node.sh
+
+npm install -g yarn@1.22.18
+
+/build/tooltool.py fetch -m /tmp/eslint.tt
+mv /build/node_modules /build/node_modules_eslint
+/build/tooltool.py fetch -m /tmp/eslint-plugin-mozilla.tt
+mv /build/node_modules /build/node_modules_eslint-plugin-mozilla
+
+###
+# fzf setup
+###
+
+tooltool_fetch <<EOF
+[
+ {
+ "size": 1161860,
+ "digest": "3246470715e1ddf4c7e5136fdddd2ca269928c2de3074a98233faef189efd88fc9b28ddbe68642a31cf647a97f630941d764187006c5115e6f357d49322ef58d",
+ "algorithm": "sha512",
+ "filename": "fzf-0.20.0-linux_amd64.tgz",
+ "unpack": true
+ }
+]
+EOF
+mv fzf /usr/local/bin
+
+###
+# codespell Setup
+###
+
+cd /setup
+
+pip3 install --require-hashes -r /tmp/codespell_requirements.txt
+
+###
+# tox Setup
+###
+
+cd /setup
+
+pip3 install --require-hashes -r /tmp/tox_requirements.txt
+
+cd /
+rm -rf /setup
diff --git a/taskcluster/docker/partner-repack/Dockerfile b/taskcluster/docker/partner-repack/Dockerfile
new file mode 100644
index 0000000000..aae9d893da
--- /dev/null
+++ b/taskcluster/docker/partner-repack/Dockerfile
@@ -0,0 +1,20 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Ben Hearsum <bhearsum@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+
+RUN dpkg --add-architecture amd64
+
+RUN apt-get update && \
+ apt-get install \
+ bzip2 \
+ curl \
+ git \
+ gzip \
+ openssh-client \
+ python2 \
+ unzip \
+ zip
+
+COPY known_hosts /etc/ssh/ssh_known_hosts
diff --git a/taskcluster/docker/partner-repack/known_hosts b/taskcluster/docker/partner-repack/known_hosts
new file mode 100644
index 0000000000..f4c560e0e7
--- /dev/null
+++ b/taskcluster/docker/partner-repack/known_hosts
@@ -0,0 +1,3 @@
+github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl
+github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=
+github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=
diff --git a/taskcluster/docker/periodic-updates/.eslintrc.js b/taskcluster/docker/periodic-updates/.eslintrc.js
new file mode 100644
index 0000000000..9828adaa40
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/.eslintrc.js
@@ -0,0 +1,70 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+module.exports = {
+ globals: {
+ // JS files in this folder are commonly xpcshell scripts where |arguments|
+ // is defined in the global scope.
+ arguments: false,
+ },
+ rules: {
+ // Enforce return statements in callbacks of array methods.
+ "array-callback-return": "error",
+
+ // Verify calls of super() in constructors.
+ "constructor-super": "error",
+
+ // Require default case in switch statements.
+ "default-case": "error",
+
+ // Disallow use of alert(), confirm(), and prompt().
+ "no-alert": "error",
+
+ // Disallow likely erroneous `switch` scoped lexical declarations in
+ // case/default clauses.
+ "no-case-declarations": "error",
+
+ // Disallow use of the console API.
+ "no-console": "error",
+
+ // Disallow constant expressions in conditions (except for loops).
+ "no-constant-condition": ["error", { checkLoops: false }],
+
+ // Disallow extending of native objects.
+ "no-extend-native": "error",
+
+ // Disallow case statement fallthrough without explicit `// falls through`
+ // annotation.
+ "no-fallthrough": "error",
+
+ // No reassigning native JS objects or read only globals.
+ "no-global-assign": "error",
+
+ // Disallow use of assignment in return statement.
+ "no-return-assign": ["error", "always"],
+
+ // Disallow template literal placeholder syntax in regular strings.
+ "no-template-curly-in-string": "error",
+
+ // Disallow use of this/super before calling super() in constructors.
+ "no-this-before-super": "error",
+
+ // Disallow unmodified loop conditions.
+ "no-unmodified-loop-condition": "error",
+
+ // No expressions where a statement is expected
+ "no-unused-expressions": "error",
+
+ // Disallow unnecessary escape usage in strings and regular expressions.
+ "no-useless-escape": "error",
+
+ // Require "use strict" to be defined globally in the script.
+ strict: ["error", "global"],
+
+ // Disallow Yoda conditions.
+ yoda: ["error", "never"],
+ },
+};
diff --git a/taskcluster/docker/periodic-updates/Dockerfile b/taskcluster/docker/periodic-updates/Dockerfile
new file mode 100644
index 0000000000..24cabe02b5
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/Dockerfile
@@ -0,0 +1,11 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Julien Cristau <jcristau@mozilla.com>
+
+ADD setup.sh /setup/setup.sh
+
+RUN cd /setup && ./setup.sh
+
+COPY runme.sh /
+COPY scripts/* /home/worker/scripts/
+
+CMD ["/runme.sh"]
diff --git a/taskcluster/docker/periodic-updates/README.md b/taskcluster/docker/periodic-updates/README.md
new file mode 100644
index 0000000000..d21c0c3656
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/README.md
@@ -0,0 +1,96 @@
+
+==Periodic File Updates==
+
+This docker image examines the in-tree files for HSTS preload data, HPKP pinning and blocklisting, and
+will produce a diff for each necessary to update the in-tree files.
+
+If given a conduit API token, it will also use the arcanist client to submit the commits for review.
+
+
+==Quick Start==
+
+```sh
+docker build -t hsts-local --no-cache --rm .
+
+docker run -e DO_HSTS=1 -e DO_HPKP=1 -e PRODUCT="firefox" -e BRANCH="mozilla-central" -e USE_MOZILLA_CENTRAL=1 hsts-local
+```
+
+HSTS checks will only be run if the `DO_HSTS` environment variable is set.
+Likewise for `DO_HPKP` and the HPKP checks. Environment variables are used
+rather than command line arguments to make constructing taskcluster tasks
+easier.
+
+To prevent a full build when landing with Phabricator, set the `DONTBUILD`
+environment variable.
+
+==Background==
+
+These scripts have been moved from
+`https://hg.mozilla.org/build/tools/scripts/periodic_file_updates/` and
+`security/manager/tools/` in the main repos.
+
+==HSTS Checks==
+
+`scripts/getHSTSPreloadList.js` will examine the current contents of
+nsSTSPreloadList.inc from whichever `BRANCH` is specified, add in the mandatory
+hosts, and those from the Chromium source, and check them all to see if their
+SSL configuration is valid, and whether or not they have the
+Strict-Transport-Security header set with an appropriate `max-age`.
+
+This javascript has been modified to use async calls to improve performance.
+
+==HPKP Checks==
+
+`scripts/genHPKPStaticPins.js` will ensure the list of pinned public keys are
+up to date.
+
+==Example Taskcluster Task==
+
+https://firefox-ci-tc.services.mozilla.com/tasks/create/
+
+```yaml
+provisionerId: aws-provisioner-v1
+workerType: gecko-1-b-linux
+retries: 0
+created: '2018-02-07T14:45:57.347Z'
+deadline: '2018-02-07T17:45:57.348Z'
+expires: '2019-02-07T17:45:57.348Z'
+scopes: []
+payload:
+ image: srfraser/hsts1
+ maxRunTime: 1800
+ artifacts:
+ public/build/nsSTSPreloadList.diff:
+ path: /home/worker/artifacts/nsSTSPreloadList.diff
+ expires: '2019-02-07T13:57:35.448Z'
+ type: file
+ public/build/StaticHPKPins.h.diff:
+ path: /home/worker/artifacts/StaticHPKPins.h.diff
+ expires: '2019-02-07T13:57:35.448Z'
+ type: file
+ public/build/blocklist.diff:
+ path: /home/worker/artifacts/blocklist.diff
+ expires: '2019-02-07T13:57:35.448Z'
+ type: file
+ env:
+ DO_HSTS: 1
+ DO_HPKP: 1
+ PRODUCT: firefox
+ BRANCH: mozilla-central
+ USE_MOZILLA_CENTRAL: 1
+ REVIEWERS: catlee
+metadata:
+ name: Periodic updates testing
+ description: Produce diffs for HSTS and HPKP in-tree files.
+ owner: sfraser@mozilla.com
+ source: 'https://firefox-ci-tc.services.mozilla.com/tasks/create'
+tags: {}
+extra:
+ treeherder:
+ jobKind: test
+ machine:
+ platform: linux64
+ tier: 1
+ symbol: 'hsts'
+
+```
diff --git a/taskcluster/docker/periodic-updates/runme.sh b/taskcluster/docker/periodic-updates/runme.sh
new file mode 100755
index 0000000000..368963aff6
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/runme.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+set -xe
+
+# Things to be set by task definition.
+# --pinset --hsts --hpkp
+# -b branch
+# --use-mozilla-central
+# -p firefox
+# Artifact directory
+# Artifact names.
+
+
+test "${BRANCH}"
+test "${PRODUCT}"
+
+PARAMS=""
+
+if [ -n "${USE_MOZILLA_CENTRAL}" ]
+then
+ PARAMS="${PARAMS} --use-mozilla-central"
+fi
+
+# TODO change these, so that they're run if the artifact location is specified?
+if [ -n "${DO_HSTS}" ]
+then
+ PARAMS="${PARAMS} --hsts"
+fi
+
+if [ -n "${DO_HPKP}" ]
+then
+ PARAMS="${PARAMS} --hpkp"
+fi
+
+if [ -n "${DO_REMOTE_SETTINGS}" ]
+then
+ PARAMS="${PARAMS} --remote-settings"
+fi
+
+if [ -n "${DO_SUFFIX_LIST}" ]
+then
+ PARAMS="${PARAMS} --suffix-list"
+fi
+
+if [ -n "${DONTBUILD}" ]
+then
+ PARAMS="${PARAMS} -d"
+fi
+
+
+export ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+
+# Get Arcanist API token
+
+if [ -n "${TASK_ID}" ]
+then
+ curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
+ ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}')
+fi
+if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster
+then
+ set +x # Don't echo these
+ secrets_url="${TASKCLUSTER_PROXY_URL}/api/secrets/v1/secret/${ARC_SECRET}"
+ SECRET=$(curl "${secrets_url}")
+ TOKEN=$(echo "${SECRET}" | jq -r '.secret.token')
+elif [ -n "${ARC_TOKEN}" ] # Allow for local testing.
+then
+ TOKEN="${ARC_TOKEN}"
+fi
+
+if [ -n "${TOKEN}" ]
+then
+ cat >"${HOME}/.arcrc" <<END
+{
+ "hosts": {
+ "https://phabricator.services.mozilla.com/api/": {
+ "token": "${TOKEN}"
+ }
+ }
+}
+END
+ set -x
+ chmod 600 "${HOME}/.arcrc"
+fi
+
+export HGPLAIN=1
+
+# shellcheck disable=SC2086
+/home/worker/scripts/periodic_file_updates.sh -p "${PRODUCT}" -b "${BRANCH}" -a ${PARAMS}
diff --git a/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js
new file mode 100644
index 0000000000..af297374b1
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js
@@ -0,0 +1,674 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// How to run this file:
+// 1. [obtain firefox source code]
+// 2. [build/obtain firefox binaries]
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell \
+// [path to]/genHPKPStaticpins.js \
+// [absolute path to]/PreloadedHPKPins.json \
+// [absolute path to]/StaticHPKPins.h
+"use strict";
+
+if (arguments.length != 2) {
+ throw new Error(
+ "Usage: genHPKPStaticPins.js " +
+ "<absolute path to PreloadedHPKPins.json> " +
+ "<absolute path to StaticHPKPins.h>"
+ );
+}
+
+var { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm");
+var { FileUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/FileUtils.sys.mjs"
+);
+
+var gCertDB = Cc["@mozilla.org/security/x509certdb;1"].getService(
+ Ci.nsIX509CertDB
+);
+
+const SHA256_PREFIX = "sha256/";
+const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_";
+
+// Pins expire in 14 weeks (6 weeks on Beta + 8 weeks on stable)
+const PINNING_MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 14;
+
+const FILE_HEADER =
+ "/* This Source Code Form is subject to the terms of the Mozilla Public\n" +
+ " * License, v. 2.0. If a copy of the MPL was not distributed with this\n" +
+ " * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" +
+ "\n" +
+ "/*****************************************************************************/\n" +
+ "/* This is an automatically generated file. If you're not */\n" +
+ "/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" +
+ "/*****************************************************************************/\n" +
+ "#include <stdint.h>" +
+ "\n";
+
+const DOMAINHEADER =
+ "/* Domainlist */\n" +
+ "struct TransportSecurityPreload {\n" +
+ " // See bug 1338873 about making these fields const.\n" +
+ " const char* mHost;\n" +
+ " bool mIncludeSubdomains;\n" +
+ " bool mTestMode;\n" +
+ " bool mIsMoz;\n" +
+ " int32_t mId;\n" +
+ " const StaticFingerprints* pinset;\n" +
+ "};\n\n";
+
+const PINSETDEF =
+ "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" +
+ "struct StaticFingerprints {\n" +
+ " // See bug 1338873 about making these fields const.\n" +
+ " size_t size;\n" +
+ " const char* const* data;\n" +
+ "};\n\n";
+
+// Command-line arguments
+var gStaticPins = parseJson(arguments[0]);
+
+// Open the output file.
+var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+file.initWithPath(arguments[1]);
+var gFileOutputStream = FileUtils.openSafeFileOutputStream(file);
+
+function writeString(string) {
+ gFileOutputStream.write(string, string.length);
+}
+
+function readFileToString(filename) {
+ let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+ file.initWithPath(filename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ stream.init(file, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+function stripComments(buf) {
+ let lines = buf.split("\n");
+ let entryRegex = /^\s*\/\//;
+ let data = "";
+ for (let i = 0; i < lines.length; ++i) {
+ let match = entryRegex.exec(lines[i]);
+ if (!match) {
+ data = data + lines[i];
+ }
+ }
+ return data;
+}
+
+function download(filename) {
+ let req = new XMLHttpRequest();
+ req.open("GET", filename, false); // doing the request synchronously
+ try {
+ req.send();
+ } catch (e) {
+ throw new Error(`ERROR: problem downloading '${filename}': ${e}`);
+ }
+
+ if (req.status != 200) {
+ throw new Error(
+ "ERROR: problem downloading '" + filename + "': status " + req.status
+ );
+ }
+
+ let resultDecoded;
+ try {
+ resultDecoded = atob(req.responseText);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not decode data as base64 from '" + filename + "': " + e
+ );
+ }
+ return resultDecoded;
+}
+
+function downloadAsJson(filename) {
+ // we have to filter out '//' comments, while not mangling the json
+ let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/gm, "");
+ let data = null;
+ try {
+ data = JSON.parse(result);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not parse data from '" + filename + "': " + e
+ );
+ }
+ return data;
+}
+
+// Returns a Subject Public Key Digest from the given pem, if it exists.
+function getSKDFromPem(pem) {
+ let cert = gCertDB.constructX509FromBase64(pem, pem.length);
+ return cert.sha256SubjectPublicKeyInfoDigest;
+}
+
+/**
+ * Hashes |input| using the SHA-256 algorithm in the following manner:
+ * btoa(sha256(atob(input)))
+ *
+ * @param {string} input Base64 string to decode and return the hash of.
+ * @returns {string} Base64 encoded SHA-256 hash.
+ */
+function sha256Base64(input) {
+ let decodedValue;
+ try {
+ decodedValue = atob(input);
+ } catch (e) {
+ throw new Error(`ERROR: could not decode as base64: '${input}': ${e}`);
+ }
+
+ // Convert |decodedValue| to an array so that it can be hashed by the
+ // nsICryptoHash instance below.
+ // In most cases across the code base, convertToByteArray() of
+ // nsIScriptableUnicodeConverter is used to do this, but the method doesn't
+ // seem to work here.
+ let data = [];
+ for (let i = 0; i < decodedValue.length; i++) {
+ data[i] = decodedValue.charCodeAt(i);
+ }
+
+ let hasher = Cc["@mozilla.org/security/hash;1"].createInstance(
+ Ci.nsICryptoHash
+ );
+ hasher.init(hasher.SHA256);
+ hasher.update(data, data.length);
+
+ // true is passed so that the hasher returns a Base64 encoded string.
+ return hasher.finish(true);
+}
+
+// Downloads the static certs file and tries to map Google Chrome nicknames
+// to Mozilla nicknames, as well as storing any hashes for pins for which we
+// don't have root PEMs. Each entry consists of a line containing the name of
+// the pin followed either by a hash in the format "sha256/" + base64(hash),
+// a PEM encoded public key, or a PEM encoded certificate.
+// For certificates that we have in our database,
+// return a map of Google's nickname to ours. For ones that aren't return a
+// map of Google's nickname to SHA-256 values. This code is modeled after agl's
+// https://github.com/agl/transport-security-state-generate, which doesn't
+// live in the Chromium repo because go is not an official language in
+// Chromium.
+// For all of the entries in this file:
+// - If the entry has a hash format, find the Mozilla pin name (cert nickname)
+// and stick the hash into certSKDToName
+// - If the entry has a PEM format, parse the PEM, find the Mozilla pin name
+// and stick the hash in certSKDToName
+// We MUST be able to find a corresponding cert nickname for the Chrome names,
+// otherwise we skip all pinsets referring to that Chrome name.
+function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) {
+ // Prefixes that we care about.
+ const BEGIN_CERT = "-----BEGIN CERTIFICATE-----";
+ const END_CERT = "-----END CERTIFICATE-----";
+ const BEGIN_PUB_KEY = "-----BEGIN PUBLIC KEY-----";
+ const END_PUB_KEY = "-----END PUBLIC KEY-----";
+
+ // Parsing states.
+ const PRE_NAME = 0;
+ const POST_NAME = 1;
+ const IN_CERT = 2;
+ const IN_PUB_KEY = 3;
+ let state = PRE_NAME;
+
+ let lines = download(filename).split("\n");
+ let pemCert = "";
+ let pemPubKey = "";
+ let hash = "";
+ let chromeNameToHash = {};
+ let chromeNameToMozName = {};
+ let chromeName;
+ for (let line of lines) {
+ // Skip comments and newlines.
+ if (!line.length || line[0] == "#") {
+ continue;
+ }
+ switch (state) {
+ case PRE_NAME:
+ chromeName = line;
+ state = POST_NAME;
+ break;
+ case POST_NAME:
+ if (line.startsWith(SHA256_PREFIX)) {
+ hash = line.substring(SHA256_PREFIX.length);
+ chromeNameToHash[chromeName] = hash;
+ certNameToSKD[chromeName] = hash;
+ certSKDToName[hash] = chromeName;
+ state = PRE_NAME;
+ } else if (line.startsWith(BEGIN_CERT)) {
+ state = IN_CERT;
+ } else if (line.startsWith(BEGIN_PUB_KEY)) {
+ state = IN_PUB_KEY;
+ } else if (
+ chromeName == "PinsListTimestamp" &&
+ line.match(/^[0-9]+$/)
+ ) {
+ // If the name of this entry is "PinsListTimestamp", this line should
+ // be the pins list timestamp. It should consist solely of digits.
+ // Ignore it and expect other entries to come.
+ state = PRE_NAME;
+ } else {
+ throw new Error(
+ "ERROR: couldn't parse Chrome certificate file line: " + line
+ );
+ }
+ break;
+ case IN_CERT:
+ if (line.startsWith(END_CERT)) {
+ state = PRE_NAME;
+ hash = getSKDFromPem(pemCert);
+ pemCert = "";
+ let mozName;
+ if (hash in certSKDToName) {
+ mozName = certSKDToName[hash];
+ } else {
+ // Not one of our built-in certs. Prefix the name with
+ // GOOGLE_PIN_.
+ mozName = GOOGLE_PIN_PREFIX + chromeName;
+ dump(
+ "Can't find hash in builtin certs for Chrome nickname " +
+ chromeName +
+ ", inserting " +
+ mozName +
+ "\n"
+ );
+ certSKDToName[hash] = mozName;
+ certNameToSKD[mozName] = hash;
+ }
+ chromeNameToMozName[chromeName] = mozName;
+ } else {
+ pemCert += line;
+ }
+ break;
+ case IN_PUB_KEY:
+ if (line.startsWith(END_PUB_KEY)) {
+ state = PRE_NAME;
+ hash = sha256Base64(pemPubKey);
+ pemPubKey = "";
+ chromeNameToHash[chromeName] = hash;
+ certNameToSKD[chromeName] = hash;
+ certSKDToName[hash] = chromeName;
+ } else {
+ pemPubKey += line;
+ }
+ break;
+ default:
+ throw new Error(
+ "ERROR: couldn't parse Chrome certificate file " + line
+ );
+ }
+ }
+ return [chromeNameToHash, chromeNameToMozName];
+}
+
+// We can only import pinsets from chrome if for every name in the pinset:
+// - We have a hash from Chrome's static certificate file
+// - We have a builtin cert
+// If the pinset meets these requirements, we store a map array of pinset
+// objects:
+// {
+// pinset_name : {
+// // Array of names with entries in certNameToSKD
+// sha256_hashes: []
+// }
+// }
+// and an array of imported pinset entries:
+// { name: string, include_subdomains: boolean, test_mode: boolean,
+// pins: pinset_name }
+function downloadAndParseChromePins(
+ filename,
+ chromeNameToHash,
+ chromeNameToMozName,
+ certNameToSKD,
+ certSKDToName
+) {
+ let chromePreloads = downloadAsJson(filename);
+ let chromePins = chromePreloads.pinsets;
+ let chromeImportedPinsets = {};
+ let chromeImportedEntries = [];
+
+ chromePins.forEach(function (pin) {
+ let valid = true;
+ let pinset = { name: pin.name, sha256_hashes: [] };
+ // Translate the Chrome pinset format to ours
+ pin.static_spki_hashes.forEach(function (name) {
+ if (name in chromeNameToHash) {
+ let hash = chromeNameToHash[name];
+ pinset.sha256_hashes.push(certSKDToName[hash]);
+
+ // We should have already added hashes for all of these when we
+ // imported the certificate file.
+ if (!certNameToSKD[name]) {
+ throw new Error("ERROR: No hash for name: " + name);
+ }
+ } else if (name in chromeNameToMozName) {
+ pinset.sha256_hashes.push(chromeNameToMozName[name]);
+ } else {
+ dump(
+ "Skipping Chrome pinset " +
+ pinset.name +
+ ", couldn't find " +
+ "builtin " +
+ name +
+ " from cert file\n"
+ );
+ valid = false;
+ }
+ });
+ if (valid) {
+ chromeImportedPinsets[pinset.name] = pinset;
+ }
+ });
+
+ // Grab the domain entry lists. Chrome's entry format is similar to
+ // ours, except theirs includes a HSTS mode.
+ const cData = gStaticPins.chromium_data;
+ let entries = chromePreloads.entries;
+ entries.forEach(function (entry) {
+ // HSTS entry only
+ if (!entry.pins) {
+ return;
+ }
+ let pinsetName = cData.substitute_pinsets[entry.pins];
+ if (!pinsetName) {
+ pinsetName = entry.pins;
+ }
+
+ // We trim the entry name here to avoid breaking hostname comparisons in the
+ // HPKP implementation.
+ entry.name = entry.name.trim();
+
+ let isProductionDomain = cData.production_domains.includes(entry.name);
+ let isProductionPinset = cData.production_pinsets.includes(pinsetName);
+ let excludeDomain = cData.exclude_domains.includes(entry.name);
+ let isTestMode = !isProductionPinset && !isProductionDomain;
+ if (entry.pins && !excludeDomain && chromeImportedPinsets[entry.pins]) {
+ chromeImportedEntries.push({
+ name: entry.name,
+ include_subdomains: entry.include_subdomains,
+ test_mode: isTestMode,
+ is_moz: false,
+ pins: pinsetName,
+ });
+ }
+ });
+ return [chromeImportedPinsets, chromeImportedEntries];
+}
+
+// Returns a pair of maps [certNameToSKD, certSKDToName] between cert
+// nicknames and digests of the SPKInfo for the mozilla trust store
+function loadNSSCertinfo(extraCertificates) {
+ let allCerts = gCertDB.getCerts();
+ let certNameToSKD = {};
+ let certSKDToName = {};
+ for (let cert of allCerts) {
+ if (!cert.isBuiltInRoot) {
+ continue;
+ }
+ let name = cert.displayName;
+ let SKD = cert.sha256SubjectPublicKeyInfoDigest;
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+
+ for (let cert of extraCertificates) {
+ let name = cert.commonName;
+ let SKD = cert.sha256SubjectPublicKeyInfoDigest;
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+
+ {
+ // This is the pinning test certificate. The key hash identifies the
+ // default RSA key from pykey.
+ let name = "End Entity Test Cert";
+ let SKD = "VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8=";
+ certNameToSKD[name] = SKD;
+ certSKDToName[SKD] = name;
+ }
+ return [certNameToSKD, certSKDToName];
+}
+
+function parseJson(filename) {
+ let json = stripComments(readFileToString(filename));
+ return JSON.parse(json);
+}
+
+function nameToAlias(certName) {
+ // change the name to a string valid as a c identifier
+ // remove non-ascii characters
+ certName = certName.replace(/[^[:ascii:]]/g, "_");
+ // replace non word characters
+ certName = certName.replace(/[^A-Za-z0-9]/g, "_");
+
+ return "k" + certName + "Fingerprint";
+}
+
+function compareByName(a, b) {
+ return a.name.localeCompare(b.name);
+}
+
+function genExpirationTime() {
+ let now = new Date();
+ let nowMillis = now.getTime();
+ let expirationMillis = nowMillis + PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000;
+ let expirationMicros = expirationMillis * 1000;
+ return (
+ "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" +
+ expirationMicros +
+ ");\n"
+ );
+}
+
+function writeFullPinset(certNameToSKD, certSKDToName, pinset) {
+ if (!pinset.sha256_hashes || !pinset.sha256_hashes.length) {
+ throw new Error(`ERROR: Pinset ${pinset.name} does not contain any hashes`);
+ }
+ writeFingerprints(
+ certNameToSKD,
+ certSKDToName,
+ pinset.name,
+ pinset.sha256_hashes
+ );
+}
+
+function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) {
+ let varPrefix = "kPinset_" + name;
+ writeString("static const char* const " + varPrefix + "_Data[] = {\n");
+ let SKDList = [];
+ for (let certName of hashes) {
+ if (!(certName in certNameToSKD)) {
+ throw new Error(`ERROR: Can't find '${certName}' in certNameToSKD`);
+ }
+ SKDList.push(certNameToSKD[certName]);
+ }
+ for (let skd of SKDList.sort()) {
+ writeString(" " + nameToAlias(certSKDToName[skd]) + ",\n");
+ }
+ if (!hashes.length) {
+ // ANSI C requires that an initialiser list be non-empty.
+ writeString(" 0\n");
+ }
+ writeString("};\n");
+ writeString(
+ "static const StaticFingerprints " +
+ varPrefix +
+ " = {\n " +
+ "sizeof(" +
+ varPrefix +
+ "_Data) / sizeof(const char*),\n " +
+ varPrefix +
+ "_Data\n};\n\n"
+ );
+}
+
+function writeEntry(entry) {
+ let printVal = ` { "${entry.name}", `;
+ if (entry.include_subdomains) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ // Default to test mode if not specified.
+ let testMode = true;
+ if (entry.hasOwnProperty("test_mode")) {
+ testMode = entry.test_mode;
+ }
+ if (testMode) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ if (
+ entry.is_moz ||
+ (entry.pins.includes("mozilla") && entry.pins != "mozilla_test")
+ ) {
+ printVal += "true, ";
+ } else {
+ printVal += "false, ";
+ }
+ if ("id" in entry) {
+ if (entry.id >= 256) {
+ throw new Error("ERROR: Not enough buckets in histogram");
+ }
+ if (entry.id >= 0) {
+ printVal += entry.id + ", ";
+ }
+ } else {
+ printVal += "-1, ";
+ }
+ printVal += "&kPinset_" + entry.pins;
+ printVal += " },\n";
+ writeString(printVal);
+}
+
+function writeDomainList(chromeImportedEntries) {
+ writeString("/* Sort hostnames for binary search. */\n");
+ writeString(
+ "static const TransportSecurityPreload " +
+ "kPublicKeyPinningPreloadList[] = {\n"
+ );
+ let count = 0;
+ let mozillaDomains = {};
+ gStaticPins.entries.forEach(function (entry) {
+ mozillaDomains[entry.name] = true;
+ });
+ // For any domain for which we have set pins, exclude them from
+ // chromeImportedEntries.
+ for (let i = chromeImportedEntries.length - 1; i >= 0; i--) {
+ if (mozillaDomains[chromeImportedEntries[i].name]) {
+ dump(
+ "Skipping duplicate pinset for domain " +
+ JSON.stringify(chromeImportedEntries[i], undefined, 2) +
+ "\n"
+ );
+ chromeImportedEntries.splice(i, 1);
+ }
+ }
+ let sortedEntries = gStaticPins.entries;
+ sortedEntries.push.apply(sortedEntries, chromeImportedEntries);
+ for (let entry of sortedEntries.sort(compareByName)) {
+ count++;
+ writeEntry(entry);
+ }
+ writeString("};\n");
+
+ writeString("\n// Pinning Preload List Length = " + count + ";\n");
+ writeString("\nstatic const int32_t kUnknownId = -1;\n");
+}
+
+function writeFile(
+ certNameToSKD,
+ certSKDToName,
+ chromeImportedPinsets,
+ chromeImportedEntries
+) {
+ // Compute used pins from both Chrome's and our pinsets, so we can output
+ // them later.
+ let usedFingerprints = {};
+ let mozillaPins = {};
+ gStaticPins.pinsets.forEach(function (pinset) {
+ mozillaPins[pinset.name] = true;
+ pinset.sha256_hashes.forEach(function (name) {
+ usedFingerprints[name] = true;
+ });
+ });
+ for (let key in chromeImportedPinsets) {
+ let pinset = chromeImportedPinsets[key];
+ pinset.sha256_hashes.forEach(function (name) {
+ usedFingerprints[name] = true;
+ });
+ }
+
+ writeString(FILE_HEADER);
+
+ // Write actual fingerprints.
+ Object.keys(usedFingerprints)
+ .sort()
+ .forEach(function (certName) {
+ if (certName) {
+ writeString("/* " + certName + " */\n");
+ writeString("static const char " + nameToAlias(certName) + "[] =\n");
+ writeString(' "' + certNameToSKD[certName] + '";\n');
+ writeString("\n");
+ }
+ });
+
+ // Write the pinsets
+ writeString(PINSETDEF);
+ writeString("/* PreloadedHPKPins.json pinsets */\n");
+ gStaticPins.pinsets.sort(compareByName).forEach(function (pinset) {
+ writeFullPinset(certNameToSKD, certSKDToName, pinset);
+ });
+ writeString("/* Chrome static pinsets */\n");
+ for (let key in chromeImportedPinsets) {
+ if (mozillaPins[key]) {
+ dump("Skipping duplicate pinset " + key + "\n");
+ } else {
+ dump("Writing pinset " + key + "\n");
+ writeFullPinset(certNameToSKD, certSKDToName, chromeImportedPinsets[key]);
+ }
+ }
+
+ // Write the domainlist entries.
+ writeString(DOMAINHEADER);
+ writeDomainList(chromeImportedEntries);
+ writeString("\n");
+ writeString(genExpirationTime());
+}
+
+function loadExtraCertificates(certStringList) {
+ let constructedCerts = [];
+ for (let certString of certStringList) {
+ constructedCerts.push(gCertDB.constructX509FromBase64(certString));
+ }
+ return constructedCerts;
+}
+
+var extraCertificates = loadExtraCertificates(gStaticPins.extra_certificates);
+var [certNameToSKD, certSKDToName] = loadNSSCertinfo(extraCertificates);
+var [chromeNameToHash, chromeNameToMozName] = downloadAndParseChromeCerts(
+ gStaticPins.chromium_data.cert_file_url,
+ certNameToSKD,
+ certSKDToName
+);
+var [chromeImportedPinsets, chromeImportedEntries] = downloadAndParseChromePins(
+ gStaticPins.chromium_data.json_file_url,
+ chromeNameToHash,
+ chromeNameToMozName,
+ certNameToSKD,
+ certSKDToName
+);
+
+writeFile(
+ certNameToSKD,
+ certSKDToName,
+ chromeImportedPinsets,
+ chromeImportedEntries
+);
+
+FileUtils.closeSafeFileOutputStream(gFileOutputStream);
diff --git a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js
new file mode 100644
index 0000000000..aeaa29bc2d
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js
@@ -0,0 +1,557 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+"use strict";
+
+// How to run this file:
+// 1. [obtain firefox source code]
+// 2. [build/obtain firefox binaries]
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell [path to]/getHSTSPreloadlist.js [absolute path to]/nsSTSPreloadlist.inc'
+// Note: Running this file outputs a new nsSTSPreloadlist.inc in the current
+// working directory.
+
+var gSSService = Cc["@mozilla.org/ssservice;1"].getService(
+ Ci.nsISiteSecurityService
+);
+
+const { FileUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/FileUtils.sys.mjs"
+);
+
+const SOURCE =
+ "https://chromium.googlesource.com/chromium/src/+/refs/heads/main/net/http/transport_security_state_static.json?format=TEXT";
+const TOOL_SOURCE =
+ "https://hg.mozilla.org/mozilla-central/file/default/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js";
+const OUTPUT = "nsSTSPreloadList.inc";
+const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18;
+const MAX_CONCURRENT_REQUESTS = 500;
+const MAX_RETRIES = 1;
+const REQUEST_TIMEOUT = 30 * 1000;
+const ERROR_NONE = "no error";
+const ERROR_CONNECTING_TO_HOST = "could not connect to host";
+const ERROR_NO_HSTS_HEADER = "did not receive HSTS header";
+const ERROR_MAX_AGE_TOO_LOW = "max-age too low: ";
+const HEADER = `/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*****************************************************************************/
+/* This is an automatically generated file. If you're not */
+/* nsSiteSecurityService.cpp, you shouldn't be #including it. */
+/*****************************************************************************/
+
+#include <stdint.h>
+`;
+
+const GPERF_DELIM = "%%\n";
+
+function download() {
+ let req = new XMLHttpRequest();
+ req.open("GET", SOURCE, false); // doing the request synchronously
+ try {
+ req.send();
+ } catch (e) {
+ throw new Error(`ERROR: problem downloading '${SOURCE}': ${e}`);
+ }
+
+ if (req.status != 200) {
+ throw new Error(
+ "ERROR: problem downloading '" + SOURCE + "': status " + req.status
+ );
+ }
+
+ let resultDecoded;
+ try {
+ resultDecoded = atob(req.responseText);
+ } catch (e) {
+ throw new Error(
+ "ERROR: could not decode data as base64 from '" + SOURCE + "': " + e
+ );
+ }
+
+ // we have to filter out '//' comments, while not mangling the json
+ let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/gm, "");
+ let data = null;
+ try {
+ data = JSON.parse(result);
+ } catch (e) {
+ throw new Error(`ERROR: could not parse data from '${SOURCE}': ${e}`);
+ }
+ return data;
+}
+
+function getHosts(rawdata) {
+ let hosts = [];
+
+ if (!rawdata || !rawdata.entries) {
+ throw new Error(
+ "ERROR: source data not formatted correctly: 'entries' not found"
+ );
+ }
+
+ for (let entry of rawdata.entries) {
+ if (entry.mode && entry.mode == "force-https") {
+ if (entry.name) {
+ // We trim the entry name here to avoid malformed URI exceptions when we
+ // later try to connect to the domain.
+ entry.name = entry.name.trim();
+ entry.retries = MAX_RETRIES;
+ // We prefer the camelCase variable to the JSON's snake case version
+ entry.includeSubdomains = entry.include_subdomains;
+ hosts.push(entry);
+ } else {
+ throw new Error("ERROR: entry not formatted correctly: no name found");
+ }
+ }
+ }
+
+ return hosts;
+}
+
+function processStsHeader(host, header, status, securityInfo) {
+ let maxAge = {
+ value: 0,
+ };
+ let includeSubdomains = {
+ value: false,
+ };
+ let error = ERROR_NONE;
+ if (
+ header != null &&
+ securityInfo != null &&
+ securityInfo.overridableErrorCategory ==
+ Ci.nsITransportSecurityInfo.ERROR_UNSET
+ ) {
+ try {
+ let uri = Services.io.newURI("https://" + host.name);
+ gSSService.processHeader(uri, header, {}, maxAge, includeSubdomains);
+ } catch (e) {
+ dump(
+ "ERROR: could not process header '" +
+ header +
+ "' from " +
+ host.name +
+ ": " +
+ e +
+ "\n"
+ );
+ error = e;
+ }
+ } else if (status == 0) {
+ error = ERROR_CONNECTING_TO_HOST;
+ } else {
+ error = ERROR_NO_HSTS_HEADER;
+ }
+
+ if (error == ERROR_NONE && maxAge.value < MINIMUM_REQUIRED_MAX_AGE) {
+ error = ERROR_MAX_AGE_TOO_LOW;
+ }
+
+ return {
+ name: host.name,
+ maxAge: maxAge.value,
+ includeSubdomains: includeSubdomains.value,
+ error,
+ retries: host.retries - 1,
+ forceInclude: host.forceInclude,
+ };
+}
+
+// RedirectAndAuthStopper prevents redirects and HTTP authentication
+function RedirectAndAuthStopper() {}
+
+RedirectAndAuthStopper.prototype = {
+ // nsIChannelEventSink
+ asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) {
+ throw Components.Exception("", Cr.NS_ERROR_ENTITY_CHANGED);
+ },
+
+ // nsIAuthPrompt2
+ promptAuth(channel, level, authInfo) {
+ return false;
+ },
+
+ asyncPromptAuth(channel, callback, context, level, authInfo) {
+ throw Components.Exception("", Cr.NS_ERROR_NOT_IMPLEMENTED);
+ },
+
+ getInterface(iid) {
+ return this.QueryInterface(iid);
+ },
+
+ QueryInterface: ChromeUtils.generateQI([
+ "nsIChannelEventSink",
+ "nsIAuthPrompt2",
+ ]),
+};
+
+function fetchstatus(host) {
+ return new Promise((resolve, reject) => {
+ let xhr = new XMLHttpRequest();
+ let uri = "https://" + host.name + "/";
+
+ xhr.open("head", uri, true);
+ xhr.setRequestHeader("X-Automated-Tool", TOOL_SOURCE);
+ xhr.timeout = REQUEST_TIMEOUT;
+
+ let errorHandler = () => {
+ dump("ERROR: exception making request to " + host.name + "\n");
+ resolve(
+ processStsHeader(
+ host,
+ null,
+ xhr.status,
+ xhr.channel && xhr.channel.securityInfo
+ )
+ );
+ };
+
+ xhr.onerror = errorHandler;
+ xhr.ontimeout = errorHandler;
+ xhr.onabort = errorHandler;
+
+ xhr.onload = () => {
+ let header = xhr.getResponseHeader("strict-transport-security");
+ resolve(
+ processStsHeader(host, header, xhr.status, xhr.channel.securityInfo)
+ );
+ };
+
+ xhr.channel.notificationCallbacks = new RedirectAndAuthStopper();
+ xhr.send();
+ });
+}
+
+async function getHSTSStatus(host) {
+ do {
+ host = await fetchstatus(host);
+ } while (shouldRetry(host));
+ return host;
+}
+
+function compareHSTSStatus(a, b) {
+ if (a.name > b.name) {
+ return 1;
+ }
+ if (a.name < b.name) {
+ return -1;
+ }
+ return 0;
+}
+
+function writeTo(string, fos) {
+ fos.write(string, string.length);
+}
+
+// Determines and returns a string representing a declaration of when this
+// preload list should no longer be used.
+// This is the current time plus MINIMUM_REQUIRED_MAX_AGE.
+function getExpirationTimeString() {
+ let now = new Date();
+ let nowMillis = now.getTime();
+ // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds
+ let expirationMillis = nowMillis + MINIMUM_REQUIRED_MAX_AGE * 1000;
+ let expirationMicros = expirationMillis * 1000;
+ return (
+ "const PRTime gPreloadListExpirationTime = INT64_C(" +
+ expirationMicros +
+ ");\n"
+ );
+}
+
+function shouldRetry(response) {
+ return (
+ response.error != ERROR_NO_HSTS_HEADER &&
+ response.error != ERROR_MAX_AGE_TOO_LOW &&
+ response.error != ERROR_NONE &&
+ response.retries > 0
+ );
+}
+
+// Copied from browser/components/migration/MigrationUtils.sys.mjs
+function spinResolve(promise) {
+ if (!(promise instanceof Promise)) {
+ return promise;
+ }
+ let done = false;
+ let result = null;
+ let error = null;
+ promise
+ .catch(e => {
+ error = e;
+ })
+ .then(r => {
+ result = r;
+ done = true;
+ });
+
+ Services.tm.spinEventLoopUntil(
+ "getHSTSPreloadList.js:spinResolve",
+ () => done
+ );
+ if (error) {
+ throw error;
+ } else {
+ return result;
+ }
+}
+
+async function probeHSTSStatuses(inHosts) {
+ let totalLength = inHosts.length;
+ dump("Examining " + totalLength + " hosts.\n");
+
+ // Make requests in batches of MAX_CONCURRENT_REQUESTS. Otherwise, we have
+ // too many in-flight requests and the time it takes to process them causes
+ // them all to time out.
+ let allResults = [];
+ while (inHosts.length) {
+ let promises = [];
+ for (let i = 0; i < MAX_CONCURRENT_REQUESTS && inHosts.length; i++) {
+ let host = inHosts.shift();
+ promises.push(getHSTSStatus(host));
+ }
+ let results = await Promise.all(promises);
+ let progress = (
+ (100 * (totalLength - inHosts.length)) /
+ totalLength
+ ).toFixed(2);
+ dump(progress + "% done\n");
+ allResults = allResults.concat(results);
+ }
+
+ dump("HSTS Probe received " + allResults.length + " statuses.\n");
+ return allResults;
+}
+
+function readCurrentList(filename) {
+ var currentHosts = {};
+ var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
+ file.initWithPath(filename);
+ var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsILineInputStream
+ );
+ fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF);
+ var line = {};
+
+ // While we generate entries matching the latest version format,
+ // we still need to be able to read entries in the previous version formats
+ // for bootstrapping a latest version preload list from a previous version
+ // preload list. Hence these regexes.
+ const entryRegexes = [
+ /([^,]+), (0|1)/, // v3
+ / {2}\/\* "([^"]*)", (true|false) \*\//, // v2
+ / {2}{ "([^"]*)", (true|false) },/, // v1
+ ];
+
+ while (fis.readLine(line)) {
+ let match;
+ entryRegexes.find(r => {
+ match = r.exec(line.value);
+ return match;
+ });
+ if (match) {
+ currentHosts[match[1]] = match[2] == "1" || match[2] == "true";
+ }
+ }
+ return currentHosts;
+}
+
+function combineLists(newHosts, currentHosts) {
+ let newHostsSet = new Set();
+
+ for (let newHost of newHosts) {
+ newHostsSet.add(newHost.name);
+ }
+
+ for (let currentHost in currentHosts) {
+ if (!newHostsSet.has(currentHost)) {
+ newHosts.push({ name: currentHost, retries: MAX_RETRIES });
+ }
+ }
+}
+
+const TEST_ENTRIES = [
+ {
+ name: "includesubdomains.preloaded.test",
+ includeSubdomains: true,
+ },
+ {
+ name: "includesubdomains2.preloaded.test",
+ includeSubdomains: true,
+ },
+ {
+ name: "noincludesubdomains.preloaded.test",
+ includeSubdomains: false,
+ },
+];
+
+function deleteTestHosts(currentHosts) {
+ for (let testEntry of TEST_ENTRIES) {
+ delete currentHosts[testEntry.name];
+ }
+}
+
+function getTestHosts() {
+ let hosts = [];
+ for (let testEntry of TEST_ENTRIES) {
+ hosts.push({
+ name: testEntry.name,
+ maxAge: MINIMUM_REQUIRED_MAX_AGE,
+ includeSubdomains: testEntry.includeSubdomains,
+ error: ERROR_NONE,
+ // This deliberately doesn't have a value for `retries` (because we should
+ // never attempt to connect to this host).
+ forceInclude: true,
+ });
+ }
+ return hosts;
+}
+
+async function insertHosts(inoutHostList, inAddedHosts) {
+ for (let host of inAddedHosts) {
+ inoutHostList.push(host);
+ }
+}
+
+function filterForcedInclusions(inHosts, outNotForced, outForced) {
+ // Apply our filters (based on policy today) to determine which entries
+ // will be included without being checked (forced); the others will be
+ // checked using active probing.
+ for (let host of inHosts) {
+ if (
+ host.policy == "google" ||
+ host.policy == "public-suffix" ||
+ host.policy == "public-suffix-requested"
+ ) {
+ host.forceInclude = true;
+ host.error = ERROR_NONE;
+ outForced.push(host);
+ } else {
+ outNotForced.push(host);
+ }
+ }
+}
+
+function output(statuses) {
+ dump("INFO: Writing output to " + OUTPUT + "\n");
+ try {
+ let file = new FileUtils.File(
+ PathUtils.join(Services.dirsvc.get("CurWorkD", Ci.nsIFile).path, OUTPUT)
+ );
+ let fos = FileUtils.openSafeFileOutputStream(file);
+ writeTo(HEADER, fos);
+ writeTo(getExpirationTimeString(), fos);
+
+ writeTo(GPERF_DELIM, fos);
+
+ for (let status of statuses) {
+ let includeSubdomains = status.includeSubdomains ? 1 : 0;
+ writeTo(status.name + ", " + includeSubdomains + "\n", fos);
+ }
+
+ writeTo(GPERF_DELIM, fos);
+ FileUtils.closeSafeFileOutputStream(fos);
+ dump("finished writing output file\n");
+ } catch (e) {
+ dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n");
+ throw e;
+ }
+}
+
+function errorToString(status) {
+ return status.error == ERROR_MAX_AGE_TOO_LOW
+ ? status.error + status.maxAge
+ : status.error;
+}
+
+async function main(args) {
+ if (args.length != 1) {
+ throw new Error(
+ "Usage: getHSTSPreloadList.js <absolute path to current nsSTSPreloadList.inc>"
+ );
+ }
+
+ // get the current preload list
+ let currentHosts = readCurrentList(args[0]);
+ // delete any hosts we use in tests so we don't actually connect to them
+ deleteTestHosts(currentHosts);
+ // disable the current preload list so it won't interfere with requests we make
+ Services.prefs.setBoolPref(
+ "network.stricttransportsecurity.preloadlist",
+ false
+ );
+ // download and parse the raw json file from the Chromium source
+ let rawdata = download();
+ // get just the hosts with mode: "force-https"
+ let hosts = getHosts(rawdata);
+ // add hosts in the current list to the new list (avoiding duplicates)
+ combineLists(hosts, currentHosts);
+
+ // Don't contact hosts that are forced to be included anyway
+ let hostsToContact = [];
+ let forcedHosts = [];
+ filterForcedInclusions(hosts, hostsToContact, forcedHosts);
+
+ // Initialize the final status list
+ let hstsStatuses = [];
+ // Add the hosts we use in tests
+ dump("Adding test hosts\n");
+ insertHosts(hstsStatuses, getTestHosts());
+ // Add in the hosts that are forced
+ dump("Adding forced hosts\n");
+ insertHosts(hstsStatuses, forcedHosts);
+
+ let total = await probeHSTSStatuses(hostsToContact)
+ .then(function (probedStatuses) {
+ return hstsStatuses.concat(probedStatuses);
+ })
+ .then(function (statuses) {
+ return statuses.sort(compareHSTSStatus);
+ })
+ .then(function (statuses) {
+ for (let status of statuses) {
+ // If we've encountered an error for this entry (other than the site not
+ // sending an HSTS header), be safe and don't remove it from the list
+ // (given that it was already on the list).
+ if (
+ !status.forceInclude &&
+ status.error != ERROR_NONE &&
+ status.error != ERROR_NO_HSTS_HEADER &&
+ status.error != ERROR_MAX_AGE_TOO_LOW &&
+ status.name in currentHosts
+ ) {
+ // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n");
+ status.maxAge = MINIMUM_REQUIRED_MAX_AGE;
+ status.includeSubdomains = currentHosts[status.name];
+ }
+ }
+ return statuses;
+ })
+ .then(function (statuses) {
+ // Filter out entries we aren't including.
+ var includedStatuses = statuses.filter(function (status) {
+ if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) {
+ // dump("INFO: " + status.name + " NOT ON the preload list\n");
+ return false;
+ }
+
+ // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n");
+ if (status.forceInclude && status.error != ERROR_NONE) {
+ dump(
+ status.name +
+ ": " +
+ errorToString(status) +
+ " (error ignored - included regardless)\n"
+ );
+ }
+ return true;
+ });
+ return includedStatuses;
+ });
+
+ // Write the output file
+ output(total);
+
+ dump("HSTS probing all done\n");
+}
+
+// arguments is a global within xpcshell
+spinResolve(main(arguments));
diff --git a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh
new file mode 100755
index 0000000000..b88ee476da
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh
@@ -0,0 +1,618 @@
+#!/bin/bash
+
+set -ex
+
+function usage {
+ cat <<EOF
+
+Usage: $(basename "$0") -h # Displays this usage/help text
+Usage: $(basename "$0") -x # lists exit codes
+Usage: $(basename "$0") [-p product]
+ [-r existing_repo_dir]
+ # Use mozilla-central builds to check HSTS & HPKP
+ [--use-mozilla-central]
+ # Use archive.m.o instead of the taskcluster index to get xpcshell
+ [--use-ftp-builds]
+ # One (or more) of the following actions must be specified.
+ --hsts | --hpkp | --remote-settings | --suffix-list
+ -b branch
+
+EOF
+}
+
+PRODUCT="firefox"
+BRANCH=""
+PLATFORM_EXT="tar.bz2"
+UNPACK_CMD="tar jxf"
+CLOSED_TREE=false
+DONTBUILD=false
+APPROVAL=false
+COMMIT_AUTHOR='ffxbld <ffxbld@mozilla.com>'
+REPODIR=''
+HGHOST="hg.mozilla.org"
+STAGEHOST="archive.mozilla.org"
+WGET="wget -nv"
+UNTAR="tar -zxf"
+DIFF="$(command -v diff) -u"
+BASEDIR="${HOME}"
+
+SCRIPTDIR="$(realpath "$(dirname "$0")")"
+HG="$(command -v hg)"
+DATADIR="${BASEDIR}/data"
+mkdir -p "${DATADIR}"
+
+USE_MC=false
+USE_TC=true
+JQ="$(command -v jq)"
+
+DO_HSTS=false
+HSTS_PRELOAD_SCRIPT="${SCRIPTDIR}/getHSTSPreloadList.js"
+HSTS_PRELOAD_ERRORS="nsSTSPreloadList.errors"
+HSTS_PRELOAD_INC_OLD="${DATADIR}/nsSTSPreloadList.inc"
+HSTS_PRELOAD_INC_NEW="${BASEDIR}/${PRODUCT}/nsSTSPreloadList.inc"
+HSTS_UPDATED=false
+
+DO_HPKP=false
+HPKP_PRELOAD_SCRIPT="${SCRIPTDIR}/genHPKPStaticPins.js"
+HPKP_PRELOAD_ERRORS="StaticHPKPins.errors"
+HPKP_PRELOAD_JSON="${DATADIR}/PreloadedHPKPins.json"
+HPKP_PRELOAD_INC="StaticHPKPins.h"
+HPKP_PRELOAD_INPUT="${DATADIR}/${HPKP_PRELOAD_INC}"
+HPKP_PRELOAD_OUTPUT="${DATADIR}/${HPKP_PRELOAD_INC}.out"
+HPKP_UPDATED=false
+
+DO_REMOTE_SETTINGS=false
+REMOTE_SETTINGS_SERVER=''
+REMOTE_SETTINGS_INPUT="${DATADIR}/remote-settings.in"
+REMOTE_SETTINGS_OUTPUT="${DATADIR}/remote-settings.out"
+REMOTE_SETTINGS_DIR="/services/settings/dumps"
+REMOTE_SETTINGS_UPDATED=false
+
+DO_SUFFIX_LIST=false
+GITHUB_SUFFIX_URL="https://raw.githubusercontent.com/publicsuffix/list/master/public_suffix_list.dat"
+GITHUB_SUFFIX_LOCAL="public_suffix_list.dat"
+HG_SUFFIX_LOCAL="effective_tld_names.dat"
+HG_SUFFIX_PATH="/netwerk/dns/${HG_SUFFIX_LOCAL}"
+SUFFIX_LIST_UPDATED=false
+
+ARTIFACTS_DIR="${ARTIFACTS_DIR:-.}"
+# Defaults
+HSTS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HSTS_DIFF_ARTIFACT:-"nsSTSPreloadList.diff"}"
+HPKP_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HPKP_DIFF_ARTIFACT:-"StaticHPKPins.h.diff"}"
+REMOTE_SETTINGS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${REMOTE_SETTINGS_DIFF_ARTIFACT:-"remote-settings.diff"}"
+SUFFIX_LIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${SUFFIX_LIST_DIFF_ARTIFACT:-"effective_tld_names.diff"}"
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+index_base="$TASKCLUSTER_ROOT_URL/api/index/v1"
+
+# Cleanup common artifacts.
+function preflight_cleanup {
+ cd "${BASEDIR}"
+ rm -rf "${PRODUCT}" tests "${BROWSER_ARCHIVE}" "${TESTS_ARCHIVE}"
+}
+
+function download_shared_artifacts_from_ftp {
+ cd "${BASEDIR}"
+
+ # Download everything we need to run js with xpcshell
+ echo "INFO: Downloading all the necessary pieces from ${STAGEHOST}..."
+ ARTIFACT_DIR="nightly/latest-${REPODIR}"
+ if [ "${USE_MC}" == "true" ]; then
+ ARTIFACT_DIR="nightly/latest-mozilla-central"
+ fi
+
+ BROWSER_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${BROWSER_ARCHIVE}"
+ TESTS_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${TESTS_ARCHIVE}"
+
+ echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+ ${WGET} "${BROWSER_ARCHIVE_URL}"
+ echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+ ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function download_shared_artifacts_from_tc {
+ cd "${BASEDIR}"
+ TASKID_FILE="taskId.json"
+
+ # Download everything we need to run js with xpcshell
+ echo "INFO: Downloading all the necessary pieces from the taskcluster index..."
+ TASKID_URL="$index_base/task/gecko.v2.${REPODIR}.shippable.latest.${PRODUCT}.linux64-opt"
+ if [ "${USE_MC}" == "true" ]; then
+ TASKID_URL="$index_base/task/gecko.v2.mozilla-central.shippable.latest.${PRODUCT}.linux64-opt"
+ fi
+ ${WGET} -O ${TASKID_FILE} "${TASKID_URL}"
+ INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})"
+ if [ -z "${INDEX_TASK_ID}" ]; then
+ echo "Failed to look up taskId at ${TASKID_URL}"
+ exit 22
+ else
+ echo "INFO: Got taskId of $INDEX_TASK_ID"
+ fi
+
+ TASKSTATUS_FILE="taskstatus.json"
+ STATUS_URL="$queue_base/task/${INDEX_TASK_ID}/status"
+ ${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}"
+ LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1))
+ echo "INFO: Examining run number ${LAST_RUN_INDEX}"
+
+ BROWSER_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}"
+ echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+ ${WGET} "${BROWSER_ARCHIVE_URL}"
+
+ TESTS_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}"
+ echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+ ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function unpack_artifacts {
+ cd "${BASEDIR}"
+ if [ ! -f "${BROWSER_ARCHIVE}" ]; then
+ echo "Downloaded file '${BROWSER_ARCHIVE}' not found in directory '$(pwd)'." >&2
+ exit 31
+ fi
+ if [ ! -f "${TESTS_ARCHIVE}" ]; then
+ echo "Downloaded file '${TESTS_ARCHIVE}' not found in directory '$(pwd)'." >&2
+ exit 32
+ fi
+ # Unpack the browser and move xpcshell in place for updating the preload list.
+ echo "INFO: Unpacking resources..."
+ ${UNPACK_CMD} "${BROWSER_ARCHIVE}"
+ mkdir -p tests
+ cd tests
+ ${UNTAR} "../${TESTS_ARCHIVE}"
+ cd "${BASEDIR}"
+ cp tests/bin/xpcshell "${PRODUCT}"
+}
+
+# Downloads the current in-tree HSTS (HTTP Strict Transport Security) files.
+# Runs a simple xpcshell script to generate up-to-date HSTS information.
+# Compares the new HSTS output with the old to determine whether we need to update.
+function compare_hsts_files {
+ cd "${BASEDIR}"
+
+ HSTS_PRELOAD_INC_HG="${HGREPO}/raw-file/default/security/manager/ssl/$(basename "${HSTS_PRELOAD_INC_OLD}")"
+
+ echo "INFO: Downloading existing include file..."
+ rm -rf "${HSTS_PRELOAD_ERRORS}" "${HSTS_PRELOAD_INC_OLD}"
+ echo "INFO: ${WGET} ${HSTS_PRELOAD_INC_HG}"
+ ${WGET} -O "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_HG}"
+
+ if [ ! -f "${HSTS_PRELOAD_INC_OLD}" ]; then
+ echo "Downloaded file '${HSTS_PRELOAD_INC_OLD}' not found in directory '$(pwd)' - this should have been downloaded above from ${HSTS_PRELOAD_INC_HG}." >&2
+ exit 41
+ fi
+
+ # Run the script to get an updated preload list.
+ echo "INFO: Generating new HSTS preload list..."
+ cd "${BASEDIR}/${PRODUCT}"
+ if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HSTS_PRELOAD_SCRIPT}" "${HSTS_PRELOAD_INC_OLD}"; then
+ echo "HSTS preload list generation failed" >&2
+ exit 43
+ fi
+
+ # The created files should be non-empty.
+ echo "INFO: Checking whether new HSTS preload list is valid..."
+ if [ ! -s "${HSTS_PRELOAD_INC_NEW}" ]; then
+ echo "New HSTS preload list ${HSTS_PRELOAD_INC_NEW} is empty. That's less good." >&2
+ exit 42
+ fi
+ cd "${BASEDIR}"
+
+ # Check for differences
+ echo "INFO: diffing old/new HSTS preload lists into ${HSTS_DIFF_ARTIFACT}"
+ ${DIFF} "${HSTS_PRELOAD_INC_OLD}" "${HSTS_PRELOAD_INC_NEW}" | tee "${HSTS_DIFF_ARTIFACT}"
+ if [ -s "${HSTS_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+# Downloads the current in-tree HPKP (HTTP public key pinning) files.
+# Runs a simple xpcshell script to generate up-to-date HPKP information.
+# Compares the new HPKP output with the old to determine whether we need to update.
+function compare_hpkp_files {
+ cd "${BASEDIR}"
+ HPKP_PRELOAD_JSON_HG="${HGREPO}/raw-file/default/security/manager/tools/$(basename "${HPKP_PRELOAD_JSON}")"
+
+ HPKP_PRELOAD_OUTPUT_HG="${HGREPO}/raw-file/default/security/manager/ssl/${HPKP_PRELOAD_INC}"
+
+ rm -f "${HPKP_PRELOAD_OUTPUT}"
+ ${WGET} -O "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT_HG}"
+ ${WGET} -O "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_JSON_HG}"
+
+ # Run the script to get an updated preload list.
+ echo "INFO: Generating new HPKP preload list..."
+ cd "${BASEDIR}/${PRODUCT}"
+ if ! LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HPKP_PRELOAD_SCRIPT}" "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_OUTPUT}" > "${HPKP_PRELOAD_ERRORS}"; then
+ echo "HPKP preload list generation failed" >&2
+ exit 54
+ fi
+
+ # The created files should be non-empty.
+ echo "INFO: Checking whether new HPKP preload list is valid..."
+
+ if [ ! -s "${HPKP_PRELOAD_OUTPUT}" ]; then
+ echo "${HPKP_PRELOAD_OUTPUT} is empty. That's less good." >&2
+ exit 52
+ fi
+ if ! grep kPreloadPKPinsExpirationTime "${HPKP_PRELOAD_OUTPUT}"; then
+ echo "${HPKP_PRELOAD_OUTPUT} is missing an expiration time. Truncated?" >&2
+ exit 53
+ fi
+ cd "${BASEDIR}"
+
+ echo "INFO: diffing old/new HPKP preload lists..."
+ ${DIFF} "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT}" | tee "${HPKP_DIFF_ARTIFACT}"
+ if [ -s "${HPKP_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+function is_valid_xml {
+ xmlfile=$1
+ XMLLINT=$(command -v xmllint 2>/dev/null | head -n1)
+
+ if [ ! -x "${XMLLINT}" ]; then
+ echo "ERROR: xmllint not found in PATH"
+ exit 60
+ fi
+ ${XMLLINT} --nonet --noout "${xmlfile}"
+}
+
+# Downloads the public suffix list
+function compare_suffix_lists {
+ HG_SUFFIX_URL="${HGREPO}/raw-file/default/${HG_SUFFIX_PATH}"
+ cd "${BASEDIR}"
+
+ echo "INFO: ${WGET} -O ${GITHUB_SUFFIX_LOCAL} ${GITHUB_SUFFIX_URL}"
+ rm -f "${GITHUB_SUFFIX_LOCAL}"
+ ${WGET} -O "${GITHUB_SUFFIX_LOCAL}" "${GITHUB_SUFFIX_URL}"
+
+ echo "INFO: ${WGET} -O ${HG_SUFFIX_LOCAL} ${HG_SUFFIX_URL}"
+ rm -f "${HG_SUFFIX_LOCAL}"
+ ${WGET} -O "${HG_SUFFIX_LOCAL}" "${HG_SUFFIX_URL}"
+
+ echo "INFO: diffing in-tree suffix list against the suffix list from AMO..."
+ ${DIFF} ${GITHUB_SUFFIX_LOCAL} ${HG_SUFFIX_LOCAL} | tee "${SUFFIX_LIST_DIFF_ARTIFACT}"
+ if [ -s "${SUFFIX_LIST_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+function compare_remote_settings_files {
+ REMOTE_SETTINGS_SERVER="https://firefox.settings.services.mozilla.com/v1"
+
+ # 1. List remote settings collections from server.
+ echo "INFO: fetch remote settings list from server"
+ ${WGET} -qO- "${REMOTE_SETTINGS_SERVER}/buckets/monitor/collections/changes/records" |\
+ ${JQ} -r '.data[] | .bucket+"/"+.collection+"/"+(.last_modified|tostring)' |\
+ # 2. For each entry ${bucket, collection, last_modified}
+ while IFS="/" read -r bucket collection last_modified; do
+
+ # 3. Download the dump from HG into REMOTE_SETTINGS_INPUT folder
+ hg_dump_url="${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${bucket}/${collection}.json"
+ local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json"
+ mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}"
+ ${WGET} -qO "$local_location_input" "$hg_dump_url"
+ if [ $? -eq 8 ]; then
+ # We don't keep any dump for this collection, skip it.
+ # Try to clean up in case no collection in this bucket has dump.
+ rmdir "$REMOTE_SETTINGS_INPUT/${bucket}" --ignore-fail-on-non-empty
+ continue
+ fi
+
+ # 4. Download server version into REMOTE_SETTINGS_OUTPUT folder
+ remote_records_url="$REMOTE_SETTINGS_SERVER/buckets/${bucket}/collections/${collection}/changeset?_expected=${last_modified}"
+ local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json"
+ mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}"
+ ${WGET} -qO- "$remote_records_url" | ${JQ} '{"data": .changes, "timestamp": .timestamp}' > "${local_location_output}"
+
+ # 5. Download attachments if needed.
+ if [ "${bucket}" = "blocklists" ] && [ "${collection}" = "addons-bloomfilters" ]; then
+ # Find the attachment with the most recent generation_time, like _updateMLBF in Blocklist.jsm.
+ # The server should return one "bloomfilter-base" record, but in case it returns multiple,
+ # return the most recent one. The server may send multiple entries if we ever decide to use
+ # the "filter_expression" feature of Remote Settings to send different records to specific
+ # channels. In that case this code should be updated to recognize the filter expression,
+ # but until we do, simply select the most recent record - can't go wrong with that.
+ # Note that "attachment_type" and "generation_time" are specific to addons-bloomfilters.
+ update_remote_settings_attachment "${bucket}" "${collection}" addons-mlbf.bin \
+ 'map(select(.attachment_type == "bloomfilter-base")) | sort_by(.generation_time) | last'
+ fi
+ # Here is an example to download an attachment with record identifier "ID":
+ # update_remote_settings_attachment "${bucket}" "${collection}" ID '.[] | select(.id == "ID")'
+ # NOTE: The downloaded data is not validated. xpcshell should be used for that.
+ done
+
+ echo "INFO: diffing old/new remote settings dumps..."
+ ${DIFF} -r "${REMOTE_SETTINGS_INPUT}" "${REMOTE_SETTINGS_OUTPUT}" > "${REMOTE_SETTINGS_DIFF_ARTIFACT}"
+ if [ -s "${REMOTE_SETTINGS_DIFF_ARTIFACT}" ]
+ then
+ return 0
+ fi
+ return 1
+}
+
+# Helper for compare_remote_settings_files to download attachments from remote settings.
+# The format and location is documented at:
+# https://firefox-source-docs.mozilla.org/services/common/services/RemoteSettings.html#packaging-attachments
+function update_remote_settings_attachment() {
+ local bucket=$1
+ local collection=$2
+ local attachment_id=$3
+ # $4 is a jq filter on the arrays that should return one record with the attachment
+ local jq_attachment_selector=".data | map(select(.attachment)) | $4"
+
+ # These paths match _readAttachmentDump in services/settings/Attachments.jsm.
+ local path_to_attachment="${bucket}/${collection}/${attachment_id}"
+ local path_to_meta="${bucket}/${collection}/${attachment_id}.meta.json"
+ local old_meta="$REMOTE_SETTINGS_INPUT/${path_to_meta}"
+ local new_meta="$REMOTE_SETTINGS_OUTPUT/${path_to_meta}"
+
+ # Those files should have been created by compare_remote_settings_files before the function call.
+ local local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json"
+ local local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json"
+
+ # Compute the metadata based on already-downloaded records.
+ mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}/${collection}"
+ ${JQ} -cj <"$local_location_input" "${jq_attachment_selector}" > "${old_meta}"
+ mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}"
+ ${JQ} -cj <"$local_location_output" "${jq_attachment_selector}" > "${new_meta}"
+
+ if cmp --silent "${old_meta}" "${new_meta}" ; then
+ # Metadata not changed, don't bother downloading the attachments themselves.
+ return
+ fi
+ # Metadata changed. Download attachments.
+
+ echo "INFO: Downloading updated remote settings dump: ${bucket}/${collection}/${attachment_id}"
+
+ # Overwrited old_meta with the actual file from the repo. The content should be equivalent,
+ # but can have minor differences (e.g. different line endings) if the checked in file was not
+ # generated by this script (e.g. manually checked in).
+ ${WGET} -qO "${old_meta}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_meta}"
+
+ ${WGET} -qO "${REMOTE_SETTINGS_INPUT}/${path_to_attachment}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_attachment}"
+
+ if [ -z "${ATTACHMENT_BASE_URL}" ] ; then
+ ATTACHMENT_BASE_URL=$(${WGET} -qO- "${REMOTE_SETTINGS_SERVER}" | ${JQ} -r .capabilities.attachments.base_url)
+ fi
+ attachment_path_from_meta=$(${JQ} -r < "${new_meta}" .attachment.location)
+ ${WGET} -qO "${REMOTE_SETTINGS_OUTPUT}/${path_to_attachment}" "${ATTACHMENT_BASE_URL}${attachment_path_from_meta}"
+}
+
+# Clones an hg repo
+function clone_repo {
+ cd "${BASEDIR}"
+ if [ ! -d "${REPODIR}" ]; then
+ ${HG} robustcheckout --sharebase /tmp/hg-store -b default "${HGREPO}" "${REPODIR}"
+ fi
+
+ ${HG} -R "${REPODIR}" pull
+ ${HG} -R "${REPODIR}" update -C default
+}
+
+# Copies new HSTS files in place, and commits them.
+function stage_hsts_files {
+ cd "${BASEDIR}"
+ cp -f "${HSTS_PRELOAD_INC_NEW}" "${REPODIR}/security/manager/ssl/"
+}
+
+function stage_hpkp_files {
+ cd "${BASEDIR}"
+ cp -f "${HPKP_PRELOAD_OUTPUT}" "${REPODIR}/security/manager/ssl/${HPKP_PRELOAD_INC}"
+}
+
+function stage_remote_settings_files {
+ cd "${BASEDIR}"
+ cp -a "${REMOTE_SETTINGS_OUTPUT}"/* "${REPODIR}${REMOTE_SETTINGS_DIR}"
+}
+
+function stage_tld_suffix_files {
+ cd "${BASEDIR}"
+ cp -a "${GITHUB_SUFFIX_LOCAL}" "${REPODIR}/${HG_SUFFIX_PATH}"
+}
+
+# Push all pending commits to Phabricator
+function push_repo {
+ cd "${REPODIR}"
+ if [ ! -r "${HOME}/.arcrc" ]
+ then
+ return 1
+ fi
+ if ! ARC=$(command -v arc)
+ then
+ return 1
+ fi
+ if [ -z "${REVIEWERS}" ]
+ then
+ return 1
+ fi
+ # Clean up older review requests
+ # Turn Needs Review D624: No bug, Automated HSTS ...
+ # into D624
+ for diff in $($ARC list | grep "Needs Review" | grep -E "${BRANCH} repo-update" | awk 'match($0, /D[0-9]+[^: ]/) { print substr($0, RSTART, RLENGTH) }')
+ do
+ echo "Removing old request $diff"
+ # There is no 'arc abandon', see bug 1452082
+ echo '{"transactions": [{"type":"abandon", "value": true}], "objectIdentifier": "'"${diff}"'"}' | $ARC call-conduit -- differential.revision.edit
+ done
+
+ $ARC diff --verbatim --reviewers "${REVIEWERS}"
+}
+
+
+
+# Main
+
+# Parse our command-line options.
+while [ $# -gt 0 ]; do
+ case "$1" in
+ -h) usage; exit 0 ;;
+ -p) PRODUCT="$2"; shift ;;
+ -b) BRANCH="$2"; shift ;;
+ -n) DRY_RUN=true ;;
+ -c) CLOSED_TREE=true ;;
+ -d) DONTBUILD=true ;;
+ -a) APPROVAL=true ;;
+ --pinset) DO_PRELOAD_PINSET=true ;;
+ --hsts) DO_HSTS=true ;;
+ --hpkp) DO_HPKP=true ;;
+ --remote-settings) DO_REMOTE_SETTINGS=true ;;
+ --suffix-list) DO_SUFFIX_LIST=true ;;
+ -r) REPODIR="$2"; shift ;;
+ --use-mozilla-central) USE_MC=true ;;
+ --use-ftp-builds) USE_TC=false ;;
+ -*) usage
+ exit 11 ;;
+ *) break ;; # terminate while loop
+ esac
+ shift
+done
+
+# Must supply a code branch to work with.
+if [ "${BRANCH}" == "" ]; then
+ echo "Error: You must specify a branch with -b branchname." >&2
+ usage
+ exit 12
+fi
+
+# Must choose at least one update action.
+if [ "$DO_HSTS" == "false" ] && [ "$DO_HPKP" == "false" ] && [ "$DO_REMOTE_SETTINGS" == "false" ] && [ "$DO_SUFFIX_LIST" == "false" ]
+then
+ echo "Error: you must specify at least one action from: --hsts, --hpkp, --remote-settings, or --suffix-list" >&2
+ usage
+ exit 13
+fi
+
+# per-product constants
+case "${PRODUCT}" in
+ thunderbird)
+ COMMIT_AUTHOR="tbirdbld <tbirdbld@thunderbird.net>"
+ ;;
+ firefox)
+ ;;
+ *)
+ echo "Error: Invalid product specified"
+ usage
+ exit 14
+ ;;
+esac
+
+if [ "${REPODIR}" == "" ]; then
+ REPODIR="$(basename "${BRANCH}")"
+fi
+
+case "${BRANCH}" in
+ mozilla-central|comm-central|try )
+ HGREPO="https://${HGHOST}/${BRANCH}"
+ ;;
+ mozilla-*|comm-* )
+ HGREPO="https://${HGHOST}/releases/${BRANCH}"
+ ;;
+ * )
+ HGREPO="https://${HGHOST}/projects/${BRANCH}"
+ ;;
+esac
+
+BROWSER_ARCHIVE="target.${PLATFORM_EXT}"
+TESTS_ARCHIVE="target.common.tests.tar.gz"
+
+preflight_cleanup
+if [ "${DO_HSTS}" == "true" ] || [ "${DO_HPKP}" == "true" ] || [ "${DO_PRELOAD_PINSET}" == "true" ]
+then
+ if [ "${USE_TC}" == "true" ]; then
+ download_shared_artifacts_from_tc
+ else
+ download_shared_artifacts_from_ftp
+ fi
+ unpack_artifacts
+fi
+
+if [ "${DO_HSTS}" == "true" ]; then
+ if compare_hsts_files
+ then
+ HSTS_UPDATED=true
+ fi
+fi
+if [ "${DO_HPKP}" == "true" ]; then
+ if compare_hpkp_files
+ then
+ HPKP_UPDATED=true
+ fi
+fi
+if [ "${DO_REMOTE_SETTINGS}" == "true" ]; then
+ if compare_remote_settings_files
+ then
+ REMOTE_SETTINGS_UPDATED=true
+ fi
+fi
+if [ "${DO_SUFFIX_LIST}" == "true" ]; then
+ if compare_suffix_lists
+ then
+ SUFFIX_LIST_UPDATED=true
+ fi
+fi
+
+
+if [ "${HSTS_UPDATED}" == "false" ] && [ "${HPKP_UPDATED}" == "false" ] && [ "${REMOTE_SETTINGS_UPDATED}" == "false" ] && [ "${SUFFIX_LIST_UPDATED}" == "false" ]; then
+ echo "INFO: no updates required. Exiting."
+ exit 0
+else
+ if [ "${DRY_RUN}" == "true" ]; then
+ echo "INFO: Updates are available, not updating hg in dry-run mode."
+ exit 2
+ fi
+fi
+
+clone_repo
+
+COMMIT_MESSAGE="No Bug, ${BRANCH} repo-update"
+if [ "${HSTS_UPDATED}" == "true" ]
+then
+ stage_hsts_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} HSTS"
+fi
+
+if [ "${HPKP_UPDATED}" == "true" ]
+then
+ stage_hpkp_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} HPKP"
+fi
+
+if [ "${REMOTE_SETTINGS_UPDATED}" == "true" ]
+then
+ stage_remote_settings_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} remote-settings"
+fi
+
+if [ "${SUFFIX_LIST_UPDATED}" == "true" ]
+then
+ stage_tld_suffix_files
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} tld-suffixes"
+fi
+
+
+if [ ${DONTBUILD} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)"
+fi
+if [ ${CLOSED_TREE} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE"
+fi
+if [ ${APPROVAL} == true ]; then
+ COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=repo-update"
+fi
+
+
+if ${HG} -R "${REPODIR}" commit -u "${COMMIT_AUTHOR}" -m "${COMMIT_MESSAGE}"
+then
+ push_repo
+fi
+
+echo "All done"
diff --git a/taskcluster/docker/periodic-updates/setup.sh b/taskcluster/docker/periodic-updates/setup.sh
new file mode 100755
index 0000000000..d05c60883c
--- /dev/null
+++ b/taskcluster/docker/periodic-updates/setup.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -ve
+
+apt-get update -q
+apt-get install \
+ arcanist \
+ curl \
+ jq \
+ libasound2 \
+ libdbus-glib-1-2 \
+ libgtk-3-0 \
+ libx11-xcb1 \
+ libxml2-utils \
+ libxt6 \
+ libxtst6 \
+ shellcheck \
+ unzip \
+ bzip2 \
+ wget
+
+rm -rf /setup
diff --git a/taskcluster/docker/push-to-try/Dockerfile b/taskcluster/docker/push-to-try/Dockerfile
new file mode 100644
index 0000000000..4725c7e48b
--- /dev/null
+++ b/taskcluster/docker/push-to-try/Dockerfile
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+FROM $DOCKER_IMAGE_PARENT
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+RUN apt-get update && \
+ apt-get install \
+ openssh-client
+
+COPY known_hosts /etc/ssh/ssh_known_hosts
+COPY hgrc /etc/mercurial/hgrc.d/push-to-try.rc
+
+RUN hg clone -r 19a77a5d7b2a90ba225cf5015afd1ac44787abec https://hg.mozilla.org/hgcustom/version-control-tools /builds/worker/.mozbuild/version-control-tools
+
+# Create directory for scripts to put an hg config in, for configuring ssh
+RUN mkdir /builds/worker/.config/hg
+RUN chown -R worker:worker /builds/worker/.config/hg
diff --git a/taskcluster/docker/push-to-try/hgrc b/taskcluster/docker/push-to-try/hgrc
new file mode 100644
index 0000000000..d1a0318499
--- /dev/null
+++ b/taskcluster/docker/push-to-try/hgrc
@@ -0,0 +1,2 @@
+[extensions]
+push-to-try = ~/.mozbuild/version-control-tools/hgext/push-to-try
diff --git a/taskcluster/docker/push-to-try/known_hosts b/taskcluster/docker/push-to-try/known_hosts
new file mode 100644
index 0000000000..1dc2e39e70
--- /dev/null
+++ b/taskcluster/docker/push-to-try/known_hosts
@@ -0,0 +1,2 @@
+hg.mozilla.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDEsS2fK+TVkHl4QvvOHB6R5xxngsSYJR+pA4+xDhw4mZT9tgCRU9BBG3LazSLp6PUxnpfok78475/tx6Z8QwbTyUTmLElZ9Z9eJzjaGz/olHzQSWv0VB3kT+VZt0LK7pEuaG+Ph/qwxbtUZZOApYLEvu8uctDlS66doofxZylbsgl1kpRQ5HNu+/DgVo9K9dyMOm9OLoy4tXHSE5pofn4tKYdFRa2lt6OVtIP5/hKNb2i0+JmgM8C3bJTPvzJ4C8p2h83ro29XPUkNAfWrgD5CmAPPqHFXyefDCfdefcvI8B8Za9v4j4LynBDZHsGfII+wIfzyLIxy9K6Op6nqDZgCciBRdgxh4uZQINEhB/JJP03Pxo42ExdG28oU3aL8kRRTORT5ehFtImFfr9QESHaUnbVzBbU5DmOB5voYDMle3RgyY+RXJ7+4OxjLRnJvGks9QCn8QrIvabs/PTCnenI8+yDhMlLUkWTiR4JK8vDBYB2Rm++EmVsN9WjllfDNg3Aj1aYe8XiBD4tS+lg7Ur4rJL8X20H4yMvq56sQ0qfH8PCIQGyGL725E7Yuwj/MHvou5xrPM/Lqo/MtX5T2njrzkeaBmI/zFJaLwbphdrwmrzepbcim7OYJFF2pz8u56KDPD1pUQ7C1gEIAx/4mHiDOGCYooSvyfD+JRdjkZUZMiQ==
+hg.mozilla.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGuRsL+/OrvIekv3iBST1zdVBLBuh/DMIj+ZN72N9a0g
diff --git a/taskcluster/docker/recipes/common.sh b/taskcluster/docker/recipes/common.sh
new file mode 100644
index 0000000000..0182154495
--- /dev/null
+++ b/taskcluster/docker/recipes/common.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+tooltool_fetch() {
+ cat >manifest.tt
+ python3 /setup/tooltool.py fetch
+ rm manifest.tt
+}
diff --git a/taskcluster/docker/recipes/debian-test-system-setup.sh b/taskcluster/docker/recipes/debian-test-system-setup.sh
new file mode 100644
index 0000000000..a0023220a9
--- /dev/null
+++ b/taskcluster/docker/recipes/debian-test-system-setup.sh
@@ -0,0 +1,84 @@
+#!/usr/bin/env bash
+
+set -ve
+
+test "$(whoami)" == 'root'
+
+mkdir -p /setup
+cd /setup
+
+apt_packages=()
+
+apt_packages+=('autoconf2.13')
+apt_packages+=('bluez-cups')
+apt_packages+=('build-essential')
+apt_packages+=('ccache')
+apt_packages+=('curl')
+apt_packages+=('fonts-kacst')
+apt_packages+=('fonts-kacst-one')
+apt_packages+=('fonts-liberation')
+apt_packages+=('fonts-stix')
+apt_packages+=('fonts-unfonts-core')
+apt_packages+=('fonts-unfonts-extra')
+apt_packages+=('fonts-vlgothic')
+apt_packages+=('g++-multilib')
+apt_packages+=('gcc-multilib')
+apt_packages+=('gir1.2-gnomebluetooth-1.0')
+apt_packages+=('gnome-keyring')
+apt_packages+=('libasound2-dev')
+apt_packages+=('libcanberra-pulse')
+apt_packages+=('libcurl4-openssl-dev')
+apt_packages+=('libdbus-1-dev')
+apt_packages+=('libdbus-glib-1-dev')
+apt_packages+=('libgconf2-dev')
+apt_packages+=('libiw-dev')
+apt_packages+=('libnotify-dev')
+apt_packages+=('libpulse-dev')
+apt_packages+=('libsox-fmt-alsa')
+apt_packages+=('libxt-dev')
+apt_packages+=('libxxf86vm1')
+apt_packages+=('llvm')
+apt_packages+=('llvm-dev')
+apt_packages+=('llvm-runtime')
+apt_packages+=('locales')
+apt_packages+=('locales-all')
+apt_packages+=('net-tools')
+apt_packages+=('openssh-client')
+apt_packages+=('qemu-kvm')
+apt_packages+=('rlwrap')
+apt_packages+=('screen')
+apt_packages+=('software-properties-common')
+apt_packages+=('sudo')
+apt_packages+=('tar')
+apt_packages+=('ttf-dejavu')
+apt_packages+=('unzip')
+apt_packages+=('uuid')
+apt_packages+=('wget')
+apt_packages+=('xdg-user-dirs')
+apt_packages+=('xvfb')
+apt_packages+=('zip')
+
+# use Ubuntu's Python-2.7 (2.7.3 on Precise)
+apt_packages+=('python-dev')
+apt_packages+=('python-pip')
+
+apt-get update
+apt-get install "${apt_packages[@]}"
+
+dpkg-reconfigure locales
+
+# pip 19.3 is causing errors building the docker image, pin to 19.2.3 for now.
+# See https://github.com/pypa/pip/issues/7206
+pip install --upgrade pip==19.2.3
+hash -r
+pip install virtualenv==15.2.0
+
+# clean up
+apt-get autoremove
+
+# We don't need no docs!
+rm -rf /usr/share/help /usr/share/doc /usr/share/man
+
+cd /
+rm -rf /setup ~/.ccache ~/.cache ~/.npm
+rm -f "$0"
diff --git a/taskcluster/docker/recipes/dot-config/pip/pip.conf b/taskcluster/docker/recipes/dot-config/pip/pip.conf
new file mode 100644
index 0000000000..73c2b2a52c
--- /dev/null
+++ b/taskcluster/docker/recipes/dot-config/pip/pip.conf
@@ -0,0 +1,2 @@
+[global]
+disable-pip-version-check = true
diff --git a/taskcluster/docker/recipes/hgrc b/taskcluster/docker/recipes/hgrc
new file mode 100755
index 0000000000..f6a2f6643c
--- /dev/null
+++ b/taskcluster/docker/recipes/hgrc
@@ -0,0 +1,33 @@
+# By default the progress bar starts after 3s and updates every 0.1s. We
+# change this so it shows and updates every 1.0s.
+# We also tell progress to assume a TTY is present so updates are printed
+# even if there is no known TTY.
+[progress]
+delay = 1.0
+refresh = 1.0
+assume-tty = true
+
+[extensions]
+share =
+sparse =
+robustcheckout = /usr/local/mercurial/robustcheckout.py
+
+[hostsecurity]
+# When running a modern Python, Mercurial will default to TLS 1.1+.
+# When running on a legacy Python, Mercurial will default to TLS 1.0+.
+# There is no good reason we shouldn't be running a modern Python
+# capable of speaking TLS 1.2. And the only Mercurial servers we care
+# about should be running TLS 1.2. So make TLS 1.2 the minimum.
+minimumprotocol = tls1.2
+
+# Settings to make 1-click loaners more useful.
+[extensions]
+histedit =
+rebase =
+
+[diff]
+git = 1
+showfunc = 1
+
+[pager]
+pager = LESS=FRSXQ less
diff --git a/taskcluster/docker/recipes/install-node.sh b/taskcluster/docker/recipes/install-node.sh
new file mode 100644
index 0000000000..daeba2edc9
--- /dev/null
+++ b/taskcluster/docker/recipes/install-node.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script installs Node v16.
+# XXX For now, this should match the version installed in
+# taskcluster/scripts/misc/repack-node.sh. Later we'll get the ESLint builder
+# to use the linux64-node toolchain directly.
+
+wget -O node.xz --progress=dot:mega https://nodejs.org/dist/v16.19.0/node-v16.19.0-linux-x64.tar.xz
+echo 'c88b52497ab38a3ddf526e5b46a41270320409109c3f74171b241132984fd08f' node.xz | sha256sum -c
+tar -C /usr/local -xJ --strip-components 1 < node.xz
+node -v # verify
+npm -v
diff --git a/taskcluster/docker/recipes/setup_packages.sh b/taskcluster/docker/recipes/setup_packages.sh
new file mode 100755
index 0000000000..48dbf493f3
--- /dev/null
+++ b/taskcluster/docker/recipes/setup_packages.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+TASKCLUSTER_ROOT_URL=$1
+shift
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
+
+
+for task in "$@"; do
+ echo "adding package source $queue_base/task/$task/artifacts/public/build/"
+ echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ apt/" > "/etc/apt/sources.list.d/99$task.list"
+done
diff --git a/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh b/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh
new file mode 100644
index 0000000000..cb934529bf
--- /dev/null
+++ b/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh
@@ -0,0 +1,163 @@
+#!/usr/bin/env bash
+
+set -ve
+
+test "$(whoami)" == 'root'
+
+# We do want to install recommended packages.
+sed -i /APT::Install-Recommends/d /etc/apt/apt.conf.d/99taskcluster
+
+# To speed up docker image build times as well as number of network/disk I/O
+# build a list of packages to be installed nad call it in one go.
+apt_packages=()
+
+apt_packages+=('autoconf2.13')
+apt_packages+=('bluez-cups')
+apt_packages+=('build-essential')
+apt_packages+=('ca-certificates')
+apt_packages+=('ccache')
+apt_packages+=('compiz')
+apt_packages+=('curl')
+apt_packages+=('fontconfig')
+apt_packages+=('fonts-kacst')
+apt_packages+=('fonts-kacst-one')
+apt_packages+=('fonts-liberation')
+apt_packages+=('fonts-stix')
+apt_packages+=('fonts-unfonts-core')
+apt_packages+=('fonts-unfonts-extra')
+apt_packages+=('fonts-vlgothic')
+apt_packages+=('g++-multilib')
+apt_packages+=('gcc-multilib')
+apt_packages+=('gir1.2-gnomebluetooth-1.0')
+apt_packages+=('git')
+apt_packages+=('gnome-icon-theme')
+apt_packages+=('gstreamer1.0-gtk3')
+apt_packages+=('gstreamer1.0-plugins-base')
+apt_packages+=('gstreamer1.0-plugins-good')
+apt_packages+=('gstreamer1.0-tools')
+apt_packages+=('gstreamer1.0-pulseaudio')
+apt_packages+=('language-pack-en-base')
+apt_packages+=('libc6-dbg')
+apt_packages+=('libasound2-dev')
+apt_packages+=('libavcodec-extra57')
+apt_packages+=('libcanberra-gtk3-module')
+apt_packages+=('libcanberra-pulse')
+apt_packages+=('libcurl4-openssl-dev')
+apt_packages+=('libdbus-1-dev')
+apt_packages+=('libdbus-glib-1-dev')
+apt_packages+=('libfreetype6')
+apt_packages+=('libgconf2-dev')
+apt_packages+=('libgl1-mesa-dri')
+apt_packages+=('libgl1-mesa-glx')
+apt_packages+=('libgstreamer-plugins-base1.0-dev')
+apt_packages+=('libgstreamer1.0-dev')
+apt_packages+=('libgtk-3-0')
+apt_packages+=('libiw-dev')
+apt_packages+=('libx11-xcb1')
+apt_packages+=('libxcb1')
+apt_packages+=('libxcb-render0')
+apt_packages+=('libxcb-shm0')
+apt_packages+=('libxcb-glx0')
+apt_packages+=('libxcb-shape0')
+apt_packages+=('libnotify-dev')
+apt_packages+=('libpulse0')
+apt_packages+=('libpulse-dev')
+apt_packages+=('libxt-dev')
+apt_packages+=('libxxf86vm1')
+apt_packages+=('llvm')
+apt_packages+=('llvm-dev')
+apt_packages+=('llvm-runtime')
+apt_packages+=('mesa-common-dev')
+apt_packages+=('net-tools')
+apt_packages+=('pulseaudio')
+apt_packages+=('pulseaudio-module-bluetooth')
+apt_packages+=('pulseaudio-module-gconf')
+apt_packages+=('python-dev')
+apt_packages+=('python-pip')
+apt_packages+=('python3-pip')
+apt_packages+=('qemu-kvm')
+apt_packages+=('rlwrap')
+apt_packages+=('screen')
+apt_packages+=('software-properties-common')
+apt_packages+=('sudo')
+apt_packages+=('ttf-dejavu')
+apt_packages+=('ubuntu-desktop')
+apt_packages+=('unzip')
+apt_packages+=('uuid')
+apt_packages+=('wget')
+apt_packages+=('xvfb')
+apt_packages+=('xwit')
+apt_packages+=('yasm')
+apt_packages+=('zip')
+
+# xvinfo for test-linux.sh to monitor Xvfb startup
+apt_packages+=('x11-utils')
+
+# Bug 1232407 - this allows the user to start vnc
+apt_packages+=('x11vnc')
+
+# Bug 1176031 - need `xset` to disable screensavers
+apt_packages+=('x11-xserver-utils')
+
+# Build a list of packages to install from the multiverse repo.
+apt_packages+=('ubuntu-restricted-extras')
+
+# APT update takes very long on Ubuntu. Run it at the last possible minute.
+apt-get update
+
+# Also force the cleanup after installation of packages to reduce image size.
+apt-get install --allow-downgrades "${apt_packages[@]}"
+
+# Enable i386 packages
+dpkg --add-architecture i386
+apt-get update
+
+# Make sure we have libraries for 32-bit tests
+apt_packages=()
+apt_packages+=('libavcodec-extra57:i386')
+apt_packages+=('libdbus-glib-1-2:i386')
+apt_packages+=('libpulse0:i386')
+apt_packages+=('libxt6:i386')
+apt_packages+=('libxtst6:i386')
+apt_packages+=('libsecret-1-0:i386')
+apt_packages+=('libgtk-3-0:i386')
+apt_packages+=('libx11-xcb1:i386')
+apt_packages+=('libxcb1:i386')
+apt_packages+=('libasound2:i386')
+
+apt-get install --allow-downgrades "${apt_packages[@]}"
+rm -rf /var/lib/apt/lists/*
+
+# Build a list of packages to purge from the image.
+apt_packages=()
+apt_packages+=('*cheese*')
+apt_packages+=('example-content')
+apt_packages+=('git')
+apt_packages+=('gnome-calendar')
+apt_packages+=('gnome-initial-setup')
+apt_packages+=('gnome-mahjongg')
+apt_packages+=('gnome-mines')
+apt_packages+=('gnome-sudoku')
+apt_packages+=('libx11-doc')
+apt_packages+=('manpages-dev')
+apt_packages+=('orca')
+apt_packages+=('rhythmbox')
+apt_packages+=('thunderbird')
+apt_packages+=('ubuntu-release-upgrader*')
+apt_packages+=('update-manager-core')
+apt_packages+=('update-manager')
+apt_packages+=('yelp')
+
+# Purge unnecessary packages
+apt-get purge "${apt_packages[@]}"
+
+# We don't need no docs!
+rm -rf /usr/share/help /usr/share/doc /usr/share/man
+
+# Remove all locale files other than en_US.UTF-8
+rm -rf /usr/share/locale/ /usr/share/locale-langpack/ /usr/share/locales/
+
+# Further cleanup
+apt-get autoremove
+
+rm -f "$0"
diff --git a/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh b/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh
new file mode 100644
index 0000000000..e22b21f51d
--- /dev/null
+++ b/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+set -ve
+
+test "$(whoami)" == 'root'
+
+cd /setup
+
+# Install tooltool and node now that dependencies are in place.
+. /setup/common.sh
+. /setup/install-node.sh
+
+# Upgrade pip and install virtualenv to specified versions.
+pip install --upgrade pip==19.2.3
+hash -r
+pip install virtualenv==15.2.0
+
+pip3 install -r /setup/psutil_requirements.txt
+pip install -r /setup/psutil_requirements.txt
+
+# Cleanup
+cd /
+rm -rf /setup ~/.ccache ~/.cache ~/.npm
+rm -f "$0"
diff --git a/taskcluster/docker/recipes/xvfb.sh b/taskcluster/docker/recipes/xvfb.sh
new file mode 100644
index 0000000000..6e0e79f7d4
--- /dev/null
+++ b/taskcluster/docker/recipes/xvfb.sh
@@ -0,0 +1,75 @@
+#! /bin/bash -x
+
+set -x
+
+fail() {
+ echo # make sure error message is on a new line
+ echo "[xvfb.sh:error]" "${@}"
+ exit 1
+}
+
+cleanup_xvfb() {
+ # When you call this script with START_VNC or TASKCLUSTER_INTERACTIVE
+ # we make sure we do not kill xvfb so you do not lose your connection
+ local xvfb_pid=`pidof Xvfb`
+ local vnc=${START_VNC:-false}
+ local interactive=${TASKCLUSTER_INTERACTIVE:-false}
+ if [ -n "$xvfb_pid" ] && [[ $vnc == false ]] && [[ $interactive == false ]] ; then
+ kill $xvfb_pid || true
+ screen -XS xvfb quit || true
+ fi
+}
+
+# Attempt to start xvfb in a screen session with the given resolution and display
+# number. Up to 5 attempts will be made to start xvfb with a short delay
+# between retries
+try_xvfb() {
+ screen -dmS xvfb Xvfb :$2 -nolisten tcp -screen 0 $1 \
+ > ~/artifacts/xvfb/xvfb.log 2>&1
+ export DISPLAY=:$2
+
+ # Only error code 255 matters, because it signifies that no
+ # display could be opened. As long as we can open the display
+ # tests should work. We'll retry a few times with a sleep before
+ # failing.
+ local retry_count=0
+ local max_retries=5
+ xvfb_test=0
+ until [ $retry_count -gt $max_retries ]; do
+ xvinfo || xvfb_test=$?
+ if [ $xvfb_test != 255 ]; then
+ retry_count=$(($max_retries + 1))
+ else
+ retry_count=$(($retry_count + 1))
+ echo "Failed to start Xvfb, retry: $retry_count"
+ sleep 2
+ fi
+ done
+ if [ $xvfb_test == 255 ]; then
+ return 1
+ else
+ return 0
+ fi
+}
+
+start_xvfb() {
+ set +e
+ mkdir -p ~/artifacts/xvfb
+ local retry_count=0
+ local max_retries=2
+ local success=1
+ until [ $retry_count -gt $max_retries ]; do
+ try_xvfb $1 $2
+ success=$?
+ if [ $success -eq 0 ]; then
+ retry_count=$(($max_retries + 1))
+ else
+ retry_count=$(($retry_count + 1))
+ sleep 10
+ fi
+ done
+ set -e
+ if [ $success -eq 1 ]; then
+ fail "Could not start xvfb after ${max_retries} attempts"
+ fi
+}
diff --git a/taskcluster/docker/sentry/Dockerfile b/taskcluster/docker/sentry/Dockerfile
new file mode 100644
index 0000000000..2f606c5529
--- /dev/null
+++ b/taskcluster/docker/sentry/Dockerfile
@@ -0,0 +1,11 @@
+FROM $DOCKER_IMAGE_PARENT
+LABEL maintainer="Mitchell Hentges mhentges@mozilla.com"
+
+VOLUME /builds/worker/checkouts
+
+ADD prepare.sh /setup/prepare-docker.sh
+ADD submit_sentry_release.sh /usr/bin/submit_sentry_release.sh
+RUN /bin/bash /setup/prepare-docker.sh && rm -R /setup
+
+# Set a default command useful for debugging
+CMD ["/bin/sh", "--login"]
diff --git a/taskcluster/docker/sentry/prepare.sh b/taskcluster/docker/sentry/prepare.sh
new file mode 100755
index 0000000000..7cce3e1955
--- /dev/null
+++ b/taskcluster/docker/sentry/prepare.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+set -o errexit
+set -o nounset
+set -o xtrace
+
+apt-get -y update
+
+# Install:
+# * curl to fetch sentry-cli
+# * jq to parse hgmo pushlog
+apt-get install -y \
+ curl \
+ jq
+
+# Install sentry-cli to publish releases
+curl -L https://github.com/getsentry/sentry-cli/releases/download/1.63.1/sentry-cli-Linux-x86_64 -o /usr/bin/sentry-cli
+chmod +x /usr/bin/sentry-cli
diff --git a/taskcluster/docker/sentry/submit_sentry_release.sh b/taskcluster/docker/sentry/submit_sentry_release.sh
new file mode 100644
index 0000000000..136e06a58a
--- /dev/null
+++ b/taskcluster/docker/sentry/submit_sentry_release.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+set -o nounset
+set -o pipefail
+
+run() {
+ revisions=$(curl "$HG_PUSHLOG_URL" | jq -c -r ".pushes[].changesets | @sh" | tr -d \') || return 1
+ sentry_api_key=$(curl "http://taskcluster/secrets/v1/secret/$SENTRY_SECRET" | jq -r ".secret.sentryToken") || return 1
+ for revision in $revisions; do
+ SENTRY_AUTH_TOKEN=$sentry_api_key SENTRY_ORG=mozilla sentry-cli --url https://sentry.io/ releases --project mach new "hg-rev-$revision" || return 1
+ done
+}
+
+with_backoff() {
+ failures=0
+ while ! "$@"; do
+ failures=$(( failures + 1 ))
+ if (( failures >= 5 )); then
+ echo "[with_backoff] Unable to succeed after 5 tries, failing the job."
+ return 1
+ else
+ seconds=$(( 2 ** (failures - 1) ))
+ echo "[with_backoff] Retrying in $seconds second(s)"
+ sleep $seconds
+ fi
+ done
+}
+
+with_backoff run
diff --git a/taskcluster/docker/static-analysis-build/Dockerfile b/taskcluster/docker/static-analysis-build/Dockerfile
new file mode 100644
index 0000000000..e53ff4fa2e
--- /dev/null
+++ b/taskcluster/docker/static-analysis-build/Dockerfile
@@ -0,0 +1,61 @@
+FROM $DOCKER_IMAGE_PARENT
+LABEL maintainer="Andi-Bogdan Postelnicu <andi@mozilla.com>"
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV XZ_OPT=-T0
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
+
+RUN apt-get update && \
+ apt-get install \
+ autoconf2.13 \
+ automake \
+ bison \
+ bzip2 \
+ cmake \
+ flex \
+ curl \
+ libsqlite3-dev \
+ file \
+ gawk \
+ gcc-multilib \
+ gnupg \
+ jq \
+ libc6-dev \
+ libstdc++-*-dev \
+ libmpfr-dev \
+ nasm \
+ opam \
+ patch \
+ p7zip-full \
+ procps \
+ python3-dev \
+ rsync \
+ screen \
+ tar \
+ unzip \
+ uuid \
+ valgrind \
+ wget \
+ zip \
+ zlib1g-dev \
+ x11-utils \
+ xvfb \
+ linux-libc-dev \
+ libdbus-glib-1-dev \
+ libfontconfig1-dev \
+ libfreetype6-dev \
+ libgconf2-dev \
+ libgmp-dev \
+ libgtk-3-dev \
+ libpango1.0-dev \
+ libpulse-dev \
+ libx11-xcb-dev \
+ libxt-dev \
+ lib32z1 \
+ patchelf
diff --git a/taskcluster/docker/system-symbols-linux-scraper/Dockerfile b/taskcluster/docker/system-symbols-linux-scraper/Dockerfile
new file mode 100644
index 0000000000..1b2c340ff0
--- /dev/null
+++ b/taskcluster/docker/system-symbols-linux-scraper/Dockerfile
@@ -0,0 +1,28 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Gabriele Svelto <gsvelto@mozilla.com>
+
+RUN mkdir -p /builds
+RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
+
+# We need to declare all potentially cache volumes as caches. Also,
+# making high I/O paths volumes increase I/O throughput because of
+# AUFS slowness.
+VOLUME /builds/worker/checkouts
+
+RUN apt-get update && \
+ apt-get install --no-install-recommends -y \
+ binutils build-essential cpio curl elfutils jq libxml2-utils \
+ python3-pip rpm2cpio squashfs-tools unzip wget zip && \
+ apt-get autoremove -y && rm -rf /var/lib/apt/lists/*
+
+COPY run.sh /builds/worker/run.sh
+
+COPY SHA256SUMS.txt /builds/worker/
+
+RUN chown -R worker:worker /builds/worker
+
+RUN pip3 install crashstats-tools==1.3.0
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/system-symbols-linux-scraper/SHA256SUMS.txt b/taskcluster/docker/system-symbols-linux-scraper/SHA256SUMS.txt
new file mode 100644
index 0000000000..46a456d21a
--- /dev/null
+++ b/taskcluster/docker/system-symbols-linux-scraper/SHA256SUMS.txt
@@ -0,0 +1,6 @@
+b4ba66e218aa6f27e19bfbc48142f86f3fddb64b6540fd5beb30bad11f73a409 arch/SHA256SUMS.zip
+6083ab8cb46a5be2a4d9bc51dce1443375f71d9536c325da39a14ba68c1de81a debian/SHA256SUMS.zip
+64003a50133900e5d01b29e37022b3029652788a0e094b67330e5e32f1bf5e12 fedora/SHA256SUMS.zip
+00e3e54788b2d7867535eb25bc9ce41541dc6299dca0a8ce8d0d4e5b49839ac3 mint/SHA256SUMS.zip
+c4d02cc24f2a50b0caea20dd5fd0cd6bc960d0a619e8a355f7fec14a766e7966 opensuse/SHA256SUMS.zip
+aed77c596f5652f4ecdc48b37aaa1c2b0b516c7e74ccc0cf183e7d7de101b6d7 ubuntu/SHA256SUMS.zip
diff --git a/taskcluster/docker/system-symbols-linux-scraper/run.sh b/taskcluster/docker/system-symbols-linux-scraper/run.sh
new file mode 100755
index 0000000000..d7837d112e
--- /dev/null
+++ b/taskcluster/docker/system-symbols-linux-scraper/run.sh
@@ -0,0 +1,80 @@
+#!/bin/bash
+
+set -ex
+
+SHA256SUMS=SHA256SUMS.zip
+
+function get_route()
+{
+ local task_url=${TASKCLUSTER_ROOT_URL}/api/queue/v1/task/${TASK_ID}
+ local payload
+ payload=$(curl -sSL "${task_url}")
+
+ local route
+ route=$(echo "${payload}" | jq -r '.routes[] | select(contains("latest")) | select(contains("pushdate") | not) ' | sed -e 's/^index\.//')
+ echo "${route}"
+}
+
+function get_sha256sum_url()
+{
+ local route
+ route=$(get_route)
+ local sha256sums_url=${TASKCLUSTER_ROOT_URL}/api/index/v1/task/${route}/artifacts/public/build/${SHA256SUMS}
+ echo "${sha256sums_url}"
+}
+
+function has_sha256sums_on_index()
+{
+ local url
+ url=$(get_sha256sum_url)
+ curl -sSL --head --fail -o /dev/null "${url}"
+}
+
+function download_verify_extract_sha256sums()
+{
+ local url=$1
+
+ if [ ! -f "/builds/worker/SHA256SUMS.txt" ]; then
+ echo "Missing checksums, aborting."
+ exit 1
+ fi
+
+ curl -sSL "${url}" -o ${SHA256SUMS}
+
+ # We verify SHA256SUMS when we bootstrap and thus download from GitHub
+ # The one downloaded from TaskCluster will have been updated by previous tasks
+ if [ -n "${BOOTSTRAP_SHA256}" ]; then
+ echo "BOOTSTRAP_SHA256 was set, ensuring consistent set of SHA256SUMS"
+ local sha_check
+ sha256sum --quiet --status -c <(grep "${DISTRO}/" /builds/worker/SHA256SUMS.txt | sed -e "s/${DISTRO}\///")
+ sha_check=$?
+ if [ ${sha_check} -ne 0 ]; then
+ echo "Checksum verification failed, aborting."
+ exit 1
+ fi
+ fi
+
+ unzip ${SHA256SUMS} && rm ${SHA256SUMS}
+}
+
+DISTRO=$1
+
+sha256=https://mozilla.github.io/symbol-scrapers/${DISTRO}/${SHA256SUMS}
+if [ -z "${BOOTSTRAP_SHA256}" ]; then
+ if has_sha256sums_on_index; then
+ sha256=$(get_sha256sum_url)
+ fi
+fi
+
+mkdir -p /builds/worker/artifacts/
+
+pushd "${MOZ_FETCHES_DIR}/symbol-scrapers/${DISTRO}"
+ download_verify_extract_sha256sums "${sha256}"
+ DUMP_SYMS=${MOZ_FETCHES_DIR}/dump_syms/dump_syms /bin/bash script.sh
+ zip -r9 /builds/worker/artifacts/${SHA256SUMS} SHA256SUMS
+ cp wget*.log /builds/worker/artifacts/
+popd
+
+if [ ! -f "/builds/worker/artifacts/target.crashreporter-symbols.zip" ]; then
+ echo "No symbols zip produced, upload task will fail"
+fi
diff --git a/taskcluster/docker/system-symbols-mac/Dockerfile b/taskcluster/docker/system-symbols-mac/Dockerfile
new file mode 100644
index 0000000000..6b4c1fbec2
--- /dev/null
+++ b/taskcluster/docker/system-symbols-mac/Dockerfile
@@ -0,0 +1,37 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Gabriele Svelto <gsvelto@mozilla.com>
+
+RUN mkdir -p /builds
+RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
+
+# We need to declare all potentially cache volumes as caches. Also,
+# making high I/O paths volumes increase I/O throughput because of
+# AUFS slowness.
+VOLUME /builds/worker/checkouts
+
+RUN apt-get update && \
+ apt-get install --no-install-recommends -y python3-dev python3-pip python3-requests-futures python3-setuptools curl pax build-essential libxml2-dev libssl-dev zlib1g-dev libusb-dev cmake libbz2-dev libpng-dev wget zip liblzma-dev && \
+ rm -rf /var/lib/apt/lists/*
+
+RUN mkdir /opt/data-reposado/
+RUN chown -R worker.worker /opt/data-reposado/
+
+COPY setup.sh /setup/
+COPY requirements.txt /setup/
+# %include tools/crashreporter/system-symbols/mac
+
+RUN /bin/sh /setup/setup.sh
+
+ADD https://github.com/marco-c/breakpad-mac-update-symbols/raw/21221733edfbcac49d40e50fc219fab7d17437a0/lipo /usr/local/bin/
+RUN chmod +x /usr/local/bin/lipo
+
+COPY topsrcdir/tools/crashreporter/system-symbols/mac /builds/worker/mac-update-symbols
+# %include python/mozbuild/mozpack/macpkg.py
+COPY topsrcdir/python/mozbuild/mozpack/macpkg.py /builds/worker/mac-update-symbols/mozpack/
+WORKDIR /builds/worker/mac-update-symbols
+
+RUN chown -R worker:worker /builds/worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/system-symbols-mac/requirements.txt b/taskcluster/docker/system-symbols-mac/requirements.txt
new file mode 100644
index 0000000000..775756115c
--- /dev/null
+++ b/taskcluster/docker/system-symbols-mac/requirements.txt
@@ -0,0 +1,2 @@
+futures
+requests
diff --git a/taskcluster/docker/system-symbols-mac/setup.sh b/taskcluster/docker/system-symbols-mac/setup.sh
new file mode 100644
index 0000000000..c0c4e766ac
--- /dev/null
+++ b/taskcluster/docker/system-symbols-mac/setup.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+set -v -e -x
+
+ncpu=-j$(grep -c ^processor /proc/cpuinfo)
+
+WORK=/setup/
+cd $WORK
+git clone --depth=1 --single-branch -b system-symbols-mac https://github.com/gabrielesvelto/libdmg-hfsplus.git
+cd libdmg-hfsplus
+cmake .
+make "$ncpu" dmg-bin hfsplus
+# `make install` installs way too much stuff
+cp dmg/dmg hfs/hfsplus /builds/worker/bin
+strip /builds/worker/bin/dmg /builds/worker/bin/hfsplus
+
+pip3 install --no-cache-dir git+https://github.com/mozilla/reposado
+
+python3 /usr/local/bin/repoutil --configure <<EOF
+/opt/data-reposado/html/
+/opt/data-reposado/metadata/
+http://example.com/
+EOF
+
+pip3 install --no-cache-dir -r /setup/requirements.txt
+
+cd /
+rm -rf /setup
diff --git a/taskcluster/docker/system-symbols-win/Dockerfile b/taskcluster/docker/system-symbols-win/Dockerfile
new file mode 100644
index 0000000000..93a91bf199
--- /dev/null
+++ b/taskcluster/docker/system-symbols-win/Dockerfile
@@ -0,0 +1,26 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Gabriele Svelto <gsvelto@mozilla.com>
+
+RUN mkdir -p /builds
+RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
+
+# We need to declare all potentially cache volumes as caches. Also,
+# making high I/O paths volumes increase I/O throughput because of
+# AUFS slowness.
+VOLUME /builds/worker/checkouts
+
+COPY requirements.txt /builds/worker/requirements.txt
+RUN apt-get update && \
+ apt-get install --no-install-recommends -y gcc python3-dev python3-pip python3-setuptools libffi-dev && \
+ rm -rf /var/lib/apt/lists/*
+RUN pip3 install pip==20.3.4 --upgrade
+RUN pip3 install --no-cache-dir --require-hashes -r /builds/worker/requirements.txt
+
+# %include tools/crashreporter/system-symbols/win
+COPY topsrcdir/tools/crashreporter/system-symbols/win /builds/worker
+
+RUN chown -R worker:worker /builds/worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/system-symbols-win/requirements.txt b/taskcluster/docker/system-symbols-win/requirements.txt
new file mode 100644
index 0000000000..d717b6efda
--- /dev/null
+++ b/taskcluster/docker/system-symbols-win/requirements.txt
@@ -0,0 +1,19 @@
+aiodns==3.0.0 --hash=sha256:2b19bc5f97e5c936638d28e665923c093d8af2bf3aa88d35c43417fa25d136a2
+aiofile==3.7.4 --hash=sha256:0e2a524e4714efda47ce8964b13d4da94cf553411f9f6da813df615a4cd73d95
+aiohttp==3.8.1 --hash=sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd
+aiosignal==1.2.0 --hash=sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a
+asyncio==3.4.3 --hash=sha256:c4d18b22701821de07bd6aea8b53d21449ec0ec5680645e5317062ea21817d2d
+asynctest==0.13.0 --hash=sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676
+async-timeout==4.0.2 --hash=sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c
+attrs==21.4.0 --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4
+caio==0.9.0 --hash=sha256:399de8f266ede4e87ffa1c647c8bb6859df14039354b6fcf74a47d3d756fdc41
+cffi==1.15.0 --hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954
+chardet==4.0.0 --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5
+charset-normalizer==2.0.12 --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
+frozenlist==1.3.0 --hash=sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51
+idna==3.3 --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff
+multidict==6.0.2 --hash=sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013
+pycares==4.1.2 --hash=sha256:dc942692fca0e27081b7bb414bb971d34609c80df5e953f6d0c62ecc8019acd9
+pycparser==2.21 --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9
+typing_extensions==4.2.0 --hash=sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708
+yarl==1.7.2 --hash=sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794
diff --git a/taskcluster/docker/toolchain-build/Dockerfile b/taskcluster/docker/toolchain-build/Dockerfile
new file mode 100644
index 0000000000..6e56172ab2
--- /dev/null
+++ b/taskcluster/docker/toolchain-build/Dockerfile
@@ -0,0 +1,62 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ENV XZ_OPT=-T0
+
+ARG DOCKER_IMAGE_PACKAGES
+ARG TASKCLUSTER_ROOT_URL
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
+
+RUN apt-get update && \
+ apt-get install \
+ autoconf \
+ autoconf2.13 \
+ automake \
+ bison \
+ build-essential \
+ cabextract \
+ curl \
+ clang-13 \
+ cmake \
+ flex \
+ gawk \
+ gcc-multilib \
+ git \
+ gnupg \
+ jq \
+ libbz2-dev \
+ libexpat1-dev \
+ libffi-dev \
+ libncursesw5-dev \
+ libsqlite3-dev \
+ libssl-dev \
+ libtool \
+ libucl-dev \
+ libxml2-dev \
+ lld-13 \
+ msitools \
+ ninja-build \
+ openssh-client \
+ p7zip-full \
+ pkg-config \
+ procps \
+ python3-distutils-extra \
+ python3-requests \
+ python3-toml \
+ scons \
+ subversion \
+ tar \
+ unzip \
+ uuid \
+ uuid-dev \
+ wget \
+ zip \
+ zlib1g-dev
+
+# mmdebstrap is not available in jessie, but we only need it in the buster-based image.
+# We also want zstd when available.
+RUN apt-get install fakechroot fakeroot mmdebstrap zstd || true
diff --git a/taskcluster/docker/ubuntu1804-base/Dockerfile b/taskcluster/docker/ubuntu1804-base/Dockerfile
new file mode 100644
index 0000000000..3a54e58180
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-base/Dockerfile
@@ -0,0 +1,70 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+### Add worker user and setup its workspace.
+RUN mkdir /builds && \
+ groupadd -g 1000 worker && \
+ useradd -u 1000 -g 1000 -d /builds/worker -s /bin/bash -m worker && \
+ mkdir -p /builds/worker/workspace && \
+ chown -R worker:worker /builds
+
+# Declare default working folder
+WORKDIR /builds/worker
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV HOME=/builds/worker \
+ SHELL=/bin/bash \
+ USER=worker \
+ LOGNAME=worker \
+ HOSTNAME=taskcluster-worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
+ apt-get update && \
+ apt-get dist-upgrade && \
+ apt-get install \
+ git \
+ less \
+ make \
+ mercurial \
+ patch \
+ python3.7 \
+ python3.7-dev \
+ python3.7-venv \
+ python3-distutils-extra \
+ python3-minimal \
+ python3-zstandard \
+ python3-psutil \
+ python3-venv \
+ vim-tiny \
+ xz-utils \
+ zstd
+
+RUN test $(readlink /usr/bin/python3) = python3.6 && ln -sf python3.7 /usr/bin/python3
+
+# %include testing/mozharness/external_tools/robustcheckout.py
+COPY topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
+
+# %include taskcluster/docker/recipes/hgrc
+COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc
+
+# Add pip configuration, among other things.
+# %include taskcluster/docker/recipes/dot-config
+COPY topsrcdir/taskcluster/docker/recipes/dot-config /builds/worker/.config
+
+# %include taskcluster/scripts/run-task
+COPY topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task
+
+# %include taskcluster/scripts/misc/fetch-content
+ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content
+
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
diff --git a/taskcluster/docker/ubuntu1804-test-base/Dockerfile b/taskcluster/docker/ubuntu1804-test-base/Dockerfile
new file mode 100644
index 0000000000..a8556340e5
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test-base/Dockerfile
@@ -0,0 +1,24 @@
+# This docker image only sets up packages on top of the original Ubuntu image.
+# Everything else is setup is a child docker image derived from this one, such
+# that changes to our scripts don't trigger the rebuild of this base image,
+# possibly leading to breakage. (See bug 1503756 and bug 1678451 for examples
+# of such recent breakages)
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Edwin Takahashi <egao@mozilla.com>
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
+
+# %include taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh
+ADD topsrcdir/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh /setup/system-setup.sh
+RUN bash /setup/system-setup.sh
+
+# gnome-keyring-daemon is configured to have the IPC_LOCK capability (to lock pages with secrets in
+# memory), but docker isn't run with that capability granted. So, if we were to try running
+# gnome-keyring-daemon without first clearing the capability, it would just exit with the message
+# "Operation not permitted". Luckily it doesn't actually require the capability.
+RUN setcap -r /usr/bin/gnome-keyring-daemon
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/ubuntu1804-test/Dockerfile b/taskcluster/docker/ubuntu1804-test/Dockerfile
new file mode 100644
index 0000000000..69385ca632
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/Dockerfile
@@ -0,0 +1,86 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Edwin Takahashi <egao@mozilla.com>
+
+# Create necessary directories
+RUN mkdir -p /artifacts
+
+# We need to declare all potentially cache volumes as caches. Also,
+# making high I/O paths volumes increase I/O throughput because of
+# AUFS slowness.
+VOLUME /builds/worker/.cache
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/tooltool-cache
+VOLUME /builds/worker/workspace
+
+# In test.sh we accept START_VNC to start a vnc daemon.
+# Exposing this port allows it to work.
+EXPOSE 5900
+
+# %include python/mozbuild/mozbuild/action/tooltool.py
+ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
+
+# %include taskcluster/docker/recipes/common.sh
+ADD topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh
+
+# %include taskcluster/docker/recipes/install-node.sh
+ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /setup/install-node.sh
+
+# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
+# %include taskcluster/docker/recipes/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
+
+ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/worker/scripts/tooltool.py
+
+# %include build/psutil_requirements.txt
+ADD topsrcdir/build/psutil_requirements.txt /setup/psutil_requirements.txt
+# %include taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh
+ADD topsrcdir/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh /setup/system-setup.sh
+RUN bash /setup/system-setup.sh
+
+# %include taskcluster/scripts/tester/test-linux.sh
+ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /builds/worker/bin/test-linux.sh
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV LANG en_US.UTF-8
+ENV LC_ALL en_US.UTF-8
+ENV PATH $PATH:/builds/worker/bin
+
+# This helps not forgetting setting DISPLAY=:0 when running
+# tests outside of test.sh
+ENV DISPLAY :0
+
+# Add utilities and configuration
+COPY dot-files/config /builds/worker/.config
+
+# Disable apport (app crash reporter) to avoid stealing focus from test runs
+ADD apport /etc/default/apport
+
+# Disable font antialiasing for now to match releng's setup
+ADD fonts.conf /builds/worker/.fonts.conf
+
+# Bug 1345105 - Do not run periodical update checks and downloads
+ADD autostart/gnome-software-service.desktop /etc/xdg/autostart/
+
+# allow the worker user to access video devices
+RUN usermod -a -G video worker
+
+# Set execution and ownership privileges
+RUN chmod +x bin/*; chown -R worker:worker /builds/worker
+
+# Set up first-run experience for interactive mode
+ADD motd /etc/taskcluster-motd
+ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell
+RUN chmod +x /bin/taskcluster-interactive-shell
+
+# Bug 1638183 - increase xserver maximum client count
+COPY dot-files/config/xorg/99-serverflags.conf /usr/share/X11/xorg.conf.d/
+
+# Ubuntu 18.04 (or any other GNOME3 based systems) needs dbus to have various
+# test function as expected. Use entrypoint to initialize dbus as root.
+COPY dbus.sh /usr/local/bin/dbus.sh
+RUN chmod +x /usr/local/bin/dbus.sh
+ENTRYPOINT ["/usr/local/bin/dbus.sh"]
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/ubuntu1804-test/apport b/taskcluster/docker/ubuntu1804-test/apport
new file mode 100644
index 0000000000..42e5f8d3a1
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/apport
@@ -0,0 +1 @@
+enabled=0
diff --git a/taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop b/taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop
new file mode 100644
index 0000000000..c3b8a4c676
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop
@@ -0,0 +1,19 @@
+[Desktop Entry]
+Version=1.0
+X-Ubuntu-Gettext-Domain=deja-dup
+
+Name=Backup Monitor
+Comment=Schedules backups at regular intervals
+
+Icon=deja-dup
+TryExec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor
+Exec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor
+
+# Bug 984944/1240084 - It prevents taking screenshots
+X-GNOME-Autostart-Delay=false
+
+StartupNotify=false
+NoDisplay=true
+
+Type=Application
+Categories=System;Utility;Archiving;
diff --git a/taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop b/taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop
new file mode 100644
index 0000000000..b563cc306b
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop
@@ -0,0 +1,9 @@
+[Desktop Entry]
+Type=Application
+Name=GNOME Software
+Exec=/usr/bin/gnome-software --gapplication-service
+OnlyShowIn=GNOME;Unity;
+X-Ubuntu-Gettext-Domain=gnome-software
+
+# Bug 1345105 - Do not run periodical update checks and downloads
+X-GNOME-Autostart-enabled=false
diff --git a/taskcluster/docker/ubuntu1804-test/dbus.sh b/taskcluster/docker/ubuntu1804-test/dbus.sh
new file mode 100644
index 0000000000..f27ec664f1
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/dbus.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+set -e
+
+/etc/init.d/dbus start 2>&1
+
+exec "${@}"
diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs
new file mode 100644
index 0000000000..2db2718d24
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs
@@ -0,0 +1,15 @@
+# This file is written by xdg-user-dirs-update
+# If you want to change or add directories, just edit the line you're
+# interested in. All local changes will be retained on the next run
+# Format is XDG_xxx_DIR="$HOME/yyy", where yyy is a shell-escaped
+# homedir-relative path, or XDG_xxx_DIR="/yyy", where /yyy is an
+# absolute path. No other format is supported.
+
+XDG_DESKTOP_DIR="$HOME/Desktop"
+XDG_DOWNLOAD_DIR="$HOME/Downloads"
+XDG_TEMPLATES_DIR="$HOME/Templates"
+XDG_PUBLICSHARE_DIR="$HOME/Public"
+XDG_DOCUMENTS_DIR="$HOME/Documents"
+XDG_MUSIC_DIR="$HOME/Music"
+XDG_PICTURES_DIR="$HOME/Pictures"
+XDG_VIDEOS_DIR="$HOME/Videos"
diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale
new file mode 100644
index 0000000000..7741b83a3e
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale
@@ -0,0 +1 @@
+en_US
diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf b/taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf
new file mode 100644
index 0000000000..4c335f44e3
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf
@@ -0,0 +1,3 @@
+Section "ServerFlags"
+ Option "MaxClients" "2048"
+EndSection \ No newline at end of file
diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf b/taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf
new file mode 100644
index 0000000000..4b2dab2a66
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf
@@ -0,0 +1 @@
+autospawn = no \ No newline at end of file
diff --git a/taskcluster/docker/ubuntu1804-test/fonts.conf b/taskcluster/docker/ubuntu1804-test/fonts.conf
new file mode 100644
index 0000000000..9784fcc981
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/fonts.conf
@@ -0,0 +1,5 @@
+<match target="font">
+ <edit name="antialias" mode="assign">
+ <bool>false</bool>
+ </edit>
+</match>
diff --git a/taskcluster/docker/ubuntu1804-test/motd b/taskcluster/docker/ubuntu1804-test/motd
new file mode 100644
index 0000000000..f958393cd8
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/motd
@@ -0,0 +1,6 @@
+Welcome to your taskcluster interactive shell! The regularly scheduled task
+has been paused to give you a chance to set up your debugging environment.
+
+For your convenience, the exact mozharness command needed for this task can
+be invoked using the 'run-mozharness' command.
+
diff --git a/taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell b/taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell
new file mode 100644
index 0000000000..c782c0ea96
--- /dev/null
+++ b/taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+
+download() {
+ name=`basename $1`
+ url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/$1
+ if ! curl --fail --silent -o ./$name --retry 10 $url; then
+ fail "failed downloading $1 from ${GECKO_HEAD_REPOSITORY}"
+ fi
+}
+
+cd $HOME/bin;
+download taskcluster/scripts/tester/run-wizard;
+chmod +x run-wizard;
+./run-wizard;
+
+SPAWN="$SHELL";
+if [ "$SHELL" = "bash" ]; then
+ SPAWN="bash -li";
+fi;
+
+cd $HOME;
+exec $SPAWN;
diff --git a/taskcluster/docker/update-verify/Dockerfile b/taskcluster/docker/update-verify/Dockerfile
new file mode 100644
index 0000000000..5cddeac1f0
--- /dev/null
+++ b/taskcluster/docker/update-verify/Dockerfile
@@ -0,0 +1,32 @@
+# This is an LTS! We should upgrade after the next LTS is released, unless
+# we've switched to the in-tree debian base images by then.
+FROM $DOCKER_IMAGE_PARENT
+
+MAINTAINER release@mozilla.com
+
+# We do want to install recommended packages.
+RUN sed -i /APT::Install-Recommends/d /etc/apt/apt.conf.d/99taskcluster
+
+RUN dpkg --add-architecture i386 && apt-get -q update \
+ # p7zip-full is for extracting Windows and OS X packages
+ # wget is for downloading update.xml, installers, and MARs
+ # libgtk-3-0 is required to run the Firefox updater
+ && apt-get -q --yes install p7zip-full wget libgtk-3-0 libgtk-3.0:i386 python3-cairo
+
+VOLUME /builds/worker/.cache
+VOLUME /builds/worker/checkouts
+
+RUN mkdir /build
+ADD system-setup.sh /tmp/system-setup.sh
+RUN bash /tmp/system-setup.sh
+
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
+ENV PATH $PATH:/builds/worker/bin
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV LANG en_US.UTF-8
+ENV LC_ALL en_US.UTF-8
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/taskcluster/docker/update-verify/system-setup.sh b/taskcluster/docker/update-verify/system-setup.sh
new file mode 100644
index 0000000000..8a55f255f8
--- /dev/null
+++ b/taskcluster/docker/update-verify/system-setup.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+
+set -ve
+
+test "$(whoami)" == 'root'
+
+mkdir -p /setup
+cd /setup
+
+apt_packages=()
+apt_packages+=('curl')
+apt_packages+=('locales')
+apt_packages+=('python3-pip')
+apt_packages+=('python3-aiohttp')
+apt_packages+=('shellcheck')
+apt_packages+=('sudo')
+
+apt-get update
+apt-get install "${apt_packages[@]}"
+
+# Without this we get spurious "LC_ALL: cannot change locale (en_US.UTF-8)" errors,
+# and python scripts raise UnicodeEncodeError when trying to print unicode characters.
+locale-gen en_US.UTF-8
+dpkg-reconfigure locales
+
+su -c 'git config --global user.email "worker@mozilla.test"' worker
+su -c 'git config --global user.name "worker"' worker
+
+rm -rf /setup
diff --git a/taskcluster/docker/updatebot/Dockerfile b/taskcluster/docker/updatebot/Dockerfile
new file mode 100644
index 0000000000..a58c9c07a8
--- /dev/null
+++ b/taskcluster/docker/updatebot/Dockerfile
@@ -0,0 +1,24 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Tom Ritter <tom@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+
+# %include taskcluster/docker/recipes/install-node.sh
+ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /setup/install-node.sh
+
+COPY privileged-setup.sh /setup/privileged-setup.sh
+COPY updatebot-version.sh /setup/updatebot-version.sh
+COPY setup.sh /builds/worker/setup.sh
+COPY run.py /builds/worker/run.py
+COPY hgrc /etc/mercurial/hgrc.d/updatebot.rc
+
+RUN cd /setup && ./privileged-setup.sh
+
+ENV HOME /builds/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
+ENV PYTHONUNBUFFERED 1
+ENV PATH "/builds/worker/go/bin:/builds/worker/fetches/rustc/bin:/builds/worker/fetches/cargo-vet:${PATH}"
+
+RUN cd /builds/worker && ./setup.sh
diff --git a/taskcluster/docker/updatebot/VERSION b/taskcluster/docker/updatebot/VERSION
new file mode 100644
index 0000000000..56a6051ca2
--- /dev/null
+++ b/taskcluster/docker/updatebot/VERSION
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/taskcluster/docker/updatebot/arcanist_patch_size.patch b/taskcluster/docker/updatebot/arcanist_patch_size.patch
new file mode 100644
index 0000000000..7540c76fbd
--- /dev/null
+++ b/taskcluster/docker/updatebot/arcanist_patch_size.patch
@@ -0,0 +1,13 @@
+diff --git a/src/workflow/ArcanistDiffWorkflow.php b/src/workflow/ArcanistDiffWorkflow.php
+index 38aa4b62..c9b7e215 100644
+--- a/src/workflow/ArcanistDiffWorkflow.php
++++ b/src/workflow/ArcanistDiffWorkflow.php
+@@ -899,7 +899,7 @@ EOTEXT
+ throw new Exception(pht('Repository API is not supported.'));
+ }
+
+- $limit = 1024 * 1024 * 4;
++ $limit = 1024 * 1024 * 12;
+ foreach ($changes as $change) {
+ $size = 0;
+ foreach ($change->getHunks() as $hunk) {
diff --git a/taskcluster/docker/updatebot/arcanist_windows_stream.patch b/taskcluster/docker/updatebot/arcanist_windows_stream.patch
new file mode 100644
index 0000000000..953fbdce85
--- /dev/null
+++ b/taskcluster/docker/updatebot/arcanist_windows_stream.patch
@@ -0,0 +1,19 @@
+diff --git a/src/channel/PhutilSocketChannel.php b/src/channel/PhutilSocketChannel.php
+index 4bd2a47a..2ad3fd85 100644
+--- a/src/channel/PhutilSocketChannel.php
++++ b/src/channel/PhutilSocketChannel.php
+@@ -46,9 +46,11 @@ final class PhutilSocketChannel extends PhutilChannel {
+ if (!$socket) {
+ continue;
+ }
+- $ok = stream_set_blocking($socket, false);
+- if (!$ok) {
+- throw new Exception(pht('Failed to set socket nonblocking!'));
++ if (strtoupper(substr(PHP_OS, 0, 3)) !== 'WIN') {
++ $ok = stream_set_blocking($socket, false);
++ if (!$ok) {
++ throw new Exception(pht('Failed to set socket nonblocking!'));
++ }
+ }
+ }
+
diff --git a/taskcluster/docker/updatebot/hgrc b/taskcluster/docker/updatebot/hgrc
new file mode 100644
index 0000000000..7d1db24ae0
--- /dev/null
+++ b/taskcluster/docker/updatebot/hgrc
@@ -0,0 +1,7 @@
+[ui]
+ssh = ssh -i $HOME/id_rsa -l updatebot@mozilla.com -o UserKnownHostsFile=$HOME/ssh_known_hosts
+username = Updatebot <updatebot@mozilla.com>
+
+[extensions]
+strip =
+push-to-try = $HOME/.mozbuild/version-control-tools/hgext/push-to-try \ No newline at end of file
diff --git a/taskcluster/docker/updatebot/moz.build b/taskcluster/docker/updatebot/moz.build
new file mode 100644
index 0000000000..315dc32600
--- /dev/null
+++ b/taskcluster/docker/updatebot/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files("**"):
+ BUG_COMPONENT = ("Developer Infrastructure", "Mach Vendor & Updatebot")
diff --git a/taskcluster/docker/updatebot/privileged-setup.sh b/taskcluster/docker/updatebot/privileged-setup.sh
new file mode 100755
index 0000000000..640b0826e2
--- /dev/null
+++ b/taskcluster/docker/updatebot/privileged-setup.sh
@@ -0,0 +1,82 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -vex
+
+. ./updatebot-version.sh # Get UPDATEBOT_REVISION
+
+# If you edit this, be sure to edit fetch/updatebot.yml
+export SQLPROXY_REVISION=fb1939ab92846761595833361c6b0b0ecd543861
+
+export DEBIAN_FRONTEND=noninteractive
+
+# Update apt-get lists
+apt-get update -y
+
+# Install dependencies
+apt-get install -y --no-install-recommends \
+ arcanist \
+ ca-certificates \
+ curl \
+ ed \
+ golang-go \
+ gcc \
+ libc6-dev \
+ python3-minimal \
+ python3-wheel \
+ python3-pip \
+ python3-venv \
+ python3-requests \
+ python3-requests-unixsocket \
+ python3-setuptools \
+ openssh-client \
+ rsync \
+ wget
+
+mkdir -p /builds/worker/.mozbuild
+chown -R worker:worker /builds/worker/
+export GOPATH=/builds/worker/go
+
+# nodejs 16 for pdfjs
+. install-node.sh
+
+# pdf.js setup
+# We want to aviod downloading a ton of packages all the time, so
+# we will preload the pdf.js repo (and packages) in the Docker image
+# and only update it at runtime. This means that the `./mach vendor`
+# behavior for pdf.js will also be kind of custom
+npm install -g gulp-cli
+cd /builds/worker/
+git clone https://github.com/mozilla/pdf.js.git
+cd /builds/worker/pdf.js
+npm ci --legacy-peer-deps
+
+# seed a v8 repository because it's large, and doing an update will
+# be much faster than a new clone each time.
+cd /builds/worker/
+git clone https://github.com/v8/v8.git
+
+# Build Google's Cloud SQL Proxy from source
+cd /builds/worker/
+mkdir cloud_sql_proxy
+cd cloud_sql_proxy
+go mod init cloud_sql_proxy
+go get github.com/GoogleCloudPlatform/cloudsql-proxy/cmd/cloud_sql_proxy@$SQLPROXY_REVISION
+
+# Check out source code
+cd /builds/worker/
+git clone https://github.com/mozilla-services/updatebot.git
+cd updatebot
+git checkout "$UPDATEBOT_REVISION"
+
+# Set up dependencies
+cd /builds/worker/
+chown -R worker:worker .
+chown -R worker:worker .*
+
+python3 -m pip install -U pip
+python3 -m pip install poetry==1.2.2
+
+rm -rf /setup
diff --git a/taskcluster/docker/updatebot/run.py b/taskcluster/docker/updatebot/run.py
new file mode 100755
index 0000000000..ad9bcb8788
--- /dev/null
+++ b/taskcluster/docker/updatebot/run.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+sys.path.append("/builds/worker/checkouts/gecko/third_party/python")
+sys.path.append(".")
+
+import base64
+import os
+import platform
+import signal
+import stat
+import subprocess
+
+import requests
+
+import taskcluster
+
+# Bump this number when you need to cause a commit for the job to re-run: 21
+
+if len(sys.argv) < 3:
+ print("Usage:", sys.argv[0], "gecko-dev-path updatebot-path [moz-fetches-dir]")
+ sys.exit(1)
+
+GECKO_DEV_PATH = sys.argv[1].replace("/", os.path.sep)
+UPDATEBOT_PATH = sys.argv[2].replace("/", os.path.sep)
+
+# Only needed on Windows
+if len(sys.argv) > 3:
+ FETCHES_PATH = sys.argv[3].replace("/", os.path.sep)
+else:
+ FETCHES_PATH = None
+
+HOME_PATH = os.path.expanduser("~")
+
+OPERATING_MODE = (
+ "prod"
+ if os.environ.get("GECKO_HEAD_REPOSITORY", "")
+ == "https://hg.mozilla.org/mozilla-central"
+ else "dev"
+)
+
+DEV_PHAB_URL = "https://phabricator-dev.allizom.org/"
+PROD_PHAB_URL = "https://phabricator.services.mozilla.com/"
+
+phabricator_url = DEV_PHAB_URL if OPERATING_MODE == "dev" else PROD_PHAB_URL
+
+
+def log(*args):
+ print(*args)
+
+
+def get_secret(name):
+ secret = None
+ if "TASK_ID" in os.environ:
+ secrets_url = (
+ "http://taskcluster/secrets/v1/secret/project/updatebot/"
+ + ("3" if OPERATING_MODE == "prod" else "2")
+ + "/"
+ + name
+ )
+ res = requests.get(secrets_url)
+ res.raise_for_status()
+ secret = res.json()
+ else:
+ secrets = taskcluster.Secrets(taskcluster.optionsFromEnvironment())
+ secret = secrets.get("project/updatebot/" + OPERATING_MODE + "/" + name)
+ secret = secret["secret"] if "secret" in secret else None
+ secret = secret["value"] if "value" in secret else None
+ return secret
+
+
+# Get TC Secrets =======================================
+log("Operating mode is ", OPERATING_MODE)
+log("Getting secrets...")
+bugzilla_api_key = get_secret("bugzilla-api-key")
+phabricator_token = get_secret("phabricator-token")
+try_sshkey = get_secret("try-sshkey")
+database_config = get_secret("database-password")
+sentry_url = get_secret("sentry-url")
+sql_proxy_config = get_secret("sql-proxy-config")
+
+# Update Updatebot =======================================
+if OPERATING_MODE == "dev":
+ """
+ If we are in development mode, we will update from github.
+ (This command will probably only work if we checkout a branch FWIW.)
+
+ This allows us to iterate faster by committing to github and
+ re-running the cron job on Taskcluster, without rebuilding the
+ Docker image.
+
+ However, this mechanism is bypassing the security feature we
+ have in-tree, where upstream out-of-tree code is fixed at a known
+ revision and cannot be changed without a commit to m-c.
+
+ Therefore, we only do this in dev mode when running on try.
+ """
+
+ os.chdir(UPDATEBOT_PATH)
+ log("Performing git repo update...")
+ command = ["git", "symbolic-ref", "-q", "HEAD"]
+
+ r = subprocess.run(command)
+ if r.returncode == 0:
+ # This indicates we are on a branch, and not a specific revision
+ subprocess.check_call(["git", "pull", "origin"])
+
+# Set Up SSH & Phabricator ==============================
+os.chdir(HOME_PATH)
+log("Setting up ssh and phab keys...")
+with open("id_rsa", "w") as sshkey:
+ sshkey.write(try_sshkey)
+os.chmod("id_rsa", stat.S_IRUSR | stat.S_IWUSR)
+
+arc_filename = ".arcrc"
+if platform.system() == "Windows":
+ arc_path = os.path.join(FETCHES_PATH, "..", "AppData", "Roaming")
+ os.makedirs(arc_path, exist_ok=True)
+ os.chdir(arc_path)
+ log("Writing %s to %s" % (arc_filename, arc_path))
+else:
+ os.chdir(HOME_PATH)
+
+arcrc = open(arc_filename, "w")
+towrite = """
+{
+ "hosts": {
+ "PHAB_URL_HERE": {
+ "token": "TOKENHERE"
+ }
+ }
+}
+""".replace(
+ "TOKENHERE", phabricator_token
+).replace(
+ "PHAB_URL_HERE", phabricator_url + "api/"
+)
+arcrc.write(towrite)
+arcrc.close()
+os.chmod(arc_filename, stat.S_IRUSR | stat.S_IWUSR)
+
+# Set up the Cloud SQL Proxy =============================
+os.chdir(HOME_PATH)
+log("Setting up cloud_sql_proxy...")
+with open("sql-proxy-key", "w") as proxy_key_file:
+ proxy_key_file.write(
+ base64.b64decode(sql_proxy_config["key-value"]).decode("utf-8")
+ )
+
+instance_name = sql_proxy_config["instance-name"]
+if platform.system() == "Linux":
+ sql_proxy_command = "/builds/worker/go/bin/cloud_sql_proxy"
+else:
+ sql_proxy_command = os.path.join(UPDATEBOT_PATH, "..", "cloud_sql_proxy.exe")
+
+sql_proxy_command += (
+ " -instances=" + instance_name + "=tcp:3306 -credential_file=sql-proxy-key"
+)
+sql_proxy_args = {
+ "stdout": subprocess.PIPE,
+ "stderr": subprocess.PIPE,
+ "shell": True,
+ "start_new_session": True,
+}
+
+if platform.system() == "Windows":
+ si = subprocess.STARTUPINFO()
+ si.dwFlags = subprocess.CREATE_NEW_PROCESS_GROUP
+
+ sql_proxy_args["startupinfo"] = si
+
+sql_proxy = subprocess.Popen((sql_proxy_command), **sql_proxy_args)
+
+try:
+ (stdout, stderr) = sql_proxy.communicate(input=None, timeout=2)
+ log("sql proxy stdout:", stdout.decode("utf-8"))
+ log("sql proxy stderr:", stderr.decode("utf-8"))
+except subprocess.TimeoutExpired:
+ log("no sqlproxy output in 2 seconds, this means it probably didn't error.")
+ log("sqlproxy pid:", sql_proxy.pid)
+
+database_config["host"] = "127.0.0.1"
+
+# Vendor =================================================
+log("Getting Updatebot ready...")
+os.chdir(UPDATEBOT_PATH)
+localconfig = {
+ "General": {
+ "env": OPERATING_MODE,
+ "gecko-path": GECKO_DEV_PATH,
+ },
+ "Logging": {
+ "local": True,
+ "sentry": True,
+ "sentry_config": {"url": sentry_url, "debug": False},
+ },
+ "Database": database_config,
+ "Bugzilla": {
+ "apikey": bugzilla_api_key,
+ },
+ "Taskcluster": {
+ "url_treeherder": "https://treeherder.mozilla.org/",
+ "url_taskcluster": "http://taskcluster/",
+ },
+}
+
+log("Writing local config file")
+config = open("localconfig.py", "w")
+config.write("localconfig = " + str(localconfig))
+config.close()
+
+log("Running updatebot")
+# On Windows, Updatebot is run by windows-setup.sh
+if platform.system() == "Linux":
+ subprocess.check_call(["python3", "-m", "poetry", "run", "./automation.py"])
+
+ # Clean up ===============================================
+ log("Killing cloud_sql_proxy")
+ os.kill(sql_proxy.pid, signal.SIGTERM)
diff --git a/taskcluster/docker/updatebot/setup.sh b/taskcluster/docker/updatebot/setup.sh
new file mode 100755
index 0000000000..e7c6fe03e0
--- /dev/null
+++ b/taskcluster/docker/updatebot/setup.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -vex
+
+# Copy the system known_hosts to the home directory so we have uniformity with Windows
+# and the ssh command will find them in the same place.
+cp /etc/ssh/ssh_known_hosts "$HOME/ssh_known_hosts"
+
+# If poetry is not run as worker, then it won't work when run as user later.
+cd /builds/worker/updatebot
+/usr/local/bin/poetry install --no-ansi
diff --git a/taskcluster/docker/updatebot/updatebot-version.sh b/taskcluster/docker/updatebot/updatebot-version.sh
new file mode 100644
index 0000000000..2dd61412ce
--- /dev/null
+++ b/taskcluster/docker/updatebot/updatebot-version.sh
@@ -0,0 +1,2 @@
+export UPDATEBOT_REVISION=dee7a25bb71daf3356d146ab9582720d4a490264
+
diff --git a/taskcluster/docker/updatebot/windows-php.ini b/taskcluster/docker/updatebot/windows-php.ini
new file mode 100644
index 0000000000..8ccaa05ce4
--- /dev/null
+++ b/taskcluster/docker/updatebot/windows-php.ini
@@ -0,0 +1,130 @@
+[PHP]
+
+engine = On
+short_open_tag = Off
+precision = 14
+output_buffering = 4096
+zlib.output_compression = Off
+implicit_flush = Off
+unserialize_callback_func =
+serialize_precision = -1
+disable_functions =
+disable_classes =
+zend.enable_gc = On
+zend.exception_ignore_args = Off
+expose_php = On
+max_execution_time = 30
+max_input_time = 60
+memory_limit = 128M
+error_reporting = E_ALL
+display_errors = On
+display_startup_errors = On
+log_errors = On
+log_errors_max_len = 1024
+ignore_repeated_errors = Off
+ignore_repeated_source = Off
+report_memleaks = On
+variables_order = "GPCS"
+request_order = "GP"
+register_argc_argv = Off
+auto_globals_jit = On
+post_max_size = 8M
+auto_prepend_file =
+auto_append_file =
+default_mimetype = "text/html"
+default_charset = "UTF-8"
+doc_root =
+user_dir =
+extension_dir = "ext"
+enable_dl = Off
+file_uploads = On
+upload_max_filesize = 2M
+max_file_uploads = 20
+allow_url_fopen = On
+allow_url_include = Off
+default_socket_timeout = 60
+extension=curl
+
+[CLI Server]
+cli_server.color = On
+
+[Pdo_mysql]
+pdo_mysql.default_socket=
+
+[mail function]
+SMTP = localhost
+smtp_port = 25
+mail.add_x_header = Off
+
+[ODBC]
+odbc.allow_persistent = On
+odbc.check_persistent = On
+odbc.max_persistent = -1
+odbc.max_links = -1
+odbc.defaultlrl = 4096
+odbc.defaultbinmode = 1
+
+[MySQLi]
+mysqli.max_persistent = -1
+mysqli.allow_persistent = On
+mysqli.max_links = -1
+mysqli.default_port = 3306
+mysqli.default_socket =
+mysqli.default_host =
+mysqli.default_user =
+mysqli.default_pw =
+mysqli.reconnect = Off
+
+[mysqlnd]
+mysqlnd.collect_statistics = On
+mysqlnd.collect_memory_statistics = On
+
+[PostgreSQL]
+pgsql.allow_persistent = On
+pgsql.auto_reset_persistent = Off
+pgsql.max_persistent = -1
+pgsql.max_links = -1
+pgsql.ignore_notice = 0
+pgsql.log_notice = 0
+
+[bcmath]
+bcmath.scale = 0
+
+[Session]
+session.save_handler = files
+session.use_strict_mode = 0
+session.use_cookies = 1
+session.use_only_cookies = 1
+session.name = PHPSESSID
+session.auto_start = 0
+session.cookie_lifetime = 0
+session.cookie_path = /
+session.cookie_domain =
+session.cookie_httponly =
+session.cookie_samesite =
+session.serialize_handler = php
+session.gc_probability = 1
+session.gc_divisor = 1000
+session.gc_maxlifetime = 1440
+session.referer_check =
+session.cache_limiter = nocache
+session.cache_expire = 180
+session.use_trans_sid = 0
+session.sid_length = 26
+session.trans_sid_tags = "a=href,area=href,frame=src,form="
+session.sid_bits_per_character = 5
+
+[Assertion]
+zend.assertions = 1
+
+[Tidy]
+tidy.clean_output = Off
+
+[soap]
+soap.wsdl_cache_enabled=1
+soap.wsdl_cache_dir="/tmp"
+soap.wsdl_cache_ttl=86400
+soap.wsdl_cache_limit = 5
+
+[ldap]
+ldap.max_links = -1
diff --git a/taskcluster/docker/updatebot/windows-setup.sh b/taskcluster/docker/updatebot/windows-setup.sh
new file mode 100644
index 0000000000..c04d0a6fbf
--- /dev/null
+++ b/taskcluster/docker/updatebot/windows-setup.sh
@@ -0,0 +1,135 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+set -vex
+
+. ./taskcluster/docker/updatebot/updatebot-version.sh # Get UPDATEBOT_REVISION
+
+HOME=$(python3 -c "import os;print(os.path.expanduser('~'))")
+export HOME
+GECKO_PATH="$PWD"
+UPDATEBOT_PATH="$MOZ_FETCHES_DIR/updatebot"
+
+# MOZ_FETCHES_DIR is in Z:/ format. When we update the PATH we need to use
+# /z/ format. Fortunately, we can translate them like so:
+cd "$MOZ_FETCHES_DIR"
+MOZ_FETCHES_PATH="$PWD"
+
+#########################################################
+# Install dependencies
+
+# Move depot_tools
+cd "$MOZ_FETCHES_DIR"
+mv depot_tools.git depot_tools
+
+
+# Generating a new version of the preloaded depot_tools download can be done by:
+# 1) Running the task, uncommenting the variable assignment below, uncommenting the
+# _GENERATE_DEPOT_TOOLS_BINARIES_ section in taskcluster/ci/updatebot/kind.yml,
+# and ensuring that an angle update will actually take place (so it downloads the depot_tools)
+# 2) Downloading and sanity-checking the depot_tools-preloaded-binaries-GIT_HASH-DATE.zip artifact
+# 3) Adding it to tooltool
+# 4) Updating the updatebot manifest
+# Note that even for the same git revision the downloaded tools can change, so they are tagged
+# with both the git hash and the date it was generated
+
+# export GENERATE_DEPOT_TOOLS_BINARIES=1
+
+if test -n "$GENERATE_DEPOT_TOOLS_BINARIES"; then
+ cp -r depot_tools depot_tools-from-git
+fi
+
+# Git is at /c/Program Files/Git/cmd/git.exe
+# It's in PATH for this script (confusingly) but not in PATH so we need to add it
+export PATH="/c/Program Files/Git/cmd:$PATH"
+
+# php & arcanist
+if [ -n "$TOOLTOOL_MANIFEST" ]; then
+ . "$GECKO_PATH/taskcluster/scripts/misc/tooltool-download.sh"
+fi
+
+cp "$MOZ_FETCHES_DIR/vcruntime140.dll" "$MOZ_FETCHES_DIR/php-win"
+cp "$GECKO_PATH/taskcluster/docker/updatebot/windows-php.ini" "$MOZ_FETCHES_DIR/php-win/php.ini"
+
+cd "$MOZ_FETCHES_DIR/arcanist"
+patch -p1 < "$GECKO_PATH/taskcluster/docker/updatebot/arcanist_windows_stream.patch"
+patch -p1 < "$GECKO_PATH/taskcluster/docker/updatebot/arcanist_patch_size.patch"
+cd "$MOZ_FETCHES_DIR"
+
+export PATH="$MOZ_FETCHES_PATH/php-win:$PATH"
+export PATH="$MOZ_FETCHES_PATH/arcanist/bin:$PATH"
+
+# get Updatebot
+cd "$MOZ_FETCHES_DIR"
+git clone https://github.com/mozilla-services/updatebot.git
+cd updatebot
+git checkout "$UPDATEBOT_REVISION"
+
+# base python needs
+python3 -m pip install --no-warn-script-location --user -U pip
+python3 -m pip install --no-warn-script-location --user poetry wheel requests setuptools
+
+# updatebot dependencies
+cd "$UPDATEBOT_PATH"
+python3 -m poetry install
+
+# taskcluster secrets and writing out localconfig
+cd "$GECKO_PATH"
+python3 ./taskcluster/docker/updatebot/run.py "$GECKO_PATH" "$UPDATEBOT_PATH" "$MOZ_FETCHES_PATH"
+
+# mercurial configuration
+cp "$GECKO_PATH/taskcluster/docker/updatebot/hgrc" "$HOME/.hgrc"
+# Windows is not happy with $HOME in the hgrc so we need to do a hack to replace it
+# with the actual value
+( echo "cat <<EOF" ; cat "$HOME/.hgrc" ) | sh > tmp
+mv tmp "$HOME/.hgrc"
+
+# ssh known hosts
+cp "$GECKO_PATH/taskcluster/docker/push-to-try/known_hosts" "$HOME/ssh_known_hosts"
+
+#########################################################
+# Run it
+export PYTHONIOENCODING=utf8
+export PYTHONUNBUFFERED=1
+
+cd "$UPDATEBOT_PATH"
+python3 -m poetry run python3 ./automation.py
+
+#########################################################
+if test -n "$GENERATE_DEPOT_TOOLS_BINARIES"; then
+ # Artifacts
+
+ cd "$MOZ_FETCHES_PATH"
+ mv depot_tools depot_tools-from-tc
+
+ # Clean out unneeded files
+ # Need to use cmd because for some reason rm from bash throws 'Access Denied'
+ cmd '/c for /d /r %i in (*__pycache__) do rmdir /s /q %i'
+ rm -rf depot_tools-from-git/.git || true
+
+ # Delete the files that are already in git
+ find depot_tools-from-git -mindepth 1 -maxdepth 1 | sed s/depot_tools-from-git/depot_tools-from-tc/ | while read -r d; do rm -rf "$d"; done
+
+ # Make the artifact
+ rm -rf depot_tools-preloaded-binaries #remove it if it existed (i.e. we probably have one from tooltool already)
+ mv depot_tools-from-tc depot_tools-preloaded-binaries
+
+ # zip can't add symbolic links, and exits with an error code. || true avoids a script crash
+ zip -r depot_tools-preloaded-binaries.zip depot_tools-preloaded-binaries/ || true
+
+ # Convoluted way to get the git hash, because we don't have a .git directory
+ # Adding extra print statements just in case we need to debug it
+ GIT_HASH=$(grep depot_tools -A 1 "$GECKO_PATH/taskcluster/ci/fetch/updatebot.yml" | tee /dev/tty | grep revision | tee /dev/tty | awk -F': *' '{print $2}' | tee /dev/tty)
+ DATE=$(date -I)
+ mv depot_tools-preloaded-binaries.zip "depot_tools-preloaded-binaries-$GIT_HASH-$DATE.zip"
+
+ # Put the artifact into the directory we will look for it
+ mkdir -p "$GECKO_PATH/obj-build/depot_tools" || true
+ mv "depot_tools-preloaded-binaries-$GIT_HASH-$DATE.zip" "$GECKO_PATH/obj-build/depot_tools"
+fi
+
+#########################################################
+echo "Killing SQL Proxy"
+taskkill -f -im cloud_sql_proxy.exe || true
diff --git a/taskcluster/docker/valgrind-build/Dockerfile b/taskcluster/docker/valgrind-build/Dockerfile
new file mode 100644
index 0000000000..a585b5771c
--- /dev/null
+++ b/taskcluster/docker/valgrind-build/Dockerfile
@@ -0,0 +1,66 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+ARG TASKCLUSTER_ROOT_URL
+ARG DOCKER_IMAGE_PACKAGES
+RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
+
+# We could try to be smart and install all the -dbg packages corresponding to
+# the installed packages, but, not all of them are actually for libraries used
+# by Firefox, leading to a larger docker image. Moreover, some of the -dbg
+# packages for unnecessary libraries pull other packages through dependencies,
+# that make for even larger docker images.
+RUN apt-get update && apt-get install \
+ dbus-x11 \
+ libgtk-3-bin \
+ libdbus-1-3-dbgsym \
+ libatk-bridge2.0-0-dbgsym \
+ libatk1.0-0-dbgsym \
+ libcairo2-dbgsym \
+ libdbus-glib-1-2-dbgsym \
+ libdrm2-dbgsym \
+ libffi7-dbgsym \
+ libfontconfig1-dbgsym \
+ libglx-mesa0-dbgsym \
+ libglapi-mesa-dbgsym \
+ libglib2.0-0-dbgsym \
+ libgdk-pixbuf-2.0-0-dbgsym \
+ libgtk-3-0-dbgsym \
+ libice6-dbgsym \
+ libicu67-dbgsym \
+ libpango-1.0-0-dbgsym \
+ libpcre3-dbg \
+ libpixman-1-0-dbgsym \
+ libsm6-dbgsym \
+ libvorbis0a-dbgsym \
+ libwayland-client0-dbgsym \
+ libwayland-cursor0-dbgsym \
+ libx11-6-dbgsym \
+ libx11-xcb1-dbgsym \
+ libxau6-dbgsym \
+ libxcb-glx0-dbgsym \
+ libxcb-render0-dbgsym \
+ libxcb-shm0-dbgsym \
+ libxcb1-dbgsym \
+ libxcomposite1-dbgsym \
+ libxcursor1-dbgsym \
+ libxdamage1-dbgsym \
+ libxdmcp6-dbg \
+ libxext6-dbg \
+ libxfixes3-dbgsym \
+ libxi6-dbgsym \
+ libxinerama1-dbgsym \
+ libxrandr2-dbgsym \
+ libxrender1-dbgsym \
+ libxt6-dbgsym \
+ libxxf86vm1-dbg \
+ valgrind-dbgsym \
+ xvfb \
+ zlib1g-dbgsym
+
+# %include taskcluster/docker/recipes/xvfb.sh
+COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
diff --git a/taskcluster/docker/webrender/Dockerfile b/taskcluster/docker/webrender/Dockerfile
new file mode 100644
index 0000000000..73b876cf78
--- /dev/null
+++ b/taskcluster/docker/webrender/Dockerfile
@@ -0,0 +1,16 @@
+FROM $DOCKER_IMAGE_PARENT
+MAINTAINER Kartikaya Gupta <kgupta@mozilla.com>
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+# %include gfx/wr/ci-scripts/docker-image/setup.sh
+ADD topsrcdir/gfx/wr/ci-scripts/docker-image/setup.sh /tmp/wr-setup.sh
+RUN /bin/bash /tmp/wr-setup.sh && rm /tmp/wr-setup.sh
+
+# We need this to install cargo-vendor as part of the wrench-deps task
+RUN apt-get install libssl-dev
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]