diff options
Diffstat (limited to 'taskcluster/docker')
152 files changed, 10634 insertions, 0 deletions
diff --git a/taskcluster/docker/REGISTRY b/taskcluster/docker/REGISTRY new file mode 100644 index 0000000000..cb1e1bb482 --- /dev/null +++ b/taskcluster/docker/REGISTRY @@ -0,0 +1 @@ +taskcluster diff --git a/taskcluster/docker/android-build/Dockerfile b/taskcluster/docker/android-build/Dockerfile new file mode 100644 index 0000000000..b1cd9d40d0 --- /dev/null +++ b/taskcluster/docker/android-build/Dockerfile @@ -0,0 +1,58 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Nick Alexander <nalexander@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +# rsync is required for l10n single locale repacks. less, screen, and +# vim, help debugging interactive tasks in Task Cluster. +# git and openssh-client are used to upload GeckoView javadoc to Github. +RUN apt-get update && \ + apt-get install \ + autoconf2.13 \ + build-essential \ + base-files \ + ccache \ + cmake \ + curl \ + file \ + gnupg \ + jq \ + less \ + openssh-client \ + procps \ + python-cryptography \ + python-dev \ + python-pip \ + python-setuptools \ + python-virtualenv \ + python3-dev \ + rsync \ + screen \ + sudo \ + tar \ + unzip \ + uuid \ + valgrind \ + vim \ + wget \ + yasm \ + zip + +# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb. +# %include taskcluster/docker/recipes/xvfb.sh +COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh + +# %include taskcluster/docker/recipes/prepare_openjdk.sh +COPY topsrcdir/taskcluster/docker/recipes/prepare_openjdk.sh /tmp/prepare_openjdk.sh +RUN /tmp/prepare_openjdk.sh && rm /tmp/prepare_openjdk.sh + +# Observe missing --no-install-recommends, in order to install glib2.0/gconf/etc. +RUN apt-get install \ + libgconf-2-4 \ + libgnome2-0 \ + openjdk-8-jdk-headless + +# Back to the workdir, matching desktop-build. +WORKDIR /builds/worker diff --git a/taskcluster/docker/android-build/README.md b/taskcluster/docker/android-build/README.md new file mode 100644 index 0000000000..6096b08368 --- /dev/null +++ b/taskcluster/docker/android-build/README.md @@ -0,0 +1,2 @@ +This is a docker script for fetching Android Gradle dependenices for +use in Mozilla's build clusters. diff --git a/taskcluster/docker/android-build/VERSION b/taskcluster/docker/android-build/VERSION new file mode 100644 index 0000000000..4e379d2bfe --- /dev/null +++ b/taskcluster/docker/android-build/VERSION @@ -0,0 +1 @@ +0.0.2 diff --git a/taskcluster/docker/condprof/Dockerfile b/taskcluster/docker/condprof/Dockerfile new file mode 100644 index 0000000000..b14522501c --- /dev/null +++ b/taskcluster/docker/condprof/Dockerfile @@ -0,0 +1,13 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Tarek Ziade <tarek@mozilla.com> + +VOLUME /builds/worker/.cache +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/tooltool-cache +VOLUME /builds/worker/workspace + +RUN apt-get -y update && \ + apt-get install -y python3.7 + +CMD ["/bin/bash", "--login"] + diff --git a/taskcluster/docker/custom-v8/Dockerfile b/taskcluster/docker/custom-v8/Dockerfile new file mode 100644 index 0000000000..bb3e8340bc --- /dev/null +++ b/taskcluster/docker/custom-v8/Dockerfile @@ -0,0 +1,21 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Joel Maher <jmaher@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV XZ_OPT=-T0 + +RUN apt-get update && \ + apt-get install \ + bzip2 \ + curl \ + libbz2-dev \ + libglib2.0-dev \ + libtinfo5 \ + pkg-config \ + tar \ + unzip \ + wget \ + zip diff --git a/taskcluster/docker/debian-base/Dockerfile b/taskcluster/docker/debian-base/Dockerfile new file mode 100644 index 0000000000..9c4d59190e --- /dev/null +++ b/taskcluster/docker/debian-base/Dockerfile @@ -0,0 +1,67 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +### Add worker user and setup its workspace. +RUN mkdir /builds && \ + groupadd -g 1000 worker && \ + useradd -u 1000 -g 1000 -d /builds/worker -s /bin/bash -m worker && \ + mkdir -p /builds/worker/workspace && \ + chown -R worker:worker /builds + +# Declare default working folder +WORKDIR /builds/worker + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +# Set variable normally configured at login, by the shells parent process, these +# are taken from GNU su manual +ENV HOME=/builds/worker \ + SHELL=/bin/bash \ + USER=worker \ + LOGNAME=worker \ + HOSTNAME=taskcluster-worker + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] + +ARG TASKCLUSTER_ROOT_URL +ARG DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \ + apt-get update && \ + apt-get dist-upgrade && \ + apt-get install \ + git \ + less \ + make \ + mercurial \ + patch \ + python \ + python3 \ + python3-distutils-extra \ + python3-minimal \ + python-zstandard \ + python3-zstandard \ + python-psutil \ + python3-psutil \ + vim-tiny \ + xz-utils + +# %include testing/mozharness/external_tools/robustcheckout.py +COPY topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# Add pip configuration, among other things. +# %include taskcluster/docker/recipes/dot-config +COPY topsrcdir/taskcluster/docker/recipes/dot-config /builds/worker/.config + +# %include taskcluster/scripts/run-task +COPY topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task + +# %include taskcluster/scripts/misc/fetch-content +ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content + +RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/* diff --git a/taskcluster/docker/debian-build/Dockerfile b/taskcluster/docker/debian-build/Dockerfile new file mode 100644 index 0000000000..29c6280765 --- /dev/null +++ b/taskcluster/docker/debian-build/Dockerfile @@ -0,0 +1,94 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV XZ_OPT=-T0 + +ARG TASKCLUSTER_ROOT_URL +ARG DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES + +# %ARG ARCH +RUN dpkg --add-architecture $ARCH + +# Ideally, we wouldn't need gcc-multilib and the extra linux-libc-dev, +# but the latter is required to make the former installable, and the former +# because of bug 1409276. +# We exclude /usr/share/doc/*/changelog.Debian* files because they might differ +# between i386 and amd64 variants of the packages we build on automation +# because of dates stored in them by debchange (and both might end up installed +# in some cases). +# On Debian 7, pkg-config was not multiarch-aware, so we rely on the $ARCH +# pkg-config being valid on the host. On more recent versions of Debian, we can +# use pkg-config, which is multiarch-aware, but we also need dpkg-dev because +# of https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=916772. +# +# We also need libstdc++ development files, but the exact version of libstdc++ +# that we need varies between debian versions. +RUN apt-get update && \ + apt-get dist-upgrade && \ + echo path-exclude=/usr/share/doc/*/changelog.Debian* > /etc/dpkg/dpkg.cfg.d/excludes && \ + if grep -q ^8\\. /etc/debian_version; then \ + PKG_CONFIG=pkg-config:$ARCH; \ + else \ + PKG_CONFIG="pkg-config dpkg-dev"; \ + fi && \ + if grep -q ^8\\. /etc/debian_version; then \ + LIBSTDCXX=libstdc++-4.9-dev; \ + elif grep -q ^9\\. /etc/debian_version; then \ + LIBSTDCXX=libstdc++-6-dev; \ + elif grep -q ^10\\. /etc/debian_version; then \ + libstdcxx=libstdc++-8-dev; \ + LIBSTDCXX="$libstdcxx $libstdcxx:$ARCH"; \ + fi && \ + apt-get install \ + autoconf2.13 \ + automake \ + bzip2 \ + curl \ + file \ + gawk \ + gcc-multilib \ + gnupg \ + jq \ + libucl1 \ + p7zip-full \ + procps \ + python-dev \ + python-pip \ + python-setuptools \ + python-virtualenv \ + python3-dev \ + rsync \ + screen \ + tar \ + unzip \ + uuid \ + valgrind \ + wget \ + x11-utils \ + xvfb \ + yasm \ + zip \ + linux-libc-dev \ + linux-libc-dev:$ARCH \ + $PKG_CONFIG \ + $LIBSTDCXX \ + libdbus-glib-1-dev:$ARCH \ + libdrm-dev:$ARCH \ + libfontconfig1-dev:$ARCH \ + libfreetype6-dev:$ARCH \ + libgconf2-dev:$ARCH \ + libgtk-3-dev:$ARCH \ + libgtk2.0-dev:$ARCH \ + libpango1.0-dev:$ARCH \ + libpulse-dev:$ARCH \ + libx11-xcb-dev:$ARCH \ + libxt-dev:$ARCH + +# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb +# %include taskcluster/docker/recipes/xvfb.sh +COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh diff --git a/taskcluster/docker/debian-packages/Dockerfile b/taskcluster/docker/debian-packages/Dockerfile new file mode 100644 index 0000000000..51e83541d6 --- /dev/null +++ b/taskcluster/docker/debian-packages/Dockerfile @@ -0,0 +1,10 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +RUN apt-get install --install-recommends \ + apt-utils \ + aptitude \ + build-essential \ + devscripts \ + fakeroot \ + git diff --git a/taskcluster/docker/debian-raw/Dockerfile b/taskcluster/docker/debian-raw/Dockerfile new file mode 100644 index 0000000000..304bcb0fbe --- /dev/null +++ b/taskcluster/docker/debian-raw/Dockerfile @@ -0,0 +1,33 @@ +# %ARG BASE_IMAGE +FROM $BASE_IMAGE +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +ENV DEBIAN_FRONTEND=noninteractive + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] + +COPY setup_packages.sh /usr/local/sbin/ + +# %ARG DIST +# %ARG SNAPSHOT +# Set apt sources list to a snapshot. +RUN for s in debian_$DIST debian_$DIST-updates debian_$DIST-backports debian-security_$DIST/updates; do \ + if [ $s = "debian_jessie-backports" ]; then \ + echo "deb http://snapshot.debian.org/archive/debian-archive/20190328T105444Z/debian/ ${s#*_} main"; \ + else \ + echo "deb http://snapshot.debian.org/archive/${s%_*}/$SNAPSHOT/ ${s#*_} main"; \ + fi; \ + done > /etc/apt/sources.list && \ + ( echo 'quiet "true";'; \ + echo 'APT::Get::Assume-Yes "true";'; \ + echo 'APT::Install-Recommends "false";'; \ + echo 'Acquire::Check-Valid-Until "false";'; \ + echo 'Acquire::Retries "5";'; \ + ) > /etc/apt/apt.conf.d/99taskcluster + +RUN apt-get update && \ + apt-get dist-upgrade && \ + apt-get install \ + apt-transport-https \ + ca-certificates diff --git a/taskcluster/docker/debian-raw/setup_packages.sh b/taskcluster/docker/debian-raw/setup_packages.sh new file mode 100755 index 0000000000..2f61283528 --- /dev/null +++ b/taskcluster/docker/debian-raw/setup_packages.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +TASKCLUSTER_ROOT_URL=$1 +shift + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" + + +for task in "$@"; do + echo "adding package source $queue_base/task/$task/artifacts/public/build/" + echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list" +done diff --git a/taskcluster/docker/debian10-test-iris/Dockerfile b/taskcluster/docker/debian10-test-iris/Dockerfile new file mode 100644 index 0000000000..ef8ec5df77 --- /dev/null +++ b/taskcluster/docker/debian10-test-iris/Dockerfile @@ -0,0 +1,28 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Wes Kocher <wkocher@mozilla.com> + +RUN mkdir -p /builds +RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +# We need to declare all potentially cache volumes as caches. Also, +# making high I/O paths volumes increase I/O throughput because of +# AUFS slowness. +VOLUME /builds/worker/.cache +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/tooltool-cache +VOLUME /builds/worker/workspace + +# %include taskcluster/docker/debian10-test-iris/install_iris_deps.sh +ADD topsrcdir/taskcluster/docker/debian10-test-iris/install_iris_deps.sh /setup/install_iris_deps.sh +RUN bash /setup/install_iris_deps.sh + +# Set up first-run experience for interactive mode +ADD motd /etc/taskcluster-motd +ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell +RUN chmod +x /bin/taskcluster-interactive-shell + +RUN chown -R worker:worker /builds/worker + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/debian10-test-iris/install_iris_deps.sh b/taskcluster/docker/debian10-test-iris/install_iris_deps.sh new file mode 100755 index 0000000000..c1b7152c06 --- /dev/null +++ b/taskcluster/docker/debian10-test-iris/install_iris_deps.sh @@ -0,0 +1,90 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script installs and configures everything the iris +# testing suite requires. +#!/usr/bin/env bash + +set -ve + +apt_packages=() + +apt_packages+=('apt-utils') +apt_packages+=('autoconf') +apt_packages+=('autoconf-archive') +apt_packages+=('automake') +apt_packages+=('fluxbox') +apt_packages+=('libcairo2-dev') +apt_packages+=('libgtk2.0-dev') +apt_packages+=('libicu-dev') +apt_packages+=('libjpeg62-turbo-dev') +apt_packages+=('libopencv-contrib-dev') +apt_packages+=('libopencv-dev') +apt_packages+=('libopencv-objdetect-dev') +apt_packages+=('libopencv-superres-dev') +apt_packages+=('libopencv-videostab-dev') +apt_packages+=('libpango1.0-dev') +apt_packages+=('libpng-dev') +apt_packages+=('libpng16-16') +apt_packages+=('libtiff5-dev') +apt_packages+=('libtool') +apt_packages+=('p7zip-full') +apt_packages+=('pkg-config') +apt_packages+=('python3.7-tk') +apt_packages+=('python3.7-dev') +apt_packages+=('python3-pip') +apt_packages+=('scrot') +apt_packages+=('wmctrl') +apt_packages+=('xdotool') +apt_packages+=('xsel') +apt_packages+=('zlib1g-dev') + +apt-get update +# This allows packages to be installed without human interaction +export DEBIAN_FRONTEND=noninteractive +apt-get install -y -f "${apt_packages[@]}" + +python3.7 -m pip install pipenv +python3.7 -m pip install psutil +python3.7 -m pip install zstandard + +mkdir -p /setup +cd /setup + +wget http://www.leptonica.org/source/leptonica-1.76.0.tar.gz +tar xopf leptonica-1.76.0.tar.gz +cd leptonica-1.76.0 +./configure && make && make install + +cd /setup +wget https://github.com/tesseract-ocr/tesseract/archive/4.0.0.tar.gz +tar xopf 4.0.0.tar.gz +cd tesseract-4.0.0 +./autogen.sh &&\ +./configure --enable-debug &&\ +LDFLAGS="-L/usr/local/lib" CFLAGS="-I/usr/local/include" make &&\ +make install &&\ +make install -langs &&\ +ldconfig + +cd /setup +wget https://github.com/tesseract-ocr/tessdata/archive/4.0.0.zip +unzip 4.0.0.zip +cd tessdata-4.0.0 +ls /usr/local/share/tessdata/ +mv ./* /usr/local/share/tessdata/ + + +cd / +rm -rf /setup +rm -rf ~/.ccache + +ls ~/.cache + +rm -rf ~/.npm + +apt-get clean +apt-get autoclean +rm -f "$0" diff --git a/taskcluster/docker/debian10-test-iris/motd b/taskcluster/docker/debian10-test-iris/motd new file mode 100644 index 0000000000..35017be2a2 --- /dev/null +++ b/taskcluster/docker/debian10-test-iris/motd @@ -0,0 +1,5 @@ +Welcome to your taskcluster interactive shell! The regularly scheduled task +has been paused to give you a chance to set up your debugging environment. + +For your convenience, the exact mozharness command needed for this task can +be invoked using the 'run-mozharness' command. diff --git a/taskcluster/docker/debian10-test-iris/taskcluster-interactive-shell b/taskcluster/docker/debian10-test-iris/taskcluster-interactive-shell new file mode 100644 index 0000000000..c782c0ea96 --- /dev/null +++ b/taskcluster/docker/debian10-test-iris/taskcluster-interactive-shell @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +download() { + name=`basename $1` + url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/$1 + if ! curl --fail --silent -o ./$name --retry 10 $url; then + fail "failed downloading $1 from ${GECKO_HEAD_REPOSITORY}" + fi +} + +cd $HOME/bin; +download taskcluster/scripts/tester/run-wizard; +chmod +x run-wizard; +./run-wizard; + +SPAWN="$SHELL"; +if [ "$SHELL" = "bash" ]; then + SPAWN="bash -li"; +fi; + +cd $HOME; +exec $SPAWN; diff --git a/taskcluster/docker/debian10-test/Dockerfile b/taskcluster/docker/debian10-test/Dockerfile new file mode 100644 index 0000000000..fd114aec95 --- /dev/null +++ b/taskcluster/docker/debian10-test/Dockerfile @@ -0,0 +1,106 @@ +FROM debian:buster +MAINTAINER Edwin Takahashi <egao@mozilla.com> + +RUN mkdir -p /builds +RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +# We need to declare all potentially cache volumes as caches. Also, +# making high I/O paths volumes increase I/O throughput because of +# AUFS slowness. +VOLUME /builds/worker/.cache +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/tooltool-cache +VOLUME /builds/worker/workspace + +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/common.sh +ADD topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh + +# %include taskcluster/docker/recipes/install-node.sh +ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /setup/install-node.sh + +# %include taskcluster/docker/recipes/debian-test-system-setup.sh +ADD topsrcdir/taskcluster/docker/recipes/debian-test-system-setup.sh /setup/system-setup.sh +RUN bash /setup/system-setup.sh + +# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb +# %include taskcluster/docker/recipes/xvfb.sh +ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh + +# %include taskcluster/scripts/run-task +ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task + +# %include taskcluster/scripts/misc/fetch-content +ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content + +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/worker/scripts/tooltool.py + +# Locale related setup for debian:buster +RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment +RUN echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen +RUN echo "LANG=en_US.UTF-8" > /etc/locale.conf +RUN locale-gen en_US.UTF-8 + +# Set variable normally configured at login, by the shells parent process, these +# are taken from GNU su manual +ENV HOME /builds/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker +ENV HOSTNAME taskcluster-worker +ENV LANG en_US.UTF-8 +ENV LC_ALL en_US.UTF-8 + +# Add utilities and configuration +COPY dot-files/config /builds/worker/.config +COPY dot-files/pulse /builds/worker/.pulse +RUN chmod +x bin/* + +# allow the worker user to access video devices +RUN usermod -a -G video worker + +RUN mkdir -p artifacts + +ENV PATH $PATH:/builds/worker/bin + +# In test.sh we accept START_VNC to start a vnc daemon. +# Exposing this port allows it to work. +EXPOSE 5900 + +# This helps not forgetting setting DISPLAY=:0 when running +# tests outside of test.sh +ENV DISPLAY :0 + +# Disable apport (app crash reporter) to avoid stealing focus from test runs +ADD apport /etc/default/apport + +# Disable font antialiasing for now to match releng's setup +ADD fonts.conf /builds/worker/.fonts.conf + +# Set up first-run experience for interactive mode +ADD motd /etc/taskcluster-motd +ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell +RUN chmod +x /bin/taskcluster-interactive-shell + +RUN chown -R worker:worker /builds/worker + +# gnome-keyring-daemon is configured to have the IPC_LOCK capability (to lock pages with secrets in +# memory), but docker isn't run with that capability granted. So, if we were to try running +# gnome-keyring-daemon without first clearing the capability, it would just exit with the message +# "Operation not permitted". Luckily it doesn't actually require the capability. +RUN setcap -r /usr/bin/gnome-keyring-daemon + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/debian10-test/apport b/taskcluster/docker/debian10-test/apport new file mode 100644 index 0000000000..42e5f8d3a1 --- /dev/null +++ b/taskcluster/docker/debian10-test/apport @@ -0,0 +1 @@ +enabled=0 diff --git a/taskcluster/docker/debian10-test/dot-files/config/pip/pip.conf b/taskcluster/docker/debian10-test/dot-files/config/pip/pip.conf new file mode 100644 index 0000000000..73c2b2a52c --- /dev/null +++ b/taskcluster/docker/debian10-test/dot-files/config/pip/pip.conf @@ -0,0 +1,2 @@ +[global] +disable-pip-version-check = true diff --git a/taskcluster/docker/debian10-test/dot-files/config/user-dirs.dirs b/taskcluster/docker/debian10-test/dot-files/config/user-dirs.dirs new file mode 100644 index 0000000000..2db2718d24 --- /dev/null +++ b/taskcluster/docker/debian10-test/dot-files/config/user-dirs.dirs @@ -0,0 +1,15 @@ +# This file is written by xdg-user-dirs-update +# If you want to change or add directories, just edit the line you're +# interested in. All local changes will be retained on the next run +# Format is XDG_xxx_DIR="$HOME/yyy", where yyy is a shell-escaped +# homedir-relative path, or XDG_xxx_DIR="/yyy", where /yyy is an +# absolute path. No other format is supported. + +XDG_DESKTOP_DIR="$HOME/Desktop" +XDG_DOWNLOAD_DIR="$HOME/Downloads" +XDG_TEMPLATES_DIR="$HOME/Templates" +XDG_PUBLICSHARE_DIR="$HOME/Public" +XDG_DOCUMENTS_DIR="$HOME/Documents" +XDG_MUSIC_DIR="$HOME/Music" +XDG_PICTURES_DIR="$HOME/Pictures" +XDG_VIDEOS_DIR="$HOME/Videos" diff --git a/taskcluster/docker/debian10-test/dot-files/config/user-dirs.locale b/taskcluster/docker/debian10-test/dot-files/config/user-dirs.locale new file mode 100644 index 0000000000..7741b83a3e --- /dev/null +++ b/taskcluster/docker/debian10-test/dot-files/config/user-dirs.locale @@ -0,0 +1 @@ +en_US diff --git a/taskcluster/docker/debian10-test/dot-files/pulse/client.conf b/taskcluster/docker/debian10-test/dot-files/pulse/client.conf new file mode 100644 index 0000000000..4b2dab2a66 --- /dev/null +++ b/taskcluster/docker/debian10-test/dot-files/pulse/client.conf @@ -0,0 +1 @@ +autospawn = no
\ No newline at end of file diff --git a/taskcluster/docker/debian10-test/fonts.conf b/taskcluster/docker/debian10-test/fonts.conf new file mode 100644 index 0000000000..9784fcc981 --- /dev/null +++ b/taskcluster/docker/debian10-test/fonts.conf @@ -0,0 +1,5 @@ +<match target="font"> + <edit name="antialias" mode="assign"> + <bool>false</bool> + </edit> +</match> diff --git a/taskcluster/docker/debian10-test/motd b/taskcluster/docker/debian10-test/motd new file mode 100644 index 0000000000..f958393cd8 --- /dev/null +++ b/taskcluster/docker/debian10-test/motd @@ -0,0 +1,6 @@ +Welcome to your taskcluster interactive shell! The regularly scheduled task +has been paused to give you a chance to set up your debugging environment. + +For your convenience, the exact mozharness command needed for this task can +be invoked using the 'run-mozharness' command. + diff --git a/taskcluster/docker/debian10-test/taskcluster-interactive-shell b/taskcluster/docker/debian10-test/taskcluster-interactive-shell new file mode 100644 index 0000000000..c782c0ea96 --- /dev/null +++ b/taskcluster/docker/debian10-test/taskcluster-interactive-shell @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +download() { + name=`basename $1` + url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/$1 + if ! curl --fail --silent -o ./$name --retry 10 $url; then + fail "failed downloading $1 from ${GECKO_HEAD_REPOSITORY}" + fi +} + +cd $HOME/bin; +download taskcluster/scripts/tester/run-wizard; +chmod +x run-wizard; +./run-wizard; + +SPAWN="$SHELL"; +if [ "$SHELL" = "bash" ]; then + SPAWN="bash -li"; +fi; + +cd $HOME; +exec $SPAWN; diff --git a/taskcluster/docker/debian8-mozjs-rust-build/Dockerfile b/taskcluster/docker/debian8-mozjs-rust-build/Dockerfile new file mode 100644 index 0000000000..4fcdd795d6 --- /dev/null +++ b/taskcluster/docker/debian8-mozjs-rust-build/Dockerfile @@ -0,0 +1,12 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ARG TASKCLUSTER_ROOT_URL +ARG DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \ + apt-get update && \ + apt-get install cmake diff --git a/taskcluster/docker/decision/Dockerfile b/taskcluster/docker/decision/Dockerfile new file mode 100644 index 0000000000..916ad77781 --- /dev/null +++ b/taskcluster/docker/decision/Dockerfile @@ -0,0 +1,35 @@ +FROM ubuntu:16.04 +MAINTAINER Dustin Mitchell <dustin@mozilla.com> + +# Add worker user +RUN mkdir /builds && \ + useradd -d /builds/worker -s /bin/bash -m worker && \ + mkdir /builds/worker/artifacts && \ + chown worker:worker /builds/worker/artifacts + +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /tmp/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /tmp/install-mercurial.sh + +ADD system-setup.sh /tmp/system-setup.sh +RUN bash /tmp/system-setup.sh + +# %include taskcluster/scripts/run-task +ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task + +ADD comm-task-env /builds/worker/bin/comm-task-env + +ENV PATH=/builds/worker/bin:$PATH \ + SHELL=/bin/bash \ + HOME=/builds/worker + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/decision/HASH b/taskcluster/docker/decision/HASH new file mode 100644 index 0000000000..8729f9625d --- /dev/null +++ b/taskcluster/docker/decision/HASH @@ -0,0 +1 @@ +sha256:4039fd878e5700b326d4a636e28c595c053fbcb53909c1db84ad1f513cf644ef diff --git a/taskcluster/docker/decision/README.md b/taskcluster/docker/decision/README.md new file mode 100644 index 0000000000..4490880be8 --- /dev/null +++ b/taskcluster/docker/decision/README.md @@ -0,0 +1,5 @@ +# Decision Tasks + +The decision image is a "boostrapping" image for the in tree logic it +deals with cloning gecko and the related utilities for providing an +environment where we can run gecko. diff --git a/taskcluster/docker/decision/VERSION b/taskcluster/docker/decision/VERSION new file mode 100644 index 0000000000..b1b25a5ffa --- /dev/null +++ b/taskcluster/docker/decision/VERSION @@ -0,0 +1 @@ +2.2.2 diff --git a/taskcluster/docker/decision/comm-task-env b/taskcluster/docker/decision/comm-task-env new file mode 100755 index 0000000000..65481497ae --- /dev/null +++ b/taskcluster/docker/decision/comm-task-env @@ -0,0 +1,199 @@ +#!/usr/bin/python3 -u +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Thunderbird build environment prep for run-task, +for use with comm-central derived repositories. + +This script is meant to run prior to run-task on repositories like +comm-central that need to check out a copy of a mozilla repository +in order to build. +See bug 1491371 for background on why this is necessary. + +A project will have a file named ".gecko_rev.yml" in it's root. See the +constant "GECKO_REV_CONF" if you want to change that. To download it, the +script uses the project repository URL and the revision number. +Those are defined in the environment variables: +COMM_HEAD_REPOSITORY +COMM_HEAD_REV + +.gecko_rev.yml has a structure like (for comm-central): +``` +GECKO_BASE_REPOSITORY: https://hg.mozilla.org/mozilla-unified +GECKO_HEAD_REPOSITORY: https://hg.mozilla.org/mozilla-central +GECKO_HEAD_REF: default +``` +or for branches: +``` +GECKO_BASE_REPOSITORY: https://hg.mozilla.org/mozilla-unified +GECKO_HEAD_REPOSITORY: https://hg.mozilla.org/releases/mozilla-beta +GECKO_HEAD_REF: THUNDERBIRD_60_VERBRANCH +GECKO_HEAD_REV: 6a830d12f15493a70b1192022c9985eba2139910 + +Note about GECKO_HEAD_REV and GECKO_HEAD_REF: +GECKO_HEAD_REF is a branch name or "default". +GECKO_HEAD_REV is a revision hash. +``` +""" + +import sys + +import os +import socket +import time +from datetime import datetime +from pprint import pformat + +import urllib.error +import urllib.request + +import yaml + +if sys.version_info[0:2] < (3, 5): + print('run-task-wrapper requires Python 3.5+') + sys.exit(1) + +GECKO_REV_CONF = ".gecko_rev.yml" +DEBUG = bool(os.environ.get("RTW_DEBUG", False)) + + +def print_message(msg, prefix=__file__, level=""): + """ + Print messages. + :param object msg: message to print, usually a string, but not always + :param str prefix: message prefix + :param str level: message level (DEBUG, ERROR, INFO) + """ + if not isinstance(msg, str): + msg = pformat(msg) + now = datetime.utcnow().isoformat() + # slice microseconds to 3 decimals. + now = now[:-3] if now[-7:-6] == '.' else now + if level: + sys.stdout.write('[{prefix} {now}Z] {level}: {msg}\n'.format( + prefix=prefix, now=now, level=level, msg=msg)) + else: + sys.stdout.write('[{prefix} {now}Z] {msg}\n'.format( + prefix=prefix, now=now, msg=msg)) + sys.stdout.flush() + + +def error_exit(msg): + """Print the error message and exit with error.""" + print_message(msg, level="ERROR") + if DEBUG: + raise Exception(msg) + + sys.exit(1) + + +def print_debug(msg): + """Prints a message with DEBUG prefix if DEBUG is enabled + with the environment variable "RTW_DEBUG". + """ + if DEBUG: + print_message(msg, level="DEBUG") + + +def check_environ(): + """Check that the necessary environment variables to find the + comm- repository are defined. (Set in .taskcluster.yml) + :return: tuple(str, str) + """ + print_debug("Checking environment variables...") + project_head_repo = os.environ.get("COMM_HEAD_REPOSITORY", None) + project_head_rev = os.environ.get("COMM_HEAD_REV", None) + + if project_head_repo is None or project_head_rev is None: + error_exit("Environment NOT Ok:\n\tHead: {}\n\tRev: {}\n").format( + project_head_repo, project_head_rev) + + print_debug("Environment Ok:\n\tHead: {}\n\tRev: {}\n".format( + project_head_repo, project_head_rev)) + return project_head_repo, project_head_rev + + +def download_url(url, retry=1): + """Downloads the given URL. Naively retries (when asked) upon failure + :param url: str + :param retry: int + :return: str + """ + # Use 1-based counting for display and calculation purposes. + for i in range(1, retry+1): + try: + print_message('Fetching {}. Attempt {} of {}.'.format( + url, i, retry)) + with urllib.request.urlopen(url, timeout=10) as response: + data = response.read().decode("utf-8") + return data + except (urllib.error.URLError, socket.timeout) as exc: + print_message('Unable to retrieve {}'.format(url)) + if isinstance(exc, urllib.error.URLError): + print_message(exc.reason) + else: # socket.timeout + print_message('Connection timed out.') + + if i < retry: # No more retries + wait_time = i * 5 # fail #1: sleep 5s. #2, sleep 10s + print_message('Retrying in {} seconds.'.format(wait_time)) + time.sleep(wait_time) + + error_exit('No more retry attempts! Aborting.') + + +def fetch_gecko_conf(project_head_repo, project_revision): + """Downloads .gecko_rev.yml from the project repository + :param project_head_repo: str + :param project_revision: str + :return: dict + """ + gecko_conf_url = '/'.join( + [project_head_repo, 'raw-file', project_revision, GECKO_REV_CONF]) + + gecko_conf_yml = download_url(gecko_conf_url, retry=5) + + try: + gecko_conf = yaml.safe_load(gecko_conf_yml) + return gecko_conf + except yaml.YAMLError as exc: + err_txt = ["Error processing Gecko YAML configuration."] + if hasattr(exc, "problem_mark"): + mark = exc.problem_mark # pylint: disable=no-member + err_txt.append("Error position: line {}, column {}".format( + mark.line + 1, mark.column + 1)) + error_exit('\n'.join(err_txt)) + + +def update_environment(gecko_conf): + """Adds the new variables defined in gecko_conf to the + running environment. + :param gecko_conf: dict + """ + print_message("Updating environment with:") + print_message(gecko_conf) + os.environ.update(gecko_conf) + + print_debug("New environment:") + print_debug(os.environ) + + +def exec_run_task(args): + """Executes run-task with a modified environment.""" + print_message("Executing: {}".format(pformat(args))) + os.execv(args[0], args[0:]) + + +def main(): + """Main function.""" + args = sys.argv[1:] # Remaining args starting with run-task + + project_head_repo, project_revision = check_environ() + gecko_conf = fetch_gecko_conf(project_head_repo, project_revision) + update_environment(gecko_conf) + exec_run_task(args) + + +if __name__ == "__main__": + main() diff --git a/taskcluster/docker/decision/system-setup.sh b/taskcluster/docker/decision/system-setup.sh new file mode 100644 index 0000000000..f90052565f --- /dev/null +++ b/taskcluster/docker/decision/system-setup.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +set -v -e + +test "$(whoami)" == 'root' + +apt-get update +apt-get install -y --force-yes --no-install-recommends \ + ca-certificates \ + python \ + sudo \ + python3 \ + python3-yaml \ + python-psutil \ + python3-psutil + +BUILD=/root/build +mkdir "$BUILD" + +tooltool_fetch() { + cat >manifest.tt + python2.7 /tmp/tooltool.py fetch + rm manifest.tt +} + +cd $BUILD +# shellcheck disable=SC1091 +. /tmp/install-mercurial.sh + +cd / +rm -rf $BUILD +apt-get clean +apt-get autoclean +rm -rf /var/lib/apt/lists/ +rm "$0" diff --git a/taskcluster/docker/diffoscope/Dockerfile b/taskcluster/docker/diffoscope/Dockerfile new file mode 100644 index 0000000000..20d79c016f --- /dev/null +++ b/taskcluster/docker/diffoscope/Dockerfile @@ -0,0 +1,30 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV LANG=en_US.UTF-8 + +RUN apt-get install \ + binutils-multiarch \ + bzip2 \ + curl \ + enjarify \ + diffoscope/buster-backports \ + jsbeautifier \ + libc++abi1 \ + locales \ + default-jdk-headless \ + python3-progressbar \ + unzip \ + zip \ + && \ + sed -i '/en_US.UTF-8/s/^# *//' /etc/locale.gen && \ + locale-gen + +COPY get_and_diffoscope /builds/worker/bin/get_and_diffoscope +COPY readelf /builds/worker/bin/readelf + +RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/* diff --git a/taskcluster/docker/diffoscope/get_and_diffoscope b/taskcluster/docker/diffoscope/get_and_diffoscope new file mode 100644 index 0000000000..7409313275 --- /dev/null +++ b/taskcluster/docker/diffoscope/get_and_diffoscope @@ -0,0 +1,140 @@ +#!/bin/bash + +set -e +set -x + +cd /builds/worker + +mkdir a b + +# /builds/worker/bin contains wrapper binaries to divert what diffoscope +# needs to use, so it needs to appear first. +export PATH=/builds/worker/bin:$PATH + +# Until https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=879010 is +# implemented, it's better to first manually extract the data. +# Plus dmg files are not supported yet. + +case "$ORIG_URL" in +*.zip|*.apk) + curl -L "$ORIG_URL" > a.zip + curl -L "$NEW_URL" > b.zip + unzip -d a a.zip + unzip -d b b.zip + ;; +*.tar.bz2) + curl -L "$ORIG_URL" | tar -C a -jxf - + curl -L "$NEW_URL" | tar -C b -jxf - + ;; +*.tar.gz) + curl -L "$ORIG_URL" | tar -C a -zxf - + curl -L "$NEW_URL" | tar -C b -zxf - + ;; +*.dmg) + for tool in lipo otool; do + ln -s $MOZ_FETCHES_DIR/cctools/bin/x86_64-apple-darwin*-$tool bin/$tool + done + curl -L "$ORIG_URL" > a.dmg + curl -L "$NEW_URL" > b.dmg + for i in a b; do + $MOZ_FETCHES_DIR/dmg/dmg extract $i.dmg $i.hfs + $MOZ_FETCHES_DIR/dmg/hfsplus $i.hfs extractall / $i + done + ;; +*) + ARTIFACT=$(basename "${ORIG_URL}") + curl -L "$ORIG_URL" > "a/${ARTIFACT}" + curl -L "$NEW_URL" > "b/${ARTIFACT}" +esac + +case "$ORIG_URL" in +*/target.apk) + OMNIJAR=assets/omni.ja + ;; +*) + OMNIJAR=omni.ja + ;; +esac + +report_error() { + # We "parse" the diff output, so we look at the lines that contain a "tee", like: + # ├── firefox + # │ ├── libxul.so + # │ │ ├── readelf --wide --notes {} + # We ignore lines like the last one, to only report file names. And we ignore + # lines for directories such as the first one, but still look at them to report + # full paths. + python3 <<-EOF + TEE = '├──' + paths = set() + path = [] + with open("$1.txt") as fh: + for l in fh: + if TEE not in l: + continue + fields = l.split() + # We rely on the number of │ to figure out at what level the file + # name applies. + if fields[-2:-1] == [TEE]: + path[len(fields) - 2:] = [fields[-1]] + else: + # Align path length to match the number of │ + path.append(None) + path_ = [p for p in path if p] + full_path = '/'.join(path_) + parent_path = '/'.join(path_[:-1]) + if parent_path in paths: + paths.remove(parent_path) + if full_path: + paths.add(full_path) + + for p in sorted(paths): + print('TEST-UNEXPECTED-FAIL | {} differs. See the $1.html or $1.txt artifact'.format(p)) + EOF +} + +# Builds are 99% of the time differing in some small ways, so it's not +# really useful to report a failure (at least not until we actually +# care about the builds being 100% identical). +POST=true + +fail() { + exit 1 +} + +for option; do + case "$option" in + --unpack) + CURDIR=$PWD + for dir in a b; do + # Need to run mach python from inside the gecko source. + # See bug #1533642. + (cd $GECKO_PATH && ./mach python --no-virtualenv toolkit/mozapps/installer/unpack.py --omnijar $OMNIJAR $CURDIR/$dir) + done + ;; + --fail) + POST="fail" + ;; + *) + echo "Unsupported option: $option" >&2 + exit 1 + esac +done + +if [ -n "$PRE_DIFF" ]; then + eval $PRE_DIFF +fi + +if diffoscope \ + --html diff.html \ + --text diff.txt \ + --progress \ + $DIFFOSCOPE_ARGS \ + a b +then + # Ok + : +else + report_error diff + $POST +fi diff --git a/taskcluster/docker/diffoscope/readelf b/taskcluster/docker/diffoscope/readelf new file mode 100644 index 0000000000..6b864171d7 --- /dev/null +++ b/taskcluster/docker/diffoscope/readelf @@ -0,0 +1,13 @@ +#!/bin/sh + +case "$1 $2" in +"--wide --symbols") + # When called with --wide --symbols, we remove the first column (which + # is essentially a line number that is not very useful), and then sort, + # which will order symbols by address, making a diff more useful. + /usr/bin/readelf "$@" | awk -F: '{print $2}' | sort + ;; +*) + exec /usr/bin/readelf "$@" + ;; +esac diff --git a/taskcluster/docker/fetch/Dockerfile b/taskcluster/docker/fetch/Dockerfile new file mode 100644 index 0000000000..88d18acab1 --- /dev/null +++ b/taskcluster/docker/fetch/Dockerfile @@ -0,0 +1,33 @@ +FROM $DOCKER_IMAGE_PARENT + +### Add worker user and setup its workspace. +RUN mkdir /builds && \ + groupadd -g 1000 worker && \ + useradd -u 1000 -g 1000 -d /builds/worker -s /bin/bash -m worker && \ + mkdir -p /builds/worker/workspace && \ + chown -R worker:worker /builds + +# Declare default working folder +WORKDIR /builds/worker + +ARG TASKCLUSTER_ROOT_URL +ARG DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \ + apt-get update && \ + apt-get install \ + gnupg \ + bzip2 \ + git \ + openssh-client \ + python3-requests \ + python3-zstandard \ + unzip + +# %include taskcluster/scripts/run-task +ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task + +# %include taskcluster/scripts/misc/fetch-content +ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content + +# %include taskcluster/scripts/misc/fetch-chromium.py +ADD topsrcdir/taskcluster/scripts/misc/fetch-chromium.py /builds/worker/bin/fetch-chromium.py diff --git a/taskcluster/docker/firefox-flatpak/Dockerfile b/taskcluster/docker/firefox-flatpak/Dockerfile new file mode 100644 index 0000000000..5492cd8d51 --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/Dockerfile @@ -0,0 +1,15 @@ +FROM freedesktopsdk/flatpak:20.08-x86_64 +MAINTAINER release@mozilla.com + +RUN mkdir /scripts/ +WORKDIR /scripts/ +# Copy everything in the docker/firefox-flatpak folder but the Dockerfile +# +# XXX The following pattern is neither a regex nor a glob one. It's +# documented at https://golang.org/pkg/path/filepath/#Match. There's no +# way of explicitly filtering out "Dockerfile". If one day, someone needs +# to add a file starting with "D", then we must revisit the pattern below. +COPY [^D]* /scripts/ + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/firefox-flatpak/default-preferences.js b/taskcluster/docker/firefox-flatpak/default-preferences.js new file mode 100644 index 0000000000..95663d03db --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/default-preferences.js @@ -0,0 +1,11 @@ +/*global pref*/ +/*eslint no-undef: "error"*/ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +pref("intl.locale.requested", ""); +pref("app.update.auto", false); +pref("app.update.enabled", false); +pref("app.update.autoInstallEnabled", false); +pref("browser.shell.checkDefaultBrowser", false); +pref("spellchecker.dictionary_path", "/usr/share/hunspell"); diff --git a/taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py b/taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py new file mode 100644 index 0000000000..59ed1896a9 --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/extract_locales_from_l10n_json.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +""" +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +""" + + +from __future__ import absolute_import, print_function + +import sys +import json + +l10n_changesets_json_path = sys.argv[1] +with open(l10n_changesets_json_path) as f: + locales = json.load(f).keys() +linux_locales = [l for l in locales if l != "ja-JP-mac"] + +print("\n".join(sorted(linux_locales))) diff --git a/taskcluster/docker/firefox-flatpak/launch-script.sh b/taskcluster/docker/firefox-flatpak/launch-script.sh new file mode 100644 index 0000000000..98279e71eb --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/launch-script.sh @@ -0,0 +1,3 @@ +#!/bin/bash +export TMPDIR=$XDG_CACHE_HOME/tmp +exec /app/lib/firefox/firefox "$@" diff --git a/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in new file mode 100644 index 0000000000..d49abf98d9 --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.appdata.xml.in @@ -0,0 +1,43 @@ +<?xml version="1.0" encoding="UTF-8"?> +<application> + <id type="desktop">org.mozilla.firefox</id> + <launchable type="desktop-id">org.mozilla.firefox.desktop</launchable> + <name>Firefox</name> + <summary>Mozilla Firefox Web Browser</summary> + <metadata_license>CC0-1.0</metadata_license> + <project_license>MPL-2.0</project_license> + <description> + <p>The Firefox standalone app for Linux is here</p> + <p>Get uncompromised access to the web by blending integrated privacy features and extensions with incredible performance.</p> + <p>Features:</p> + <ul> + <li>A powerful engine that’s built for rapidfire performance no matter how many tabs you’re switching between</li> + <li>Automatic privacy protection to safeguard your personal data against 2000+ online trackers</li> + <li>Faster page loading by blocking disruptive ads and scripts that hijack your browsing experience</li> + </ul> + <p>One account to sync them all</p> + <p>Seamlessly access passwords, bookmarks and more from all your devices. Plus, use the Send Tabs feature to instantly share open tabs between desktop, mobile and tablet.</p> + <p>Browse for good</p> + <p>We never track, collect or store your personal data. The Firefox Browser is made by Mozilla, the non-profit champions of a healthy internet. Mozilla also tackles issues like privacy, misinformation and trolling by investing in fellowships, campaigns and new technologies designed to make the internet a healthier place that’s open to everyone.</p> + </description> + <releases> + <release version="$VERSION" date="$DATE"/> + </releases> + <keywords> + <keyword>mozilla</keyword> + <keyword>internet</keyword> + <keyword>web</keyword> + </keywords> + <content_rating type="oars-1.1" /> + <url type="homepage">https://www.mozilla.org/firefox/</url> + <url type="donation">https://donate.mozilla.org/</url> + <url type="bugtracker">https://bugzilla.mozilla.org/</url> + <url type="help">https://support.mozilla.org/</url> + <url type="translate">https://wiki.mozilla.org/L10n:Starting_a_localization</url> + + <screenshots> + <screenshot type="default">https://raw.githubusercontent.com/mozilla-releng/scriptworker-scripts/master/pushflatpakscript/media/screenshots/image1.png</screenshot> + <screenshot>https://raw.githubusercontent.com/mozilla-releng/scriptworker-scripts/master/pushflatpakscript/media/screenshots/image2.png</screenshot> + <screenshot>https://raw.githubusercontent.com/mozilla-releng/scriptworker-scripts/master/pushflatpakscript/media/screenshots/image3.png</screenshot> + </screenshots> +</application> diff --git a/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop new file mode 100644 index 0000000000..a9fea7a702 --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/org.mozilla.firefox.desktop @@ -0,0 +1,274 @@ +[Desktop Entry] +Version=1.0 +Name=Firefox +GenericName=Web Browser +GenericName[ca]=Navegador web +GenericName[cs]=Webový prohlížeč +GenericName[es]=Navegador web +GenericName[fa]=مرورگر اینترنتی +GenericName[fi]=WWW-selain +GenericName[fr]=Navigateur Web +GenericName[hu]=Webböngésző +GenericName[it]=Browser Web +GenericName[ja]=ウェブ・ブラウザ +GenericName[ko]=웹 브라우저 +GenericName[nb]=Nettleser +GenericName[nl]=Webbrowser +GenericName[nn]=Nettlesar +GenericName[no]=Nettleser +GenericName[pl]=Przeglądarka WWW +GenericName[pt]=Navegador Web +GenericName[pt_BR]=Navegador Web +GenericName[sk]=Internetový prehliadač +GenericName[sv]=Webbläsare +Comment=Browse the Web +Comment[ca]=Navegueu per el web +Comment[cs]=Prohlížení stránek World Wide Webu +Comment[de]=Im Internet surfen +Comment[es]=Navegue por la web +Comment[fa]=صفحات شبکه جهانی اینترنت را مرور نمایید +Comment[fi]=Selaa Internetin WWW-sivuja +Comment[fr]=Navigue sur Internet +Comment[hu]=A világháló böngészése +Comment[it]=Esplora il web +Comment[ja]=ウェブを閲覧します +Comment[ko]=웹을 돌아 다닙니다 +Comment[nb]=Surf på nettet +Comment[nl]=Verken het internet +Comment[nn]=Surf på nettet +Comment[no]=Surf på nettet +Comment[pl]=Przeglądanie stron WWW +Comment[pt]=Navegue na Internet +Comment[pt_BR]=Navegue na Internet +Comment[sk]=Prehliadanie internetu +Comment[sv]=Surfa på webben +Exec=firefox %u +Icon=org.mozilla.firefox +Terminal=false +Type=Application +MimeType=text/html;text/xml;application/xhtml+xml;application/vnd.mozilla.xul+xml;text/mml;x-scheme-handler/http;x-scheme-handler/https; +StartupNotify=true +Categories=Network;WebBrowser; +Keywords=web;browser;internet; +Actions=new-window;new-private-window;profile-manager-window; + +[Desktop Action new-window] +Name=Open a New Window +Name[ach]=Dirica manyen +Name[af]=Nuwe venster +Name[an]=Nueva finestra +Name[ar]=نافذة جديدة +Name[as]=নতুন উইন্ডো +Name[ast]=Ventana nueva +Name[az]=Yeni Pəncərə +Name[be]=Новае акно +Name[bg]=Нов прозорец +Name[bn-BD]=নতুন উইন্ডো (N) +Name[bn-IN]=নতুন উইন্ডো +Name[br]=Prenestr nevez +Name[brx]=गोदान उइन्ड'(N) +Name[bs]=Novi prozor +Name[ca]=Finestra nova +Name[cak]=K'ak'a' tzuwäch +Name[cs]=Nové okno +Name[cy]=Ffenestr Newydd +Name[da]=Nyt vindue +Name[de]=Neues Fenster +Name[dsb]=Nowe wokno +Name[el]=Νέο παράθυρο +Name[en-GB]=New Window +Name[en-US]=New Window +Name[en-ZA]=New Window +Name[eo]=Nova fenestro +Name[es-AR]=Nueva ventana +Name[es-CL]=Nueva ventana +Name[es-ES]=Nueva ventana +Name[es-MX]=Nueva ventana +Name[et]=Uus aken +Name[eu]=Leiho berria +Name[fa]=پنجره جدید +Name[ff]=Henorde Hesere +Name[fi]=Uusi ikkuna +Name[fr]=Nouvelle fenêtre +Name[fy-NL]=Nij finster +Name[ga-IE]=Fuinneog Nua +Name[gd]=Uinneag ùr +Name[gl]=Nova xanela +Name[gn]=Ovetã pyahu +Name[gu-IN]=નવી વિન્ડો +Name[he]=חלון חדש +Name[hi-IN]=नया विंडो +Name[hr]=Novi prozor +Name[hsb]=Nowe wokno +Name[hu]=Új ablak +Name[hy-AM]=Նոր Պատուհան +Name[id]=Jendela Baru +Name[is]=Nýr gluggi +Name[it]=Nuova finestra +Name[ja]=新しいウィンドウ +Name[ja-JP-mac]=新規ウインドウ +Name[ka]=ახალი ფანჯარა +Name[kk]=Жаңа терезе +Name[km]=បង្អួចថ្មី +Name[kn]=ಹೊಸ ಕಿಟಕಿ +Name[ko]=새 창 +Name[kok]=नवें जनेल +Name[ks]=نئئ وِنڈو +Name[lij]=Neuvo barcon +Name[lo]=ຫນ້າຕ່າງໃຫມ່ +Name[lt]=Naujas langas +Name[ltg]=Jauns lūgs +Name[lv]=Jauns logs +Name[mai]=नव विंडो +Name[mk]=Нов прозорец +Name[ml]=പുതിയ ജാലകം +Name[mr]=नवीन पटल +Name[ms]=Tetingkap Baru +Name[my]=ဝင်းဒိုးအသစ် +Name[nb-NO]=Nytt vindu +Name[ne-NP]=नयाँ सञ्झ्याल +Name[nl]=Nieuw venster +Name[nn-NO]=Nytt vindauge +Name[or]=ନୂତନ ୱିଣ୍ଡୋ +Name[pa-IN]=ਨਵੀਂ ਵਿੰਡੋ +Name[pl]=Nowe okno +Name[pt-BR]=Nova janela +Name[pt-PT]=Nova janela +Name[rm]=Nova fanestra +Name[ro]=Fereastră nouă +Name[ru]=Новое окно +Name[sat]=नावा विंडो (N) +Name[si]=නව කවුළුවක් +Name[sk]=Nové okno +Name[sl]=Novo okno +Name[son]=Zanfun taaga +Name[sq]=Dritare e Re +Name[sr]=Нови прозор +Name[sv-SE]=Nytt fönster +Name[ta]=புதிய சாளரம் +Name[te]=కొత్త విండో +Name[th]=หน้าต่างใหม่ +Name[tr]=Yeni pencere +Name[tsz]=Eraatarakua jimpani +Name[uk]=Нове вікно +Name[ur]=نیا دریچہ +Name[uz]=Yangi oyna +Name[vi]=Cửa sổ mới +Name[wo]=Palanteer bu bees +Name[xh]=Ifestile entsha +Name[zh-CN]=新建窗口 +Name[zh-TW]=開新視窗 +Exec=firefox --new-window %u + +[Desktop Action new-private-window] +Name=Open a New Private Window +Name[ach]=Dirica manyen me mung +Name[af]=Nuwe privaatvenster +Name[an]=Nueva finestra privada +Name[ar]=نافذة خاصة جديدة +Name[as]=নতুন ব্যক্তিগত উইন্ডো +Name[ast]=Ventana privada nueva +Name[az]=Yeni Məxfi Pəncərə +Name[be]=Новае акно адасаблення +Name[bg]=Нов прозорец за поверително сърфиране +Name[bn-BD]=নতুন ব্যক্তিগত উইন্ডো +Name[bn-IN]=নতুন ব্যক্তিগত উইন্ডো +Name[br]=Prenestr merdeiñ prevez nevez +Name[brx]=गोदान प्राइभेट उइन्ड' +Name[bs]=Novi privatni prozor +Name[ca]=Finestra privada nova +Name[cak]=K'ak'a' ichinan tzuwäch +Name[cs]=Nové anonymní okno +Name[cy]=Ffenestr Breifat Newydd +Name[da]=Nyt privat vindue +Name[de]=Neues privates Fenster +Name[dsb]=Nowe priwatne wokno +Name[el]=Νέο παράθυρο ιδιωτικής περιήγησης +Name[en-GB]=New Private Window +Name[en-US]=New Private Window +Name[en-ZA]=New Private Window +Name[eo]=Nova privata fenestro +Name[es-AR]=Nueva ventana privada +Name[es-CL]=Nueva ventana privada +Name[es-ES]=Nueva ventana privada +Name[es-MX]=Nueva ventana privada +Name[et]=Uus privaatne aken +Name[eu]=Leiho pribatu berria +Name[fa]=پنجره ناشناس جدید +Name[ff]=Henorde Suturo Hesere +Name[fi]=Uusi yksityinen ikkuna +Name[fr]=Nouvelle fenêtre de navigation privée +Name[fy-NL]=Nij priveefinster +Name[ga-IE]=Fuinneog Nua Phríobháideach +Name[gd]=Uinneag phrìobhaideach ùr +Name[gl]=Nova xanela privada +Name[gn]=Ovetã ñemi pyahu +Name[gu-IN]=નવી ખાનગી વિન્ડો +Name[he]=חלון פרטי חדש +Name[hi-IN]=नयी निजी विंडो +Name[hr]=Novi privatni prozor +Name[hsb]=Nowe priwatne wokno +Name[hu]=Új privát ablak +Name[hy-AM]=Սկսել Գաղտնի դիտարկում +Name[id]=Jendela Mode Pribadi Baru +Name[is]=Nýr huliðsgluggi +Name[it]=Nuova finestra anonima +Name[ja]=新しいプライベートウィンドウ +Name[ja-JP-mac]=新規プライベートウインドウ +Name[ka]=ახალი პირადი ფანჯარა +Name[kk]=Жаңа жекелік терезе +Name[km]=បង្អួចឯកជនថ្មី +Name[kn]=ಹೊಸ ಖಾಸಗಿ ಕಿಟಕಿ +Name[ko]=새 사생활 보호 모드 +Name[kok]=नवो खाजगी विंडो +Name[ks]=نْو پرایوٹ وینڈو& +Name[lij]=Neuvo barcon privou +Name[lo]=ເປີດຫນ້າຕ່າງສວນຕົວຂື້ນມາໃຫມ່ +Name[lt]=Naujas privataus naršymo langas +Name[ltg]=Jauns privatais lūgs +Name[lv]=Jauns privātais logs +Name[mai]=नया निज विंडो (W) +Name[mk]=Нов приватен прозорец +Name[ml]=പുതിയ സ്വകാര്യ ജാലകം +Name[mr]=नवीन वैयक्तिक पटल +Name[ms]=Tetingkap Persendirian Baharu +Name[my]=New Private Window +Name[nb-NO]=Nytt privat vindu +Name[ne-NP]=नयाँ निजी सञ्झ्याल +Name[nl]=Nieuw privévenster +Name[nn-NO]=Nytt privat vindauge +Name[or]=ନୂତନ ବ୍ୟକ୍ତିଗତ ୱିଣ୍ଡୋ +Name[pa-IN]=ਨਵੀਂ ਪ੍ਰਾਈਵੇਟ ਵਿੰਡੋ +Name[pl]=Nowe okno prywatne +Name[pt-BR]=Nova janela privativa +Name[pt-PT]=Nova janela privada +Name[rm]=Nova fanestra privata +Name[ro]=Fereastră privată nouă +Name[ru]=Новое приватное окно +Name[sat]=नावा निजेराक् विंडो (W ) +Name[si]=නව පුද්ගලික කවුළුව (W) +Name[sk]=Nové okno v režime Súkromné prehliadanie +Name[sl]=Novo zasebno okno +Name[son]=Sutura zanfun taaga +Name[sq]=Dritare e Re Private +Name[sr]=Нови приватан прозор +Name[sv-SE]=Nytt privat fönster +Name[ta]=புதிய தனிப்பட்ட சாளரம் +Name[te]=కొత్త ఆంతరంగిక విండో +Name[th]=หน้าต่างส่วนตัวใหม่ +Name[tr]=Yeni gizli pencere +Name[tsz]=Juchiiti eraatarakua jimpani +Name[uk]=Приватне вікно +Name[ur]=نیا نجی دریچہ +Name[uz]=Yangi maxfiy oyna +Name[vi]=Cửa sổ riêng tư mới +Name[wo]=Panlanteeru biir bu bees +Name[xh]=Ifestile yangasese entsha +Name[zh-CN]=新建隐私浏览窗口 +Name[zh-TW]=新增隱私視窗 +Exec=firefox --private-window %u + +[Desktop Action profile-manager-window] +Name=Open the Profile Manager +Name[cs]=Správa profilů +Exec=firefox --ProfileManager diff --git a/taskcluster/docker/firefox-flatpak/policies.json b/taskcluster/docker/firefox-flatpak/policies.json new file mode 100644 index 0000000000..2645f20c0c --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/policies.json @@ -0,0 +1,6 @@ +{ + "policies": { + "DisableAppUpdate": true, + "DontCheckDefaultBrowser": true + } +} diff --git a/taskcluster/docker/firefox-flatpak/runme.sh b/taskcluster/docker/firefox-flatpak/runme.sh new file mode 100755 index 0000000000..9dd9a60f4b --- /dev/null +++ b/taskcluster/docker/firefox-flatpak/runme.sh @@ -0,0 +1,171 @@ +#!/bin/bash +set -xe + +# Future products supporting Flatpaks will set this accordingly +: PRODUCT "${PRODUCT:=firefox}" + +# Required env variables + +test "$VERSION" +test "$BUILD_NUMBER" +test "$CANDIDATES_DIR" +test "$L10N_CHANGESETS" +test "$FLATPAK_BRANCH" + +# Optional env variables +: WORKSPACE "${WORKSPACE:=/home/worker/workspace}" +: ARTIFACTS_DIR "${ARTIFACTS_DIR:=/home/worker/artifacts}" + +pwd + +# XXX: this is used to populate the datetime in org.mozilla.firefox.appdata.xml +DATE=$(date +%Y-%m-%d) +export DATE + +SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +TARGET_TAR_XZ_FULL_PATH="$ARTIFACTS_DIR/target.flatpak.tar.xz" +SOURCE_DEST="${WORKSPACE}/source" +FREEDESKTOP_VERSION="20.08" +FIREFOX_BASEAPP_CHANNEL="20.08" + + +# XXX: these commands are temporarily, there's an upcoming fix in the upstream Docker image +# that we work on top of, from `freedesktopsdk`, that will make these two lines go away eventually +mkdir -p /root /tmp /var/tmp +mkdir -p "$ARTIFACTS_DIR" +rm -rf "$SOURCE_DEST" && mkdir -p "$SOURCE_DEST" + +# XXX ensure we have a clean slate in the local flatpak repo +rm -rf ~/.local/share/flatpak/ + + +CURL="curl --location --retry 10 --retry-delay 10" + +# Download en-US linux64 binary +$CURL -o "${WORKSPACE}/firefox.tar.bz2" \ + "${CANDIDATES_DIR}/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/en-US/firefox-${VERSION}.tar.bz2" + +# Use list of locales to fetch L10N XPIs +$CURL -o "${WORKSPACE}/l10n_changesets.json" "$L10N_CHANGESETS" +locales=$(python3 "$SCRIPT_DIRECTORY/extract_locales_from_l10n_json.py" "${WORKSPACE}/l10n_changesets.json") + +DISTRIBUTION_DIR="$SOURCE_DEST/distribution" +if [[ "$PRODUCT" == "firefox" ]]; then + # Get Flatpak configuration + PARTNER_CONFIG_DIR="$WORKSPACE/partner_config" + git clone https://github.com/mozilla-partners/flatpak.git "$PARTNER_CONFIG_DIR" + mv "$PARTNER_CONFIG_DIR/desktop/flatpak/distribution" "$DISTRIBUTION_DIR" +else + mkdir -p "$DISTRIBUTION_DIR" +fi + +mkdir -p "$DISTRIBUTION_DIR/extensions" +for locale in $locales; do + $CURL -o "$DISTRIBUTION_DIR/extensions/langpack-${locale}@firefox.mozilla.org.xpi" \ + "$CANDIDATES_DIR/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/xpi/${locale}.xpi" +done + +envsubst < "$SCRIPT_DIRECTORY/org.mozilla.firefox.appdata.xml.in" > "${WORKSPACE}/org.mozilla.firefox.appdata.xml" +cp -v "$SCRIPT_DIRECTORY/org.mozilla.firefox.desktop" "$WORKSPACE" +# Add a group policy file to disable app updates, as those are handled by Flathub +cp -v "$SCRIPT_DIRECTORY/policies.json" "$WORKSPACE" +cp -v "$SCRIPT_DIRECTORY/default-preferences.js" "$WORKSPACE" +cp -v "$SCRIPT_DIRECTORY/launch-script.sh" "$WORKSPACE" +cd "${WORKSPACE}" + +flatpak remote-add --user --if-not-exists --from flathub https://dl.flathub.org/repo/flathub.flatpakrepo +# XXX: added --user to `flatpak install` to avoid ambiguity +flatpak install --user -y flathub org.mozilla.firefox.BaseApp//${FIREFOX_BASEAPP_CHANNEL} --no-deps + +# XXX: this command is temporarily, there's an upcoming fix in the upstream Docker image +# that we work on top of, from `freedesktopsdk`, that will make these two lines go away eventually +mkdir -p build +cp -r ~/.local/share/flatpak/app/org.mozilla.firefox.BaseApp/current/active/files build/files + +ARCH=$(flatpak --default-arch) +cat <<EOF > build/metadata +[Application] +name=org.mozilla.firefox +runtime=org.freedesktop.Platform/${ARCH}/${FREEDESKTOP_VERSION} +sdk=org.freedesktop.Sdk/${ARCH}/${FREEDESKTOP_VERSION} +base=app/org.mozilla.firefox.BaseApp/${ARCH}/${FIREFOX_BASEAPP_CHANNEL} +[Extension org.mozilla.firefox.Locale] +directory=share/runtime/langpack +autodelete=true +locale-subset=true + +[Extension org.freedesktop.Platform.ffmpeg-full] +directory=lib/ffmpeg +add-ld-path=. +no-autodownload=true +version=${FREEDESKTOP_VERSION} +EOF + +cat <<EOF > build/metadata.locale +[Runtime] +name=org.mozilla.firefox.Locale + +[ExtensionOf] +ref=app/org.mozilla.firefox/${ARCH}/${FLATPAK_BRANCH} +EOF + +appdir=build/files +install -d "${appdir}/lib/" +(cd "${appdir}/lib/" && tar jxf "${WORKSPACE}/firefox.tar.bz2") +install -D -m644 -t "${appdir}/share/appdata" org.mozilla.firefox.appdata.xml +install -D -m644 -t "${appdir}/share/applications" org.mozilla.firefox.desktop +for size in 16 32 48 64 128; do + install -D -m644 "${appdir}/lib/firefox/browser/chrome/icons/default/default${size}.png" "${appdir}/share/icons/hicolor/${size}x${size}/apps/org.mozilla.firefox.png" +done +mkdir -p "${appdir}/lib/ffmpeg" + +appstream-compose --prefix="${appdir}" --origin=flatpak --basename=org.mozilla.firefox org.mozilla.firefox +appstream-util mirror-screenshots "${appdir}"/share/app-info/xmls/org.mozilla.firefox.xml.gz "https://dl.flathub.org/repo/screenshots/org.mozilla.firefox-${FLATPAK_BRANCH}" build/screenshots "build/screenshots/org.mozilla.firefox-${FLATPAK_BRANCH}" + +# XXX: we used to `install -D` before which automatically created the components +# of target, now we need to manually do this since we're symlinking +mkdir -p "${appdir}/lib/firefox/distribution/extensions" +# XXX: we put the langpacks in /app/share/locale/$LANG_CODE and symlink that +# directory to where Firefox looks them up; this way only subset configured +# on user system is downloaded vs all locales +for locale in $locales; do + install -D -m644 -t "${appdir}/share/runtime/langpack/${locale:0:2}/" "${DISTRIBUTION_DIR}/extensions/langpack-${locale}@firefox.mozilla.org.xpi" + ln -sf "/app/share/runtime/langpack/${locale:0:2}/langpack-${locale}@firefox.mozilla.org.xpi" "${appdir}/lib/firefox/distribution/extensions/langpack-${locale}@firefox.mozilla.org.xpi" +done +install -D -m644 -t "${appdir}/lib/firefox/distribution" "$DISTRIBUTION_DIR/distribution.ini" +install -D -m644 -t "${appdir}/lib/firefox/distribution" policies.json +install -D -m644 -t "${appdir}/lib/firefox/browser/defaults/preferences" default-preferences.js +install -D -m755 launch-script.sh "${appdir}/bin/firefox" + +flatpak build-finish build \ + --share=ipc \ + --share=network \ + --socket=pulseaudio \ + --socket=x11 \ + --socket=pcsc \ + --require-version=0.11.1 \ + --persist=.mozilla \ + --filesystem=xdg-download:rw \ + --device=all \ + --talk-name=org.freedesktop.FileManager1 \ + --system-talk-name=org.freedesktop.NetworkManager \ + --talk-name=org.a11y.Bus \ + --talk-name=org.gnome.SessionManager \ + --talk-name=org.freedesktop.ScreenSaver \ + --talk-name="org.gtk.vfs.*" \ + --talk-name=org.freedesktop.Notifications \ + --own-name="org.mpris.MediaPlayer2.firefox.*" \ + --command=firefox + +flatpak build-export --disable-sandbox --no-update-summary --exclude='/share/runtime/langpack/*/*' repo build "$FLATPAK_BRANCH" +flatpak build-export --disable-sandbox --no-update-summary --metadata=metadata.locale --files=files/share/runtime/langpack repo build "$FLATPAK_BRANCH" +ostree commit --repo=repo --canonical-permissions --branch=screenshots/x86_64 build/screenshots +flatpak build-update-repo --generate-static-deltas repo +tar cvfJ flatpak.tar.xz repo + +mv -- flatpak.tar.xz "$TARGET_TAR_XZ_FULL_PATH" + +# XXX: if we ever wanted to go back to building flatpak bundles, we can revert this command; useful for testing individual artifacts, not publishable +# flatpak build-bundle "$WORKSPACE"/repo org.mozilla.firefox.flatpak org.mozilla.firefox +# TARGET_FULL_PATH="$ARTIFACTS_DIR/target.flatpak" +# mv -- *.flatpak "$TARGET_FULL_PATH" diff --git a/taskcluster/docker/firefox-snap/Dockerfile b/taskcluster/docker/firefox-snap/Dockerfile new file mode 100644 index 0000000000..5ab993b3b6 --- /dev/null +++ b/taskcluster/docker/firefox-snap/Dockerfile @@ -0,0 +1,64 @@ +## The below code is from snapcraft/docker/stable.Dockerfile +## The modifications done are part of the documentation for enabling core18 snaps. +## https://snapcraft.io/docs/t/creating-docker-images-for-snapcraft/11739 + +FROM ubuntu:bionic + +ENV LANG='en_US.UTF-8' \ + LANGUAGE='en_US:en' \ + LC_ALL='en_US.UTF-8' \ + PATH="/snap/bin:$PATH" \ + SNAP='/snap/snapcraft/current' \ + SNAP_NAME='snapcraft' \ + SNAP_ARCH='amd64' \ + TERM='dumb' + +# Grab dependencies. snapd is now required per https://github.com/snapcore/snapcraft/pull/3210 +RUN apt-get update && \ + apt-get dist-upgrade --yes && \ + apt-get install --yes \ + curl \ + jq \ + squashfs-tools \ + locales \ + bzip2 \ + curl \ + gcc \ + git \ + python3 \ + locales \ + snapd \ + sudo \ + && \ + apt-get clean && \ +locale-gen "$LANG" + +COPY download_and_install_snap.sh . + +# Grab the core snap (for backwards compatibility) +RUN bash download_and_install_snap.sh 'core' +# Grab the core18 snap (which snapcraft uses as a base) +RUN bash download_and_install_snap.sh 'core18' +RUN bash download_and_install_snap.sh 'gnome-3-34-1804' +RUN bash download_and_install_snap.sh 'gnome-3-34-1804-sdk' +RUN bash download_and_install_snap.sh 'snapcraft' + +# Create a snapcraft runner (TODO: move version detection to the core of snapcraft). +RUN mkdir -p /snap/bin +RUN echo "#!/bin/sh" > /snap/bin/snapcraft +RUN snap_version="$(awk '/^version:/{print $2}' /snap/snapcraft/current/meta/snap.yaml)" && echo "export SNAP_VERSION=\"$snap_version\"" >> /snap/bin/snapcraft +RUN echo 'exec "$SNAP/usr/bin/python3" "$SNAP/bin/snapcraft" "$@"' >> /snap/bin/snapcraft +RUN chmod +x /snap/bin/snapcraft + +RUN mkdir /scripts/ +WORKDIR /scripts/ +# Copy everything in the docker/firefox-snap folder but the Dockerfile +# +# XXX The following pattern is neither a regex nor a glob one. It's +# documented at https://golang.org/pkg/path/filepath/#Match. There's no +# way of explicitly filtering out "Dockerfile". If one day, someone needs +# to add a file starting with "D", then we must revisit the pattern below. +COPY [^D]* /scripts/ + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/firefox-snap/Makefile b/taskcluster/docker/firefox-snap/Makefile new file mode 100644 index 0000000000..d71dc70881 --- /dev/null +++ b/taskcluster/docker/firefox-snap/Makefile @@ -0,0 +1,12 @@ +DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set) +IMAGE_NAME = firefox-snapcraft +FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME) + +build: + docker build -t $(FULL_IMAGE_NAME) --no-cache --rm . + +push: + docker push $(FULL_IMAGE_NAME):latest + +pull: + docker pull $(FULL_IMAGE_NAME):latest diff --git a/taskcluster/docker/firefox-snap/download_and_install_snap.sh b/taskcluster/docker/firefox-snap/download_and_install_snap.sh new file mode 100644 index 0000000000..7def88b2aa --- /dev/null +++ b/taskcluster/docker/firefox-snap/download_and_install_snap.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -ex + +SNAP_NAME="$1" +SNAP_CHANNEL="${2:-stable}" +SNAP_INSTALL_LOCATION="${3:-/snap}" + +SNAP_METADATA="$(curl --header 'X-Ubuntu-Series: 16' "https://api.snapcraft.io/api/v1/snaps/details/$SNAP_NAME?channel=$SNAP_CHANNEL")" + +set +x +SNAP_SHA512="$(echo "$SNAP_METADATA" | jq '.download_sha512' -r)" +SNAP_DOWNLOAD_URL="$(echo "$SNAP_METADATA" | jq '.download_url' -r)" +SNAP_LAST_UPDATED="$(echo "$SNAP_METADATA" | jq '.last_updated' -r)" +SNAP_REVISION="$(echo "$SNAP_METADATA" | jq '.revision' -r)" +SNAP_VERSION="$(echo "$SNAP_METADATA" | jq '.version' -r)" +set -x + +echo "Downloading $SNAP_NAME, version $SNAP_VERSION, revision $SNAP_REVISION (last updated: $SNAP_LAST_UPDATED)..." +curl --location "$SNAP_DOWNLOAD_URL" --output "$SNAP_NAME.snap" +sha512sum -c <(echo "$SNAP_SHA512 $SNAP_NAME.snap") + +mkdir -p "$SNAP_INSTALL_LOCATION/$SNAP_NAME" +unsquashfs -d "$SNAP_INSTALL_LOCATION/$SNAP_NAME/current" "$SNAP_NAME.snap" +rm "$SNAP_NAME.snap" + +echo "$SNAP_NAME version $SNAP_VERSION has correctly been uploaded and installed."
\ No newline at end of file diff --git a/taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py b/taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py new file mode 100644 index 0000000000..50da17a859 --- /dev/null +++ b/taskcluster/docker/firefox-snap/extract_locales_from_l10n_json.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +from __future__ import absolute_import, print_function + +import sys +import json + +l10n_changesets_json_path = sys.argv[1] +with open(l10n_changesets_json_path) as f: + locales = json.load(f).keys() +linux_locales = [l for l in locales if l != "ja-JP-mac"] + +print("\n".join(sorted(linux_locales))) diff --git a/taskcluster/docker/firefox-snap/firefox.desktop b/taskcluster/docker/firefox-snap/firefox.desktop new file mode 100644 index 0000000000..73e36e8d83 --- /dev/null +++ b/taskcluster/docker/firefox-snap/firefox.desktop @@ -0,0 +1,219 @@ +[Desktop Entry] +Version=1.0 +Name=Firefox Web Browser +Name[ar]=متصفح الويب فَيَرفُكْس +Name[ast]=Restolador web Firefox +Name[bn]=ফায়ারফক্স ওয়েব ব্রাউজার +Name[ca]=Navegador web Firefox +Name[cs]=Firefox Webový prohlížeč +Name[da]=Firefox - internetbrowser +Name[el]=Περιηγητής Firefox +Name[es]=Navegador web Firefox +Name[et]=Firefoxi veebibrauser +Name[fa]=مرورگر اینترنتی Firefox +Name[fi]=Firefox-selain +Name[fr]=Navigateur Web Firefox +Name[gl]=Navegador web Firefox +Name[he]=דפדפן האינטרנט Firefox +Name[hr]=Firefox web preglednik +Name[hu]=Firefox webböngésző +Name[it]=Firefox Browser Web +Name[ja]=Firefox ウェブ・ブラウザ +Name[ko]=Firefox 웹 브라우저 +Name[ku]=Geroka torê Firefox +Name[lt]=Firefox interneto naršyklė +Name[nb]=Firefox Nettleser +Name[nl]=Firefox webbrowser +Name[nn]=Firefox Nettlesar +Name[no]=Firefox Nettleser +Name[pl]=Przeglądarka WWW Firefox +Name[pt]=Firefox Navegador Web +Name[pt_BR]=Navegador Web Firefox +Name[ro]=Firefox – Navigator Internet +Name[ru]=Веб-браузер Firefox +Name[sk]=Firefox - internetový prehliadač +Name[sl]=Firefox spletni brskalnik +Name[sv]=Firefox webbläsare +Name[tr]=Firefox Web Tarayıcısı +Name[ug]=Firefox توركۆرگۈ +Name[uk]=Веб-браузер Firefox +Name[vi]=Trình duyệt web Firefox +Name[zh_CN]=Firefox 网络浏览器 +Name[zh_TW]=Firefox 網路瀏覽器 +Comment=Browse the World Wide Web +Comment[ar]=تصفح الشبكة العنكبوتية العالمية +Comment[ast]=Restola pela Rede +Comment[bn]=ইন্টারনেট ব্রাউজ করুন +Comment[ca]=Navegueu per la web +Comment[cs]=Prohlížení stránek World Wide Webu +Comment[da]=Surf på internettet +Comment[de]=Im Internet surfen +Comment[el]=Μπορείτε να περιηγηθείτε στο διαδίκτυο (Web) +Comment[es]=Navegue por la web +Comment[et]=Lehitse veebi +Comment[fa]=صفحات شبکه جهانی اینترنت را مرور نمایید +Comment[fi]=Selaa Internetin WWW-sivuja +Comment[fr]=Naviguer sur le Web +Comment[gl]=Navegar pola rede +Comment[he]=גלישה ברחבי האינטרנט +Comment[hr]=Pretražite web +Comment[hu]=A világháló böngészése +Comment[it]=Esplora il web +Comment[ja]=ウェブを閲覧します +Comment[ko]=웹을 돌아 다닙니다 +Comment[ku]=Li torê bigere +Comment[lt]=Naršykite internete +Comment[nb]=Surf på nettet +Comment[nl]=Verken het internet +Comment[nn]=Surf på nettet +Comment[no]=Surf på nettet +Comment[pl]=Przeglądanie stron WWW +Comment[pt]=Navegue na Internet +Comment[pt_BR]=Navegue na Internet +Comment[ro]=Navigați pe Internet +Comment[ru]=Доступ в Интернет +Comment[sk]=Prehliadanie internetu +Comment[sl]=Brskajte po spletu +Comment[sv]=Surfa på webben +Comment[tr]=İnternet'te Gezinin +Comment[ug]=دۇنيادىكى توربەتلەرنى كۆرگىلى بولىدۇ +Comment[uk]=Перегляд сторінок Інтернету +Comment[vi]=Để duyệt các trang web +Comment[zh_CN]=浏览互联网 +Comment[zh_TW]=瀏覽網際網路 +GenericName=Web Browser +GenericName[ar]=متصفح ويب +GenericName[ast]=Restolador Web +GenericName[bn]=ওয়েব ব্রাউজার +GenericName[ca]=Navegador web +GenericName[cs]=Webový prohlížeč +GenericName[da]=Webbrowser +GenericName[el]=Περιηγητής διαδικτύου +GenericName[es]=Navegador web +GenericName[et]=Veebibrauser +GenericName[fa]=مرورگر اینترنتی +GenericName[fi]=WWW-selain +GenericName[fr]=Navigateur Web +GenericName[gl]=Navegador Web +GenericName[he]=דפדפן אינטרנט +GenericName[hr]=Web preglednik +GenericName[hu]=Webböngésző +GenericName[it]=Browser web +GenericName[ja]=ウェブ・ブラウザ +GenericName[ko]=웹 브라우저 +GenericName[ku]=Geroka torê +GenericName[lt]=Interneto naršyklė +GenericName[nb]=Nettleser +GenericName[nl]=Webbrowser +GenericName[nn]=Nettlesar +GenericName[no]=Nettleser +GenericName[pl]=Przeglądarka WWW +GenericName[pt]=Navegador Web +GenericName[pt_BR]=Navegador Web +GenericName[ro]=Navigator Internet +GenericName[ru]=Веб-браузер +GenericName[sk]=Internetový prehliadač +GenericName[sl]=Spletni brskalnik +GenericName[sv]=Webbläsare +GenericName[tr]=Web Tarayıcı +GenericName[ug]=توركۆرگۈ +GenericName[uk]=Веб-браузер +GenericName[vi]=Trình duyệt Web +GenericName[zh_CN]=网络浏览器 +GenericName[zh_TW]=網路瀏覽器 +Keywords=Internet;WWW;Browser;Web;Explorer +Keywords[ar]=انترنت;إنترنت;متصفح;ويب;وب +Keywords[ast]=Internet;WWW;Restolador;Web;Esplorador +Keywords[ca]=Internet;WWW;Navegador;Web;Explorador;Explorer +Keywords[cs]=Internet;WWW;Prohlížeč;Web;Explorer +Keywords[da]=Internet;Internettet;WWW;Browser;Browse;Web;Surf;Nettet +Keywords[de]=Internet;WWW;Browser;Web;Explorer;Webseite;Site;surfen;online;browsen +Keywords[el]=Internet;WWW;Browser;Web;Explorer;Διαδίκτυο;Περιηγητής;Firefox;Φιρεφοχ;Ιντερνετ +Keywords[es]=Explorador;Internet;WWW +Keywords[fi]=Internet;WWW;Browser;Web;Explorer;selain;Internet-selain;internetselain;verkkoselain;netti;surffaa +Keywords[fr]=Internet;WWW;Browser;Web;Explorer;Fureteur;Surfer;Navigateur +Keywords[he]=דפדפן;אינטרנט;רשת;אתרים;אתר;פיירפוקס;מוזילה; +Keywords[hr]=Internet;WWW;preglednik;Web +Keywords[hu]=Internet;WWW;Böngésző;Web;Háló;Net;Explorer +Keywords[it]=Internet;WWW;Browser;Web;Navigatore +Keywords[is]=Internet;WWW;Vafri;Vefur;Netvafri;Flakk +Keywords[ja]=Internet;WWW;Web;インターネット;ブラウザ;ウェブ;エクスプローラ +Keywords[nb]=Internett;WWW;Nettleser;Explorer;Web;Browser;Nettside +Keywords[nl]=Internet;WWW;Browser;Web;Explorer;Verkenner;Website;Surfen;Online +Keywords[pt]=Internet;WWW;Browser;Web;Explorador;Navegador +Keywords[pt_BR]=Internet;WWW;Browser;Web;Explorador;Navegador +Keywords[ru]=Internet;WWW;Browser;Web;Explorer;интернет;браузер;веб;файрфокс;огнелис +Keywords[sk]=Internet;WWW;Prehliadač;Web;Explorer +Keywords[sl]=Internet;WWW;Browser;Web;Explorer;Brskalnik;Splet +Keywords[tr]=İnternet;WWW;Tarayıcı;Web;Gezgin;Web sitesi;Site;sörf;çevrimiçi;tara +Keywords[uk]=Internet;WWW;Browser;Web;Explorer;Інтернет;мережа;переглядач;оглядач;браузер;веб;файрфокс;вогнелис;перегляд +Keywords[vi]=Internet;WWW;Browser;Web;Explorer;Trình duyệt;Trang web +Keywords[zh_CN]=Internet;WWW;Browser;Web;Explorer;网页;浏览;上网;火狐;Firefox;ff;互联网;网站; +Keywords[zh_TW]=Internet;WWW;Browser;Web;Explorer;網際網路;網路;瀏覽器;上網;網頁;火狐 +Exec=firefox %u +Terminal=false +X-MultipleArgs=false +Type=Application +Icon=/browser/chrome/icons/default/default128.png +Categories=GNOME;GTK;Network;WebBrowser; +MimeType=text/html;text/xml;application/xhtml+xml;application/xml;application/rss+xml;application/rdf+xml;image/gif;image/jpeg;image/png;x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/ftp;x-scheme-handler/chrome;video/webm;application/x-xpinstall; +StartupNotify=true +Actions=NewWindow;NewPrivateWindow; + +[Desktop Action NewWindow] +Name=Open a New Window +Name[ar]=افتح نافذة جديدة +Name[ast]=Abrir una ventana nueva +Name[bn]=Abrir una ventana nueva +Name[ca]=Obre una finestra nova +Name[cs]=Otevřít nové okno +Name[da]=Åbn et nyt vindue +Name[de]=Ein neues Fenster öffnen +Name[el]=Άνοιγμα νέου παραθύρου +Name[es]=Abrir una ventana nueva +Name[fi]=Avaa uusi ikkuna +Name[fr]=Ouvrir une nouvelle fenêtre +Name[gl]=Abrir unha nova xanela +Name[he]=פתיחת חלון חדש +Name[hr]=Otvori novi prozor +Name[hu]=Új ablak nyitása +Name[it]=Apri una nuova finestra +Name[ja]=新しいウィンドウを開く +Name[ko]=새 창 열기 +Name[ku]=Paceyeke nû veke +Name[lt]=Atverti naują langą +Name[nb]=Åpne et nytt vindu +Name[nl]=Nieuw venster openen +Name[pt]=Abrir nova janela +Name[pt_BR]=Abrir nova janela +Name[ro]=Deschide o fereastră nouă +Name[ru]=Новое окно +Name[sk]=Otvoriť nové okno +Name[sl]=Odpri novo okno +Name[sv]=Öppna ett nytt fönster +Name[tr]=Yeni pencere aç +Name[ug]=يېڭى كۆزنەك ئېچىش +Name[uk]=Відкрити нове вікно +Name[vi]=Mở cửa sổ mới +Name[zh_CN]=新建窗口 +Name[zh_TW]=開啟新視窗 +Exec=firefox -new-window + +[Desktop Action NewPrivateWindow] +Name=Open a New Private Window +Name[ar]=افتح نافذة جديدة للتصفح الخاص +Name[ca]=Obre una finestra nova en mode d'incògnit +Name[de]=Ein neues privates Fenster öffnen +Name[es]=Abrir una ventana privada nueva +Name[fi]=Avaa uusi yksityinen ikkuna +Name[fr]=Ouvrir une nouvelle fenêtre de navigation privée +Name[he]=פתיחת חלון גלישה פרטית חדש +Name[hu]=Új privát ablak nyitása +Name[it]=Apri una nuova finestra anonima +Name[nb]=Åpne et nytt privat vindu +Name[ru]=Новое приватное окно +Name[sl]=Odpri novo okno zasebnega brskanja +Name[tr]=Yeni bir pencere aç +Name[uk]=Відкрити нове вікно у потайливому режимі +Name[zh_TW]=開啟新隱私瀏覽視窗 +Exec=firefox -private-window diff --git a/taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in b/taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in new file mode 100644 index 0000000000..5ed18ef519 --- /dev/null +++ b/taskcluster/docker/firefox-snap/firefox.snapcraft.yaml.in @@ -0,0 +1,82 @@ +name: firefox +version: @VERSION@-@BUILD_NUMBER@ +summary: Mozilla Firefox web browser +description: Firefox is a powerful, extensible web browser with support for modern web application technologies. +confinement: strict +grade: stable +base: core18 + +apps: + firefox: + command: firefox + command-chain: [tmpdir] + desktop: distribution/firefox.desktop + extensions: [gnome-3-34] + environment: + DISABLE_WAYLAND: 1 + HOME: "$SNAP_USER_COMMON" + GTK_USE_PORTAL: 1 + slots: + - dbus-daemon + - mpris + plugs: + - avahi-observe + - browser-sandbox + - camera + - cups-control + - gsettings + - home + - joystick + - network + - network-observe + - opengl + - pulseaudio + - removable-media + - screen-inhibit-control + - u2f-devices + - unity7 + - upower-observe + +plugs: + browser-sandbox: + interface: browser-support + allow-sandbox: true + +parts: + firefox: + plugin: dump + source: source + stage-packages: + - libxt6 + - libdbus-glib-1-2 + - libasound2 + - libpulse0 + - libgl1-mesa-dri + - libgl1-mesa-glx + - libmirclient9 + - desktop-file-utils + - ffmpeg + - libc-bin + - locales-all + - libcurl3-gnutls + prime: + - -usr/lib/*/libharfbuzz* + - -usr/lib/*/*pango* + + # Find files provided by the base and platform snap and ensure they aren't + # duplicated in this snap + cleanup: + after: [firefox] + plugin: nil + build-snaps: [core18, gnome-3-34-1804] + override-prime: | + set -eux + for snap in "core18" "gnome-3-34-1804"; do + cd "/snap/$snap/current" && find . -type f,l -exec rm -f "$SNAPCRAFT_PRIME/{}" \; + done + +slots: + dbus-daemon: + interface: dbus + bus: session + name: org.mozilla.firefox diff --git a/taskcluster/docker/firefox-snap/policies.json b/taskcluster/docker/firefox-snap/policies.json new file mode 100644 index 0000000000..f36622021f --- /dev/null +++ b/taskcluster/docker/firefox-snap/policies.json @@ -0,0 +1,5 @@ +{ + "policies": { + "DisableAppUpdate": true + } +} diff --git a/taskcluster/docker/firefox-snap/runme.sh b/taskcluster/docker/firefox-snap/runme.sh new file mode 100755 index 0000000000..c5a98fb7e0 --- /dev/null +++ b/taskcluster/docker/firefox-snap/runme.sh @@ -0,0 +1,103 @@ +#!/bin/bash + +set -xe + +# Thunderbird Snap builds will set this to "thunderbird" +: PRODUCT "${PRODUCT:=firefox}" + +# Required env variables +test "$VERSION" +test "$BUILD_NUMBER" +test "$CANDIDATES_DIR" +test "$L10N_CHANGESETS" + +# Optional env variables +: WORKSPACE "${WORKSPACE:=/home/worker/workspace}" +: ARTIFACTS_DIR "${ARTIFACTS_DIR:=/home/worker/artifacts}" +: PUSH_TO_CHANNEL "" + +SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +TARGET="target.snap" +TARGET_FULL_PATH="$ARTIFACTS_DIR/$TARGET" +SOURCE_DEST="${WORKSPACE}/source" + +mkdir -p "$ARTIFACTS_DIR" +rm -rf "$SOURCE_DEST" && mkdir -p "$SOURCE_DEST" + +CURL="curl --location --retry 10 --retry-delay 10" + +# Download and extract en-US linux64 binary +$CURL -o "${WORKSPACE}/${PRODUCT}.tar.bz2" \ + "${CANDIDATES_DIR}/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/en-US/${PRODUCT}-${VERSION}.tar.bz2" +tar -C "$SOURCE_DEST" -xf "${WORKSPACE}/${PRODUCT}.tar.bz2" --strip-components=1 + +DISTRIBUTION_DIR="$SOURCE_DEST/distribution" +if [[ "$PRODUCT" == "firefox" ]]; then + # Get Ubuntu configuration + PARTNER_CONFIG_DIR="$WORKSPACE/partner_config" + git clone https://github.com/mozilla-partners/canonical.git "$PARTNER_CONFIG_DIR" + mv "$PARTNER_CONFIG_DIR/desktop/ubuntu/distribution" "$DISTRIBUTION_DIR" +else + mkdir -p "$DISTRIBUTION_DIR" +fi + +cp -v "$SCRIPT_DIRECTORY/${PRODUCT}.desktop" "$DISTRIBUTION_DIR" + +# Add a group policy file to disable app updates, as those are handled by snapd +cp -v "$SCRIPT_DIRECTORY/policies.json" "$DISTRIBUTION_DIR" + +# Use list of locales to fetch L10N XPIs +$CURL -o "${WORKSPACE}/l10n_changesets.json" "$L10N_CHANGESETS" +locales=$(python3 "$SCRIPT_DIRECTORY/extract_locales_from_l10n_json.py" "${WORKSPACE}/l10n_changesets.json") + +mkdir -p "$DISTRIBUTION_DIR/extensions" +for locale in $locales; do + $CURL -o "$SOURCE_DEST/distribution/extensions/langpack-${locale}@${PRODUCT}.mozilla.org.xpi" \ + "$CANDIDATES_DIR/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/xpi/${locale}.xpi" +done + +# Extract gtk30.mo from Ubuntu language packs +apt download language-pack-gnome-*-base +for i in *.deb; do + # shellcheck disable=SC2086 + dpkg-deb --fsys-tarfile $i | tar xv -C "$SOURCE_DEST" --wildcards "./usr/share/locale-langpack/*/LC_MESSAGES/gtk30.mo" || true +done + +# Add wrapper script to set TMPDIR appropriate for the snap +cp -v "$SCRIPT_DIRECTORY/tmpdir" "$SOURCE_DEST" + +# Generate snapcraft manifest +sed -e "s/@VERSION@/${VERSION}/g" -e "s/@BUILD_NUMBER@/${BUILD_NUMBER}/g" ${PRODUCT}.snapcraft.yaml.in > "${WORKSPACE}/snapcraft.yaml" +cd "${WORKSPACE}" + +# Make sure snapcraft knows we're building amd64, even though we may not be on this arch. +export SNAP_ARCH='amd64' + +# When a snap is built, snapcraft fetches deb packages from ubuntu.com. They may bump packages +# there and remove the old ones. Updating the database allows snapcraft to find the latest packages. +# For more context, see 1448239 +apt-get update + +snapcraft + +mv -- *.snap "$TARGET_FULL_PATH" + +cd "$ARTIFACTS_DIR" + +# Generate checksums file +size=$(stat --printf="%s" "$TARGET_FULL_PATH") +sha=$(sha512sum "$TARGET_FULL_PATH" | awk '{print $1}') +echo "$sha sha512 $size $TARGET" > "$TARGET.checksums" + +echo "Generating signing manifest" +hash=$(sha512sum "$TARGET.checksums" | awk '{print $1}') + +cat << EOF > signing_manifest.json +[{"file_to_sign": "$TARGET.checksums", "hash": "$hash"}] +EOF + +# For posterity +find . -ls +cat "$TARGET.checksums" +cat signing_manifest.json diff --git a/taskcluster/docker/firefox-snap/tmpdir b/taskcluster/docker/firefox-snap/tmpdir new file mode 100755 index 0000000000..e7e60801fa --- /dev/null +++ b/taskcluster/docker/firefox-snap/tmpdir @@ -0,0 +1,6 @@ +#!/bin/bash + +# Set TMPDIR to be under the user's default Downloads dir +export TMPDIR=$(xdg-user-dir DOWNLOAD)/firefox.tmp + +exec "$@" diff --git a/taskcluster/docker/funsize-update-generator/Dockerfile b/taskcluster/docker/funsize-update-generator/Dockerfile new file mode 100644 index 0000000000..8c63a05df3 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/Dockerfile @@ -0,0 +1,55 @@ +FROM ubuntu:18.04 +MAINTAINER Simon Fraser <sfraser@mozilla.com> + +# Required software +ENV DEBIAN_FRONTEND noninteractive +# Chain apt-get commands with apt-get clean in a single docker RUN +# to make sure that files are removed within a single docker layer +RUN apt-get update -q && \ + apt-get install -yyq --no-install-recommends \ + bzip2 \ + ca-certificates \ + curl \ + gcc \ + jq \ + libdpkg-perl \ + libgetopt-simple-perl \ + liblzma-dev \ + locales \ + python3.8 \ + libpython3.8-dev \ + python3-dev \ + xz-utils \ + && apt-get clean +RUN useradd -d /home/worker -s /bin/bash -m worker +COPY requirements.txt / + +RUN locale-gen en_CA.UTF-8 +ENV LANG en_CA.UTF-8 +ENV LANGUAGE en_CA.UTF-8 +ENV LANG_ALL en_CA.UTF-8 +ENV LC_ALL en_CA.UTF-8 + +# python-pip installs a lot of dependencies increasing the size of an image +# drastically. Install it like this saves us almost 200M. +RUN bash -c "curl -L https://bootstrap.pypa.io/get-pip.py | python3.8" + +RUN ["pip", "install", "-r", "/requirements.txt"] + +# scripts +RUN mkdir /home/worker/bin +COPY scripts/* /home/worker/bin/ + +COPY runme.sh /runme.sh +RUN chmod 755 /home/worker/bin/* /*.sh +RUN mkdir /home/worker/keys +COPY *.pubkey /home/worker/keys/ + +ENV HOME /home/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker + +USER worker + +CMD ["/runme.sh"] diff --git a/taskcluster/docker/funsize-update-generator/Makefile b/taskcluster/docker/funsize-update-generator/Makefile new file mode 100644 index 0000000000..6b67f0ed90 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/Makefile @@ -0,0 +1,9 @@ +IMAGE_NAME = funsize-update-generator + +build: + docker build -t $(IMAGE_NAME) --no-cache --rm . + +update_pubkeys: + curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey + curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey + curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey diff --git a/taskcluster/docker/funsize-update-generator/README b/taskcluster/docker/funsize-update-generator/README new file mode 100644 index 0000000000..5e9507be71 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/README @@ -0,0 +1,7 @@ + +To run this locally for testing/development purposes: + +1. Find a funsize generating task ID +2. docker run -t -e TASKCLUSTER_ROOT_URL="https://firefox-ci-tc.services.mozilla.com" -e SIGNING_CERT='nightly' -e MAR_CHANNEL_ID='firefox-mozilla-central' -e TASK_ID="${TASK_ID}" -e EXTRA_PARAMS="--arch=x86_64" funsize-update-generator /runme.sh + +The TASK_ID should be a recent "partials" Task. diff --git a/taskcluster/docker/funsize-update-generator/dep1.pubkey b/taskcluster/docker/funsize-update-generator/dep1.pubkey new file mode 100644 index 0000000000..927b2cc947 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/dep1.pubkey @@ -0,0 +1,14 @@ +-----BEGIN PUBLIC KEY----- +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA8Y6AS+xwKoXZl0X5qOKr +0I00xC4UN+IMjA1LIQoZ2GBkiqQF3q8v2nWTFE0+47+3NtP0l8tvsQY+LSYR4Fek +v2Vx4m/CAMKmWzW6Vtlj80y6rQ04V19l41bZXvCIBW5fm9sAvPgc7CngkcLySNqk +8vf57cUEpOmbsjSOCmK0j8hh03I1eWogpbAVEchSm1xN2sUJaVTvz5j8BfE6Vm0i +nN7V0zF+AOxzvntZIpfUqMZbHRiMkGn4l9rjia1Rz0qUc9RNCJkNocyKtQ2N2wnN +FjHpmK9x2V71cS1JQGhgLegrswPCAWY1lTmiLk9LweqGoVL0rqR4LCkb0VCaeSRe +6bUEYcU1ZQedE80zGKB3AfoC5br1shYY0xjmyRSCQ8m8WE60HzXhL8wczKrn5yoJ +iF6BxFwcYsvrWBPgIYVZLcqjODfR/M62o8yIfTC7yBcIdycJ0sWhB47dHAFxv1kc +wv8Ik9ftvDyupE8kwcl58fNOXz93j7IxMry/ey27NyYpESPOUNcjT8TP26FdGebg +4iJx0/LaYmaNUdchfBBlaYqGdH6ZGK0OeVxzHstGuG0gebm/igYcpaFxiQzvWijX +MIAU56s4g+yj7pSzT5/s9r8Gv+YhsNHKm4hnwLZaITV0lLMT5h/OZGseQTPMBnAR +hK3CIfcqG0I23hdwI29ZuUMCAwEAAQ== +-----END PUBLIC KEY----- diff --git a/taskcluster/docker/funsize-update-generator/nightly.pubkey b/taskcluster/docker/funsize-update-generator/nightly.pubkey new file mode 100644 index 0000000000..e51049844c --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/nightly.pubkey @@ -0,0 +1,14 @@ +-----BEGIN PUBLIC KEY----- +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAth151NGY8PBzn0bii9Yc +AjYHZDwP9Lj1c3owG0zLqW2kPcdp86QTAcoYunHGYFFakNG3tooZhzwkMjZ1OrXc +ERjD6AuVSGIBdsKtKP4vLtMjDUteFN4K2+rveozcnYFZuTWEajGu8uoYsv4QgdEA +nTBC39j0J33xlfUR+XKuxzhxNrFX+fRFWuLDJrPziMcVA/mzf0gXlhtEsfV0HYyg +yWpHdIWww+llysD1QOQAHk94Ss8c/4BFXFxlwlLeNlB1ZqLm1LsNy0jUy9EHeO3C +H6eqmiFEbpdjlrkJdgR1NcTzeY/Qf/nhWH6BAZrSapQycF7OSLU+rFWMQUElSPLc +NVl7oNAAfSYLTvRjPGi+mJK3wGFQw1EpwQl+elE1oj4+sHvIVpDrLb6btpxfr1cZ +pR4Di/hkOIymxEDWvtUhOxUXnYbDKQSDcAHKM/xR3sdIAiVtVuL4hyBwlAqkQc2j +H+SmnCbazgnq5+dN4y5DjoOgbZQ/koE3s3bUzzMeIxaul9v4gMtGROw3PQ3OZcP0 +lgjPRhY+NeTnWMo2nGb4/eS6Cn2qFLfbEQjsj6pJJBNKfvK/gm1jXb3PgXXdf8+d +2xTPOX8QNpSK7C0w4vYlvSpYZlsx2cznEOV6LDqP0QHUnmd/k1xWRRGiQ7gtT+BV +Fn0h7JyTGmEdFu6l4OhS8hMCAwEAAQ== +-----END PUBLIC KEY----- diff --git a/taskcluster/docker/funsize-update-generator/release.pubkey b/taskcluster/docker/funsize-update-generator/release.pubkey new file mode 100644 index 0000000000..ec1103d828 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/release.pubkey @@ -0,0 +1,14 @@ +-----BEGIN PUBLIC KEY----- +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxCHbY+fP3dvaP9XVbmK6 +i4rbqo72INEWgDSYbr/DIYfCSzHC9H8pU8dyjt+Nd8OtoUZtBD1N9fP7SlrvPZSI +ZSW4k0e9Ky5aV3Uy+ivamSvYszkhqdeP2y7MBu73XHKYONR9PnKa+ovmREwSEI+h +1e0ebm8zvF7Ndwx0mOeZkDu9SDkDGg4aj2xrJyBBOuGVjuctMZ6l1davANI5xiJ0 +GBEU3tR1gJs1T4vLBis5mEFn9y4kgyw/HrxmRYGnZL4fLb2fTI+pNW0Twu3KWwwi +LgLkkVrNWiHSk7YWqxjcg5IA3pQETQ17paTHoB5Mnkvuh6MkDXvRG5VgAHZAigr6 +fJMsasOUaBeos/cD1LDQEIObpetlxc0Fiu/lvUts0755otkhI+yv35+wUa6GJrsE +CsT7c/LaFtQXg06aGXbMLDn0bE/e+nw9KWT/rE1iYXMFkzrqoTeYJ+v7/fD/ywU8 +m8l4CZmXxzd/RogMrM3xl+j4ucAAltDQyL4yLySaIT05w5U8z2zJDEXFvpFDSRfF +K3kjLwGub7wNwaQDuh/msIUdavu4g+GNikCXAJ8AssLuYatyHoltd2tf+EIIDW3U +zzLpymnLo3cAz3IPfXyqVB+mcLcpqbHjl3hWms6l1wGtz6S4WqdrWs/KfzS5EyDK +r63xn1Rg/XFmR57EsFEXAZ8CAwEAAQ== +-----END PUBLIC KEY----- diff --git a/taskcluster/docker/funsize-update-generator/requirements.in b/taskcluster/docker/funsize-update-generator/requirements.in new file mode 100644 index 0000000000..3360c71690 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/requirements.in @@ -0,0 +1,7 @@ +aiohttp +awscli +mar +redo +requests +scriptworker +sh diff --git a/taskcluster/docker/funsize-update-generator/requirements.txt b/taskcluster/docker/funsize-update-generator/requirements.txt new file mode 100644 index 0000000000..bbc462f6e5 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/requirements.txt @@ -0,0 +1,338 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes requirements.in +# +aiohttp==3.6.2 \ + --hash=sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e \ + --hash=sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326 \ + --hash=sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a \ + --hash=sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654 \ + --hash=sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a \ + --hash=sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4 \ + --hash=sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17 \ + --hash=sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec \ + --hash=sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd \ + --hash=sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48 \ + --hash=sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59 \ + --hash=sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965 + # via + # -r requirements.in + # scriptworker + # taskcluster +aiomemoizettl==0.0.3 \ + --hash=sha256:07a6becac60f6cd2604b9f2b73bcd9a50079a0b7b55e2a4e45b1eec5a3ea9659 \ + --hash=sha256:0a80d2dc765e545263f515363b6700ec8cf86fa3968b529f56390b28e34f743d + # via scriptworker +arrow==0.15.5 \ + --hash=sha256:5390e464e2c5f76971b60ffa7ee29c598c7501a294bc9f5e6dadcb251a5d027b \ + --hash=sha256:70729bcc831da496ca3cb4b7e89472c8e2d27d398908155e0796179f6d2d41ee + # via scriptworker +asn1crypto==1.3.0 \ + --hash=sha256:5a215cb8dc12f892244e3a113fe05397ee23c5c4ca7a69cd6e69811755efc42d \ + --hash=sha256:831d2710d3274c8a74befdddaf9f17fcbf6e350534565074818722d6d615b315 + # via mar +async-timeout==3.0.1 \ + --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ + --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 + # via + # aiohttp + # taskcluster +attrs==19.3.0 \ + --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ + --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 + # via + # aiohttp + # jsonschema +awscli==1.18.13 \ + --hash=sha256:197a1168a2c979752bb290b09cbbf8bf836507de1b99a19431ca9b0cde8a5f81 \ + --hash=sha256:d74c5b097fe9288147c6045974eca9dc369055aa6ccce05fb551b0b95e813213 + # via -r requirements.in +backports.lzma==0.0.14 \ + --hash=sha256:16d8b68e4d3cd4e6c9ddb059850452946da3914c8a8e197a7f2b0954559f2df4 + # via mar +botocore==1.15.13 \ + --hash=sha256:265bf5e902e2f0854f12032304f2559f5d042e03c10a59e4f3ec43353be5b88f \ + --hash=sha256:2fce3cc8fae3bbf89984f3065d9eecf0a906b96bee491a4fb54ad7ec7f4261dc + # via + # awscli + # s3transfer +certifi==2019.11.28 \ + --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ + --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f + # via requests +cffi==1.14.0 \ + --hash=sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff \ + --hash=sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b \ + --hash=sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac \ + --hash=sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0 \ + --hash=sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384 \ + --hash=sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26 \ + --hash=sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6 \ + --hash=sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b \ + --hash=sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e \ + --hash=sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd \ + --hash=sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2 \ + --hash=sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66 \ + --hash=sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc \ + --hash=sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8 \ + --hash=sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55 \ + --hash=sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4 \ + --hash=sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5 \ + --hash=sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d \ + --hash=sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78 \ + --hash=sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa \ + --hash=sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793 \ + --hash=sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f \ + --hash=sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a \ + --hash=sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f \ + --hash=sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30 \ + --hash=sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f \ + --hash=sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3 \ + --hash=sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c + # via cryptography +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 + # via + # aiohttp + # requests +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 + # via mar +colorama==0.4.3 \ + --hash=sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff \ + --hash=sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1 + # via awscli +construct==2.10.56 \ + --hash=sha256:97ba13edcd98546f10f7555af41c8ce7ae9d8221525ec4062c03f9adbf940661 + # via mar +cryptography==2.8 \ + --hash=sha256:02079a6addc7b5140ba0825f542c0869ff4df9a69c360e339ecead5baefa843c \ + --hash=sha256:1df22371fbf2004c6f64e927668734070a8953362cd8370ddd336774d6743595 \ + --hash=sha256:369d2346db5934345787451504853ad9d342d7f721ae82d098083e1f49a582ad \ + --hash=sha256:3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651 \ + --hash=sha256:44ff04138935882fef7c686878e1c8fd80a723161ad6a98da31e14b7553170c2 \ + --hash=sha256:4b1030728872c59687badcca1e225a9103440e467c17d6d1730ab3d2d64bfeff \ + --hash=sha256:58363dbd966afb4f89b3b11dfb8ff200058fbc3b947507675c19ceb46104b48d \ + --hash=sha256:6ec280fb24d27e3d97aa731e16207d58bd8ae94ef6eab97249a2afe4ba643d42 \ + --hash=sha256:7270a6c29199adc1297776937a05b59720e8a782531f1f122f2eb8467f9aab4d \ + --hash=sha256:73fd30c57fa2d0a1d7a49c561c40c2f79c7d6c374cc7750e9ac7c99176f6428e \ + --hash=sha256:7f09806ed4fbea8f51585231ba742b58cbcfbfe823ea197d8c89a5e433c7e912 \ + --hash=sha256:90df0cc93e1f8d2fba8365fb59a858f51a11a394d64dbf3ef844f783844cc793 \ + --hash=sha256:971221ed40f058f5662a604bd1ae6e4521d84e6cad0b7b170564cc34169c8f13 \ + --hash=sha256:a518c153a2b5ed6b8cc03f7ae79d5ffad7315ad4569b2d5333a13c38d64bd8d7 \ + --hash=sha256:b0de590a8b0979649ebeef8bb9f54394d3a41f66c5584fff4220901739b6b2f0 \ + --hash=sha256:b43f53f29816ba1db8525f006fa6f49292e9b029554b3eb56a189a70f2a40879 \ + --hash=sha256:d31402aad60ed889c7e57934a03477b572a03af7794fa8fb1780f21ea8f6551f \ + --hash=sha256:de96157ec73458a7f14e3d26f17f8128c959084931e8997b9e655a39c8fde9f9 \ + --hash=sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2 \ + --hash=sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf \ + --hash=sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8 + # via + # jwcrypto + # mar + # scriptworker +dictdiffer==0.8.1 \ + --hash=sha256:1adec0d67cdf6166bda96ae2934ddb5e54433998ceab63c984574d187cc563d2 \ + --hash=sha256:d79d9a39e459fe33497c858470ca0d2e93cb96621751de06d631856adfd9c390 + # via scriptworker +docutils==0.15.2 \ + --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \ + --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \ + --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 + # via + # awscli + # botocore +github3.py==1.3.0 \ + --hash=sha256:15a115c18f7bfcf934dfef7ab103844eb9f620c586bad65967708926da47cbda \ + --hash=sha256:50833b5da35546b8cced0e8d7ff4c50a9afc2c8e46cc4d07dc4b66d26467c708 + # via scriptworker +idna-ssl==1.1.0 \ + --hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c + # via aiohttp +idna==2.9 \ + --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ + --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa + # via + # idna-ssl + # requests + # yarl +immutabledict==0.2.0 \ + --hash=sha256:43dde3e55dcb539537ae6189fb6b09a1d01e94db304e4506e94ca2d45ec14c47 \ + --hash=sha256:7881e44098f13dd12d6fec00551d564433cb46776e8b2f3453128f715df4376a + # via scriptworker +importlib-metadata==1.5.0 \ + --hash=sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302 \ + --hash=sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b + # via jsonschema +jmespath==0.9.5 \ + --hash=sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec \ + --hash=sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9 + # via botocore +json-e==3.0.2 \ + --hash=sha256:35dc96ce04f0caece560dc18266af7f0ffb780955273ffa20814be65447cad49 + # via scriptworker +jsonschema==3.2.0 \ + --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ + --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a + # via scriptworker +jwcrypto==0.7 \ + --hash=sha256:618ded1d25d3f806a1ab05cee42633a5a2787af33fca8d8f539b0aa1478b3728 \ + --hash=sha256:adbe1f6266cde35d40d5de6d1419612b3bd4c869b9332c88c9d7a9163d305100 + # via github3.py +mar==3.1.0 \ + --hash=sha256:1939df482f2d3f6221405da00d6286d77d2dd60d372a0fd37532a8f00544f64f \ + --hash=sha256:5d2904a063f0da625e37515fa9eb340082e69fa1c00fdbeb82d28b7ff3c51e28 + # via -r requirements.in +mohawk==1.1.0 \ + --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \ + --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723 + # via taskcluster +multidict==4.7.5 \ + --hash=sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1 \ + --hash=sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35 \ + --hash=sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928 \ + --hash=sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969 \ + --hash=sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e \ + --hash=sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78 \ + --hash=sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1 \ + --hash=sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136 \ + --hash=sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8 \ + --hash=sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2 \ + --hash=sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e \ + --hash=sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4 \ + --hash=sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5 \ + --hash=sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd \ + --hash=sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab \ + --hash=sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20 \ + --hash=sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3 + # via + # aiohttp + # yarl +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via rsa +pycparser==2.19 \ + --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 + # via cffi +pyrsistent==0.15.7 \ + --hash=sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280 + # via jsonschema +python-dateutil==2.8.1 \ + --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ + --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a + # via + # arrow + # botocore + # github3.py +pyyaml==5.3 \ + --hash=sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6 \ + --hash=sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf \ + --hash=sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5 \ + --hash=sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e \ + --hash=sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811 \ + --hash=sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e \ + --hash=sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d \ + --hash=sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20 \ + --hash=sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689 \ + --hash=sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994 \ + --hash=sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615 + # via + # awscli + # scriptworker +redo==2.0.3 \ + --hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \ + --hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8 + # via -r requirements.in +requests==2.23.0 \ + --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \ + --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 + # via + # -r requirements.in + # github3.py + # taskcluster +rsa==3.4.2 \ + --hash=sha256:25df4e10c263fb88b5ace923dd84bf9aa7f5019687b5e55382ffcdb8bede9db5 \ + --hash=sha256:43f682fea81c452c98d09fc316aae12de6d30c4b5c84226642cf8f8fd1c93abd + # via awscli +s3transfer==0.3.3 \ + --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \ + --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db + # via awscli +scriptworker==32.0.3 \ + --hash=sha256:a4d428a2c9db1b0dbabb86c0badf850e919ed83ffaba701185370ebe3dd5f7aa \ + --hash=sha256:d60976a94ba86d0c4b9f23536ce3cb026281c726d00da12b6546a8dd80cb4d1b + # via -r requirements.in +sh==1.12.14 \ + --hash=sha256:ae3258c5249493cebe73cb4e18253a41ed69262484bad36fdb3efcb8ad8870bb \ + --hash=sha256:b52bf5833ed01c7b5c5fb73a7f71b3d98d48e9b9b8764236237bdc7ecae850fc + # via -r requirements.in +six==1.14.0 \ + --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ + --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c + # via + # cryptography + # jsonschema + # mohawk + # pyrsistent + # python-dateutil + # taskcluster +slugid==2.0.0 \ + --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \ + --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c + # via taskcluster +taskcluster-urls==12.1.0 \ + --hash=sha256:1dc740c32c7beb31e11ed7ccf9da2d47a504acdb3170c8900649433b0fd16fb2 \ + --hash=sha256:4a62c776aeba6d45044789a8845ec4d8521bc1bb6ebfc86d79ee759bcdd4f2f7 + # via taskcluster +taskcluster==25.4.0 \ + --hash=sha256:017e626ec7211cc250f6c59ac4fbeb3ca7d7ecf0a253eb4f21a694bc5d02ae36 \ + --hash=sha256:0a8fc965441ab60aafb83bcf2b670afd3f8817b13067561e084b4d2528f1e75c \ + --hash=sha256:a9d10a794a065ea17a11fa8ee4861e57ea702362f70d00dc97d5a4f685a70d4f + # via scriptworker +typing-extensions==3.7.4.1 \ + --hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \ + --hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \ + --hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575 + # via aiohttp +uritemplate==3.0.1 \ + --hash=sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f \ + --hash=sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae + # via github3.py +urllib3==1.25.8 \ + --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ + --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc + # via + # botocore + # requests +yarl==1.4.2 \ + --hash=sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce \ + --hash=sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6 \ + --hash=sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce \ + --hash=sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae \ + --hash=sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d \ + --hash=sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f \ + --hash=sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b \ + --hash=sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b \ + --hash=sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb \ + --hash=sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462 \ + --hash=sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea \ + --hash=sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70 \ + --hash=sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1 \ + --hash=sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a \ + --hash=sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b \ + --hash=sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080 \ + --hash=sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2 + # via aiohttp +zipp==3.1.0 \ + --hash=sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b \ + --hash=sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96 + # via importlib-metadata + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. +# setuptools diff --git a/taskcluster/docker/funsize-update-generator/runme.sh b/taskcluster/docker/funsize-update-generator/runme.sh new file mode 100644 index 0000000000..99d80c09e2 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/runme.sh @@ -0,0 +1,61 @@ +#!/bin/sh + +set -xe + +test "$TASK_ID" +test "$SIGNING_CERT" + +ARTIFACTS_DIR="/home/worker/artifacts" +mkdir -p "$ARTIFACTS_DIR" + +# Strip trailing / if present +TASKCLUSTER_ROOT_URL="${TASKCLUSTER_ROOT_URL%/}" +export TASKCLUSTER_ROOT_URL + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="${TASKCLUSTER_ROOT_URL%/}/api/queue/v1" + +curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" + +# auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/ +# -> bucket of tc-gp-private-1d-us-east-1, path of releng/mbsdiff-cache/ +# Trailing slash is important, due to prefix permissions in S3. +S3_BUCKET_AND_PATH=$(jq -r '.scopes[] | select(contains ("auth:aws-s3"))' /home/worker/task.json | awk -F: '{print $4}') + +# Will be empty if there's no scope for AWS S3. +if [ -n "${S3_BUCKET_AND_PATH}" ] && getent hosts taskcluster +then + # Does this parse as we expect? + S3_PATH=${S3_BUCKET_AND_PATH#*/} + AWS_BUCKET_NAME=${S3_BUCKET_AND_PATH%/${S3_PATH}*} + test "${S3_PATH}" + test "${AWS_BUCKET_NAME}" + + set +x # Don't echo these. + secret_url="${TASKCLUSTER_PROXY_URL}/api/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}" + AUTH=$(curl "${secret_url}") + AWS_ACCESS_KEY_ID=$(echo "${AUTH}" | jq -r '.credentials.accessKeyId') + AWS_SECRET_ACCESS_KEY=$(echo "${AUTH}" | jq -r '.credentials.secretAccessKey') + AWS_SESSION_TOKEN=$(echo "${AUTH}" | jq -r '.credentials.sessionToken') + export AWS_ACCESS_KEY_ID + export AWS_SECRET_ACCESS_KEY + export AWS_SESSION_TOKEN + AUTH= + + if [ -n "$AWS_ACCESS_KEY_ID" ] && [ -n "$AWS_SECRET_ACCESS_KEY" ]; then + # Pass the full bucket/path prefix, as the script just appends local files. + export MBSDIFF_HOOK="/home/worker/bin/mbsdiff_hook.sh -S ${S3_BUCKET_AND_PATH}" + fi + set -x +else + # disable caching + export MBSDIFF_HOOK= +fi + +# EXTRA_PARAMS is optional +# shellcheck disable=SC2086 +python3.8 /home/worker/bin/funsize.py \ + --artifacts-dir "$ARTIFACTS_DIR" \ + --task-definition /home/worker/task.json \ + --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \ + $EXTRA_PARAMS diff --git a/taskcluster/docker/funsize-update-generator/scripts/funsize.py b/taskcluster/docker/funsize-update-generator/scripts/funsize.py new file mode 100644 index 0000000000..ab35b78806 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py @@ -0,0 +1,472 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, division, print_function + +import argparse +import asyncio +import configparser +import json +import logging +import os +import shutil +import tempfile +import time +from distutils.util import strtobool +from contextlib import AsyncExitStack +from pathlib import Path + +import aiohttp +from mardor.reader import MarReader +from mardor.signing import get_keysize +from scriptworker.utils import retry_async, get_hash + +log = logging.getLogger(__name__) + + +ROOT_URL = os.environ.get( + "TASKCLUSTER_ROOT_URL", "https://firefox-ci-tc.services.mozilla.com" +) +QUEUE_PREFIX = f"{ROOT_URL}/api/queue/" +ALLOWED_URL_PREFIXES = ( + "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/", + "http://download.cdn.mozilla.net/pub/firefox/nightly/", + "http://ftp.mozilla.org/", + "http://download.mozilla.org/", + "https://archive.mozilla.org/", + "http://archive.mozilla.org/", + QUEUE_PREFIX, +) +STAGING_URL_PREFIXES = ( + "http://ftp.stage.mozaws.net/", + "https://ftp.stage.mozaws.net/", +) + +BCJ_OPTIONS = { + "x86": ["--x86"], + "x86_64": ["--x86"], + "aarch64": [], + # macOS Universal Builds + "macos-x86_64-aarch64": [], +} + + +def verify_signature(mar, cert): + log.info("Checking %s signature", mar) + with open(mar, "rb") as mar_fh: + m = MarReader(mar_fh) + if not m.verify(verify_key=cert): + raise ValueError( + "MAR Signature invalid: %s (%s) against %s", mar, m.signature_type, cert + ) + + +def process_arguments(): + parser = argparse.ArgumentParser() + parser.add_argument("--artifacts-dir", required=True) + parser.add_argument("--signing-cert", type=argparse.FileType("rb"), required=True) + parser.add_argument("--task-definition", required=True, type=argparse.FileType("r")) + parser.add_argument( + "--allow-staging-prefixes", + action="store_true", + default=strtobool(os.environ.get("FUNSIZE_ALLOW_STAGING_PREFIXES", "false")), + help="Allow files from staging buckets.", + ) + parser.add_argument( + "-q", + "--quiet", + dest="log_level", + action="store_const", + const=logging.INFO, + default=logging.DEBUG, + ) + parser.add_argument( + "--arch", + type=str, + required=True, + choices=BCJ_OPTIONS.keys(), + help="The archtecture you are building.", + ) + return parser.parse_args() + + +def validate_mar_channel_id(mar, channel_ids): + log.info("Checking %s for MAR_CHANNEL_ID %s", mar, channel_ids) + # We may get a string with a list representation, or a single entry string. + channel_ids = set(channel_ids.split(",")) + + product_info = MarReader(open(mar, "rb")).productinfo + if not isinstance(product_info, tuple): + raise ValueError( + "Malformed product information in mar: {}".format(product_info) + ) + + found_channel_ids = set(product_info[1].split(",")) + + if not found_channel_ids.issubset(channel_ids): + raise ValueError( + "MAR_CHANNEL_ID mismatch, {} not in {}".format(product_info[1], channel_ids) + ) + + log.info("%s channel %s in %s", mar, product_info[1], channel_ids) + + +async def retry_download(*args, semaphore=None, **kwargs): # noqa: E999 + """Retry download() calls.""" + async with AsyncExitStack() as stack: + if semaphore: + await stack.enter_async_context(semaphore) + await retry_async( + download, + retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError), + args=args, + kwargs=kwargs, + ) + + +def verify_allowed_url(mar, allowed_url_prefixes): + if not any(mar.startswith(prefix) for prefix in allowed_url_prefixes): + raise ValueError( + "{mar} is not in allowed URL prefixes: {p}".format( + mar=mar, p=allowed_url_prefixes + ) + ) + + +async def download(url, dest, mode=None): # noqa: E999 + log.info("Downloading %s to %s", url, dest) + chunk_size = 4096 + bytes_downloaded = 0 + async with aiohttp.ClientSession(raise_for_status=True) as session: + start = time.time() + async with session.get(url, timeout=120) as resp: + # Additional early logging for download timeouts. + log.debug("Fetching from url %s", resp.url) + for history in resp.history: + log.debug("Redirection history: %s", history.url) + log.debug("Headers for %s: %s", resp.url, resp.headers) + if "Content-Length" in resp.headers: + log.debug( + "Content-Length expected for %s: %s", + url, + resp.headers["Content-Length"], + ) + log_interval = chunk_size * 1024 + with open(dest, "wb") as fd: + while True: + chunk = await resp.content.read(chunk_size) + if not chunk: + break + fd.write(chunk) + bytes_downloaded += len(chunk) + log_interval -= len(chunk) + if log_interval <= 0: + log.debug("Bytes downloaded for %s: %d", url, bytes_downloaded) + log_interval = chunk_size * 1024 + end = time.time() + log.info( + "Downloaded %s, %s bytes in %s seconds", + url, + bytes_downloaded, + int(end - start), + ) + if mode: + log.info("chmod %o %s", mode, dest) + os.chmod(dest, mode) + + +async def download_buildsystem_bits(partials_config, downloads, tools_dir): + """Download external tools needed to make partials.""" + + # We're making the assumption that the "to" mar is the same for all, + # as that's the way this task is currently used. + to_url = extract_download_urls(partials_config, mar_type="to").pop() + + repo = get_option( + downloads[to_url]["extracted_path"], + filename="platform.ini", + section="Build", + option="SourceRepository", + ) + revision = get_option( + downloads[to_url]["extracted_path"], + filename="platform.ini", + section="Build", + option="SourceStamp", + ) + + urls = { + "make_incremental_update.sh": f"{repo}/raw-file/{revision}/tools/" + "update-packaging/make_incremental_update.sh", + "common.sh": f"{repo}/raw-file/{revision}/tools/update-packaging/common.sh", + "mar": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/" + "latest-mozilla-central/mar-tools/linux64/mar", + "mbsdiff": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/" + "latest-mozilla-central/mar-tools/linux64/mbsdiff", + } + for filename, url in urls.items(): + filename = tools_dir / filename + await retry_download(url, dest=filename, mode=0o755) + + +def find_file(directory, filename): + log.debug("Searching for %s in %s", filename, directory) + return next(Path(directory).rglob(filename)) + + +def get_option(directory, filename, section, option): + log.info("Extracting [%s]: %s from %s/**/%s", section, option, directory, filename) + f = find_file(directory, filename) + config = configparser.ConfigParser() + config.read(f) + rv = config.get(section, option) + log.info("Found %s", rv) + return rv + + +def extract_download_urls(partials_config, mar_type): + """Extract a set of urls to download from the task configuration. + + mar_type should be one of "from", "to" + """ + return {definition[f"{mar_type}_mar"] for definition in partials_config} + + +async def download_and_verify_mars(partials_config, allowed_url_prefixes, signing_cert): + """Download, check signature, channel ID and unpack MAR files.""" + # Separate these categories so we can opt to perform checks on only 'to' downloads. + from_urls = extract_download_urls(partials_config, mar_type="from") + to_urls = extract_download_urls(partials_config, mar_type="to") + tasks = list() + downloads = dict() + + semaphore = asyncio.Semaphore(2) # Magic 2 to reduce network timeout errors. + for url in from_urls.union(to_urls): + verify_allowed_url(url, allowed_url_prefixes) + downloads[url] = { + "download_path": Path(tempfile.mkdtemp()) / Path(url).name, + } + tasks.append( + retry_download(url, downloads[url]["download_path"], semaphore=semaphore) + ) + + await asyncio.gather(*tasks) + + for url in downloads: + # Verify signature, but not from an artifact as we don't + # depend on the signing task + if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION") and not url.startswith( + QUEUE_PREFIX + ): + verify_signature(downloads[url]["download_path"], signing_cert) + + # Only validate the target channel ID, as we update from beta->release + if url in to_urls: + validate_mar_channel_id( + downloads[url]["download_path"], os.environ["MAR_CHANNEL_ID"] + ) + + downloads[url]["extracted_path"] = tempfile.mkdtemp() + with open(downloads[url]["download_path"], "rb") as mar_fh: + log.info( + "Unpacking %s into %s", + downloads[url]["download_path"], + downloads[url]["extracted_path"], + ) + m = MarReader(mar_fh) + m.extract(downloads[url]["extracted_path"]) + + return downloads + + +async def run_command(cmd, cwd="/", env=None, label=None, silent=False): + log.info("Running: %s", cmd) + if not env: + env = dict() + process = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + cwd=cwd, + env=env, + ) + if label: + label = "{}: ".format(label) + else: + label = "" + + async def read_output(stream, label, printcmd): + while True: + line = await stream.readline() + if line == b"": + break + printcmd("%s%s", label, line.decode("utf-8").rstrip()) + + if silent: + await process.wait() + else: + await asyncio.gather( + read_output(process.stdout, label, log.info), + read_output(process.stderr, label, log.warn), + ) + await process.wait() + + +async def generate_partial(from_dir, to_dir, dest_mar, mar_data, tools_dir, arch): + log.info("Generating partial %s", dest_mar) + env = os.environ.copy() + env["LC_ALL"] = "C" + env["MAR"] = tools_dir / "mar" + env["MBSDIFF"] = tools_dir / "mbsdiff" + if arch: + env["BCJ_OPTIONS"] = " ".join(BCJ_OPTIONS[arch]) + env["MOZ_PRODUCT_VERSION"] = mar_data["version"] + env["MAR_CHANNEL_ID"] = mar_data["MAR_CHANNEL_ID"] + env["BRANCH"] = mar_data["branch"] + + make_incremental_update = tools_dir / "make_incremental_update.sh" + cmd = f"{make_incremental_update} {dest_mar} {from_dir} {to_dir}" + + await run_command(cmd, cwd=dest_mar.parent, env=env, label=dest_mar.name) + validate_mar_channel_id(dest_mar, mar_data["MAR_CHANNEL_ID"]) + + +async def manage_partial( + partial_def, artifacts_dir, tools_dir, downloads, semaphore, arch=None +): + from_url = partial_def["from_mar"] + to_url = partial_def["to_mar"] + from_path = downloads[from_url]["extracted_path"] + to_path = downloads[to_url]["extracted_path"] + + mar_data = { + "MAR_CHANNEL_ID": os.environ["MAR_CHANNEL_ID"], + "version": get_option( + to_path, filename="application.ini", section="App", option="Version" + ), + "appName": get_option( + from_path, filename="application.ini", section="App", option="Name" + ), + # Use Gecko repo and rev from platform.ini, not application.ini + "repo": get_option( + to_path, filename="platform.ini", section="Build", option="SourceRepository" + ), + "revision": get_option( + to_path, filename="platform.ini", section="Build", option="SourceStamp" + ), + "locale": partial_def["locale"], + "from_mar": partial_def["from_mar"], + "from_size": os.path.getsize(downloads[from_url]["download_path"]), + "from_hash": get_hash(downloads[from_url]["download_path"], hash_alg="sha512"), + "from_buildid": get_option( + from_path, filename="application.ini", section="App", option="BuildID" + ), + "to_mar": partial_def["to_mar"], + "to_size": os.path.getsize(downloads[to_url]["download_path"]), + "to_hash": get_hash(downloads[to_url]["download_path"], hash_alg="sha512"), + "to_buildid": get_option( + to_path, filename="application.ini", section="App", option="BuildID" + ), + "mar": partial_def["dest_mar"], + } + # if branch not set explicitly use repo-name + mar_data["branch"] = partial_def.get("branch", Path(mar_data["repo"]).name) + + for field in ( + "update_number", + "previousVersion", + "previousBuildNumber", + "toVersion", + "toBuildNumber", + ): + if field in partial_def: + mar_data[field] = partial_def[field] + + dest_mar = Path(artifacts_dir) / mar_data["mar"] + + async with semaphore: + await generate_partial(from_path, to_path, dest_mar, mar_data, tools_dir, arch) + + mar_data["size"] = os.path.getsize(dest_mar) + mar_data["hash"] = get_hash(dest_mar, hash_alg="sha512") + return mar_data + + +async def async_main(args, signing_cert): + tasks = [] + + allowed_url_prefixes = list(ALLOWED_URL_PREFIXES) + if args.allow_staging_prefixes: + allowed_url_prefixes += STAGING_URL_PREFIXES + + task = json.load(args.task_definition) + + downloads = await download_and_verify_mars( + task["extra"]["funsize"]["partials"], allowed_url_prefixes, signing_cert + ) + + tools_dir = Path(tempfile.mkdtemp()) + await download_buildsystem_bits( + partials_config=task["extra"]["funsize"]["partials"], + downloads=downloads, + tools_dir=tools_dir, + ) + + # May want to consider os.cpu_count() if we ever run on osx/win. + # sched_getaffinity is the list of cores we can run on, not the total. + semaphore = asyncio.Semaphore(len(os.sched_getaffinity(0))) + for definition in task["extra"]["funsize"]["partials"]: + tasks.append( + asyncio.ensure_future( + retry_async( + manage_partial, + retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError), + kwargs=dict( + partial_def=definition, + artifacts_dir=args.artifacts_dir, + tools_dir=tools_dir, + arch=args.arch, + downloads=downloads, + semaphore=semaphore, + ), + ) + ) + ) + manifest = await asyncio.gather(*tasks) + + for url in downloads: + downloads[url]["download_path"].unlink() + shutil.rmtree(downloads[url]["extracted_path"]) + shutil.rmtree(tools_dir) + + return manifest + + +def main(): + args = process_arguments() + + logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s") + log.setLevel(args.log_level) + + signing_cert = args.signing_cert.read() + assert get_keysize(signing_cert) == 4096 + + artifacts_dir = Path(args.artifacts_dir) + if not artifacts_dir.exists(): + artifacts_dir.mkdir() + + loop = asyncio.get_event_loop() + manifest = loop.run_until_complete(async_main(args, signing_cert)) + loop.close() + + manifest_file = artifacts_dir / "manifest.json" + with open(manifest_file, "w") as fp: + json.dump(manifest, fp, indent=2, sort_keys=True) + + log.debug("{}".format(json.dumps(manifest, indent=2, sort_keys=True))) + + +if __name__ == "__main__": + main() diff --git a/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh new file mode 100755 index 0000000000..965d938247 --- /dev/null +++ b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh @@ -0,0 +1,157 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# +# This tool contains functions that are to be used to handle/enable funsize +# Author: Mihai Tabara +# + +HOOK= +AWS_BUCKET_NAME= +LOCAL_CACHE_DIR= + +# Don't cache files smaller than this, as it's slower with S3 +# Bug 1437473 +CACHE_THRESHOLD=500000 + +S3_CACHE_HITS=0 +S3_CACHE_MISSES=0 + +getsha512(){ + openssl sha512 "${1}" | awk '{print $2}' +} + +print_usage(){ + echo "$(basename "$0") [-S S3-BUCKET-NAME] [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH" + echo "Script that saves/retrieves from cache presumptive patches as args" + echo "" + echo "-A SERVER-URL - host where to send the files" + echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached" + echo "-g pre hook - tests whether patch already in cache" + echo "-u post hook - upload patch to cache for future use" + echo "" + echo "PATH-FROM-URL : path on disk for source file" + echo "PATH-TO-URL : path on disk for destination file" + echo "PATH-PATCH : path on disk for patch between source and destination" +} + +upload_patch(){ + if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ] + then + return 0 + fi + sha_from=$(getsha512 "$1") + sha_to=$(getsha512 "$2") + patch_path="$3" + patch_filename="$(basename "$3")" + + # save to local cache first + if [ -n "$LOCAL_CACHE_DIR" ]; then + local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from"" + if $local_cmd >&2; then + cp -avf "${patch_path}" "$LOCAL_CACHE_DIR/$sha_from/$sha_to" + echo "${patch_path} saved on local cache." + fi + fi + + if [ -n "${AWS_BUCKET_NAME}" ]; then + BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${patch_filename}" + if aws s3 cp "${patch_path}" "${BUCKET_PATH}"; then + echo "${patch_path} saved on s://${AWS_BUCKET_NAME}" + return 0 + fi + echo "${patch_path} failed to be uploaded to s3://${AWS_BUCKET_NAME}" + return 1 + fi + return 0 +} + +get_patch(){ + # $1 and $2 are the /path/to/filename + if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ] + then + return 1 + fi + sha_from=$(getsha512 "$1") + sha_to=$(getsha512 "$2") + destination_file="$3" + s3_filename="$(basename "$3")" + + # Try to retrieve from local cache first. + if [ -n "$LOCAL_CACHE_DIR" ]; then + if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then + cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file" + echo "Successful retrieved ${destination_file} from local cache." + return 0 + fi + fi + # If not in the local cache, we might find it remotely. + + if [ -n "${AWS_BUCKET_NAME}" ]; then + BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${s3_filename}" + if aws s3 ls "${BUCKET_PATH}"; then + ((S3_CACHE_HITS++)) + echo "s3 cache hit for ${s3_filename} (${S3_CACHE_HITS} total hits)" + if aws s3 cp "${BUCKET_PATH}" "${destination_file}"; then + echo "Successful retrieved ${destination_file} from s3://${AWS_BUCKET_NAME}" + return 0 + else + echo "Failed to retrieve ${destination_file} from s3://${AWS_BUCKET_NAME}" + return 1 + fi + # Not found, fall through to default error + else + ((S3_CACHE_MISSES++)) + echo "s3 cache miss for ${s3_filename} (${S3_CACHE_MISSES} total misses)" + fi + fi + return 1 +} + +OPTIND=1 + +while getopts ":S:c:gu" option; do + case $option in + S) + # This will probably be bucketname/path/prefix but we can use it either way + AWS_BUCKET_NAME="$OPTARG" + # Ensure trailing slash is there. + if [[ ! $AWS_BUCKET_NAME =~ .*/$ ]]; then + AWS_BUCKET_NAME="${AWS_BUCKET_NAME}/" + fi + ;; + c) + LOCAL_CACHE_DIR="$OPTARG" + ;; + g) + HOOK="PRE" + ;; + u) + HOOK="POST" + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + print_usage + exit 1 + ;; + :) + echo "Option -$OPTARG requires an argument." >&2 + print_usage + exit 1 + ;; + *) + echo "Unimplemented option: -$OPTARG" >&2 + print_usage + exit 1 + ;; + esac +done +shift $((OPTIND-1)) + +if [ "$HOOK" == "PRE" ]; then + get_patch "$1" "$2" "$3" +elif [ "$HOOK" == "POST" ]; then + upload_patch "$1" "$2" "$3" +fi diff --git a/taskcluster/docker/gdb-test/Dockerfile b/taskcluster/docker/gdb-test/Dockerfile new file mode 100644 index 0000000000..d00504cdd1 --- /dev/null +++ b/taskcluster/docker/gdb-test/Dockerfile @@ -0,0 +1,15 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV XZ_OPT=-T0 + +ARG TASKCLUSTER_ROOT_URL +ARG DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES + +RUN apt-get update && \ + apt-get install gdb diff --git a/taskcluster/docker/github-sync/Dockerfile b/taskcluster/docker/github-sync/Dockerfile new file mode 100644 index 0000000000..bff131ad7b --- /dev/null +++ b/taskcluster/docker/github-sync/Dockerfile @@ -0,0 +1,14 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Kartikaya Gupta <kgupta@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache +VOLUME /builds/worker/.ghsync + +ADD prepare.sh /setup/prepare-docker.sh +ADD requirements.txt /setup/requirements.txt +RUN /bin/bash /setup/prepare-docker.sh /setup/requirements.txt && rm -R /setup + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/github-sync/prepare.sh b/taskcluster/docker/github-sync/prepare.sh new file mode 100755 index 0000000000..8efdec2e27 --- /dev/null +++ b/taskcluster/docker/github-sync/prepare.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +set -o errexit +set -o nounset +set -o pipefail +set -o xtrace + +test "$(whoami)" == 'root' + +# Install stuff we need +apt-get -y update +apt-get install -y \ + cmake \ + curl \ + gcc \ + git \ + g++ \ + libffi-dev \ + libgit2-dev \ + libssl-dev \ + python3 \ + python3-dev \ + python3-pip \ + python3-setuptools + +# Python packages +pip3 install -r "$1" diff --git a/taskcluster/docker/github-sync/requirements.txt b/taskcluster/docker/github-sync/requirements.txt new file mode 100644 index 0000000000..4f6ed0a620 --- /dev/null +++ b/taskcluster/docker/github-sync/requirements.txt @@ -0,0 +1,3 @@ +requests == 2.21.0 +pygit2 == 0.27.0 # this requires libgit2 v0.27.* which can be gotten via `brew install libgit2` +python-hglib == 2.6.1 diff --git a/taskcluster/docker/image_builder/Dockerfile b/taskcluster/docker/image_builder/Dockerfile new file mode 100644 index 0000000000..ad38a48c49 --- /dev/null +++ b/taskcluster/docker/image_builder/Dockerfile @@ -0,0 +1,99 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +FROM golang:1.14 as skopeo + +WORKDIR /go/src/ +RUN ["git", "clone", "--no-checkout", "--depth=1", "--branch=v1.1.1", "https://github.com/containers/skopeo", "."] +RUN ["git", "checkout", "67abbb3cefbdc876447583d5ea45e76bf441eba7"] +ENV GO111MODULE=on CGO_ENABLED=0 +RUN ["go", "build", \ + "-mod=vendor", "-o", "out/skopeo", \ + "-tags", "exclude_graphdriver_devicemapper exclude_graphdriver_btrfs containers_image_openpgp", \ + # Set unixTempDirForBigFiles so skopeo will extract in a directory hidden by kaniko + # We create the directory below. + "-ldflags", " -X github.com/containers/image/v5/internal/tmpdir.unixTempDirForBigFiles=/workspace/tmp -X github.com/containers/image/v5/signature.systemDefaultPolicyPath=/kaniko/containers/policy.json -extldflags \"-static\" -w -s", \ + "./cmd/skopeo"] + +FROM golang:1.14 as kaniko +WORKDIR /go/src/ +RUN ["git", "clone", "--no-checkout", "--depth=1", "--branch=v1.0.0", "https://github.com/GoogleContainerTools/kaniko", "."] +RUN ["git", "checkout", "146ec6a9cd6f87b4a12e8119ded575d5edca35ac"] +RUN ["make"] + +# Build the `build-image` command as a static binary using musl +# The setup is loosely based on a stripped down version of +# https://github.com/emk/rust-musl-builder/blob/master/Dockerfile +FROM debian:buster as build-image + +COPY apt.conf /etc/apt/apt.conf.d/99taskcluster + +RUN apt-get update && \ + apt-get install \ + build-essential \ + ca-certificates \ + curl \ + musl-dev \ + musl-tools \ + && \ + useradd rust --user-group --create-home --shell /bin/bash + +# Run all further code as user `rust`, and create our working directories +# as the appropriate user. +USER rust + +# Set up our path with all our binary directories, including those for the +# musl-gcc toolchain and for our Rust toolchain. +ENV PATH=/home/rust/.cargo/bin:$PATH + +# The Rust toolchain to use when building our image. Set by `hooks/build`. +ENV TOOLCHAIN=1.42.0 \ + TARGET=x86_64-unknown-linux-musl + +# Install our Rust toolchain and the `musl` target. We patch the +# command-line we pass to the installer so that it won't attempt to +# interact with the user or fool around with TTYs. We also set the default +# `--target` to musl so that our users don't need to keep overriding it +# manually. +RUN curl https://sh.rustup.rs -sSf | \ + sh -s -- -y \ + --profile minimal \ + --default-toolchain $TOOLCHAIN \ + --target $TARGET + +# Expect our source code to live in /home/rust/src. We'll run the build as +# user `rust`, which will be uid 1000, gid 1000 outside the container. +RUN mkdir -p /home/rust/src +WORKDIR /home/rust/src +# Add our source code. +ADD --chown=rust:rust build-image/ ./ + +# --out-dir is not yet stable +ENV RUSTC_BOOTSTRAP=1 +# Build our application. +RUN ["cargo", "build", "--target", "x86_64-unknown-linux-musl", "--out-dir=bin", "--release", "-Zunstable-options"] + +FROM scratch as empty + +FROM scratch + +COPY --from=skopeo /go/src/out/skopeo /kaniko-bootstrap/skopeo +COPY --from=kaniko /go/src/out/executor /kaniko-bootstrap/executor +COPY --from=build-image \ + /home/rust/src/bin/build-image \ + /kaniko-bootstrap/build-image + +ADD https://mkcert.org/generate/ /kaniko-bootstrap/ssl/certs/ca-certificats.crt +ENV SSL_CERT_DIR=/kaniko/ssl/certs + +ADD policy.json /kaniko-bootstrap/containers/policy.json + +ENV HOME /root +ENV USER /root +WORKDIR /workspace + +ENV PATH /usr/local/bin:/kaniko + +VOLUME /workspace +ENTRYPOINT ["/kaniko-bootstrap/build-image"] diff --git a/taskcluster/docker/image_builder/VERSION b/taskcluster/docker/image_builder/VERSION new file mode 100644 index 0000000000..fcdb2e109f --- /dev/null +++ b/taskcluster/docker/image_builder/VERSION @@ -0,0 +1 @@ +4.0.0 diff --git a/taskcluster/docker/image_builder/apt.conf b/taskcluster/docker/image_builder/apt.conf new file mode 100644 index 0000000000..84c0cf10ef --- /dev/null +++ b/taskcluster/docker/image_builder/apt.conf @@ -0,0 +1,5 @@ +quiet "true"; +APT::Get::Assume-Yes "true"; +APT::Install-Recommends "false"; +Acquire::Check-Valid-Until "false"; +Acquire::Retries "5"; diff --git a/taskcluster/docker/image_builder/build-image/Cargo.lock b/taskcluster/docker/image_builder/build-image/Cargo.lock new file mode 100644 index 0000000000..7e07920591 --- /dev/null +++ b/taskcluster/docker/image_builder/build-image/Cargo.lock @@ -0,0 +1,1085 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "anyhow" +version = "1.0.31" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "autocfg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "base64" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "build-image" +version = "0.1.0" +dependencies = [ + "anyhow 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", + "envy 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "reqwest 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)", + "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bumpalo" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "bytes" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "loom 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cc" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "dtoa" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "either" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "encoding_rs" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "envy" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fs_extra" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "fuchsia-zircon" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "futures-channel" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "futures-core" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "futures-io" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "futures-macro" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro-hack 0.5.16 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "futures-sink" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "futures-task" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "once_cell 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "futures-util" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-io 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-macro 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-task 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-utils 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack 0.5.16 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-nested 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "generator" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "glob" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "h2" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hermit-abi" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "http" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "http-body" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "httparse" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "hyper" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "h2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hyper-rustls" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.13.6 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", + "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "idna" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "indexmap" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "itertools" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "itoa" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "jobserver" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "js-sys" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "log" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "loom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "generator 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", + "scoped-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "matches" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "memchr" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "mime_guess" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", + "unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "mio" +version = "0.6.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", + "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "miow" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "net2" +version = "0.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num_cpus" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "hermit-abi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "once_cell" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "pin-project" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "pin-project-internal 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pin-project-internal" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pin-project-lite" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "proc-macro-hack" +version = "0.5.16" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "proc-macro-nested" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "proc-macro2" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "quote" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "redox_syscall" +version = "0.1.56" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "reqwest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "base64 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "encoding_rs 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.13.6 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", + "mime_guess 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)", + "webpki-roots 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winreg 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ring" +version = "0.16.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "once_cell 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rustls" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)", + "sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "scoped-tls" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "sct" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)", + "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde_derive 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_derive" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_json" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_urlencoded" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "dtoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", + "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "slab" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "socket2" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "syn" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tinyvec" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "tokio" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tokio-rustls" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", + "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tokio-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tower-service" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "try-lock" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "tinyvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "url" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "version_check" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bumpalo 3.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.64" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "web-sys" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "webpki" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)", + "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "webpki-roots" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winreg" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zstd" +version = "0.5.3+zstd.1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zstd-safe" +version = "2.0.5+zstd.1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", + "zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "zstd-sys" +version = "1.4.17+zstd.1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)", + "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum anyhow 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)" = "85bb70cc08ec97ca5450e6eba421deeea5f172c0fc61f78b5357b2a8e8be195f" +"checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" +"checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" +"checksum base64 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" +"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +"checksum bumpalo 3.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820" +"checksum bytes 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "118cf036fbb97d0816e3c34b2d7a1e8cfc60f68fcf63d550ddbe9bd5f59c213b" +"checksum cc 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)" = "77c1f1d60091c1b73e2b1f4560ab419204b178e625fa945ded7b660becd2bd46" +"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +"checksum dtoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b" +"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +"checksum encoding_rs 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "e8ac63f94732332f44fe654443c46f6375d1939684c17b0afb6cb56b0456e171" +"checksum envy 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f938a4abd5b75fe3737902dbc2e79ca142cc1526827a9e40b829a086758531a9" +"checksum fnv 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" +"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +"checksum futures-channel 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f366ad74c28cca6ba456d95e6422883cfb4b252a83bed929c83abfdbbf2967d5" +"checksum futures-core 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "59f5fff90fd5d971f936ad674802482ba441b6f09ba5e15fd8b39145582ca399" +"checksum futures-io 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "de27142b013a8e869c14957e6d2edeef89e97c289e69d042ee3a49acd8b51789" +"checksum futures-macro 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d0b5a30a4328ab5473878237c447333c093297bded83a4983d10f4deea240d39" +"checksum futures-sink 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3f2032893cb734c7a05d85ce0cc8b8c4075278e93b24b66f9de99d6eb0fa8acc" +"checksum futures-task 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bdb66b5f09e22019b1ab0830f7785bcea8e7a42148683f99214f73f8ec21a626" +"checksum futures-util 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "8764574ff08b701a084482c3c7031349104b07ac897393010494beaa18ce32c6" +"checksum generator 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "add72f17bb81521258fcc8a7a3245b1e184e916bfbe34f0ea89558f440df5c68" +"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" +"checksum h2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "79b7246d7e4b979c03fa093da39cfb3617a96bbeee6310af63991668d7e843ff" +"checksum hermit-abi 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "b9586eedd4ce6b3c498bc3b4dd92fc9f11166aa908a914071953768066c67909" +"checksum http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "28d569972648b2c512421b5f2a405ad6ac9666547189d0c5477a3f200f3e02f9" +"checksum http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" +"checksum httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" +"checksum hyper 0.13.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a6e7655b9594024ad0ee439f3b5a7299369dc2a3f459b47c696f9ff676f9aa1f" +"checksum hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac965ea399ec3a25ac7d13b8affd4b8f39325cca00858ddf5eb29b79e6b14b08" +"checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" +"checksum indexmap 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c398b2b113b55809ceb9ee3e753fcbac793f1956663f3c36549c1346015c2afe" +"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +"checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" +"checksum itoa 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6" +"checksum jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" +"checksum js-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)" = "c4b9172132a62451e56142bff9afc91c8e4a4500aa5b847da36815b63bfda916" +"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +"checksum libc 0.2.71 (registry+https://github.com/rust-lang/crates.io-index)" = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49" +"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" +"checksum loom 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecc775857611e1df29abba5c41355cdf540e7e9d4acfdf0f355eefee82330b7" +"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" +"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" +"checksum mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +"checksum mime_guess 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" +"checksum mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)" = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430" +"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" +"checksum net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)" = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7" +"checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +"checksum once_cell 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0b631f7e854af39a1739f401cf34a8a013dfe09eac4fa4dba91e9768bd28168d" +"checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +"checksum pin-project 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "12e3a6cdbfe94a5e4572812a0201f8c0ed98c1c452c7b8563ce2276988ef9c17" +"checksum pin-project-internal 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "6a0ffd45cf79d88737d7cc85bfd5d2894bee1139b356e616fe85dc389c61aaf7" +"checksum pin-project-lite 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "282adbf10f2698a7a77f8e983a74b2d18176c19a7fd32a45446139ae7b02b715" +"checksum pin-utils 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +"checksum proc-macro-hack 0.5.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7e0456befd48169b9f13ef0f0ad46d492cf9d2dbb918bcf38e01eed4ce3ec5e4" +"checksum proc-macro-nested 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a" +"checksum proc-macro2 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)" = "beae6331a816b1f65d04c45b078fd8e6c93e8071771f41b8163255bbd8d7c8fa" +"checksum quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" +"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" +"checksum reqwest 0.10.6 (registry+https://github.com/rust-lang/crates.io-index)" = "3b82c9238b305f26f53443e3a4bc8528d64b8d0bee408ec949eb7bf5635ec680" +"checksum ring 0.16.15 (registry+https://github.com/rust-lang/crates.io-index)" = "952cd6b98c85bbc30efa1ba5783b8abf12fec8b3287ffa52605b9432313e34e4" +"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +"checksum rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1" +"checksum ryu 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +"checksum scoped-tls 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "332ffa32bf586782a3efaeb58f127980944bbc8c4d6913a86107ac2a5ab24b28" +"checksum sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c" +"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +"checksum serde 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)" = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3" +"checksum serde_derive 1.0.114 (registry+https://github.com/rust-lang/crates.io-index)" = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e" +"checksum serde_json 1.0.56 (registry+https://github.com/rust-lang/crates.io-index)" = "3433e879a558dde8b5e8feb2a04899cf34fdde1fafb894687e52105fc1162ac3" +"checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" +"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" +"checksum socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918" +"checksum spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +"checksum syn 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)" = "e8d5d96e8cbb005d6959f119f773bfaebb5684296108fb32600c00cde305b2cd" +"checksum time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +"checksum tinyvec 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "53953d2d3a5ad81d9f844a32f14ebb121f50b650cd59d0ee2a07cf13c617efed" +"checksum tokio 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d099fa27b9702bed751524694adbe393e18b36b204da91eb1cbbbbb4a5ee2d58" +"checksum tokio-rustls 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "15cb62a0d2770787abc96e99c1cd98fcf17f94959f3af63ca85bdfb203f051b4" +"checksum tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499" +"checksum tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" +"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382" +"checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +"checksum unicode-normalization 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6fb19cf769fa8c6a80a162df694621ebeb4dafb606470b2b2fce0be40a98a977" +"checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" +"checksum untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +"checksum url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" +"checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +"checksum want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +"checksum wasm-bindgen 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "6a634620115e4a229108b71bde263bb4220c483b3f07f5ba514ee8d15064c4c2" +"checksum wasm-bindgen-backend 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "3e53963b583d18a5aa3aaae4b4c1cb535218246131ba22a71f05b518098571df" +"checksum wasm-bindgen-futures 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)" = "dba48d66049d2a6cc8488702e7259ab7afc9043ad0dc5448444f46f2a453b362" +"checksum wasm-bindgen-macro 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "3fcfd5ef6eec85623b4c6e844293d4516470d8f19cd72d0d12246017eb9060b8" +"checksum wasm-bindgen-macro-support 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "9adff9ee0e94b926ca81b57f57f86d5545cdcb1d259e21ec9bdd95b901754c75" +"checksum wasm-bindgen-shared 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "7f7b90ea6c632dd06fd765d44542e234d5e63d9bb917ecd64d79778a13bd79ae" +"checksum web-sys 0.3.41 (registry+https://github.com/rust-lang/crates.io-index)" = "863539788676619aac1a23e2df3655e96b32b0e05eb72ca34ba045ad573c625d" +"checksum webpki 0.21.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ab146130f5f790d45f82aeeb09e55a256573373ec64409fc19a6fb82fb1032ae" +"checksum webpki-roots 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8eff4b7516a57307f9349c64bf34caa34b940b66fed4b2fb3136cb7386e5739" +"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" +"checksum winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +"checksum winreg 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" +"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +"checksum zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8" +"checksum zstd-safe 2.0.5+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055" +"checksum zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b" diff --git a/taskcluster/docker/image_builder/build-image/Cargo.toml b/taskcluster/docker/image_builder/build-image/Cargo.toml new file mode 100644 index 0000000000..be42fe1e34 --- /dev/null +++ b/taskcluster/docker/image_builder/build-image/Cargo.toml @@ -0,0 +1,23 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +[package] +name = "build-image" +version = "0.1.0" +authors = ["Tom Prince <tom.prince@twistedmatrix.com>"] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +reqwest = { version= "0.10.4", features = ["rustls-tls", "blocking"], default-features = false} +zstd = "0.5.1" +url = "2.1.1" +anyhow = "1.0.27" +serde = { version = "1.0.105", features = ["derive"]} +serde_json = "1.0.50" +envy = "0.4.1" +fs_extra = "1.1" + +[workspace] diff --git a/taskcluster/docker/image_builder/build-image/src/config.rs b/taskcluster/docker/image_builder/build-image/src/config.rs new file mode 100644 index 0000000000..94c1d55a10 --- /dev/null +++ b/taskcluster/docker/image_builder/build-image/src/config.rs @@ -0,0 +1,112 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. + +use anyhow::Result; +use serde::de::Error; +use serde::Deserialize; +use std::collections::HashMap; + +fn default_image_name() -> String { + "mozilla.org/taskgraph/default-image:latest".into() +} +fn default_zstd_level() -> i32 { + 3 +} + +fn from_json<'de, D, T>(deserializer: D) -> Result<T, D::Error> +where + D: serde::de::Deserializer<'de>, + T: serde::de::DeserializeOwned, +{ + let value: String = serde::Deserialize::deserialize(deserializer)?; + serde_json::from_str(&value).map_err(|err| { + D::Error::invalid_value(serde::de::Unexpected::Str(&value), &&*err.to_string()) + }) +} + +#[derive(Deserialize, Debug, PartialEq, Eq)] +pub struct Config { + pub context_task_id: String, + pub context_path: String, + pub parent_task_id: Option<String>, + #[serde(default = "default_image_name")] + pub image_name: String, + #[serde(default = "default_zstd_level")] + pub docker_image_zstd_level: i32, + #[serde(default)] + pub debug: bool, + #[serde(default, deserialize_with = "from_json")] + pub docker_build_args: HashMap<String, String>, +} + +impl Config { + pub fn from_env() -> Result<Config> { + Ok(envy::from_env()?) + } +} + +#[cfg(test)] +mod test { + use anyhow::Result; + + #[test] + fn test() -> Result<()> { + let env: Vec<(String, String)> = vec![ + ("CONTEXT_TASK_ID".into(), "xGRRgzG6QlCCwsFsyuqm0Q".into()), + ( + "CONTEXT_PATH".into(), + "public/docker-contexts/image.tar.gz".into(), + ), + ]; + let config: super::Config = envy::from_iter(env.into_iter())?; + assert_eq!( + config, + super::Config { + context_task_id: "xGRRgzG6QlCCwsFsyuqm0Q".into(), + context_path: "public/docker-contexts/image.tar.gz".into(), + parent_task_id: None, + image_name: "mozilla.org/taskgraph/default-image:latest".into(), + docker_image_zstd_level: 3, + debug: false, + docker_build_args: Default::default() + } + ); + Ok(()) + } + + #[test] + fn test_docker_build_args() -> Result<()> { + let env: Vec<(String, String)> = vec![ + ("CONTEXT_TASK_ID".into(), "xGRRgzG6QlCCwsFsyuqm0Q".into()), + ( + "CONTEXT_PATH".into(), + "public/docker-contexts/image.tar.gz".into(), + ), + ( + "DOCKER_BUILD_ARGS".into(), + serde_json::json! ({ + "test": "Value", + }) + .to_string(), + ), + ]; + let config: super::Config = envy::from_iter(env.into_iter())?; + assert_eq!( + config, + super::Config { + context_task_id: "xGRRgzG6QlCCwsFsyuqm0Q".into(), + context_path: "public/docker-contexts/image.tar.gz".into(), + parent_task_id: None, + image_name: "mozilla.org/taskgraph/default-image:latest".into(), + docker_image_zstd_level: 3, + debug: false, + docker_build_args: [("test".to_string(), "Value".to_string())] + .iter() + .cloned() + .collect(), + } + ); + Ok(()) + } +} diff --git a/taskcluster/docker/image_builder/build-image/src/main.rs b/taskcluster/docker/image_builder/build-image/src/main.rs new file mode 100644 index 0000000000..997617c84e --- /dev/null +++ b/taskcluster/docker/image_builder/build-image/src/main.rs @@ -0,0 +1,182 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. + +#![forbid(unsafe_code)] + +use std::collections::HashMap; +use std::path::Path; +use std::process::Command; + +use anyhow::{ensure, Context, Result}; +use fs_extra::dir::{move_dir, CopyOptions}; +use serde::Deserialize; + +mod config; +mod taskcluster; + +use config::Config; + +fn log_step(msg: &str) { + println!("[build-image] {}", msg); +} + +fn read_image_digest(path: &str) -> Result<String> { + let output = Command::new("/kaniko/skopeo") + .arg("inspect") + .arg(format!("docker-archive:{}", path)) + .stdout(std::process::Stdio::piped()) + .spawn()? + .wait_with_output()?; + ensure!(output.status.success(), "Could not inspect parent image."); + + #[derive(Deserialize, Debug)] + #[serde(rename_all = "PascalCase")] + struct ImageInfo { + #[serde(skip_serializing_if = "Option::is_none")] + name: Option<String>, + #[serde(skip_serializing_if = "Option::is_none")] + tag: Option<String>, + digest: String, + // ... + } + + let image_info: ImageInfo = serde_json::from_slice(&output.stdout) + .with_context(|| format!("Could parse image info from {:?}", path))?; + Ok(image_info.digest) +} + +fn download_parent_image( + cluster: &taskcluster::TaskCluster, + task_id: &str, + dest: &str, +) -> Result<String> { + zstd::stream::copy_decode( + cluster.stream_artifact(&task_id, "public/image.tar.zst")?, + std::fs::File::create(dest)?, + ) + .context("Could not download parent image.")?; + + read_image_digest(dest) +} + +fn build_image( + context_path: &str, + dest: &str, + debug: bool, + build_args: HashMap<String, String>, +) -> Result<()> { + let mut command = Command::new("/kaniko/executor"); + command + .stderr(std::process::Stdio::inherit()) + .args(&["--context", &format!("tar://{}", context_path)]) + .args(&["--destination", "image"]) + .args(&["--dockerfile", "Dockerfile"]) + .arg("--no-push") + .args(&["--cache-dir", "/workspace/cache"]) + .arg("--single-snapshot") + // FIXME: Generating reproducible layers currently causes OOM. + // .arg("--reproducible") + .arg("--whitelist-var-run=false") + .args(&["--tarPath", dest]); + if debug { + command.args(&["-v", "debug"]); + } + for (key, value) in build_args { + command.args(&["--build-arg", &format!("{}={}", key, value)]); + } + let status = command.status()?; + ensure!(status.success(), "Could not build image."); + Ok(()) +} + +fn repack_image(source: &str, dest: &str, image_name: &str) -> Result<()> { + let status = Command::new("/kaniko/skopeo") + .arg("copy") + .arg(format!("docker-archive:{}", source)) + .arg(format!("docker-archive:{}:{}", dest, image_name)) + .stderr(std::process::Stdio::inherit()) + .status()?; + ensure!(status.success(), "Could repack image."); + Ok(()) +} + +fn main() -> Result<()> { + // Kaniko expects everything to be in /kaniko, so if not running from there, move + // everything there. + if let Some(path) = std::env::current_exe()?.parent() { + if path != Path::new("/kaniko") { + let mut options = CopyOptions::new(); + options.copy_inside = true; + move_dir(path, "/kaniko", &options)?; + } + } + + let config = Config::from_env().context("Could not parse environment variables.")?; + + let cluster = taskcluster::TaskCluster::from_env()?; + + let mut build_args = config.docker_build_args; + + build_args.insert("TASKCLUSTER_ROOT_URL".into(), cluster.root_url()); + + log_step("Downloading context."); + + std::io::copy( + &mut cluster.stream_artifact(&config.context_task_id, &config.context_path)?, + &mut std::fs::File::create("/workspace/context.tar.gz")?, + ) + .context("Could not download image context.")?; + + if let Some(parent_task_id) = config.parent_task_id { + log_step("Downloading image."); + let digest = download_parent_image(&cluster, &parent_task_id, "/workspace/parent.tar")?; + + log_step(&format!("Parent image digest {}", &digest)); + std::fs::create_dir_all("/workspace/cache")?; + std::fs::rename( + "/workspace/parent.tar", + format!("/workspace/cache/{}", digest), + )?; + + build_args.insert( + "DOCKER_IMAGE_PARENT".into(), + format!("parent:latest@{}", digest), + ); + } + + log_step("Building image."); + build_image( + "/workspace/context.tar.gz", + "/workspace/image-pre.tar", + config.debug, + build_args, + )?; + log_step("Repacking image."); + repack_image( + "/workspace/image-pre.tar", + "/workspace/image.tar", + &config.image_name, + )?; + + log_step("Compressing image."); + compress_file( + "/workspace/image.tar", + "/workspace/image.tar.zst", + config.docker_image_zstd_level, + )?; + + Ok(()) +} + +fn compress_file( + source: impl AsRef<std::path::Path>, + dest: impl AsRef<std::path::Path>, + zstd_level: i32, +) -> Result<()> { + Ok(zstd::stream::copy_encode( + std::fs::File::open(source)?, + std::fs::File::create(dest)?, + zstd_level, + )?) +} diff --git a/taskcluster/docker/image_builder/build-image/src/taskcluster.rs b/taskcluster/docker/image_builder/build-image/src/taskcluster.rs new file mode 100644 index 0000000000..3b39d669f0 --- /dev/null +++ b/taskcluster/docker/image_builder/build-image/src/taskcluster.rs @@ -0,0 +1,55 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. + +use anyhow::{Context, Result}; + +pub struct TaskCluster { + root_url: url::Url, + client: reqwest::blocking::Client, +} + +impl TaskCluster { + pub fn from_env() -> Result<Self> { + std::env::var("TASKCLUSTER_ROOT_URL") + .context("TASKCLUSTER_ROOT_URL not set.") + .and_then(|var| var.parse().context("Couldn't parse TASKCLUSTER_ROOT_URL.")) + .map(|root_url| TaskCluster { + root_url, + client: reqwest::blocking::Client::new(), + }) + } + + /// Return the root URL as suitable for passing to other processes. + /// + /// In particular, any trailing slashes are removed. + pub fn root_url(&self) -> String { + self.root_url.as_str().trim_end_matches("/").to_string() + } + + pub fn task_artifact_url(&self, task_id: &str, path: &str) -> url::Url { + let mut url = self.root_url.clone(); + url.set_path(&format!("api/queue/v1/task/{}/artifacts/{}", task_id, path)); + url + } + + pub fn stream_artifact(&self, task_id: &str, path: &str) -> Result<impl std::io::Read> { + let url = self.task_artifact_url(task_id, path); + Ok(self.client.get(url).send()?.error_for_status()?) + } +} + +#[cfg(test)] +mod test { + #[test] + fn test_url() { + let cluster = super::TaskCluster { + root_url: url::Url::parse("http://taskcluster.example").unwrap(), + client: reqwest::blocking::Client::new(), + }; + assert_eq!( + cluster.task_artifact_url("QzDLgP4YRwanIvgPt6ClfA","public/docker-contexts/decision.tar.gz"), + url::Url::parse("http://taskcluster.example/api/queue/v1/task/QzDLgP4YRwanIvgPt6ClfA/artifacts/public/docker-contexts/decision.tar.gz").unwrap(), + ); + } +} diff --git a/taskcluster/docker/image_builder/policy.json b/taskcluster/docker/image_builder/policy.json new file mode 100644 index 0000000000..c709def1ec --- /dev/null +++ b/taskcluster/docker/image_builder/policy.json @@ -0,0 +1,11 @@ +{ + "default": [{"type": "reject"}], + "transports": { + "docker-archive": { + "": [{"type": "insecureAcceptAnything"}] + }, + "dir": { + "": [{"type": "insecureAcceptAnything"}] + } + } +} diff --git a/taskcluster/docker/index-task/.eslintrc.js b/taskcluster/docker/index-task/.eslintrc.js new file mode 100644 index 0000000000..ff84d9cb69 --- /dev/null +++ b/taskcluster/docker/index-task/.eslintrc.js @@ -0,0 +1,11 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +"use strict"; + +module.exports = { + env: { + node: true, + }, +}; diff --git a/taskcluster/docker/index-task/Dockerfile b/taskcluster/docker/index-task/Dockerfile new file mode 100644 index 0000000000..493eda4e20 --- /dev/null +++ b/taskcluster/docker/index-task/Dockerfile @@ -0,0 +1,11 @@ +FROM node:10-alpine + +ENV NODE_ENV production +RUN mkdir /app +ADD insert-indexes.js /app/ +ADD package.json /app/ +ADD yarn.lock /app/ +WORKDIR /app +RUN yarn --frozen-lockfile && yarn cache clean + +ENTRYPOINT ["node"] diff --git a/taskcluster/docker/index-task/README b/taskcluster/docker/index-task/README new file mode 100644 index 0000000000..9ec00e7897 --- /dev/null +++ b/taskcluster/docker/index-task/README @@ -0,0 +1,36 @@ +Index-Image +=========== + +This image is designed to be used for indexing other tasks. It takes a task +definition as follows: +```js +{ + ..., + scopes: [ + 'index:insert-task:my-index.namespace', + 'index:insert-task:...', + ], + payload: { + image: '...', + env: { + TARGET_TASKID: '<taskId-to-be-indexed>', + }, + command: [ + 'insert-indexes.js', + 'my-index.namespace.one', + 'my-index.namespace.two', + '....', + ], + features: { + taskclusterProxy: true, + }, + maxRunTime: 600, + }, +} +``` + +As can be seen the `taskId` to be indexed is given by the environment variable +`TARGET_TASKID` and the `command` arguments specifies namespaces that it must +be index under. It is **important** to also include scopes on the form +`index:insert-task:<...>` for all namespaces `<...>` given as `command` +arguments. diff --git a/taskcluster/docker/index-task/insert-indexes.js b/taskcluster/docker/index-task/insert-indexes.js new file mode 100644 index 0000000000..39ff45fd9c --- /dev/null +++ b/taskcluster/docker/index-task/insert-indexes.js @@ -0,0 +1,73 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +let taskcluster = require("taskcluster-client"); + +// Create instance of index client +let index = new taskcluster.Index({ + delayFactor: 750, // Good solid delay for background process + retries: 8, // A few extra retries for robustness + rootUrl: + process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL, +}); + +// Create queue instance for fetching taskId +let queue = new taskcluster.Queue({ + delayFactor: 750, // Good solid delay for background process + retries: 8, // A few extra retries for robustness + rootUrl: + process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL, +}); + +// Load input +let taskId = process.env.TARGET_TASKID; +let rank = parseInt(process.env.INDEX_RANK, 10); +let namespaces = process.argv.slice(2); + +// Validate input +if (!taskId) { + console.log("Expected target task as environment variable: TARGET_TASKID"); + process.exit(1); +} + +if (isNaN(rank)) { + console.log("Expected index rank as environment variable: INDEX_RANK"); + process.exit(1); +} + +// Fetch task definition to get expiration and then insert into index +queue + .task(taskId) + .then(task => task.expires) + .then(expires => { + return Promise.all( + namespaces.map(namespace => { + console.log( + "Inserting %s into index (rank %d) under: %s", + taskId, + rank, + namespace + ); + return index.insertTask(namespace, { + taskId, + rank, + data: {}, + expires, + }); + }) + ); + }) + .then(() => { + console.log("indexing successfully completed."); + process.exit(0); + }) + .catch(err => { + console.log("Error:\n%s", err); + if (err.stack) { + console.log("Stack:\n%s", err.stack); + } + console.log("Properties:\n%j", err); + throw err; + }) + .catch(() => process.exit(1)); diff --git a/taskcluster/docker/index-task/package.json b/taskcluster/docker/index-task/package.json new file mode 100644 index 0000000000..14c035123c --- /dev/null +++ b/taskcluster/docker/index-task/package.json @@ -0,0 +1,12 @@ +{ + "private": true, + "scripts": { + "start": "node index.js" + }, + "dependencies": { + "taskcluster-client": "^12.2.0" + }, + "engines": { + "node": "10" + } +} diff --git a/taskcluster/docker/index-task/yarn.lock b/taskcluster/docker/index-task/yarn.lock new file mode 100644 index 0000000000..326936d8a6 --- /dev/null +++ b/taskcluster/docker/index-task/yarn.lock @@ -0,0 +1,326 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +amqplib@^0.5.1: + version "0.5.3" + resolved "https://registry.yarnpkg.com/amqplib/-/amqplib-0.5.3.tgz#7ccfc85d12ee7cd3c6dc861bb07f0648ec3d7193" + integrity sha512-ZOdUhMxcF+u62rPI+hMtU1NBXSDFQ3eCJJrenamtdQ7YYwh7RZJHOIM1gonVbZ5PyVdYH4xqBPje9OYqk7fnqw== + dependencies: + bitsyntax "~0.1.0" + bluebird "^3.5.2" + buffer-more-ints "~1.0.0" + readable-stream "1.x >=1.1.9" + safe-buffer "~5.1.2" + url-parse "~1.4.3" + +asap@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +bitsyntax@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/bitsyntax/-/bitsyntax-0.1.0.tgz#b0c59acef03505de5a2ed62a2f763c56ae1d6205" + integrity sha512-ikAdCnrloKmFOugAfxWws89/fPc+nw0OOG1IzIE72uSOg/A3cYptKCjSUhDTuj7fhsJtzkzlv7l3b8PzRHLN0Q== + dependencies: + buffer-more-ints "~1.0.0" + debug "~2.6.9" + safe-buffer "~5.1.2" + +bluebird@^3.5.2: + version "3.5.3" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.3.tgz#7d01c6f9616c9a51ab0f8c549a79dfe6ec33efa7" + integrity sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw== + +boom@4.x.x: + version "4.3.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31" + integrity sha1-T4owBctKfjiJ90kDD9JbluAdLjE= + dependencies: + hoek "4.x.x" + +boom@5.x.x: + version "5.2.0" + resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02" + integrity sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw== + dependencies: + hoek "4.x.x" + +buffer-more-ints@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/buffer-more-ints/-/buffer-more-ints-1.0.0.tgz#ef4f8e2dddbad429ed3828a9c55d44f05c611422" + integrity sha512-EMetuGFz5SLsT0QTnXzINh4Ksr+oo4i+UGTXEshiGCQWnsgSs7ZhJ8fzlwQ+OzEMs0MpDAMr1hxnblp5a4vcHg== + +combined-stream@^1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.7.tgz#2d1d24317afb8abe95d6d2c0b07b57813539d828" + integrity sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w== + dependencies: + delayed-stream "~1.0.0" + +component-emitter@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" + integrity sha1-E3kY1teCg/ffemt8WmPhQOaUJeY= + +cookiejar@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.2.tgz#dd8a235530752f988f9a0844f3fc589e3111125c" + integrity sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA== + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +cryptiles@3.x.x: + version "3.1.4" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.4.tgz#769a68c95612b56faadfcebf57ac86479cbe8322" + integrity sha512-8I1sgZHfVwcSOY6mSGpVU3lw/GSIZvusg8dD2+OGehCJpOhQRLNcH0qb9upQnOH4XhgxxFJSg6E2kx95deb1Tw== + dependencies: + boom "5.x.x" + +debug@^3.1.0: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + +debug@~2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +form-data@^2.3.1: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +formidable@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.1.tgz#70fb7ca0290ee6ff961090415f4b3df3d2082659" + integrity sha512-Fs9VRguL0gqGHkXS5GQiMCr1VhZBxz0JnJs4JmMp/2jL18Fmbzvv7vOFRU+U8TBkHEE/CX1qDXzJplVULgsLeg== + +hawk@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038" + integrity sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ== + dependencies: + boom "4.x.x" + cryptiles "3.x.x" + hoek "4.x.x" + sntp "2.x.x" + +hoek@4.x.x: + version "4.2.1" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.1.tgz#9634502aa12c445dd5a7c5734b572bb8738aacbb" + integrity sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA== + +inherits@~2.0.1, inherits@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +lodash@^4.17.4: + version "4.17.11" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" + integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== + +methods@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + +mime-db@~1.37.0: + version "1.37.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.37.0.tgz#0b6a0ce6fdbe9576e25f1f2d2fde8830dc0ad0d8" + integrity sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg== + +mime-types@^2.1.12: + version "2.1.21" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.21.tgz#28995aa1ecb770742fe6ae7e58f9181c744b3f96" + integrity sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg== + dependencies: + mime-db "~1.37.0" + +mime@^1.4.1: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" + integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + +process-nextick-args@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" + integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw== + +promise@^8.0.1: + version "8.0.2" + resolved "https://registry.yarnpkg.com/promise/-/promise-8.0.2.tgz#9dcd0672192c589477d56891271bdc27547ae9f0" + integrity sha512-EIyzM39FpVOMbqgzEHhxdrEhtOSDOtjMZQ0M6iVfCE+kWNgCkAyOdnuCWqfmflylftfadU6FkiMgHZA2kUzwRw== + dependencies: + asap "~2.0.6" + +qs@^6.5.1: + version "6.6.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.6.0.tgz#a99c0f69a8d26bf7ef012f871cdabb0aee4424c2" + integrity sha512-KIJqT9jQJDQx5h5uAVPimw6yVg2SekOKu959OCtktD3FjzbpvaPr8i4zzg07DOMz+igA4W/aNM7OV8H37pFYfA== + +querystringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.0.tgz#7ded8dfbf7879dcc60d0a644ac6754b283ad17ef" + integrity sha512-sluvZZ1YiTLD5jsqZcDmFyV2EwToyXZBfpoVOmktMmW+VEnhgakFHnasVph65fOjGPTWN0Nw3+XQaSeMayr0kg== + +"readable-stream@1.x >=1.1.9": + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk= + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" + integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= + +safe-buffer@~5.1.0, safe-buffer@~5.1.1, safe-buffer@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +slugid@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/slugid/-/slugid-1.1.0.tgz#e09f00899c09f5a7058edc36dd49f046fd50a82a" + integrity sha1-4J8AiZwJ9acFjtw23UnwRv1QqCo= + dependencies: + uuid "^2.0.1" + +sntp@2.x.x: + version "2.1.0" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.1.0.tgz#2c6cec14fedc2222739caf9b5c3d85d1cc5a2cc8" + integrity sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg== + dependencies: + hoek "4.x.x" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +superagent@~3.8.1: + version "3.8.3" + resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.8.3.tgz#460ea0dbdb7d5b11bc4f78deba565f86a178e128" + integrity sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA== + dependencies: + component-emitter "^1.2.0" + cookiejar "^2.1.0" + debug "^3.1.0" + extend "^3.0.0" + form-data "^2.3.1" + formidable "^1.2.0" + methods "^1.1.1" + mime "^1.4.1" + qs "^6.5.1" + readable-stream "^2.3.5" + +taskcluster-client@^12.2.0: + version "12.2.0" + resolved "https://registry.yarnpkg.com/taskcluster-client/-/taskcluster-client-12.2.0.tgz#423aee3b17566d14f8ad23e4e47532265a74fb89" + integrity sha512-2Fu5ICS2663kC2t8ymJYzRDnipj3DsCK//b+H/83RjJvC6cWZ0akKzq0ySvPlNA6ic2UcL4I03bJTCJYBX1dqg== + dependencies: + amqplib "^0.5.1" + debug "^3.1.0" + hawk "^6.0.2" + lodash "^4.17.4" + promise "^8.0.1" + slugid "^1.1.0" + superagent "~3.8.1" + taskcluster-lib-urls "^10.0.0" + +taskcluster-lib-urls@^10.0.0: + version "10.1.1" + resolved "https://registry.yarnpkg.com/taskcluster-lib-urls/-/taskcluster-lib-urls-10.1.1.tgz#67d5b9449b947e5234eafdd15c46267dde29bf74" + integrity sha512-tdrK++rCX73FMXk/cXwS6RLTjA3pX8hJlxg1ECLs3L3llCOPMNhQ4wi6lb6yMgHc/s5on/Edj6AlAH7gkxzgPg== + +url-parse@~1.4.3: + version "1.4.4" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.4.tgz#cac1556e95faa0303691fec5cf9d5a1bc34648f8" + integrity sha512-/92DTTorg4JjktLNLe6GPS2/RvAd/RGr6LuktmWSMLEOa6rjnlrFXNgSbSmkNvCoL2T028A0a1JaJLzRMlFoHg== + dependencies: + querystringify "^2.0.0" + requires-port "^1.0.0" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +uuid@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" + integrity sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho= diff --git a/taskcluster/docker/lint/Dockerfile b/taskcluster/docker/lint/Dockerfile new file mode 100644 index 0000000000..20475b73ea --- /dev/null +++ b/taskcluster/docker/lint/Dockerfile @@ -0,0 +1,52 @@ +FROM ubuntu:18.04 +MAINTAINER Andrew Halberstadt <ahalberstadt@mozilla.com> + +RUN mkdir /builds +RUN useradd -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +VOLUME /builds/worker/.cache +VOLUME /builds/worker/checkouts + +RUN mkdir /build +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/install-node.sh +ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /build/install-node.sh + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /build/install-mercurial.sh +ADD system-setup.sh /tmp/system-setup.sh +# %include tools/lint/eslint/manifest.tt +ADD topsrcdir/tools/lint/eslint/manifest.tt /tmp/eslint.tt +# %include tools/lint/eslint/eslint-plugin-mozilla/manifest.tt +ADD topsrcdir/tools/lint/eslint/eslint-plugin-mozilla/manifest.tt /tmp/eslint-plugin-mozilla.tt +# %include tools/lint/spell/codespell_requirements.txt +ADD topsrcdir/tools/lint/spell/codespell_requirements.txt /tmp/codespell_requirements.txt +# %include tools/lint/tox/tox_requirements.txt +ADD topsrcdir/tools/lint/tox/tox_requirements.txt /tmp/tox_requirements.txt +RUN bash /tmp/system-setup.sh + +# %include taskcluster/scripts/run-task +ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task +RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/* + +# Set variable normally configured at login, by the shells parent process, these +# are taken from GNU su manual +ENV HOME /builds/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker +ENV HOSTNAME taskcluster-worker +ENV LANG en_US.UTF-8 +ENV LC_ALL en_US.UTF-8 + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/lint/system-setup.sh b/taskcluster/docker/lint/system-setup.sh new file mode 100644 index 0000000000..fe1551c810 --- /dev/null +++ b/taskcluster/docker/lint/system-setup.sh @@ -0,0 +1,119 @@ +#!/usr/bin/env bash +# This allows packages to be installed without human interaction +export DEBIAN_FRONTEND=noninteractive + +set -ve + +test "$(whoami)" == 'root' + +mkdir -p /setup +cd /setup + +apt_packages=() +apt_packages+=('curl') +apt_packages+=('iproute2') +apt_packages+=('locales') +apt_packages+=('git') +apt_packages+=('graphviz') +apt_packages+=('python') +apt_packages+=('python-pip') +apt_packages+=('python3') +apt_packages+=('python3-pip') +apt_packages+=('shellcheck') +apt_packages+=('sudo') +apt_packages+=('wget') +apt_packages+=('xz-utils') + +apt-get update +apt-get install -y "${apt_packages[@]}" + +# Without this we get spurious "LC_ALL: cannot change locale (en_US.UTF-8)" errors, +# and python scripts raise UnicodeEncodeError when trying to print unicode characters. +locale-gen en_US.UTF-8 +dpkg-reconfigure locales + +su -c 'git config --global user.email "worker@mozilla.test"' worker +su -c 'git config --global user.name "worker"' worker + +tooltool_fetch() { + cat >manifest.tt + /build/tooltool.py fetch + rm manifest.tt +} + +cd /build +# shellcheck disable=SC1091 +. install-mercurial.sh + +### +# zstandard +### +pip install zstandard==0.13.0 +pip3 install zstandard==0.13.0 + +### +# ESLint Setup +### + +# install node +# shellcheck disable=SC1091 +. install-node.sh + +npm install -g jsdoc@3.5.5 +npm install -g yarn@1.9.4 + +/build/tooltool.py fetch -m /tmp/eslint.tt +mv /build/node_modules /build/node_modules_eslint +/build/tooltool.py fetch -m /tmp/eslint-plugin-mozilla.tt +mv /build/node_modules /build/node_modules_eslint-plugin-mozilla + +### +# fzf setup +### + +tooltool_fetch <<EOF +[ + { + "size": 1161860, + "digest": "3246470715e1ddf4c7e5136fdddd2ca269928c2de3074a98233faef189efd88fc9b28ddbe68642a31cf647a97f630941d764187006c5115e6f357d49322ef58d", + "algorithm": "sha512", + "filename": "fzf-0.20.0-linux_amd64.tgz", + "unpack": true + } +] +EOF +mv fzf /usr/local/bin + +### +# codespell Setup +### + +cd /setup + +pip3 install --require-hashes -r /tmp/codespell_requirements.txt + +### +# tox Setup +### + +cd /setup + +pip3 install --require-hashes -r /tmp/tox_requirements.txt + +### +# rustfmt and clippy +### + +cd /setup +export RUSTUP_HOME=/build/rust +export CARGO_HOME="$RUSTUP_HOME" +mkdir -p "$CARGO_HOME" +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +"$RUSTUP_HOME"/bin/rustup component add rustfmt +"$RUSTUP_HOME"/bin/rustup component add clippy +"$RUSTUP_HOME"/bin/rustc --version +"$RUSTUP_HOME"/bin/rustfmt --version +"$CARGO_HOME"/bin/cargo clippy --version + +cd / +rm -rf /setup diff --git a/taskcluster/docker/mingw32-build/Dockerfile b/taskcluster/docker/mingw32-build/Dockerfile new file mode 100644 index 0000000000..b6a615de78 --- /dev/null +++ b/taskcluster/docker/mingw32-build/Dockerfile @@ -0,0 +1,43 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV XZ_OPT=-T0 + +RUN dpkg --add-architecture i386 + +# Ideally, we wouldn't need gcc-multilib and the extra linux-libc-dev, +# but the latter is required to make the former installable, and the former +# because of bug 1409276. +RUN apt-get update && \ + apt-get install \ + autoconf2.13 \ + bison \ + bzip2 \ + flex \ + curl \ + file \ + gawk \ + g++-multilib \ + gnupg \ + jq \ + libucl1 \ + libxml2 \ + patch \ + python3-dev \ + p7zip-full \ + scons \ + tar \ + unzip \ + uuid \ + wget \ + x11-utils \ + xvfb \ + yasm \ + zip \ + zlib1g-dev \ + libfreetype6-dev:i386 \ + libx11-dev:i386 diff --git a/taskcluster/docker/partner-repack/Dockerfile b/taskcluster/docker/partner-repack/Dockerfile new file mode 100644 index 0000000000..045ac096f4 --- /dev/null +++ b/taskcluster/docker/partner-repack/Dockerfile @@ -0,0 +1,24 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Ben Hearsum <bhearsum@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace + +RUN dpkg --add-architecture amd64 + +RUN apt-get update && \ + apt-get install \ + bzip2 \ + curl \ + git \ + gzip \ + openssh-client \ + unzip \ + zip && \ + apt-get clean + +# Add wrapper scripts for xvfb for repackage.sh, and interactive sessions +# %include taskcluster/docker/recipes/xvfb.sh +COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh + +COPY known_hosts /etc/ssh/ssh_known_hosts diff --git a/taskcluster/docker/partner-repack/known_hosts b/taskcluster/docker/partner-repack/known_hosts new file mode 100644 index 0000000000..73b210c5aa --- /dev/null +++ b/taskcluster/docker/partner-repack/known_hosts @@ -0,0 +1,2 @@ +|1|l1+RDluaBh2vgAVjzSQP5cMsazU=|zA0AvSBB4c3SJ6OM1H0pjrTY/BQ= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== +|1|A4q4hHD/XV6BPf9bom/6780dCwk=|gm+K1qkEPDUMm1axYNTOkbI1BQ4= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== diff --git a/taskcluster/docker/periodic-updates/.eslintrc.js b/taskcluster/docker/periodic-updates/.eslintrc.js new file mode 100644 index 0000000000..b00eb26544 --- /dev/null +++ b/taskcluster/docker/periodic-updates/.eslintrc.js @@ -0,0 +1,80 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +"use strict"; + +module.exports = { + globals: { + // JS files in this folder are commonly xpcshell scripts where |arguments| + // is defined in the global scope. + arguments: false, + }, + rules: { + // Enforce return statements in callbacks of array methods. + "array-callback-return": "error", + + // Verify calls of super() in constructors. + "constructor-super": "error", + + // Require default case in switch statements. + "default-case": "error", + + // Disallow use of alert(), confirm(), and prompt(). + "no-alert": "error", + + // Disallow likely erroneous `switch` scoped lexical declarations in + // case/default clauses. + "no-case-declarations": "error", + + // Disallow use of the console API. + "no-console": "error", + + // Disallow constant expressions in conditions (except for loops). + "no-constant-condition": ["error", { checkLoops: false }], + + // Disallow extending of native objects. + "no-extend-native": "error", + + // Disallow case statement fallthrough without explicit `// falls through` + // annotation. + "no-fallthrough": "error", + + // No reassigning native JS objects or read only globals. + "no-global-assign": "error", + + // Disallow use of assignment in return statement. + "no-return-assign": ["error", "always"], + + // Disallow template literal placeholder syntax in regular strings. + "no-template-curly-in-string": "error", + + // Disallow use of this/super before calling super() in constructors. + "no-this-before-super": "error", + + // Disallow unmodified loop conditions. + "no-unmodified-loop-condition": "error", + + // No expressions where a statement is expected + "no-unused-expressions": "error", + + // Disallow unnecessary escape usage in strings and regular expressions. + "no-useless-escape": "error", + + // Require "use strict" to be defined globally in the script. + strict: ["error", "global"], + + // Enforce valid JSDoc comments. + "valid-jsdoc": [ + "error", + { + requireParamDescription: false, + requireReturn: false, + requireReturnDescription: false, + }, + ], + + // Disallow Yoda conditions. + yoda: ["error", "never"], + }, +}; diff --git a/taskcluster/docker/periodic-updates/Dockerfile b/taskcluster/docker/periodic-updates/Dockerfile new file mode 100644 index 0000000000..1b8203593b --- /dev/null +++ b/taskcluster/docker/periodic-updates/Dockerfile @@ -0,0 +1,31 @@ +FROM ubuntu:bionic +MAINTAINER Simon Fraser <sfraser@mozilla.com> + +# Required software +ENV DEBIAN_FRONTEND noninteractive + +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh + +ADD setup.sh /setup/setup.sh + +RUN cd /setup && ./setup.sh + +COPY runme.sh / +COPY scripts/* /home/worker/scripts/ + +ENV HOME /home/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker + +CMD ["/runme.sh"] diff --git a/taskcluster/docker/periodic-updates/README.md b/taskcluster/docker/periodic-updates/README.md new file mode 100644 index 0000000000..3136b69f37 --- /dev/null +++ b/taskcluster/docker/periodic-updates/README.md @@ -0,0 +1,96 @@ + +==Periodic File Updates== + +This docker image examines the in-tree files for HSTS preload data, HPKP pinning and blocklisting, and +will produce a diff for each necessary to update the in-tree files. + +If given a conduit API token, it will also use the arcanist client to submit the commits for review. + + +==Quick Start== + +```sh +docker build -t hsts-local --no-cache --rm . + +docker run -e DO_HSTS=1 -e DO_HPKP=1 -e PRODUCT="firefox" -e BRANCH="mozilla-central" -e USE_MOZILLA_CENTRAL=1 hsts-local +``` + +HSTS checks will only be run if the `DO_HSTS` environment variable is set. +Likewise for `DO_HPKP` and the HPKP checks. Environment variables are used +rather than command line arguments to make constructing taskcluster tasks +easier. + +To prevent a full build when landing with Phabricator, set the `DONTBUILD` +environment variable. + +==Background== + +These scripts have been moved from +`https://hg.mozilla.org/build/tools/scripts/periodic_file_updates/` and +`security/manager/tools/` in the main repos. + +==HSTS Checks== + +`scripts/getHSTSPreloadList.js` will examine the current contents of +nsSTSPreloadList.inc from whichever `BRANCH` is specified, add in the mandatory +hosts, and those from the Chromium source, and check them all to see if their +SSL configuration is valid, and whether or not they have the +Strict-Transport-Security header set with an appropriate `max-age`. + +This javascript has been modified to use async calls to improve performance. + +==HPKP Checks== + +`scripts/genHPKPStaticPins.js` will ensure the list of pinned public keys are +up to date. + +==Example Taskcluster Task== + +https://firefox-ci-tc.services.mozilla.com/tasks/create/ + +```yaml +provisionerId: aws-provisioner-v1 +workerType: gecko-1-b-linux +retries: 0 +created: '2018-02-07T14:45:57.347Z' +deadline: '2018-02-07T17:45:57.348Z' +expires: '2019-02-07T17:45:57.348Z' +scopes: [] +payload: + image: srfraser/hsts1 + maxRunTime: 1800 + artifacts: + public/build/nsSTSPreloadList.diff: + path: /home/worker/artifacts/nsSTSPreloadList.diff + expires: '2019-02-07T13:57:35.448Z' + type: file + public/build/StaticHPKPins.h.diff: + path: /home/worker/artifacts/StaticHPKPins.h.diff + expires: '2019-02-07T13:57:35.448Z' + type: file + public/build/blocklist.diff: + path: /home/worker/artifacts/blocklist.diff + expires: '2019-02-07T13:57:35.448Z' + type: file + env: + DO_HSTS: 1 + DO_HPKP: 1 + PRODUCT: firefox + BRANCH: mozilla-central + USE_MOZILLA_CENTRAL: 1 + REVIEWERS: catlee +metadata: + name: Periodic updates testing + description: Produce diffs for HSTS and HPKP in-tree files. + owner: sfraser@mozilla.com + source: 'https://firefox-ci-tc.services.mozilla.com/tasks/create' +tags: {} +extra: + treeherder: + jobKind: test + machine: + platform: linux64 + tier: 1 + symbol: 'hsts' + +``` diff --git a/taskcluster/docker/periodic-updates/runme.sh b/taskcluster/docker/periodic-updates/runme.sh new file mode 100755 index 0000000000..368963aff6 --- /dev/null +++ b/taskcluster/docker/periodic-updates/runme.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +set -xe + +# Things to be set by task definition. +# --pinset --hsts --hpkp +# -b branch +# --use-mozilla-central +# -p firefox +# Artifact directory +# Artifact names. + + +test "${BRANCH}" +test "${PRODUCT}" + +PARAMS="" + +if [ -n "${USE_MOZILLA_CENTRAL}" ] +then + PARAMS="${PARAMS} --use-mozilla-central" +fi + +# TODO change these, so that they're run if the artifact location is specified? +if [ -n "${DO_HSTS}" ] +then + PARAMS="${PARAMS} --hsts" +fi + +if [ -n "${DO_HPKP}" ] +then + PARAMS="${PARAMS} --hpkp" +fi + +if [ -n "${DO_REMOTE_SETTINGS}" ] +then + PARAMS="${PARAMS} --remote-settings" +fi + +if [ -n "${DO_SUFFIX_LIST}" ] +then + PARAMS="${PARAMS} --suffix-list" +fi + +if [ -n "${DONTBUILD}" ] +then + PARAMS="${PARAMS} -d" +fi + + +export ARTIFACTS_DIR="/home/worker/artifacts" +mkdir -p "$ARTIFACTS_DIR" + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" + +# Get Arcanist API token + +if [ -n "${TASK_ID}" ] +then + curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" + ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}') +fi +if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster +then + set +x # Don't echo these + secrets_url="${TASKCLUSTER_PROXY_URL}/api/secrets/v1/secret/${ARC_SECRET}" + SECRET=$(curl "${secrets_url}") + TOKEN=$(echo "${SECRET}" | jq -r '.secret.token') +elif [ -n "${ARC_TOKEN}" ] # Allow for local testing. +then + TOKEN="${ARC_TOKEN}" +fi + +if [ -n "${TOKEN}" ] +then + cat >"${HOME}/.arcrc" <<END +{ + "hosts": { + "https://phabricator.services.mozilla.com/api/": { + "token": "${TOKEN}" + } + } +} +END + set -x + chmod 600 "${HOME}/.arcrc" +fi + +export HGPLAIN=1 + +# shellcheck disable=SC2086 +/home/worker/scripts/periodic_file_updates.sh -p "${PRODUCT}" -b "${BRANCH}" -a ${PARAMS} diff --git a/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js new file mode 100644 index 0000000000..c34497ab0e --- /dev/null +++ b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js @@ -0,0 +1,665 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// How to run this file: +// 1. [obtain firefox source code] +// 2. [build/obtain firefox binaries] +// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell \ +// [path to]/genHPKPStaticpins.js \ +// [absolute path to]/PreloadedHPKPins.json \ +// [absolute path to]/StaticHPKPins.h +"use strict"; + +if (arguments.length != 2) { + throw new Error( + "Usage: genHPKPStaticPins.js " + + "<absolute path to PreloadedHPKPins.json> " + + "<absolute path to StaticHPKPins.h>" + ); +} + +var { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm"); +var { FileUtils } = ChromeUtils.import("resource://gre/modules/FileUtils.jsm"); +var { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm"); + +var gCertDB = Cc["@mozilla.org/security/x509certdb;1"].getService( + Ci.nsIX509CertDB +); + +const SHA256_PREFIX = "sha256/"; +const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_"; + +// Pins expire in 14 weeks (6 weeks on Beta + 8 weeks on stable) +const PINNING_MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 14; + +const FILE_HEADER = + "/* This Source Code Form is subject to the terms of the Mozilla Public\n" + + " * License, v. 2.0. If a copy of the MPL was not distributed with this\n" + + " * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" + + "\n" + + "/*****************************************************************************/\n" + + "/* This is an automatically generated file. If you're not */\n" + + "/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" + + "/*****************************************************************************/\n" + + "#include <stdint.h>" + + "\n"; + +const DOMAINHEADER = + "/* Domainlist */\n" + + "struct TransportSecurityPreload {\n" + + " // See bug 1338873 about making these fields const.\n" + + " const char* mHost;\n" + + " bool mIncludeSubdomains;\n" + + " bool mTestMode;\n" + + " bool mIsMoz;\n" + + " int32_t mId;\n" + + " const StaticFingerprints* pinset;\n" + + "};\n\n"; + +const PINSETDEF = + "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" + + "struct StaticFingerprints {\n" + + " // See bug 1338873 about making these fields const.\n" + + " size_t size;\n" + + " const char* const* data;\n" + + "};\n\n"; + +// Command-line arguments +var gStaticPins = parseJson(arguments[0]); + +// Open the output file. +var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); +file.initWithPath(arguments[1]); +var gFileOutputStream = FileUtils.openSafeFileOutputStream(file); + +function writeString(string) { + gFileOutputStream.write(string, string.length); +} + +function readFileToString(filename) { + let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); + file.initWithPath(filename); + let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance( + Ci.nsIFileInputStream + ); + stream.init(file, -1, 0, 0); + let buf = NetUtil.readInputStreamToString(stream, stream.available()); + return buf; +} + +function stripComments(buf) { + let lines = buf.split("\n"); + let entryRegex = /^\s*\/\//; + let data = ""; + for (let i = 0; i < lines.length; ++i) { + let match = entryRegex.exec(lines[i]); + if (!match) { + data = data + lines[i]; + } + } + return data; +} + +function download(filename) { + let req = new XMLHttpRequest(); + req.open("GET", filename, false); // doing the request synchronously + try { + req.send(); + } catch (e) { + throw new Error(`ERROR: problem downloading '${filename}': ${e}`); + } + + if (req.status != 200) { + throw new Error( + "ERROR: problem downloading '" + filename + "': status " + req.status + ); + } + + let resultDecoded; + try { + resultDecoded = atob(req.responseText); + } catch (e) { + throw new Error( + "ERROR: could not decode data as base64 from '" + filename + "': " + e + ); + } + return resultDecoded; +} + +function downloadAsJson(filename) { + // we have to filter out '//' comments, while not mangling the json + let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/gm, ""); + let data = null; + try { + data = JSON.parse(result); + } catch (e) { + throw new Error( + "ERROR: could not parse data from '" + filename + "': " + e + ); + } + return data; +} + +// Returns a Subject Public Key Digest from the given pem, if it exists. +function getSKDFromPem(pem) { + let cert = gCertDB.constructX509FromBase64(pem, pem.length); + return cert.sha256SubjectPublicKeyInfoDigest; +} + +/** + * Hashes |input| using the SHA-256 algorithm in the following manner: + * btoa(sha256(atob(input))) + * + * @argument {String} input Base64 string to decode and return the hash of. + * @returns {String} Base64 encoded SHA-256 hash. + */ +function sha256Base64(input) { + let decodedValue; + try { + decodedValue = atob(input); + } catch (e) { + throw new Error(`ERROR: could not decode as base64: '${input}': ${e}`); + } + + // Convert |decodedValue| to an array so that it can be hashed by the + // nsICryptoHash instance below. + // In most cases across the code base, convertToByteArray() of + // nsIScriptableUnicodeConverter is used to do this, but the method doesn't + // seem to work here. + let data = []; + for (let i = 0; i < decodedValue.length; i++) { + data[i] = decodedValue.charCodeAt(i); + } + + let hasher = Cc["@mozilla.org/security/hash;1"].createInstance( + Ci.nsICryptoHash + ); + hasher.init(hasher.SHA256); + hasher.update(data, data.length); + + // true is passed so that the hasher returns a Base64 encoded string. + return hasher.finish(true); +} + +// Downloads the static certs file and tries to map Google Chrome nicknames +// to Mozilla nicknames, as well as storing any hashes for pins for which we +// don't have root PEMs. Each entry consists of a line containing the name of +// the pin followed either by a hash in the format "sha256/" + base64(hash), +// a PEM encoded public key, or a PEM encoded certificate. +// For certificates that we have in our database, +// return a map of Google's nickname to ours. For ones that aren't return a +// map of Google's nickname to SHA-256 values. This code is modeled after agl's +// https://github.com/agl/transport-security-state-generate, which doesn't +// live in the Chromium repo because go is not an official language in +// Chromium. +// For all of the entries in this file: +// - If the entry has a hash format, find the Mozilla pin name (cert nickname) +// and stick the hash into certSKDToName +// - If the entry has a PEM format, parse the PEM, find the Mozilla pin name +// and stick the hash in certSKDToName +// We MUST be able to find a corresponding cert nickname for the Chrome names, +// otherwise we skip all pinsets referring to that Chrome name. +function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) { + // Prefixes that we care about. + const BEGIN_CERT = "-----BEGIN CERTIFICATE-----"; + const END_CERT = "-----END CERTIFICATE-----"; + const BEGIN_PUB_KEY = "-----BEGIN PUBLIC KEY-----"; + const END_PUB_KEY = "-----END PUBLIC KEY-----"; + + // Parsing states. + const PRE_NAME = 0; + const POST_NAME = 1; + const IN_CERT = 2; + const IN_PUB_KEY = 3; + let state = PRE_NAME; + + let lines = download(filename).split("\n"); + let pemCert = ""; + let pemPubKey = ""; + let hash = ""; + let chromeNameToHash = {}; + let chromeNameToMozName = {}; + let chromeName; + for (let line of lines) { + // Skip comments and newlines. + if (line.length == 0 || line[0] == "#") { + continue; + } + switch (state) { + case PRE_NAME: + chromeName = line; + state = POST_NAME; + break; + case POST_NAME: + if (line.startsWith(SHA256_PREFIX)) { + hash = line.substring(SHA256_PREFIX.length); + chromeNameToHash[chromeName] = hash; + certNameToSKD[chromeName] = hash; + certSKDToName[hash] = chromeName; + state = PRE_NAME; + } else if (line.startsWith(BEGIN_CERT)) { + state = IN_CERT; + } else if (line.startsWith(BEGIN_PUB_KEY)) { + state = IN_PUB_KEY; + } else { + throw new Error( + "ERROR: couldn't parse Chrome certificate file line: " + line + ); + } + break; + case IN_CERT: + if (line.startsWith(END_CERT)) { + state = PRE_NAME; + hash = getSKDFromPem(pemCert); + pemCert = ""; + let mozName; + if (hash in certSKDToName) { + mozName = certSKDToName[hash]; + } else { + // Not one of our built-in certs. Prefix the name with + // GOOGLE_PIN_. + mozName = GOOGLE_PIN_PREFIX + chromeName; + dump( + "Can't find hash in builtin certs for Chrome nickname " + + chromeName + + ", inserting " + + mozName + + "\n" + ); + certSKDToName[hash] = mozName; + certNameToSKD[mozName] = hash; + } + chromeNameToMozName[chromeName] = mozName; + } else { + pemCert += line; + } + break; + case IN_PUB_KEY: + if (line.startsWith(END_PUB_KEY)) { + state = PRE_NAME; + hash = sha256Base64(pemPubKey); + pemPubKey = ""; + chromeNameToHash[chromeName] = hash; + certNameToSKD[chromeName] = hash; + certSKDToName[hash] = chromeName; + } else { + pemPubKey += line; + } + break; + default: + throw new Error( + "ERROR: couldn't parse Chrome certificate file " + line + ); + } + } + return [chromeNameToHash, chromeNameToMozName]; +} + +// We can only import pinsets from chrome if for every name in the pinset: +// - We have a hash from Chrome's static certificate file +// - We have a builtin cert +// If the pinset meets these requirements, we store a map array of pinset +// objects: +// { +// pinset_name : { +// // Array of names with entries in certNameToSKD +// sha256_hashes: [] +// } +// } +// and an array of imported pinset entries: +// { name: string, include_subdomains: boolean, test_mode: boolean, +// pins: pinset_name } +function downloadAndParseChromePins( + filename, + chromeNameToHash, + chromeNameToMozName, + certNameToSKD, + certSKDToName +) { + let chromePreloads = downloadAsJson(filename); + let chromePins = chromePreloads.pinsets; + let chromeImportedPinsets = {}; + let chromeImportedEntries = []; + + chromePins.forEach(function(pin) { + let valid = true; + let pinset = { name: pin.name, sha256_hashes: [] }; + // Translate the Chrome pinset format to ours + pin.static_spki_hashes.forEach(function(name) { + if (name in chromeNameToHash) { + let hash = chromeNameToHash[name]; + pinset.sha256_hashes.push(certSKDToName[hash]); + + // We should have already added hashes for all of these when we + // imported the certificate file. + if (!certNameToSKD[name]) { + throw new Error("ERROR: No hash for name: " + name); + } + } else if (name in chromeNameToMozName) { + pinset.sha256_hashes.push(chromeNameToMozName[name]); + } else { + dump( + "Skipping Chrome pinset " + + pinset.name + + ", couldn't find " + + "builtin " + + name + + " from cert file\n" + ); + valid = false; + } + }); + if (valid) { + chromeImportedPinsets[pinset.name] = pinset; + } + }); + + // Grab the domain entry lists. Chrome's entry format is similar to + // ours, except theirs includes a HSTS mode. + const cData = gStaticPins.chromium_data; + let entries = chromePreloads.entries; + entries.forEach(function(entry) { + // HSTS entry only + if (!entry.pins) { + return; + } + let pinsetName = cData.substitute_pinsets[entry.pins]; + if (!pinsetName) { + pinsetName = entry.pins; + } + + // We trim the entry name here to avoid breaking hostname comparisons in the + // HPKP implementation. + entry.name = entry.name.trim(); + + let isProductionDomain = cData.production_domains.includes(entry.name); + let isProductionPinset = cData.production_pinsets.includes(pinsetName); + let excludeDomain = cData.exclude_domains.includes(entry.name); + let isTestMode = !isProductionPinset && !isProductionDomain; + if (entry.pins && !excludeDomain && chromeImportedPinsets[entry.pins]) { + chromeImportedEntries.push({ + name: entry.name, + include_subdomains: entry.include_subdomains, + test_mode: isTestMode, + is_moz: false, + pins: pinsetName, + }); + } + }); + return [chromeImportedPinsets, chromeImportedEntries]; +} + +// Returns a pair of maps [certNameToSKD, certSKDToName] between cert +// nicknames and digests of the SPKInfo for the mozilla trust store +function loadNSSCertinfo(extraCertificates) { + let allCerts = gCertDB.getCerts(); + let certNameToSKD = {}; + let certSKDToName = {}; + for (let cert of allCerts) { + if (!cert.isBuiltInRoot) { + continue; + } + let name = cert.displayName; + let SKD = cert.sha256SubjectPublicKeyInfoDigest; + certNameToSKD[name] = SKD; + certSKDToName[SKD] = name; + } + + for (let cert of extraCertificates) { + let name = cert.commonName; + let SKD = cert.sha256SubjectPublicKeyInfoDigest; + certNameToSKD[name] = SKD; + certSKDToName[SKD] = name; + } + + { + // This is the pinning test certificate. The key hash identifies the + // default RSA key from pykey. + let name = "End Entity Test Cert"; + let SKD = "VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8="; + certNameToSKD[name] = SKD; + certSKDToName[SKD] = name; + } + return [certNameToSKD, certSKDToName]; +} + +function parseJson(filename) { + let json = stripComments(readFileToString(filename)); + return JSON.parse(json); +} + +function nameToAlias(certName) { + // change the name to a string valid as a c identifier + // remove non-ascii characters + certName = certName.replace(/[^[:ascii:]]/g, "_"); + // replace non word characters + certName = certName.replace(/[^A-Za-z0-9]/g, "_"); + + return "k" + certName + "Fingerprint"; +} + +function compareByName(a, b) { + return a.name.localeCompare(b.name); +} + +function genExpirationTime() { + let now = new Date(); + let nowMillis = now.getTime(); + let expirationMillis = nowMillis + PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000; + let expirationMicros = expirationMillis * 1000; + return ( + "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" + + expirationMicros + + ");\n" + ); +} + +function writeFullPinset(certNameToSKD, certSKDToName, pinset) { + if (!pinset.sha256_hashes || pinset.sha256_hashes.length == 0) { + throw new Error(`ERROR: Pinset ${pinset.name} does not contain any hashes`); + } + writeFingerprints( + certNameToSKD, + certSKDToName, + pinset.name, + pinset.sha256_hashes + ); +} + +function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) { + let varPrefix = "kPinset_" + name; + writeString("static const char* const " + varPrefix + "_Data[] = {\n"); + let SKDList = []; + for (let certName of hashes) { + if (!(certName in certNameToSKD)) { + throw new Error(`ERROR: Can't find '${certName}' in certNameToSKD`); + } + SKDList.push(certNameToSKD[certName]); + } + for (let skd of SKDList.sort()) { + writeString(" " + nameToAlias(certSKDToName[skd]) + ",\n"); + } + if (hashes.length == 0) { + // ANSI C requires that an initialiser list be non-empty. + writeString(" 0\n"); + } + writeString("};\n"); + writeString( + "static const StaticFingerprints " + + varPrefix + + " = {\n " + + "sizeof(" + + varPrefix + + "_Data) / sizeof(const char*),\n " + + varPrefix + + "_Data\n};\n\n" + ); +} + +function writeEntry(entry) { + let printVal = ` { "${entry.name}", `; + if (entry.include_subdomains) { + printVal += "true, "; + } else { + printVal += "false, "; + } + // Default to test mode if not specified. + let testMode = true; + if (entry.hasOwnProperty("test_mode")) { + testMode = entry.test_mode; + } + if (testMode) { + printVal += "true, "; + } else { + printVal += "false, "; + } + if ( + entry.is_moz || + (entry.pins.includes("mozilla") && entry.pins != "mozilla_test") + ) { + printVal += "true, "; + } else { + printVal += "false, "; + } + if ("id" in entry) { + if (entry.id >= 256) { + throw new Error("ERROR: Not enough buckets in histogram"); + } + if (entry.id >= 0) { + printVal += entry.id + ", "; + } + } else { + printVal += "-1, "; + } + printVal += "&kPinset_" + entry.pins; + printVal += " },\n"; + writeString(printVal); +} + +function writeDomainList(chromeImportedEntries) { + writeString("/* Sort hostnames for binary search. */\n"); + writeString( + "static const TransportSecurityPreload " + + "kPublicKeyPinningPreloadList[] = {\n" + ); + let count = 0; + let mozillaDomains = {}; + gStaticPins.entries.forEach(function(entry) { + mozillaDomains[entry.name] = true; + }); + // For any domain for which we have set pins, exclude them from + // chromeImportedEntries. + for (let i = chromeImportedEntries.length - 1; i >= 0; i--) { + if (mozillaDomains[chromeImportedEntries[i].name]) { + dump( + "Skipping duplicate pinset for domain " + + JSON.stringify(chromeImportedEntries[i], undefined, 2) + + "\n" + ); + chromeImportedEntries.splice(i, 1); + } + } + let sortedEntries = gStaticPins.entries; + sortedEntries.push.apply(sortedEntries, chromeImportedEntries); + for (let entry of sortedEntries.sort(compareByName)) { + count++; + writeEntry(entry); + } + writeString("};\n"); + + writeString("\n// Pinning Preload List Length = " + count + ";\n"); + writeString("\nstatic const int32_t kUnknownId = -1;\n"); +} + +function writeFile( + certNameToSKD, + certSKDToName, + chromeImportedPinsets, + chromeImportedEntries +) { + // Compute used pins from both Chrome's and our pinsets, so we can output + // them later. + let usedFingerprints = {}; + let mozillaPins = {}; + gStaticPins.pinsets.forEach(function(pinset) { + mozillaPins[pinset.name] = true; + pinset.sha256_hashes.forEach(function(name) { + usedFingerprints[name] = true; + }); + }); + for (let key in chromeImportedPinsets) { + let pinset = chromeImportedPinsets[key]; + pinset.sha256_hashes.forEach(function(name) { + usedFingerprints[name] = true; + }); + } + + writeString(FILE_HEADER); + + // Write actual fingerprints. + Object.keys(usedFingerprints) + .sort() + .forEach(function(certName) { + if (certName) { + writeString("/* " + certName + " */\n"); + writeString("static const char " + nameToAlias(certName) + "[] =\n"); + writeString(' "' + certNameToSKD[certName] + '";\n'); + writeString("\n"); + } + }); + + // Write the pinsets + writeString(PINSETDEF); + writeString("/* PreloadedHPKPins.json pinsets */\n"); + gStaticPins.pinsets.sort(compareByName).forEach(function(pinset) { + writeFullPinset(certNameToSKD, certSKDToName, pinset); + }); + writeString("/* Chrome static pinsets */\n"); + for (let key in chromeImportedPinsets) { + if (mozillaPins[key]) { + dump("Skipping duplicate pinset " + key + "\n"); + } else { + dump("Writing pinset " + key + "\n"); + writeFullPinset(certNameToSKD, certSKDToName, chromeImportedPinsets[key]); + } + } + + // Write the domainlist entries. + writeString(DOMAINHEADER); + writeDomainList(chromeImportedEntries); + writeString("\n"); + writeString(genExpirationTime()); +} + +function loadExtraCertificates(certStringList) { + let constructedCerts = []; + for (let certString of certStringList) { + constructedCerts.push(gCertDB.constructX509FromBase64(certString)); + } + return constructedCerts; +} + +var extraCertificates = loadExtraCertificates(gStaticPins.extra_certificates); +var [certNameToSKD, certSKDToName] = loadNSSCertinfo(extraCertificates); +var [chromeNameToHash, chromeNameToMozName] = downloadAndParseChromeCerts( + gStaticPins.chromium_data.cert_file_url, + certNameToSKD, + certSKDToName +); +var [chromeImportedPinsets, chromeImportedEntries] = downloadAndParseChromePins( + gStaticPins.chromium_data.json_file_url, + chromeNameToHash, + chromeNameToMozName, + certNameToSKD, + certSKDToName +); + +writeFile( + certNameToSKD, + certSKDToName, + chromeImportedPinsets, + chromeImportedEntries +); + +FileUtils.closeSafeFileOutputStream(gFileOutputStream); diff --git a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js new file mode 100644 index 0000000000..1edfaf0bc0 --- /dev/null +++ b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js @@ -0,0 +1,571 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +"use strict"; + +// How to run this file: +// 1. [obtain firefox source code] +// 2. [build/obtain firefox binaries] +// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell [path to]/getHSTSPreloadlist.js [absolute path to]/nsSTSPreloadlist.inc' +// Note: Running this file outputs a new nsSTSPreloadlist.inc in the current +// working directory. + +/* +var Cc = Components.classes; +var Ci = Components.interfaces; +var Cu = Components.utils; +var Cr = Components.results; +*/ +var gSSService = Cc["@mozilla.org/ssservice;1"].getService( + Ci.nsISiteSecurityService +); + +const { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm"); +const { FileUtils } = ChromeUtils.import( + "resource://gre/modules/FileUtils.jsm" +); + +const SOURCE = + "https://chromium.googlesource.com/chromium/src/net/+/master/http/transport_security_state_static.json?format=TEXT"; +const TOOL_SOURCE = + "https://hg.mozilla.org/mozilla-central/file/default/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js"; +const OUTPUT = "nsSTSPreloadList.inc"; +const ERROR_OUTPUT = "nsSTSPreloadList.errors"; +const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18; +const MAX_CONCURRENT_REQUESTS = 500; +const MAX_RETRIES = 1; +const REQUEST_TIMEOUT = 30 * 1000; +const ERROR_NONE = "no error"; +const ERROR_CONNECTING_TO_HOST = "could not connect to host"; +const ERROR_NO_HSTS_HEADER = "did not receive HSTS header"; +const ERROR_MAX_AGE_TOO_LOW = "max-age too low: "; +const HEADER = `/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/*****************************************************************************/ +/* This is an automatically generated file. If you're not */ +/* nsSiteSecurityService.cpp, you shouldn't be #including it. */ +/*****************************************************************************/ + +#include <stdint.h> +`; + +const GPERF_DELIM = "%%\n"; + +function download() { + let req = new XMLHttpRequest(); + req.open("GET", SOURCE, false); // doing the request synchronously + try { + req.send(); + } catch (e) { + throw new Error(`ERROR: problem downloading '${SOURCE}': ${e}`); + } + + if (req.status != 200) { + throw new Error( + "ERROR: problem downloading '" + SOURCE + "': status " + req.status + ); + } + + let resultDecoded; + try { + resultDecoded = atob(req.responseText); + } catch (e) { + throw new Error( + "ERROR: could not decode data as base64 from '" + SOURCE + "': " + e + ); + } + + // we have to filter out '//' comments, while not mangling the json + let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/gm, ""); + let data = null; + try { + data = JSON.parse(result); + } catch (e) { + throw new Error(`ERROR: could not parse data from '${SOURCE}': ${e}`); + } + return data; +} + +function getHosts(rawdata) { + let hosts = []; + + if (!rawdata || !rawdata.entries) { + throw new Error( + "ERROR: source data not formatted correctly: 'entries' not found" + ); + } + + for (let entry of rawdata.entries) { + if (entry.mode && entry.mode == "force-https") { + if (entry.name) { + // We trim the entry name here to avoid malformed URI exceptions when we + // later try to connect to the domain. + entry.name = entry.name.trim(); + entry.retries = MAX_RETRIES; + // We prefer the camelCase variable to the JSON's snake case version + entry.includeSubdomains = entry.include_subdomains; + hosts.push(entry); + } else { + throw new Error("ERROR: entry not formatted correctly: no name found"); + } + } + } + + return hosts; +} + +function processStsHeader(host, header, status, securityInfo) { + let maxAge = { + value: 0, + }; + let includeSubdomains = { + value: false, + }; + let error = ERROR_NONE; + if (header != null && securityInfo != null) { + try { + let uri = Services.io.newURI("https://" + host.name); + let secInfo = securityInfo.QueryInterface(Ci.nsITransportSecurityInfo); + gSSService.processHeader( + Ci.nsISiteSecurityService.HEADER_HSTS, + uri, + header, + secInfo, + 0, + Ci.nsISiteSecurityService.SOURCE_PRELOAD_LIST, + {}, + maxAge, + includeSubdomains + ); + } catch (e) { + dump( + "ERROR: could not process header '" + + header + + "' from " + + host.name + + ": " + + e + + "\n" + ); + error = e; + } + } else if (status == 0) { + error = ERROR_CONNECTING_TO_HOST; + } else { + error = ERROR_NO_HSTS_HEADER; + } + + if (error == ERROR_NONE && maxAge.value < MINIMUM_REQUIRED_MAX_AGE) { + error = ERROR_MAX_AGE_TOO_LOW; + } + + return { + name: host.name, + maxAge: maxAge.value, + includeSubdomains: includeSubdomains.value, + error, + retries: host.retries - 1, + forceInclude: host.forceInclude, + }; +} + +// RedirectAndAuthStopper prevents redirects and HTTP authentication +function RedirectAndAuthStopper() {} + +RedirectAndAuthStopper.prototype = { + // nsIChannelEventSink + asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) { + throw Components.Exception("", Cr.NS_ERROR_ENTITY_CHANGED); + }, + + // nsIAuthPrompt2 + promptAuth(channel, level, authInfo) { + return false; + }, + + asyncPromptAuth(channel, callback, context, level, authInfo) { + throw Components.Exception("", Cr.NS_ERROR_NOT_IMPLEMENTED); + }, + + getInterface(iid) { + return this.QueryInterface(iid); + }, + + QueryInterface: ChromeUtils.generateQI([ + "nsIChannelEventSink", + "nsIAuthPrompt2", + ]), +}; + +function fetchstatus(host) { + return new Promise((resolve, reject) => { + let xhr = new XMLHttpRequest(); + let uri = "https://" + host.name + "/"; + + xhr.open("head", uri, true); + xhr.setRequestHeader("X-Automated-Tool", TOOL_SOURCE); + xhr.timeout = REQUEST_TIMEOUT; + + let errorHandler = () => { + dump("ERROR: exception making request to " + host.name + "\n"); + resolve( + processStsHeader( + host, + null, + xhr.status, + xhr.channel && xhr.channel.securityInfo + ) + ); + }; + + xhr.onerror = errorHandler; + xhr.ontimeout = errorHandler; + xhr.onabort = errorHandler; + + xhr.onload = () => { + let header = xhr.getResponseHeader("strict-transport-security"); + resolve( + processStsHeader(host, header, xhr.status, xhr.channel.securityInfo) + ); + }; + + xhr.channel.notificationCallbacks = new RedirectAndAuthStopper(); + xhr.send(); + }); +} + +async function getHSTSStatus(host) { + do { + host = await fetchstatus(host); + } while (shouldRetry(host)); + return host; +} + +function compareHSTSStatus(a, b) { + if (a.name > b.name) { + return 1; + } + if (a.name < b.name) { + return -1; + } + return 0; +} + +function writeTo(string, fos) { + fos.write(string, string.length); +} + +// Determines and returns a string representing a declaration of when this +// preload list should no longer be used. +// This is the current time plus MINIMUM_REQUIRED_MAX_AGE. +function getExpirationTimeString() { + let now = new Date(); + let nowMillis = now.getTime(); + // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds + let expirationMillis = nowMillis + MINIMUM_REQUIRED_MAX_AGE * 1000; + let expirationMicros = expirationMillis * 1000; + return ( + "const PRTime gPreloadListExpirationTime = INT64_C(" + + expirationMicros + + ");\n" + ); +} + +function shouldRetry(response) { + return ( + response.error != ERROR_NO_HSTS_HEADER && + response.error != ERROR_MAX_AGE_TOO_LOW && + response.error != ERROR_NONE && + response.retries > 0 + ); +} + +// Copied from browser/components/migration/MigrationUtils.jsm +function spinResolve(promise) { + if (!(promise instanceof Promise)) { + return promise; + } + let done = false; + let result = null; + let error = null; + promise + .catch(e => { + error = e; + }) + .then(r => { + result = r; + done = true; + }); + + Services.tm.spinEventLoopUntil(() => done); + if (error) { + throw error; + } else { + return result; + } +} + +async function probeHSTSStatuses(inHosts) { + let totalLength = inHosts.length; + dump("Examining " + totalLength + " hosts.\n"); + + // Debug/testing on a small number of hosts + // while (inHosts.length > 40000) { + + // Make requests in batches of 250. Otherwise, we have too many in-flight + // requests and the time it takes to process them causes them all to time out. + let allResults = []; + while (inHosts.length > 0) { + let promises = []; + for (let i = 0; i < 250 && inHosts.length > 0; i++) { + let host = inHosts.shift(); + promises.push(getHSTSStatus(host)); + } + let results = await Promise.all(promises); + let progress = ( + (100 * (totalLength - inHosts.length)) / + totalLength + ).toFixed(2); + dump(progress + "% done\n"); + allResults = allResults.concat(results); + } + + dump("HSTS Probe received " + allResults.length + " statuses.\n"); + return allResults; +} + +function readCurrentList(filename) { + var currentHosts = {}; + var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); + file.initWithPath(filename); + var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance( + Ci.nsILineInputStream + ); + fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF); + var line = {}; + + // While we generate entries matching the latest version format, + // we still need to be able to read entries in the previous version formats + // for bootstrapping a latest version preload list from a previous version + // preload list. Hence these regexes. + const entryRegexes = [ + /([^,]+), (0|1)/, // v3 + / {2}\/\* "([^"]*)", (true|false) \*\//, // v2 + / {2}{ "([^"]*)", (true|false) },/, // v1 + ]; + + while (fis.readLine(line)) { + let match; + entryRegexes.find(r => { + match = r.exec(line.value); + return match; + }); + if (match) { + currentHosts[match[1]] = match[2] == "1" || match[2] == "true"; + } + } + return currentHosts; +} + +function combineLists(newHosts, currentHosts) { + let newHostsSet = new Set(); + + for (let newHost of newHosts) { + newHostsSet.add(newHost.name); + } + + for (let currentHost in currentHosts) { + if (!newHostsSet.has(currentHost)) { + newHosts.push({ name: currentHost, retries: MAX_RETRIES }); + } + } +} + +const TEST_ENTRIES = [ + { + name: "includesubdomains.preloaded.test", + includeSubdomains: true, + }, + { + name: "includesubdomains2.preloaded.test", + includeSubdomains: true, + }, + { + name: "noincludesubdomains.preloaded.test", + includeSubdomains: false, + }, +]; + +function deleteTestHosts(currentHosts) { + for (let testEntry of TEST_ENTRIES) { + delete currentHosts[testEntry.name]; + } +} + +function getTestHosts() { + let hosts = []; + for (let testEntry of TEST_ENTRIES) { + hosts.push({ + name: testEntry.name, + maxAge: MINIMUM_REQUIRED_MAX_AGE, + includeSubdomains: testEntry.includeSubdomains, + error: ERROR_NONE, + // This deliberately doesn't have a value for `retries` (because we should + // never attempt to connect to this host). + forceInclude: true, + }); + } + return hosts; +} + +async function insertHosts(inoutHostList, inAddedHosts) { + for (let host of inAddedHosts) { + inoutHostList.push(host); + } +} + +function filterForcedInclusions(inHosts, outNotForced, outForced) { + // Apply our filters (based on policy today) to determine which entries + // will be included without being checked (forced); the others will be + // checked using active probing. + for (let host of inHosts) { + if ( + host.policy == "google" || + host.policy == "public-suffix" || + host.policy == "public-suffix-requested" + ) { + host.forceInclude = true; + host.error = ERROR_NONE; + outForced.push(host); + } else { + outNotForced.push(host); + } + } +} + +function output(statuses) { + dump("INFO: Writing output to " + OUTPUT + "\n"); + try { + var { FileUtils } = ChromeUtils.import( + "resource://gre/modules/FileUtils.jsm" + ); + + let file = FileUtils.getFile("CurWorkD", [OUTPUT]); + let fos = FileUtils.openSafeFileOutputStream(file); + writeTo(HEADER, fos); + writeTo(getExpirationTimeString(), fos); + + writeTo(GPERF_DELIM, fos); + + for (let status of statuses) { + let includeSubdomains = status.includeSubdomains ? 1 : 0; + writeTo(status.name + ", " + includeSubdomains + "\n", fos); + } + + writeTo(GPERF_DELIM, fos); + FileUtils.closeSafeFileOutputStream(fos); + dump("finished writing output file\n"); + } catch (e) { + dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n"); + } +} + +function errorToString(status) { + return status.error == ERROR_MAX_AGE_TOO_LOW + ? status.error + status.maxAge + : status.error; +} + +async function main(args) { + if (args.length != 1) { + throw new Error( + "Usage: getHSTSPreloadList.js <absolute path to current nsSTSPreloadList.inc>" + ); + } + + // get the current preload list + let currentHosts = readCurrentList(args[0]); + // delete any hosts we use in tests so we don't actually connect to them + deleteTestHosts(currentHosts); + // disable the current preload list so it won't interfere with requests we make + Services.prefs.setBoolPref( + "network.stricttransportsecurity.preloadlist", + false + ); + // download and parse the raw json file from the Chromium source + let rawdata = download(); + // get just the hosts with mode: "force-https" + let hosts = getHosts(rawdata); + // add hosts in the current list to the new list (avoiding duplicates) + combineLists(hosts, currentHosts); + + // Don't contact hosts that are forced to be included anyway + let hostsToContact = []; + let forcedHosts = []; + filterForcedInclusions(hosts, hostsToContact, forcedHosts); + + // Initialize the final status list + let hstsStatuses = []; + // Add the hosts we use in tests + dump("Adding test hosts\n"); + insertHosts(hstsStatuses, getTestHosts()); + // Add in the hosts that are forced + dump("Adding forced hosts\n"); + insertHosts(hstsStatuses, forcedHosts); + + let total = await probeHSTSStatuses(hostsToContact) + .then(function(probedStatuses) { + return hstsStatuses.concat(probedStatuses); + }) + .then(function(statuses) { + return statuses.sort(compareHSTSStatus); + }) + .then(function(statuses) { + for (let status of statuses) { + // If we've encountered an error for this entry (other than the site not + // sending an HSTS header), be safe and don't remove it from the list + // (given that it was already on the list). + if ( + !status.forceInclude && + status.error != ERROR_NONE && + status.error != ERROR_NO_HSTS_HEADER && + status.error != ERROR_MAX_AGE_TOO_LOW && + status.name in currentHosts + ) { + // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n"); + status.maxAge = MINIMUM_REQUIRED_MAX_AGE; + status.includeSubdomains = currentHosts[status.name]; + } + } + return statuses; + }) + .then(function(statuses) { + // Filter out entries we aren't including. + var includedStatuses = statuses.filter(function(status) { + if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) { + // dump("INFO: " + status.name + " NOT ON the preload list\n"); + return false; + } + + // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n"); + if (status.forceInclude && status.error != ERROR_NONE) { + dump( + status.name + + ": " + + errorToString(status) + + " (error ignored - included regardless)\n" + ); + } + return true; + }); + return includedStatuses; + }); + + // Write the output file + output(total); + + dump("HSTS probing all done\n"); +} + +// arguments is a global within xpcshell +spinResolve(main(arguments)); diff --git a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh new file mode 100755 index 0000000000..7764777c1a --- /dev/null +++ b/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh @@ -0,0 +1,611 @@ +#!/bin/bash + +set -ex + +function usage { + cat <<EOF + +Usage: $(basename "$0") -h # Displays this usage/help text +Usage: $(basename "$0") -x # lists exit codes +Usage: $(basename "$0") [-p product] + [-r existing_repo_dir] + # Use mozilla-central builds to check HSTS & HPKP + [--use-mozilla-central] + # Use archive.m.o instead of the taskcluster index to get xpcshell + [--use-ftp-builds] + # One (or more) of the following actions must be specified. + --hsts | --hpkp | --remote-settings | --suffix-list + -b branch + +EOF +} + +PRODUCT="firefox" +BRANCH="" +PLATFORM_EXT="tar.bz2" +UNPACK_CMD="tar jxf" +CLOSED_TREE=false +DONTBUILD=false +APPROVAL=false +COMMIT_AUTHOR='ffxbld <ffxbld@mozilla.com>' +REPODIR='' +HGHOST="hg.mozilla.org" +STAGEHOST="archive.mozilla.org" +WGET="wget -nv" +UNTAR="tar -zxf" +DIFF="$(command -v diff) -u" +BASEDIR="${HOME}" + +SCRIPTDIR="$(realpath "$(dirname "$0")")" +HG="$(command -v hg)" +DATADIR="${BASEDIR}/data" +mkdir -p "${DATADIR}" + +USE_MC=false +USE_TC=true +JQ="$(command -v jq)" + +DO_HSTS=false +HSTS_PRELOAD_SCRIPT="${SCRIPTDIR}/getHSTSPreloadList.js" +HSTS_PRELOAD_ERRORS="nsSTSPreloadList.errors" +HSTS_PRELOAD_INC="${DATADIR}/nsSTSPreloadList.inc" +HSTS_UPDATED=false + +DO_HPKP=false +HPKP_PRELOAD_SCRIPT="${SCRIPTDIR}/genHPKPStaticPins.js" +HPKP_PRELOAD_ERRORS="StaticHPKPins.errors" +HPKP_PRELOAD_JSON="${DATADIR}/PreloadedHPKPins.json" +HPKP_PRELOAD_INC="StaticHPKPins.h" +HPKP_PRELOAD_INPUT="${DATADIR}/${HPKP_PRELOAD_INC}" +HPKP_PRELOAD_OUTPUT="${DATADIR}/${HPKP_PRELOAD_INC}.out" +HPKP_UPDATED=false + +DO_REMOTE_SETTINGS=false +REMOTE_SETTINGS_SERVER='' +REMOTE_SETTINGS_INPUT="${DATADIR}/remote-settings.in" +REMOTE_SETTINGS_OUTPUT="${DATADIR}/remote-settings.out" +REMOTE_SETTINGS_DIR="/services/settings/dumps" +REMOTE_SETTINGS_UPDATED=false + +DO_SUFFIX_LIST=false +GITHUB_SUFFIX_URL="https://raw.githubusercontent.com/publicsuffix/list/master/public_suffix_list.dat" +GITHUB_SUFFIX_LOCAL="public_suffix_list.dat" +HG_SUFFIX_LOCAL="effective_tld_names.dat" +HG_SUFFIX_PATH="/netwerk/dns/${HG_SUFFIX_LOCAL}" +SUFFIX_LIST_UPDATED=false + +ARTIFACTS_DIR="${ARTIFACTS_DIR:-.}" +# Defaults +HSTS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HSTS_DIFF_ARTIFACT:-"nsSTSPreloadList.diff"}" +HPKP_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HPKP_DIFF_ARTIFACT:-"StaticHPKPins.h.diff"}" +REMOTE_SETTINGS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${REMOTE_SETTINGS_DIFF_ARTIFACT:-"remote-settings.diff"}" +SUFFIX_LIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${SUFFIX_LIST_DIFF_ARTIFACT:-"effective_tld_names.diff"}" + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" +index_base="$TASKCLUSTER_ROOT_URL/api/index/v1" + +# Cleanup common artifacts. +function preflight_cleanup { + cd "${BASEDIR}" + rm -rf "${PRODUCT}" tests "${BROWSER_ARCHIVE}" "${TESTS_ARCHIVE}" +} + +function download_shared_artifacts_from_ftp { + cd "${BASEDIR}" + + # Download everything we need to run js with xpcshell + echo "INFO: Downloading all the necessary pieces from ${STAGEHOST}..." + ARTIFACT_DIR="nightly/latest-${REPODIR}" + if [ "${USE_MC}" == "true" ]; then + ARTIFACT_DIR="nightly/latest-mozilla-central" + fi + + BROWSER_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${BROWSER_ARCHIVE}" + TESTS_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${TESTS_ARCHIVE}" + + echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}" + ${WGET} "${BROWSER_ARCHIVE_URL}" + echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}" + ${WGET} "${TESTS_ARCHIVE_URL}" +} + +function download_shared_artifacts_from_tc { + cd "${BASEDIR}" + TASKID_FILE="taskId.json" + + # Download everything we need to run js with xpcshell + echo "INFO: Downloading all the necessary pieces from the taskcluster index..." + TASKID_URL="$index_base/task/gecko.v2.${REPODIR}.shippable.latest.${PRODUCT}.linux64-opt" + if [ "${USE_MC}" == "true" ]; then + TASKID_URL="$index_base/task/gecko.v2.mozilla-central.shippable.latest.${PRODUCT}.linux64-opt" + fi + ${WGET} -O ${TASKID_FILE} "${TASKID_URL}" + INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})" + if [ -z "${INDEX_TASK_ID}" ]; then + echo "Failed to look up taskId at ${TASKID_URL}" + exit 22 + else + echo "INFO: Got taskId of $INDEX_TASK_ID" + fi + + TASKSTATUS_FILE="taskstatus.json" + STATUS_URL="$queue_base/task/${INDEX_TASK_ID}/status" + ${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}" + LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1)) + echo "INFO: Examining run number ${LAST_RUN_INDEX}" + + BROWSER_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}" + echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}" + ${WGET} "${BROWSER_ARCHIVE_URL}" + + TESTS_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}" + echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}" + ${WGET} "${TESTS_ARCHIVE_URL}" +} + +function unpack_artifacts { + cd "${BASEDIR}" + if [ ! -f "${BROWSER_ARCHIVE}" ]; then + echo "Downloaded file '${BROWSER_ARCHIVE}' not found in directory '$(pwd)'." >&2 + exit 31 + fi + if [ ! -f "${TESTS_ARCHIVE}" ]; then + echo "Downloaded file '${TESTS_ARCHIVE}' not found in directory '$(pwd)'." >&2 + exit 32 + fi + # Unpack the browser and move xpcshell in place for updating the preload list. + echo "INFO: Unpacking resources..." + ${UNPACK_CMD} "${BROWSER_ARCHIVE}" + mkdir -p tests + cd tests + ${UNTAR} "../${TESTS_ARCHIVE}" + cd "${BASEDIR}" + cp tests/bin/xpcshell "${PRODUCT}" +} + +# Downloads the current in-tree HSTS (HTTP Strict Transport Security) files. +# Runs a simple xpcshell script to generate up-to-date HSTS information. +# Compares the new HSTS output with the old to determine whether we need to update. +function compare_hsts_files { + cd "${BASEDIR}" + + HSTS_PRELOAD_INC_HG="${HGREPO}/raw-file/default/security/manager/ssl/$(basename "${HSTS_PRELOAD_INC}")" + + echo "INFO: Downloading existing include file..." + rm -rf "${HSTS_PRELOAD_ERRORS}" "${HSTS_PRELOAD_INC}" + echo "INFO: ${WGET} ${HSTS_PRELOAD_INC_HG}" + ${WGET} -O "${HSTS_PRELOAD_INC}" "${HSTS_PRELOAD_INC_HG}" + + if [ ! -f "${HSTS_PRELOAD_INC}" ]; then + echo "Downloaded file '${HSTS_PRELOAD_INC}' not found in directory '$(pwd)' - this should have been downloaded above from ${HSTS_PRELOAD_INC_HG}." >&2 + exit 41 + fi + + # Run the script to get an updated preload list. + echo "INFO: Generating new HSTS preload list..." + cd "${BASEDIR}/${PRODUCT}" + LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HSTS_PRELOAD_SCRIPT}" "${HSTS_PRELOAD_INC}" + + # The created files should be non-empty. + echo "INFO: Checking whether new HSTS preload list is valid..." + if [ ! -s "${HSTS_PRELOAD_INC}" ]; then + echo "New HSTS preload list ${HSTS_PRELOAD_INC} is empty. That's less good." >&2 + exit 42 + fi + cd "${BASEDIR}" + + # Check for differences + echo "INFO: diffing old/new HSTS preload lists into ${HSTS_DIFF_ARTIFACT}" + ${DIFF} "${BASEDIR}/${PRODUCT}/$(basename "${HSTS_PRELOAD_INC}")" "${HSTS_PRELOAD_INC}" | tee "${HSTS_DIFF_ARTIFACT}" + if [ -s "${HSTS_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +# Downloads the current in-tree HPKP (HTTP public key pinning) files. +# Runs a simple xpcshell script to generate up-to-date HPKP information. +# Compares the new HPKP output with the old to determine whether we need to update. +function compare_hpkp_files { + cd "${BASEDIR}" + HPKP_PRELOAD_JSON_HG="${HGREPO}/raw-file/default/security/manager/tools/$(basename "${HPKP_PRELOAD_JSON}")" + + HPKP_PRELOAD_OUTPUT_HG="${HGREPO}/raw-file/default/security/manager/ssl/${HPKP_PRELOAD_INC}" + + rm -f "${HPKP_PRELOAD_OUTPUT}" + ${WGET} -O "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT_HG}" + ${WGET} -O "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_JSON_HG}" + + # Run the script to get an updated preload list. + echo "INFO: Generating new HPKP preload list..." + cd "${BASEDIR}/${PRODUCT}" + LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HPKP_PRELOAD_SCRIPT}" "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_OUTPUT}" > "${HPKP_PRELOAD_ERRORS}" + + # The created files should be non-empty. + echo "INFO: Checking whether new HPKP preload list is valid..." + + if [ ! -s "${HPKP_PRELOAD_OUTPUT}" ]; then + echo "${HPKP_PRELOAD_OUTPUT} is empty. That's less good." >&2 + exit 52 + fi + if ! grep kPreloadPKPinsExpirationTime "${HPKP_PRELOAD_OUTPUT}"; then + echo "${HPKP_PRELOAD_OUTPUT} is missing an expiration time. Truncated?" >&2 + exit 53 + fi + cd "${BASEDIR}" + + echo "INFO: diffing old/new HPKP preload lists..." + ${DIFF} "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT}" | tee "${HPKP_DIFF_ARTIFACT}" + if [ -s "${HPKP_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +function is_valid_xml { + xmlfile=$1 + XMLLINT=$(command -v xmllint 2>/dev/null | head -n1) + + if [ ! -x "${XMLLINT}" ]; then + echo "ERROR: xmllint not found in PATH" + exit 60 + fi + ${XMLLINT} --nonet --noout "${xmlfile}" +} + +# Downloads the public suffix list +function compare_suffix_lists { + HG_SUFFIX_URL="${HGREPO}/raw-file/default/${HG_SUFFIX_PATH}" + cd "${BASEDIR}" + + echo "INFO: ${WGET} -O ${GITHUB_SUFFIX_LOCAL} ${GITHUB_SUFFIX_URL}" + rm -f "${GITHUB_SUFFIX_LOCAL}" + ${WGET} -O "${GITHUB_SUFFIX_LOCAL}" "${GITHUB_SUFFIX_URL}" + + echo "INFO: ${WGET} -O ${HG_SUFFIX_LOCAL} ${HG_SUFFIX_URL}" + rm -f "${HG_SUFFIX_LOCAL}" + ${WGET} -O "${HG_SUFFIX_LOCAL}" "${HG_SUFFIX_URL}" + + echo "INFO: diffing in-tree suffix list against the suffix list from AMO..." + ${DIFF} ${GITHUB_SUFFIX_LOCAL} ${HG_SUFFIX_LOCAL} | tee "${SUFFIX_LIST_DIFF_ARTIFACT}" + if [ -s "${SUFFIX_LIST_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +function compare_remote_settings_files { + REMOTE_SETTINGS_SERVER="https://firefox.settings.services.mozilla.com/v1" + + # 1. List remote settings collections from server. + echo "INFO: fetch remote settings list from server" + ${WGET} -qO- "${REMOTE_SETTINGS_SERVER}/buckets/monitor/collections/changes/records" |\ + ${JQ} -r '.data[] | .bucket+"/"+.collection' |\ + # 2. For each entry ${bucket, collection} + while IFS="/" read -r bucket collection; do + + # 3. Download the dump from HG into REMOTE_SETTINGS_INPUT folder + hg_dump_url="${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${bucket}/${collection}.json" + local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json" + mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}" + ${WGET} -qO "$local_location_input" "$hg_dump_url" + if [ $? -eq 8 ]; then + # We don't keep any dump for this collection, skip it. + # Try to clean up in case no collection in this bucket has dump. + rmdir "$REMOTE_SETTINGS_INPUT/${bucket}" --ignore-fail-on-non-empty + continue + fi + + # 4. Download server version into REMOTE_SETTINGS_OUTPUT folder + remote_records_url="$REMOTE_SETTINGS_SERVER/buckets/${bucket}/collections/${collection}/records" + local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json" + mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}" + ${WGET} -qO- "$remote_records_url" | ${JQ} . > "${local_location_output}" + + # 5. Download attachments if needed. + if [ "${bucket}" = "blocklists" ] && [ "${collection}" = "addons-bloomfilters" ]; then + # Find the attachment with the most recent generation_time, like _updateMLBF in Blocklist.jsm. + # The server should return one "bloomfilter-base" record, but in case it returns multiple, + # return the most recent one. The server may send multiple entries if we ever decide to use + # the "filter_expression" feature of Remote Settings to send different records to specific + # channels. In that case this code should be updated to recognize the filter expression, + # but until we do, simply select the most recent record - can't go wrong with that. + # Note that "attachment_type" and "generation_time" are specific to addons-bloomfilters. + update_remote_settings_attachment "${bucket}" "${collection}" addons-mlbf.bin \ + 'map(select(.attachment_type == "bloomfilter-base")) | sort_by(.generation_time) | last' + fi + # Here is an example to download an attachment with record identifier "ID": + # update_remote_settings_attachment "${bucket}" "${collection}" ID '.[] | select(.id == "ID")' + # NOTE: The downloaded data is not validated. xpcshell should be used for that. + done + + echo "INFO: diffing old/new remote settings dumps..." + ${DIFF} -r "${REMOTE_SETTINGS_INPUT}" "${REMOTE_SETTINGS_OUTPUT}" > "${REMOTE_SETTINGS_DIFF_ARTIFACT}" + if [ -s "${REMOTE_SETTINGS_DIFF_ARTIFACT}" ] + then + return 0 + fi + return 1 +} + +# Helper for compare_remote_settings_files to download attachments from remote settings. +# The format and location is documented at: +# https://firefox-source-docs.mozilla.org/services/common/services/RemoteSettings.html#packaging-attachments +function update_remote_settings_attachment() { + local bucket=$1 + local collection=$2 + local attachment_id=$3 + # $4 is a jq filter on the arrays that should return one record with the attachment + local jq_attachment_selector=".data | map(select(.attachment)) | $4" + + # These paths match _readAttachmentDump in services/settings/Attachments.jsm. + local path_to_attachment="${bucket}/${collection}/${attachment_id}" + local path_to_meta="${bucket}/${collection}/${attachment_id}.meta.json" + local old_meta="$REMOTE_SETTINGS_INPUT/${path_to_meta}" + local new_meta="$REMOTE_SETTINGS_OUTPUT/${path_to_meta}" + + # Those files should have been created by compare_remote_settings_files before the function call. + local local_location_input="$REMOTE_SETTINGS_INPUT/${bucket}/${collection}.json" + local local_location_output="$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}.json" + + # Compute the metadata based on already-downloaded records. + mkdir -p "$REMOTE_SETTINGS_INPUT/${bucket}/${collection}" + ${JQ} -cj <"$local_location_input" "${jq_attachment_selector}" > "${old_meta}" + mkdir -p "$REMOTE_SETTINGS_OUTPUT/${bucket}/${collection}" + ${JQ} -cj <"$local_location_output" "${jq_attachment_selector}" > "${new_meta}" + + if cmp --silent "${old_meta}" "${new_meta}" ; then + # Metadata not changed, don't bother downloading the attachments themselves. + return + fi + # Metadata changed. Download attachments. + + echo "INFO: Downloading updated remote settings dump: ${bucket}/${collection}/${attachment_id}" + + # Overwrited old_meta with the actual file from the repo. The content should be equivalent, + # but can have minor differences (e.g. different line endings) if the checked in file was not + # generated by this script (e.g. manually checked in). + ${WGET} -qO "${old_meta}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_meta}" + + ${WGET} -qO "${REMOTE_SETTINGS_INPUT}/${path_to_attachment}" "${HGREPO}/raw-file/default${REMOTE_SETTINGS_DIR}/${path_to_attachment}" + + if [ -z "${ATTACHMENT_BASE_URL}" ] ; then + ATTACHMENT_BASE_URL=$(${WGET} -qO- "${REMOTE_SETTINGS_SERVER}" | ${JQ} -r .capabilities.attachments.base_url) + fi + attachment_path_from_meta=$(${JQ} -r < "${new_meta}" .attachment.location) + ${WGET} -qO "${REMOTE_SETTINGS_OUTPUT}/${path_to_attachment}" "${ATTACHMENT_BASE_URL}${attachment_path_from_meta}" +} + +# Clones an hg repo +function clone_repo { + cd "${BASEDIR}" + if [ ! -d "${REPODIR}" ]; then + ${HG} robustcheckout --sharebase /tmp/hg-store -b default "${HGREPO}" "${REPODIR}" + fi + + ${HG} -R "${REPODIR}" pull + ${HG} -R "${REPODIR}" update -C default +} + +# Copies new HSTS files in place, and commits them. +function stage_hsts_files { + cd "${BASEDIR}" + cp -f "${BASEDIR}/${PRODUCT}/$(basename "${HSTS_PRELOAD_INC}")" "${REPODIR}/security/manager/ssl/" +} + +function stage_hpkp_files { + cd "${BASEDIR}" + cp -f "${HPKP_PRELOAD_OUTPUT}" "${REPODIR}/security/manager/ssl/${HPKP_PRELOAD_INC}" +} + +function stage_remote_settings_files { + cd "${BASEDIR}" + cp -a "${REMOTE_SETTINGS_OUTPUT}"/* "${REPODIR}${REMOTE_SETTINGS_DIR}" +} + +function stage_tld_suffix_files { + cd "${BASEDIR}" + cp -a "${GITHUB_SUFFIX_LOCAL}" "${REPODIR}/${HG_SUFFIX_PATH}" +} + +# Push all pending commits to Phabricator +function push_repo { + cd "${REPODIR}" + if [ ! -r "${HOME}/.arcrc" ] + then + return 1 + fi + if ! ARC=$(command -v arc) + then + return 1 + fi + if [ -z "${REVIEWERS}" ] + then + return 1 + fi + # Clean up older review requests + # Turn Needs Review D624: No bug, Automated HSTS ... + # into D624 + for diff in $($ARC list | grep "Needs Review" | grep -E "${BRANCH} repo-update" | awk 'match($0, /D[0-9]+[^: ]/) { print substr($0, RSTART, RLENGTH) }') + do + echo "Removing old request $diff" + # There is no 'arc abandon', see bug 1452082 + echo '{"transactions": [{"type":"abandon"}], "objectIdentifier": "'"${diff}"'"}' | arc call-conduit differential.revision.edit + done + + $ARC diff --verbatim --reviewers "${REVIEWERS}" +} + + + +# Main + +# Parse our command-line options. +while [ $# -gt 0 ]; do + case "$1" in + -h) usage; exit 0 ;; + -p) PRODUCT="$2"; shift ;; + -b) BRANCH="$2"; shift ;; + -n) DRY_RUN=true ;; + -c) CLOSED_TREE=true ;; + -d) DONTBUILD=true ;; + -a) APPROVAL=true ;; + --pinset) DO_PRELOAD_PINSET=true ;; + --hsts) DO_HSTS=true ;; + --hpkp) DO_HPKP=true ;; + --remote-settings) DO_REMOTE_SETTINGS=true ;; + --suffix-list) DO_SUFFIX_LIST=true ;; + -r) REPODIR="$2"; shift ;; + --use-mozilla-central) USE_MC=true ;; + --use-ftp-builds) USE_TC=false ;; + -*) usage + exit 11 ;; + *) break ;; # terminate while loop + esac + shift +done + +# Must supply a code branch to work with. +if [ "${BRANCH}" == "" ]; then + echo "Error: You must specify a branch with -b branchname." >&2 + usage + exit 12 +fi + +# Must choose at least one update action. +if [ "$DO_HSTS" == "false" ] && [ "$DO_HPKP" == "false" ] && [ "$DO_REMOTE_SETTINGS" == "false" ] && [ "$DO_SUFFIX_LIST" == "false" ] +then + echo "Error: you must specify at least one action from: --hsts, --hpkp, --remote-settings, or --suffix-list" >&2 + usage + exit 13 +fi + +# per-product constants +case "${PRODUCT}" in + thunderbird) + COMMIT_AUTHOR="tbirdbld <tbirdbld@thunderbird.net>" + ;; + firefox) + ;; + *) + echo "Error: Invalid product specified" + usage + exit 14 + ;; +esac + +if [ "${REPODIR}" == "" ]; then + REPODIR="$(basename "${BRANCH}")" +fi + +case "${BRANCH}" in + mozilla-central|comm-central ) + HGREPO="https://${HGHOST}/${BRANCH}" + ;; + mozilla-*|comm-* ) + HGREPO="https://${HGHOST}/releases/${BRANCH}" + ;; + * ) + HGREPO="https://${HGHOST}/projects/${BRANCH}" + ;; +esac + +BROWSER_ARCHIVE="target.${PLATFORM_EXT}" +TESTS_ARCHIVE="target.common.tests.tar.gz" + +preflight_cleanup +if [ "${DO_HSTS}" == "true" ] || [ "${DO_HPKP}" == "true" ] || [ "${DO_PRELOAD_PINSET}" == "true" ] +then + if [ "${USE_TC}" == "true" ]; then + download_shared_artifacts_from_tc + else + download_shared_artifacts_from_ftp + fi + unpack_artifacts +fi + +if [ "${DO_HSTS}" == "true" ]; then + if compare_hsts_files + then + HSTS_UPDATED=true + fi +fi +if [ "${DO_HPKP}" == "true" ]; then + if compare_hpkp_files + then + HPKP_UPDATED=true + fi +fi +if [ "${DO_REMOTE_SETTINGS}" == "true" ]; then + if compare_remote_settings_files + then + REMOTE_SETTINGS_UPDATED=true + fi +fi +if [ "${DO_SUFFIX_LIST}" == "true" ]; then + if compare_suffix_lists + then + SUFFIX_LIST_UPDATED=true + fi +fi + + +if [ "${HSTS_UPDATED}" == "false" ] && [ "${HPKP_UPDATED}" == "false" ] && [ "${REMOTE_SETTINGS_UPDATED}" == "false" ] && [ "${SUFFIX_LIST_UPDATED}" == "false" ]; then + echo "INFO: no updates required. Exiting." + exit 0 +else + if [ "${DRY_RUN}" == "true" ]; then + echo "INFO: Updates are available, not updating hg in dry-run mode." + exit 2 + fi +fi + +clone_repo + +COMMIT_MESSAGE="No Bug, ${BRANCH} repo-update" +if [ "${HSTS_UPDATED}" == "true" ] +then + stage_hsts_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} HSTS" +fi + +if [ "${HPKP_UPDATED}" == "true" ] +then + stage_hpkp_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} HPKP" +fi + +if [ "${REMOTE_SETTINGS_UPDATED}" == "true" ] +then + stage_remote_settings_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} remote-settings" +fi + +if [ "${SUFFIX_LIST_UPDATED}" == "true" ] +then + stage_tld_suffix_files + COMMIT_MESSAGE="${COMMIT_MESSAGE} tld-suffixes" +fi + + +if [ ${DONTBUILD} == true ]; then + COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)" +fi +if [ ${CLOSED_TREE} == true ]; then + COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE" +fi +if [ ${APPROVAL} == true ]; then + COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=repo-update" +fi + + +if ${HG} -R "${REPODIR}" commit -u "${COMMIT_AUTHOR}" -m "${COMMIT_MESSAGE}" +then + push_repo +fi + +echo "All done" diff --git a/taskcluster/docker/periodic-updates/setup.sh b/taskcluster/docker/periodic-updates/setup.sh new file mode 100755 index 0000000000..334720b188 --- /dev/null +++ b/taskcluster/docker/periodic-updates/setup.sh @@ -0,0 +1,38 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -ve + +tooltool_fetch() { + cat >manifest.tt + python2.7 /setup/tooltool.py fetch + rm manifest.tt +} + +useradd -d /home/worker -s /bin/bash -m worker + +apt-get update -q +apt-get install -y --no-install-recommends \ + arcanist \ + bzip2 \ + ca-certificates \ + curl \ + jq \ + libdbus-glib-1-2 \ + libgtk-3-0 \ + libx11-xcb1 \ + libxml2-utils \ + libxt6 \ + python \ + python3 \ + shellcheck \ + unzip \ + wget \ + +apt-get clean + +. install-mercurial.sh + +rm -rf /setup diff --git a/taskcluster/docker/push-to-try/Dockerfile b/taskcluster/docker/push-to-try/Dockerfile new file mode 100644 index 0000000000..4725c7e48b --- /dev/null +++ b/taskcluster/docker/push-to-try/Dockerfile @@ -0,0 +1,22 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +FROM $DOCKER_IMAGE_PARENT + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +RUN apt-get update && \ + apt-get install \ + openssh-client + +COPY known_hosts /etc/ssh/ssh_known_hosts +COPY hgrc /etc/mercurial/hgrc.d/push-to-try.rc + +RUN hg clone -r 19a77a5d7b2a90ba225cf5015afd1ac44787abec https://hg.mozilla.org/hgcustom/version-control-tools /builds/worker/.mozbuild/version-control-tools + +# Create directory for scripts to put an hg config in, for configuring ssh +RUN mkdir /builds/worker/.config/hg +RUN chown -R worker:worker /builds/worker/.config/hg diff --git a/taskcluster/docker/push-to-try/hgrc b/taskcluster/docker/push-to-try/hgrc new file mode 100644 index 0000000000..d1a0318499 --- /dev/null +++ b/taskcluster/docker/push-to-try/hgrc @@ -0,0 +1,2 @@ +[extensions] +push-to-try = ~/.mozbuild/version-control-tools/hgext/push-to-try diff --git a/taskcluster/docker/push-to-try/known_hosts b/taskcluster/docker/push-to-try/known_hosts new file mode 100644 index 0000000000..1dc2e39e70 --- /dev/null +++ b/taskcluster/docker/push-to-try/known_hosts @@ -0,0 +1,2 @@ +hg.mozilla.org ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDEsS2fK+TVkHl4QvvOHB6R5xxngsSYJR+pA4+xDhw4mZT9tgCRU9BBG3LazSLp6PUxnpfok78475/tx6Z8QwbTyUTmLElZ9Z9eJzjaGz/olHzQSWv0VB3kT+VZt0LK7pEuaG+Ph/qwxbtUZZOApYLEvu8uctDlS66doofxZylbsgl1kpRQ5HNu+/DgVo9K9dyMOm9OLoy4tXHSE5pofn4tKYdFRa2lt6OVtIP5/hKNb2i0+JmgM8C3bJTPvzJ4C8p2h83ro29XPUkNAfWrgD5CmAPPqHFXyefDCfdefcvI8B8Za9v4j4LynBDZHsGfII+wIfzyLIxy9K6Op6nqDZgCciBRdgxh4uZQINEhB/JJP03Pxo42ExdG28oU3aL8kRRTORT5ehFtImFfr9QESHaUnbVzBbU5DmOB5voYDMle3RgyY+RXJ7+4OxjLRnJvGks9QCn8QrIvabs/PTCnenI8+yDhMlLUkWTiR4JK8vDBYB2Rm++EmVsN9WjllfDNg3Aj1aYe8XiBD4tS+lg7Ur4rJL8X20H4yMvq56sQ0qfH8PCIQGyGL725E7Yuwj/MHvou5xrPM/Lqo/MtX5T2njrzkeaBmI/zFJaLwbphdrwmrzepbcim7OYJFF2pz8u56KDPD1pUQ7C1gEIAx/4mHiDOGCYooSvyfD+JRdjkZUZMiQ== +hg.mozilla.org ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGuRsL+/OrvIekv3iBST1zdVBLBuh/DMIj+ZN72N9a0g diff --git a/taskcluster/docker/python-dependency-update/Dockerfile b/taskcluster/docker/python-dependency-update/Dockerfile new file mode 100644 index 0000000000..cb4ac2a381 --- /dev/null +++ b/taskcluster/docker/python-dependency-update/Dockerfile @@ -0,0 +1,33 @@ +FROM ubuntu:bionic +MAINTAINER Ben Hearsum <bhearsum@mozilla.com> + +# Required software +ENV DEBIAN_FRONTEND noninteractive + +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh + +ADD setup.sh /setup/setup.sh + +RUN cd /setup && ./setup.sh + +COPY runme.sh / +COPY scripts/* /home/worker/scripts/ + +ENV HOME /home/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker +ENV LC_ALL C.UTF-8 +ENV LANG C.UTF-8 + +CMD ["/runme.sh"] diff --git a/taskcluster/docker/python-dependency-update/README.md b/taskcluster/docker/python-dependency-update/README.md new file mode 100644 index 0000000000..ab560623ee --- /dev/null +++ b/taskcluster/docker/python-dependency-update/README.md @@ -0,0 +1,15 @@ + +==Python Dependency Updates== + +This docker image contains the necessary dependencies and scripts to update +in-tree requirement.txt produced by `pip-compile --generate-hashes`, produce a +diff, and submit it to Phabricator. + + +==Quick Start== + +```sh +docker build -t python-dependency-update --no-cache --rm . + +docker run -e PYTHON3="1" -e BRANCH="mozilla-central" -e REQUIREMENTS_FILE="taskcluster/docker/funsize-update-generator/requirements.in" python-dependency-update +``` diff --git a/taskcluster/docker/python-dependency-update/runme.sh b/taskcluster/docker/python-dependency-update/runme.sh new file mode 100755 index 0000000000..395ec0cd64 --- /dev/null +++ b/taskcluster/docker/python-dependency-update/runme.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +set -xe + +# Things to be set by task definition. +# -b branch +# -f requirements_file +# -3 use python3 + + +test "${BRANCH}" +test "${REQUIREMENTS_FILE}" + +PIP_ARG="-2" +if [ -n "${PYTHON3}" ]; then + PIP_ARG="-3" +fi + +export ARTIFACTS_DIR="/home/worker/artifacts" +mkdir -p "$ARTIFACTS_DIR" + +# duplicate the functionality of taskcluster-lib-urls, but in bash.. +queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1" + +# Get Arcanist API token + +if [ -n "${TASK_ID}" ] +then + curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID" + ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}') +fi +if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster +then + set +x # Don't echo these + secrets_url="${TASKCLUSTER_PROXY_URL}/api/secrets/v1/secret/${ARC_SECRET}" + SECRET=$(curl "${secrets_url}") + TOKEN=$(echo "${SECRET}" | jq -r '.secret.token') +elif [ -n "${ARC_TOKEN}" ] # Allow for local testing. +then + TOKEN="${ARC_TOKEN}" +fi + +if [ -n "${TOKEN}" ] +then + cat >"${HOME}/.arcrc" <<END +{ + "hosts": { + "https://phabricator.services.mozilla.com/api/": { + "token": "${TOKEN}" + } + } +} +END + set -x + chmod 600 "${HOME}/.arcrc" +fi + +export HGPLAIN=1 + +# shellcheck disable=SC2086 +/home/worker/scripts/update_pipfiles.sh -b "${BRANCH}" -f "${REQUIREMENTS_FILE}" ${PIP_ARG} diff --git a/taskcluster/docker/python-dependency-update/scripts/update_pipfiles.sh b/taskcluster/docker/python-dependency-update/scripts/update_pipfiles.sh new file mode 100755 index 0000000000..c6f9b10701 --- /dev/null +++ b/taskcluster/docker/python-dependency-update/scripts/update_pipfiles.sh @@ -0,0 +1,136 @@ +#!/bin/bash + +set -e + +function usage { + cat <<EOF + +Usage: $(basename "$0") -h # Displays this usage/help text +Usage: $(basename "$0") -x # lists exit codes +Usage: $(basename "$0") -b branch -r REQUIREMENTS_FILE [-2] [-3] + +EOF +} + +BRANCH="" +PIP="" +COMMIT_AUTHOR='ffxbld <ffxbld@mozilla.com>' +REPODIR='' +HGHOST="hg.mozilla.org" +BASEDIR="${HOME}" +REQUIREMENTS_FILE="" + +HG="$(command -v hg)" + +# Clones an hg repo +function clone_repo { + cd "${BASEDIR}" + if [ ! -d "${REPODIR}" ]; then + CLONE_CMD="${HG} clone ${HGREPO} ${REPODIR}" + ${CLONE_CMD} + fi + + ${HG} -R "${REPODIR}" pull + ${HG} -R "${REPODIR}" update -C default +} + +# Push all pending commits to Phabricator +function push_repo { + cd "${REPODIR}" + if [ ! -r "${HOME}/.arcrc" ] + then + return 1 + fi + if ! ARC=$(command -v arc) + then + return 1 + fi + if [ -z "${REVIEWERS}" ] + then + return 1 + fi + # Clean up older review requests + # Turn Needs Review D624: No bug, Automated HSTS ... + # into D624 + for diff in $($ARC list | grep "Needs Review" | grep "${REQUIREMENTS_FILE} pip-update" | awk 'match($0, /D[0-9]+[^: ]/) { print substr($0, RSTART, RLENGTH) }') + do + echo "Removing old request $diff" + # There is no 'arc abandon', see bug 1452082 + echo '{"transactions": [{"type":"abandon"}], "objectIdentifier": "'"${diff}"'"}' | arc call-conduit differential.revision.edit + done + + $ARC diff --verbatim --reviewers "${REVIEWERS}" +} + +function update_requirements { + pushd "${REPODIR}/${1}" + pip-compile --generate-hashes "${2}" + popd +} + +# Main + +# Parse our command-line options. +while [ $# -gt 0 ]; do + case "$1" in + -h) usage; exit 0 ;; + -b) BRANCH="$2"; shift ;; + -r) REPODIR="$2"; shift ;; + -2) PIP="pip" ;; + -3) PIP="pip3" ;; + -f) REQUIREMENTS_FILE="$2"; shift ;; + -*) usage + exit 11 ;; + *) break ;; # terminate while loop + esac + shift +done + +# Must supply a code branch to work with. +if [ "${PIP}" == "" ]; then + echo "Error: You must specify a python version with -2 or -3" >&2 + usage + exit 12 +fi + +# Must supply a code branch to work with. +if [ "${BRANCH}" == "" ]; then + echo "Error: You must specify a branch with -b branchname." >&2 + usage + exit 13 +fi + +if [ "${REPODIR}" == "" ]; then + REPODIR="${BASEDIR}/$(basename "${BRANCH}")" +fi + +if [ "${BRANCH}" == "mozilla-central" ]; then + HGREPO="https://${HGHOST}/${BRANCH}" +elif [[ "${BRANCH}" == mozilla-* ]]; then + HGREPO="https://${HGHOST}/releases/${BRANCH}" +else + HGREPO="https://${HGHOST}/projects/${BRANCH}" +fi + +clone_repo + +${PIP} install pip-tools + +requirements_basefile="$(basename "${REQUIREMENTS_FILE}")" +requirements_dir="$(dirname "${REQUIREMENTS_FILE}")" +update_requirements "${requirements_dir}" "${requirements_basefile}" +requirements_newfile="${requirements_basefile%%.in}.txt" +DIFF_ARTIFACT="${ARTIFACTS_DIR}/${requirements_newfile}.diff" + +echo "INFO: diffing old/new ${requirements_newfile} into ${DIFF_ARTIFACT}" +${HG} -R "${REPODIR}" diff "${BASEDIR}/${BRANCH}/${requirements_dir}/${requirements_newfile}" | tee "${DIFF_ARTIFACT}" + +COMMIT_MESSAGE="No Bug, ${requirements_dir}/${requirements_newfile} pip-update." + +if ${HG} -R "${REPODIR}" commit -u "${COMMIT_AUTHOR}" -m "${COMMIT_MESSAGE}" +then + ${HG} -R "${REPODIR}" out + push_repo +fi + +echo "All done" diff --git a/taskcluster/docker/python-dependency-update/setup.sh b/taskcluster/docker/python-dependency-update/setup.sh new file mode 100755 index 0000000000..99d59ed7a9 --- /dev/null +++ b/taskcluster/docker/python-dependency-update/setup.sh @@ -0,0 +1,37 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -ve + +tooltool_fetch() { + cat >manifest.tt + python2.7 /setup/tooltool.py fetch + rm manifest.tt +} + +useradd -d /home/worker -s /bin/bash -m worker + +apt-get update -q +apt-get install -y --no-install-recommends \ + arcanist \ + curl \ + gcc \ + jq \ + libdpkg-perl \ + liblzma-dev \ + python \ + python-dev \ + python-pip \ + python3.8 \ + python3.8-dev \ + python3-setuptools \ + python3-wheel \ + python3-pip + +apt-get clean + +. install-mercurial.sh + +rm -rf /setup diff --git a/taskcluster/docker/recipes/common.sh b/taskcluster/docker/recipes/common.sh new file mode 100644 index 0000000000..dbf0503887 --- /dev/null +++ b/taskcluster/docker/recipes/common.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +tooltool_fetch() { + cat >manifest.tt + python2.7 /setup/tooltool.py fetch + rm manifest.tt +} diff --git a/taskcluster/docker/recipes/debian-build-system-setup.sh b/taskcluster/docker/recipes/debian-build-system-setup.sh new file mode 100644 index 0000000000..de4e04cee7 --- /dev/null +++ b/taskcluster/docker/recipes/debian-build-system-setup.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +cd /setup || exit + +. /setup/common.sh +. /setup/install-mercurial.sh + +rm -rf /setup diff --git a/taskcluster/docker/recipes/debian-test-system-setup.sh b/taskcluster/docker/recipes/debian-test-system-setup.sh new file mode 100644 index 0000000000..410c0e874d --- /dev/null +++ b/taskcluster/docker/recipes/debian-test-system-setup.sh @@ -0,0 +1,93 @@ +#!/usr/bin/env bash + +set -ve + +test "$(whoami)" == 'root' + +mkdir -p /setup +cd /setup + +apt_packages=() + +apt_packages+=('autoconf2.13') +apt_packages+=('bluez-cups') +apt_packages+=('build-essential') +apt_packages+=('ca-certificates') +apt_packages+=('ccache') +apt_packages+=('curl') +apt_packages+=('fonts-kacst') +apt_packages+=('fonts-kacst-one') +apt_packages+=('fonts-liberation') +apt_packages+=('fonts-stix') +apt_packages+=('fonts-unfonts-core') +apt_packages+=('fonts-unfonts-extra') +apt_packages+=('fonts-vlgothic') +apt_packages+=('g++-multilib') +apt_packages+=('gcc-multilib') +apt_packages+=('gir1.2-gnomebluetooth-1.0') +apt_packages+=('git') +apt_packages+=('gnome-keyring') +apt_packages+=('libasound2-dev') +apt_packages+=('libcanberra-pulse') +apt_packages+=('libcurl4-openssl-dev') +apt_packages+=('libdbus-1-dev') +apt_packages+=('libdbus-glib-1-dev') +apt_packages+=('libgconf2-dev') +apt_packages+=('libgtk2.0-dev') +apt_packages+=('libiw-dev') +apt_packages+=('libnotify-dev') +apt_packages+=('libpulse-dev') +apt_packages+=('libsox-fmt-alsa') +apt_packages+=('libxt-dev') +apt_packages+=('libxxf86vm1') +apt_packages+=('llvm') +apt_packages+=('llvm-dev') +apt_packages+=('llvm-runtime') +apt_packages+=('locales') +apt_packages+=('locales-all') +apt_packages+=('net-tools') +apt_packages+=('qemu-kvm') +apt_packages+=('rlwrap') +apt_packages+=('screen') +apt_packages+=('software-properties-common') +apt_packages+=('sudo') +apt_packages+=('tar') +apt_packages+=('ttf-dejavu') +apt_packages+=('unzip') +apt_packages+=('uuid') +apt_packages+=('wget') +apt_packages+=('xvfb') +apt_packages+=('yasm') +apt_packages+=('zip') + +# use Ubuntu's Python-2.7 (2.7.3 on Precise) +apt_packages+=('python-dev') +apt_packages+=('python-pip') + +apt-get update +# This allows packages to be installed without human interaction +export DEBIAN_FRONTEND=noninteractive +apt-get install -y -f "${apt_packages[@]}" + +dpkg-reconfigure locales + +. /setup/common.sh +. /setup/install-mercurial.sh + +# pip 19.3 is causing errors building the docker image, pin to 19.2.3 for now. +# See https://github.com/pypa/pip/issues/7206 +pip install --upgrade pip==19.2.3 +hash -r +pip install virtualenv==15.2.0 + +# clean up +apt-get -y autoremove + +# We don't need no docs! +rm -rf /usr/share/help /usr/share/doc /usr/share/man + +cd / +rm -rf /setup ~/.ccache ~/.cache ~/.npm +apt-get clean +apt-get autoclean +rm -f "$0" diff --git a/taskcluster/docker/recipes/dot-config/pip/pip.conf b/taskcluster/docker/recipes/dot-config/pip/pip.conf new file mode 100644 index 0000000000..73c2b2a52c --- /dev/null +++ b/taskcluster/docker/recipes/dot-config/pip/pip.conf @@ -0,0 +1,2 @@ +[global] +disable-pip-version-check = true diff --git a/taskcluster/docker/recipes/hgrc b/taskcluster/docker/recipes/hgrc new file mode 100755 index 0000000000..f6a2f6643c --- /dev/null +++ b/taskcluster/docker/recipes/hgrc @@ -0,0 +1,33 @@ +# By default the progress bar starts after 3s and updates every 0.1s. We +# change this so it shows and updates every 1.0s. +# We also tell progress to assume a TTY is present so updates are printed +# even if there is no known TTY. +[progress] +delay = 1.0 +refresh = 1.0 +assume-tty = true + +[extensions] +share = +sparse = +robustcheckout = /usr/local/mercurial/robustcheckout.py + +[hostsecurity] +# When running a modern Python, Mercurial will default to TLS 1.1+. +# When running on a legacy Python, Mercurial will default to TLS 1.0+. +# There is no good reason we shouldn't be running a modern Python +# capable of speaking TLS 1.2. And the only Mercurial servers we care +# about should be running TLS 1.2. So make TLS 1.2 the minimum. +minimumprotocol = tls1.2 + +# Settings to make 1-click loaners more useful. +[extensions] +histedit = +rebase = + +[diff] +git = 1 +showfunc = 1 + +[pager] +pager = LESS=FRSXQ less diff --git a/taskcluster/docker/recipes/install-mercurial.sh b/taskcluster/docker/recipes/install-mercurial.sh new file mode 100755 index 0000000000..2e394c6775 --- /dev/null +++ b/taskcluster/docker/recipes/install-mercurial.sh @@ -0,0 +1,134 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script installs and configures Mercurial. + +set -e + +# Detect OS. +if [ -f /etc/lsb-release ]; then + # Disabled so linting works on Mac + # shellcheck disable=SC1091 + . /etc/lsb-release + + if [ "${DISTRIB_ID}" = "Ubuntu" ] && [[ "${DISTRIB_RELEASE}" = "20.04" ]]; then + HG_APT=1 + elif [ "${DISTRIB_ID}" = "Ubuntu" ] && [[ "${DISTRIB_RELEASE}" = "16.04" || "${DISTRIB_RELEASE}" = "17.10" || "${DISTRIB_RELEASE}" = "18.04" ]] + then + HG_DEB=1 + HG_DIGEST=54a215232a340139707a968b58943c2903a8297f0da32f96622d1acab47de6013a5b96d2ca4ba241b1fee142b4098a6cdd236b308a1657c31f42807d7385d327 + HG_SIZE=278440 + HG_FILENAME=mercurial_4.8.1_amd64.deb + + HG_COMMON_DIGEST=5577fec8d0f6643d17751b3f6be76b0c2bb888ae1920a8b085245e05110e3d5cfe1c4e9d51e334ab0dd0865fe553c63c704e72852e00b71eb668980cb6b33fa4 + HG_COMMON_SIZE=2439436 + HG_COMMON_FILENAME=mercurial-common_4.8.1_all.deb + elif [ "${DISTRIB_ID}" = "Ubuntu" ] && [ "${DISTRIB_RELEASE}" = "12.04" ] + then + echo "Ubuntu 12.04 not supported" + exit 1 + fi + + CERT_PATH=/etc/ssl/certs/ca-certificates.crt + +elif [ -f /etc/os-release ]; then + # Disabled so linting works on Mac + # shellcheck disable=SC1091 + . /etc/os-release + + if [ "${ID}" = "debian" ]; then + if [ -f /usr/bin/pip2 ]; then + PIP_PATH=/usr/bin/pip2 + elif [ -f /usr/bin/pip ]; then + # Versions of debian that don't have pip2 have pip pointing to the python2 version. + PIP_PATH=/usr/bin/pip + else + echo "We currently require Python 2.7 and pip to run Mercurial" + exit 1 + fi + else + echo "Unsupported debian-like system with ID '${ID}' and VERSION_ID '${VERSION_ID}'" + exit 1 + fi + + CERT_PATH=/etc/ssl/certs/ca-certificates.crt + +elif [ -f /etc/centos-release ]; then + CENTOS_VERSION="$(rpm -q --queryformat '%{VERSION}' centos-release)" + if [ "${CENTOS_VERSION}" = "6" ]; then + if [ -f /usr/bin/pip2.7 ]; then + PIP_PATH=/usr/bin/pip2.7 + else + echo "We currently require Python 2.7 and /usr/bin/pip2.7 to run Mercurial" + exit 1 + fi + else + echo "Unsupported CentOS version: ${CENTOS_VERSION}" + exit 1 + fi + + CERT_PATH=/etc/ssl/certs/ca-bundle.crt +fi + +if [ -n "${HG_APT}" ]; then +apt-get install -y mercurial +elif [ -n "${HG_DEB}" ]; then +tooltool_fetch <<EOF +[ + { + "size": ${HG_SIZE}, + "digest": "${HG_DIGEST}", + "algorithm": "sha512", + "filename": "${HG_FILENAME}" + }, + { + "size": ${HG_COMMON_SIZE}, + "digest": "${HG_COMMON_DIGEST}", + "algorithm": "sha512", + "filename": "${HG_COMMON_FILENAME}" + } +] +EOF + + dpkg -i --auto-deconfigure ${HG_COMMON_FILENAME} ${HG_FILENAME} +elif [ -n "${HG_RPM}" ]; then +tooltool_fetch <<EOF +[ + { + "size": ${HG_SIZE}, + "digest": "${HG_DIGEST}", + "algorithm": "sha512", + "filename": "${HG_FILENAME}" + } +] +EOF + + rpm -i ${HG_FILENAME} +elif [ -n "${PIP_PATH}" ]; then +tooltool_fetch <<EOF +[ + { + "size": 6869733, + "digest": "a4485c22f9bb0bb752bf42941f613cb3542c66cbec5d7d49be2090ac544f5dca0f476e4535a56e3f4f4f5fc02fb12739e6d1c7b407264fc2ba4b19b0230b9f93", + "algorithm": "sha512", + "filename": "mercurial-4.8.1.tar.gz" + } +] +EOF + + ${PIP_PATH} install mercurial-4.8.1.tar.gz +else + echo "Do not know how to install Mercurial on this OS" + exit 1 +fi + +chmod 644 /usr/local/mercurial/robustcheckout.py + +cat >/etc/mercurial/hgrc.d/cacerts.rc <<EOF +[web] +cacerts = ${CERT_PATH} +EOF + +chmod 644 /etc/mercurial/hgrc.d/cacerts.rc diff --git a/taskcluster/docker/recipes/install-node.sh b/taskcluster/docker/recipes/install-node.sh new file mode 100644 index 0000000000..5b480a603f --- /dev/null +++ b/taskcluster/docker/recipes/install-node.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script installs Node v10. +# XXX For now, this should match the version installed in +# taskcluster/scripts/misc/repack-node.sh. Later we'll get the ESLint builder +# to use the linux64-node toolchain directly. + +wget -O node.xz --progress=dot:mega https://nodejs.org/dist/v10.23.1/node-v10.23.1-linux-x64.tar.xz + +echo '207e5ec77ca655ba6fcde922d6b329acbb09898b0bd793ccfcce6c27a36fdff0 node.xz' | sha256sum -c +tar -C /usr/local -xJ --strip-components 1 < node.xz +node -v # verify +npm -v diff --git a/taskcluster/docker/recipes/prepare_openjdk.sh b/taskcluster/docker/recipes/prepare_openjdk.sh new file mode 100755 index 0000000000..828c8dc697 --- /dev/null +++ b/taskcluster/docker/recipes/prepare_openjdk.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set -x + +# Debian 10 doesn't have openjdk-8, so add the Debian 9 repository, which contains it. +if grep -q ^10\\. /etc/debian_version; then + sed s/buster/stretch/ /etc/apt/sources.list | tee /etc/apt/sources.list.d/stretch.list + apt-get update +fi diff --git a/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh b/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh new file mode 100644 index 0000000000..03ed14be83 --- /dev/null +++ b/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh @@ -0,0 +1,167 @@ +#!/usr/bin/env bash + +set -ve + +test "$(whoami)" == 'root' + +# To speed up docker image build times as well as number of network/disk I/O +# build a list of packages to be installed nad call it in one go. +apt_packages=() + +apt_packages+=('autoconf2.13') +apt_packages+=('bluez-cups') +apt_packages+=('build-essential') +apt_packages+=('ca-certificates') +apt_packages+=('ccache') +apt_packages+=('compiz') +apt_packages+=('curl') +apt_packages+=('fontconfig') +apt_packages+=('fonts-kacst') +apt_packages+=('fonts-kacst-one') +apt_packages+=('fonts-liberation') +apt_packages+=('fonts-stix') +apt_packages+=('fonts-unfonts-core') +apt_packages+=('fonts-unfonts-extra') +apt_packages+=('fonts-vlgothic') +apt_packages+=('g++-multilib') +apt_packages+=('gcc-multilib') +apt_packages+=('gir1.2-gnomebluetooth-1.0') +apt_packages+=('git') +apt_packages+=('gnome-icon-theme') +apt_packages+=('gstreamer1.0-gtk3') +apt_packages+=('gstreamer1.0-plugins-base') +apt_packages+=('gstreamer1.0-plugins-good') +apt_packages+=('gstreamer1.0-tools') +apt_packages+=('gstreamer1.0-pulseaudio') +apt_packages+=('language-pack-en-base') +apt_packages+=('libc6-dbg') +apt_packages+=('libasound2-dev') +apt_packages+=('libavcodec-extra57') +apt_packages+=('libcanberra-gtk3-module') +apt_packages+=('libcanberra-pulse') +apt_packages+=('libcurl4-openssl-dev') +apt_packages+=('libdbus-1-dev') +apt_packages+=('libdbus-glib-1-dev') +apt_packages+=('libfreetype6') +apt_packages+=('libgconf2-dev') +apt_packages+=('libgl1-mesa-dri') +apt_packages+=('libgl1-mesa-glx') +apt_packages+=('libgstreamer-plugins-base1.0-dev') +apt_packages+=('libgstreamer1.0-dev') +apt_packages+=('libgtk2.0-dev') +apt_packages+=('libgtk-3-0') +apt_packages+=('libiw-dev') +apt_packages+=('libx11-xcb1') +apt_packages+=('libxcb1') +apt_packages+=('libxcb-render0') +apt_packages+=('libxcb-shm0') +apt_packages+=('libxcb-glx0') +apt_packages+=('libxcb-shape0') +apt_packages+=('libnotify-dev') +apt_packages+=('libpulse0') +apt_packages+=('libpulse-dev') +apt_packages+=('libxt-dev') +apt_packages+=('libxxf86vm1') +apt_packages+=('llvm') +apt_packages+=('llvm-dev') +apt_packages+=('llvm-runtime') +apt_packages+=('mesa-common-dev') +apt_packages+=('net-tools') +apt_packages+=('pulseaudio') +apt_packages+=('pulseaudio-module-bluetooth') +apt_packages+=('pulseaudio-module-gconf') +apt_packages+=('python-dev') +apt_packages+=('python-pip') +apt_packages+=('python3-pip') +apt_packages+=('qemu-kvm') +apt_packages+=('rlwrap') +apt_packages+=('screen') +apt_packages+=('software-properties-common') +apt_packages+=('sudo') +apt_packages+=('ttf-dejavu') +apt_packages+=('ubuntu-desktop') +apt_packages+=('unzip') +apt_packages+=('uuid') +apt_packages+=('wget') +apt_packages+=('xvfb') +apt_packages+=('xwit') +apt_packages+=('yasm') +apt_packages+=('zip') + +# xvinfo for test-linux.sh to monitor Xvfb startup +apt_packages+=('x11-utils') + +# Bug 1232407 - this allows the user to start vnc +apt_packages+=('x11vnc') + +# Bug 1176031 - need `xset` to disable screensavers +apt_packages+=('x11-xserver-utils') + +# Build a list of packages to install from the multiverse repo. +apt_packages+=('ubuntu-restricted-extras') + +# APT update takes very long on Ubuntu. Run it at the last possible minute. +apt-get update + +# This allows ubuntu-desktop to be installed without human interaction. +# Also force the cleanup after installation of packages to reduce image size. +export DEBIAN_FRONTEND=noninteractive +apt-get install -y --allow-downgrades -f "${apt_packages[@]}" + +# Enable i386 packages +dpkg --add-architecture i386 +apt-get update + +# Make sure we have libraries for 32-bit tests +apt_packages=() +apt_packages+=('libavcodec-extra57:i386') +apt_packages+=('libdbus-glib-1-2:i386') +apt_packages+=('libpulse0:i386') +apt_packages+=('libxt6:i386') +apt_packages+=('libxtst6:i386') +apt_packages+=('libsecret-1-0:i386') +apt_packages+=('libgtk2.0-0:i386') +apt_packages+=('libgtk-3-0:i386') +apt_packages+=('libx11-xcb1:i386') +apt_packages+=('libxcb1:i386') + +apt-get install -y --allow-downgrades -f "${apt_packages[@]}" +rm -rf /var/lib/apt/lists/* + +# Build a list of packages to purge from the image. +apt_packages=() +apt_packages+=('*cheese*') +apt_packages+=('example-content') +apt_packages+=('git') +apt_packages+=('gnome-calendar') +apt_packages+=('gnome-initial-setup') +apt_packages+=('gnome-mahjongg') +apt_packages+=('gnome-mines') +apt_packages+=('gnome-sudoku') +apt_packages+=('libx11-doc') +apt_packages+=('manpages-dev') +apt_packages+=('orca') +apt_packages+=('rhythmbox') +apt_packages+=('thunderbird') +apt_packages+=('ubuntu-release-upgrader*') +apt_packages+=('update-manager-core') +apt_packages+=('update-manager') +apt_packages+=('yelp') + +# Purge unnecessary packages +apt-get purge -y -f "${apt_packages[@]}" + +# Clear apt cache one last time +rm -rf /var/cache/apt/archives + +# We don't need no docs! +rm -rf /usr/share/help /usr/share/doc /usr/share/man + +# Remove all locale files other than en_US.UTF-8 +rm -rf /usr/share/locale/ /usr/share/locale-langpack/ /usr/share/locales/ + +# Further cleanup +apt-get -y autoremove +apt-get clean +apt-get autoclean +rm -f "$0" diff --git a/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh b/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh new file mode 100644 index 0000000000..d5b78985c2 --- /dev/null +++ b/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +set -ve + +test "$(whoami)" == 'root' + +cd /setup + +# Install tooltool, mercurial and node now that dependencies are in place. +. /setup/common.sh +. /setup/install-mercurial.sh +. /setup/install-node.sh + +# Upgrade pip and install virtualenv to specified versions. +pip install --upgrade pip==19.2.3 +hash -r +pip install virtualenv==15.2.0 + +pip install zstandard==0.13.0 +pip3 install zstandard==0.13.0 + +pip install psutil==5.7.0 +pip3 install psutil==5.7.0 + +# Cleanup +cd / +rm -rf /setup ~/.ccache ~/.cache ~/.npm +rm -f "$0" diff --git a/taskcluster/docker/recipes/xvfb.sh b/taskcluster/docker/recipes/xvfb.sh new file mode 100644 index 0000000000..6e0e79f7d4 --- /dev/null +++ b/taskcluster/docker/recipes/xvfb.sh @@ -0,0 +1,75 @@ +#! /bin/bash -x + +set -x + +fail() { + echo # make sure error message is on a new line + echo "[xvfb.sh:error]" "${@}" + exit 1 +} + +cleanup_xvfb() { + # When you call this script with START_VNC or TASKCLUSTER_INTERACTIVE + # we make sure we do not kill xvfb so you do not lose your connection + local xvfb_pid=`pidof Xvfb` + local vnc=${START_VNC:-false} + local interactive=${TASKCLUSTER_INTERACTIVE:-false} + if [ -n "$xvfb_pid" ] && [[ $vnc == false ]] && [[ $interactive == false ]] ; then + kill $xvfb_pid || true + screen -XS xvfb quit || true + fi +} + +# Attempt to start xvfb in a screen session with the given resolution and display +# number. Up to 5 attempts will be made to start xvfb with a short delay +# between retries +try_xvfb() { + screen -dmS xvfb Xvfb :$2 -nolisten tcp -screen 0 $1 \ + > ~/artifacts/xvfb/xvfb.log 2>&1 + export DISPLAY=:$2 + + # Only error code 255 matters, because it signifies that no + # display could be opened. As long as we can open the display + # tests should work. We'll retry a few times with a sleep before + # failing. + local retry_count=0 + local max_retries=5 + xvfb_test=0 + until [ $retry_count -gt $max_retries ]; do + xvinfo || xvfb_test=$? + if [ $xvfb_test != 255 ]; then + retry_count=$(($max_retries + 1)) + else + retry_count=$(($retry_count + 1)) + echo "Failed to start Xvfb, retry: $retry_count" + sleep 2 + fi + done + if [ $xvfb_test == 255 ]; then + return 1 + else + return 0 + fi +} + +start_xvfb() { + set +e + mkdir -p ~/artifacts/xvfb + local retry_count=0 + local max_retries=2 + local success=1 + until [ $retry_count -gt $max_retries ]; do + try_xvfb $1 $2 + success=$? + if [ $success -eq 0 ]; then + retry_count=$(($max_retries + 1)) + else + retry_count=$(($retry_count + 1)) + sleep 10 + fi + done + set -e + if [ $success -eq 1 ]; then + fail "Could not start xvfb after ${max_retries} attempts" + fi +} diff --git a/taskcluster/docker/static-analysis-build/Dockerfile b/taskcluster/docker/static-analysis-build/Dockerfile new file mode 100644 index 0000000000..3fcb1f65fc --- /dev/null +++ b/taskcluster/docker/static-analysis-build/Dockerfile @@ -0,0 +1,76 @@ +FROM $DOCKER_IMAGE_PARENT +LABEL maintainer="Andi-Bogdan Postelnicu <andi@mozilla.com>" + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV XZ_OPT=-T0 + +# %include taskcluster/docker/recipes/prepare_openjdk.sh +COPY topsrcdir/taskcluster/docker/recipes/prepare_openjdk.sh /tmp/prepare_openjdk.sh +RUN /tmp/prepare_openjdk.sh && rm /tmp/prepare_openjdk.sh + +ARG TASKCLUSTER_ROOT_URL +ARG DOCKER_IMAGE_PACKAGES +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES + +RUN apt-get update && \ + apt-get install \ + autoconf2.13 \ + automake \ + bison \ + bzip2 \ + cmake \ + flex \ + curl \ + libsqlite3-dev \ + file \ + gawk \ + gcc-multilib \ + gnupg \ + jq \ + libc6-dev \ + libstdc++-8-dev \ + libmpfr-dev \ + nasm \ + openjdk-8-jdk-headless \ + pkg-config \ + patch \ + p7zip-full \ + procps \ + python-dev \ + python-pip \ + python-setuptools \ + python-virtualenv \ + python3-dev \ + rsync \ + screen \ + tar \ + unzip \ + uuid \ + valgrind \ + wget \ + yasm \ + zip \ + zlib1g-dev \ + x11-utils \ + xvfb \ + linux-libc-dev \ + libdbus-glib-1-dev \ + libfontconfig1-dev \ + libfreetype6-dev \ + libgconf2-dev \ + libgmp-dev \ + libgtk-3-dev \ + libgtk2.0-dev \ + libpango1.0-dev \ + libpulse-dev \ + libx11-xcb-dev \ + libxt-dev \ + lib32z1 \ + patchelf + +# Install opam 2 +RUN curl -sL https://github.com/ocaml/opam/releases/download/2.0.3/opam-2.0.3-x86_64-linux > /usr/bin/opam && \ + chmod +x /usr/bin/opam diff --git a/taskcluster/docker/system-symbols-mac/Dockerfile b/taskcluster/docker/system-symbols-mac/Dockerfile new file mode 100644 index 0000000000..21a47b594d --- /dev/null +++ b/taskcluster/docker/system-symbols-mac/Dockerfile @@ -0,0 +1,42 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Gabriele Svelto <gsvelto@mozilla.com> + +RUN mkdir -p /builds +RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +# We need to declare all potentially cache volumes as caches. Also, +# making high I/O paths volumes increase I/O throughput because of +# AUFS slowness. +VOLUME /builds/worker/checkouts + +RUN apt-get update && \ + apt-get install --no-install-recommends -y python3-dev python3-pip python3-requests-futures python3-setuptools curl pax build-essential libxml2-dev libssl-dev zlib1g-dev libusb-dev cmake libbz2-dev libpng-dev wget zip liblzma-dev && \ + rm -rf /var/lib/apt/lists/* + +RUN mkdir /opt/data-reposado/ +RUN chown -R worker.worker /opt/data-reposado/ + +COPY setup.sh /setup/ +COPY requirements.txt /setup/ +# %include tools/crashreporter/system-symbols/mac + +RUN /bin/sh /setup/setup.sh + +ADD https://github.com/marco-c/breakpad-mac-update-symbols/raw/21221733edfbcac49d40e50fc219fab7d17437a0/lipo /usr/local/bin/ +RUN chmod +x /usr/local/bin/lipo + +COPY topsrcdir/tools/crashreporter/system-symbols/mac /builds/worker/mac-update-symbols +WORKDIR /builds/worker/mac-update-symbols + +# Set up first-run experience for interactive mode +# %include taskcluster/docker/debian10-test/motd +COPY topsrcdir/taskcluster/docker/debian10-test/motd /etc/taskcluster-motd +# %include taskcluster/docker/debian10-test/taskcluster-interactive-shell +COPY topsrcdir/taskcluster/docker/debian10-test/taskcluster-interactive-shell /bin/taskcluster-interactive-shell +RUN chmod +x /bin/taskcluster-interactive-shell + +RUN chown -R worker:worker /builds/worker + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/system-symbols-mac/requirements.txt b/taskcluster/docker/system-symbols-mac/requirements.txt new file mode 100644 index 0000000000..775756115c --- /dev/null +++ b/taskcluster/docker/system-symbols-mac/requirements.txt @@ -0,0 +1,2 @@ +futures +requests diff --git a/taskcluster/docker/system-symbols-mac/setup.sh b/taskcluster/docker/system-symbols-mac/setup.sh new file mode 100644 index 0000000000..f2cefd6b44 --- /dev/null +++ b/taskcluster/docker/system-symbols-mac/setup.sh @@ -0,0 +1,47 @@ +#!/bin/sh +set -v -e -x + +ncpu=-j$(grep -c ^processor /proc/cpuinfo) + +WORK=/setup/ +cd $WORK +git clone --depth=1 --single-branch -b system-symbols-mac https://github.com/gabrielesvelto/xar xar +cd xar/xar +./autogen.sh --prefix=/builds/worker +make "$ncpu" && make install + +cd $WORK +git clone --depth=1 --single-branch -b system-symbols-mac https://github.com/gabrielesvelto/libdmg-hfsplus.git +cd libdmg-hfsplus +cmake . +make "$ncpu" dmg-bin hfsplus +# `make install` installs way too much stuff +cp dmg/dmg hfs/hfsplus /builds/worker/bin +strip /builds/worker/bin/dmg /builds/worker/bin/hfsplus + +cd $WORK +git clone --depth=1 --single-branch https://chromium.googlesource.com/chromium/tools/depot_tools.git +export PATH=$PATH:$PWD/depot_tools +mkdir breakpad +cd breakpad +fetch breakpad +cd src +touch README +./configure +make "$ncpu" src/tools/mac/dump_syms/dump_syms_mac +# `make install` is broken because there are two dump_syms binaries. +cp src/tools/mac/dump_syms/dump_syms_mac /builds/worker/bin +strip /builds/worker/bin/dump_syms_mac + +pip3 install --no-cache-dir git+https://github.com/gabrielesvelto/reposado + +python3 /usr/local/bin/repoutil --configure <<EOF +/opt/data-reposado/html/ +/opt/data-reposado/metadata/ +http://example.com/ +EOF + +pip3 install --no-cache-dir -r /setup/requirements.txt + +cd / +rm -rf /setup diff --git a/taskcluster/docker/system-symbols-win/Dockerfile b/taskcluster/docker/system-symbols-win/Dockerfile new file mode 100644 index 0000000000..1b301418c1 --- /dev/null +++ b/taskcluster/docker/system-symbols-win/Dockerfile @@ -0,0 +1,32 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Gabriele Svelto <gsvelto@mozilla.com> + +RUN mkdir -p /builds +RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +# We need to declare all potentially cache volumes as caches. Also, +# making high I/O paths volumes increase I/O throughput because of +# AUFS slowness. +VOLUME /builds/worker/checkouts + +COPY requirements.txt /builds/worker/requirements.txt +RUN apt-get update && \ + apt-get install --no-install-recommends -y gcc python3-dev python3-pip && \ + rm -rf /var/lib/apt/lists/* +RUN pip3 install --no-cache-dir --require-hashes -r /builds/worker/requirements.txt + +# %include tools/crashreporter/system-symbols/win +COPY topsrcdir/tools/crashreporter/system-symbols/win /builds/worker + +# Set up first-run experience for interactive mode +# %include taskcluster/docker/debian10-test/motd +COPY topsrcdir/taskcluster/docker/debian10-test/motd /etc/taskcluster-motd +# %include taskcluster/docker/debian10-test/taskcluster-interactive-shell +COPY topsrcdir/taskcluster/docker/debian10-test/taskcluster-interactive-shell /bin/taskcluster-interactive-shell +RUN chmod +x /bin/taskcluster-interactive-shell + +RUN chown -R worker:worker /builds/worker + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/system-symbols-win/requirements.txt b/taskcluster/docker/system-symbols-win/requirements.txt new file mode 100644 index 0000000000..d042627f66 --- /dev/null +++ b/taskcluster/docker/system-symbols-win/requirements.txt @@ -0,0 +1,13 @@ +aiodns==2.0.0 --hash=sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de +aiofile==1.5.2 --hash=sha256:312d50ed7e646a40ab2a5457fdf382870aca926f956921ab8c7ab72c3922f372 +aiohttp==3.6.2 --hash=sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd +async-timeout==3.0.1 --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 +attrs==20.2.0 --hash=sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc +cffi==1.14.3 --hash=sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579 +chardet==3.0.4 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 +idna==2.10 --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 +multidict==4.7.6 --hash=sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255 +pycares==3.1.1 --hash=sha256:0c5bd1f6f885a219d5e972788d6eef7b8043b55c3375a845e5399638436e0bba +pycparser==2.20 --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 +typing_extensions==3.7.4.3 --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 +yarl==1.6.2 --hash=sha256:c45b49b59a5724869899798e1bbd447ac486215269511d3b76b4c235a1b766b6 diff --git a/taskcluster/docker/toolchain-build/Dockerfile b/taskcluster/docker/toolchain-build/Dockerfile new file mode 100644 index 0000000000..e3660bb4b5 --- /dev/null +++ b/taskcluster/docker/toolchain-build/Dockerfile @@ -0,0 +1,53 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ENV XZ_OPT=-T0 + +ARG DOCKER_IMAGE_PACKAGES +ARG TASKCLUSTER_ROOT_URL +RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES + +RUN apt-get update && \ + apt-get install \ + autoconf \ + autoconf2.13 \ + automake \ + bison \ + build-essential \ + curl \ + cmake \ + flex \ + gawk \ + gcc-multilib \ + git \ + gnupg \ + jq \ + libbz2-dev \ + libcurl4-openssl-dev \ + libssl-dev \ + libxml2-dev \ + libtool \ + libucl-dev \ + ninja-build \ + openssh-client \ + p7zip-full \ + pkg-config \ + procps \ + python-pip \ + python-setuptools \ + python-virtualenv \ + python3-distutils-extra \ + python3-requests \ + python3-pytoml \ + subversion \ + tar \ + unzip \ + uuid \ + uuid-dev \ + wget \ + zip \ + zlib1g-dev diff --git a/taskcluster/docker/ubuntu1804-test-base/Dockerfile b/taskcluster/docker/ubuntu1804-test-base/Dockerfile new file mode 100644 index 0000000000..ebef3d76df --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test-base/Dockerfile @@ -0,0 +1,22 @@ +# This docker image only sets up packages on top of the original Ubuntu image. +# Everything else is setup is a child docker image derived from this one, such +# that changes to our scripts don't trigger the rebuild of this base image, +# possibly leading to breakage. (See bug 1503756 and bug 1678451 for examples +# of such recent breakages) +FROM ubuntu:bionic-20200112 +MAINTAINER Edwin Takahashi <egao@mozilla.com> + +ADD apt-prefs /etc/apt/preferences.d/custom + +# %include taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh +ADD topsrcdir/taskcluster/docker/recipes/ubuntu1804-test-system-setup-base.sh /setup/system-setup.sh +RUN bash /setup/system-setup.sh + +# gnome-keyring-daemon is configured to have the IPC_LOCK capability (to lock pages with secrets in +# memory), but docker isn't run with that capability granted. So, if we were to try running +# gnome-keyring-daemon without first clearing the capability, it would just exit with the message +# "Operation not permitted". Luckily it doesn't actually require the capability. +RUN setcap -r /usr/bin/gnome-keyring-daemon + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/ubuntu1804-test-base/apt-prefs b/taskcluster/docker/ubuntu1804-test-base/apt-prefs new file mode 100644 index 0000000000..9a6882c7c2 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test-base/apt-prefs @@ -0,0 +1,18 @@ +# Make bionic-security and bionic take precedence over bionic-updates for +# libc packages to the point of forcing downgrades. This works around the +# update somehow turning a gst-launch fatal warning into a crash randomly. +Package: libc-dev* +Pin: release a=bionic-security +Pin-Priority: 1001 + +Package: libc6* +Pin: release a=bionic-security +Pin-Priority: 1001 + +Package: libc-dev* +Pin: release a=bionic +Pin-Priority: 1001 + +Package: libc6* +Pin: release a=bionic +Pin-Priority: 1001 diff --git a/taskcluster/docker/ubuntu1804-test/Dockerfile b/taskcluster/docker/ubuntu1804-test/Dockerfile new file mode 100644 index 0000000000..7cef69787f --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/Dockerfile @@ -0,0 +1,106 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Edwin Takahashi <egao@mozilla.com> + +# Create necessary directories and worker user account +RUN mkdir -p /builds && mkdir -p artifacts +RUN id worker || useradd -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +# We need to declare all potentially cache volumes as caches. Also, +# making high I/O paths volumes increase I/O throughput because of +# AUFS slowness. +VOLUME /builds/worker/.cache +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/tooltool-cache +VOLUME /builds/worker/workspace + +# In test.sh we accept START_VNC to start a vnc daemon. +# Exposing this port allows it to work. +EXPOSE 5900 + +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/common.sh +ADD topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh + +# %include taskcluster/docker/recipes/install-node.sh +ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /setup/install-node.sh + +# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb +# %include taskcluster/docker/recipes/xvfb.sh +ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh + +# %include taskcluster/scripts/run-task +ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task + +# %include taskcluster/scripts/misc/fetch-content +ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content + +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/worker/scripts/tooltool.py + +# %include taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh +ADD topsrcdir/taskcluster/docker/recipes/ubuntu1804-test-system-setup.sh /setup/system-setup.sh +RUN bash /setup/system-setup.sh + +# %include taskcluster/scripts/tester/test-linux.sh +ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /builds/worker/bin/test-linux.sh + +# Set variable normally configured at login, by the shells parent process, these +# are taken from GNU su manual +ENV HOME /builds/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker +ENV HOSTNAME taskcluster-worker +ENV LANG en_US.UTF-8 +ENV LC_ALL en_US.UTF-8 +ENV PATH $PATH:/builds/worker/bin + +# This helps not forgetting setting DISPLAY=:0 when running +# tests outside of test.sh +ENV DISPLAY :0 + +# Add utilities and configuration +COPY dot-files/config /builds/worker/.config + +# Disable apport (app crash reporter) to avoid stealing focus from test runs +ADD apport /etc/default/apport + +# Disable font antialiasing for now to match releng's setup +ADD fonts.conf /builds/worker/.fonts.conf + +# Bug 1345105 - Do not run periodical update checks and downloads +ADD autostart/gnome-software-service.desktop /etc/xdg/autostart/ + +# allow the worker user to access video devices +RUN usermod -a -G video worker + +# Set execution and ownership privileges +RUN chmod +x bin/*; chown -R worker:worker /builds/worker + +# Set up first-run experience for interactive mode +ADD motd /etc/taskcluster-motd +ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell +RUN chmod +x /bin/taskcluster-interactive-shell + +# Bug 1638183 - increase xserver maximum client count +COPY dot-files/config/xorg/99-serverflags.conf /usr/share/X11/xorg.conf.d/ + +# Ubuntu 18.04 (or any other GNOME3 based systems) needs dbus to have various +# test function as expected. Use entrypoint to initialize dbus as root. +COPY dbus.sh /usr/local/bin/dbus.sh +RUN chmod +x /usr/local/bin/dbus.sh +ENTRYPOINT ["/usr/local/bin/dbus.sh"] + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/ubuntu1804-test/apport b/taskcluster/docker/ubuntu1804-test/apport new file mode 100644 index 0000000000..42e5f8d3a1 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/apport @@ -0,0 +1 @@ +enabled=0 diff --git a/taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop b/taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop new file mode 100644 index 0000000000..c3b8a4c676 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/autostart/deja-dup-monitor.desktop @@ -0,0 +1,19 @@ +[Desktop Entry] +Version=1.0 +X-Ubuntu-Gettext-Domain=deja-dup + +Name=Backup Monitor +Comment=Schedules backups at regular intervals + +Icon=deja-dup +TryExec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor +Exec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor + +# Bug 984944/1240084 - It prevents taking screenshots +X-GNOME-Autostart-Delay=false + +StartupNotify=false +NoDisplay=true + +Type=Application +Categories=System;Utility;Archiving; diff --git a/taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop b/taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop new file mode 100644 index 0000000000..b563cc306b --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/autostart/gnome-software-service.desktop @@ -0,0 +1,9 @@ +[Desktop Entry] +Type=Application +Name=GNOME Software +Exec=/usr/bin/gnome-software --gapplication-service +OnlyShowIn=GNOME;Unity; +X-Ubuntu-Gettext-Domain=gnome-software + +# Bug 1345105 - Do not run periodical update checks and downloads +X-GNOME-Autostart-enabled=false diff --git a/taskcluster/docker/ubuntu1804-test/dbus.sh b/taskcluster/docker/ubuntu1804-test/dbus.sh new file mode 100644 index 0000000000..f27ec664f1 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/dbus.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -e + +/etc/init.d/dbus start 2>&1 + +exec "${@}" diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/pip/pip.conf b/taskcluster/docker/ubuntu1804-test/dot-files/config/pip/pip.conf new file mode 100644 index 0000000000..73c2b2a52c --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/pip/pip.conf @@ -0,0 +1,2 @@ +[global] +disable-pip-version-check = true diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs new file mode 100644 index 0000000000..2db2718d24 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.dirs @@ -0,0 +1,15 @@ +# This file is written by xdg-user-dirs-update +# If you want to change or add directories, just edit the line you're +# interested in. All local changes will be retained on the next run +# Format is XDG_xxx_DIR="$HOME/yyy", where yyy is a shell-escaped +# homedir-relative path, or XDG_xxx_DIR="/yyy", where /yyy is an +# absolute path. No other format is supported. + +XDG_DESKTOP_DIR="$HOME/Desktop" +XDG_DOWNLOAD_DIR="$HOME/Downloads" +XDG_TEMPLATES_DIR="$HOME/Templates" +XDG_PUBLICSHARE_DIR="$HOME/Public" +XDG_DOCUMENTS_DIR="$HOME/Documents" +XDG_MUSIC_DIR="$HOME/Music" +XDG_PICTURES_DIR="$HOME/Pictures" +XDG_VIDEOS_DIR="$HOME/Videos" diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale new file mode 100644 index 0000000000..7741b83a3e --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/user-dirs.locale @@ -0,0 +1 @@ +en_US diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf b/taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf new file mode 100644 index 0000000000..4c335f44e3 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/dot-files/config/xorg/99-serverflags.conf @@ -0,0 +1,3 @@ +Section "ServerFlags" + Option "MaxClients" "2048" +EndSection
\ No newline at end of file diff --git a/taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf b/taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf new file mode 100644 index 0000000000..4b2dab2a66 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/dot-files/pulse/client.conf @@ -0,0 +1 @@ +autospawn = no
\ No newline at end of file diff --git a/taskcluster/docker/ubuntu1804-test/fonts.conf b/taskcluster/docker/ubuntu1804-test/fonts.conf new file mode 100644 index 0000000000..9784fcc981 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/fonts.conf @@ -0,0 +1,5 @@ +<match target="font"> + <edit name="antialias" mode="assign"> + <bool>false</bool> + </edit> +</match> diff --git a/taskcluster/docker/ubuntu1804-test/motd b/taskcluster/docker/ubuntu1804-test/motd new file mode 100644 index 0000000000..f958393cd8 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/motd @@ -0,0 +1,6 @@ +Welcome to your taskcluster interactive shell! The regularly scheduled task +has been paused to give you a chance to set up your debugging environment. + +For your convenience, the exact mozharness command needed for this task can +be invoked using the 'run-mozharness' command. + diff --git a/taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell b/taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell new file mode 100644 index 0000000000..c782c0ea96 --- /dev/null +++ b/taskcluster/docker/ubuntu1804-test/taskcluster-interactive-shell @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +download() { + name=`basename $1` + url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/$1 + if ! curl --fail --silent -o ./$name --retry 10 $url; then + fail "failed downloading $1 from ${GECKO_HEAD_REPOSITORY}" + fi +} + +cd $HOME/bin; +download taskcluster/scripts/tester/run-wizard; +chmod +x run-wizard; +./run-wizard; + +SPAWN="$SHELL"; +if [ "$SHELL" = "bash" ]; then + SPAWN="bash -li"; +fi; + +cd $HOME; +exec $SPAWN; diff --git a/taskcluster/docker/update-verify/Dockerfile b/taskcluster/docker/update-verify/Dockerfile new file mode 100644 index 0000000000..153c16b7ae --- /dev/null +++ b/taskcluster/docker/update-verify/Dockerfile @@ -0,0 +1,55 @@ +# This is an LTS! We should upgrade after the next LTS is released, unless +# we've switched to the in-tree debian base images by then. +FROM ubuntu:20.04 + +MAINTAINER release@mozilla.com + +RUN dpkg --add-architecture i386 && apt-get -q update \ + # p7zip-full is for extracting Windows and OS X packages + # wget is for downloading update.xml, installers, and MARs + # libgtk-3-0 and libgtk2.0-0 are required to run the Firefox updater + && apt-get -q --yes install p7zip-full wget libgtk-3-0 libgtk-3.0:i386 libgtk2.0-0 libgtk2.0-0:i386 \ + && apt-get clean + +RUN mkdir /builds +RUN groupadd -g 1000 worker +RUN useradd -u 1000 -g 1000 -d /builds/worker -s /bin/bash -m worker +WORKDIR /builds/worker + +VOLUME /builds/worker/.cache +VOLUME /builds/worker/checkouts + +RUN mkdir /build +# %include python/mozbuild/mozbuild/action/tooltool.py +ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py + +# %include testing/mozharness/external_tools/robustcheckout.py +ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py + +# %include taskcluster/docker/recipes/hgrc +COPY topsrcdir/taskcluster/docker/recipes/hgrc /etc/mercurial/hgrc.d/mozilla.rc + +# %include taskcluster/docker/recipes/install-mercurial.sh +ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /build/install-mercurial.sh +ADD system-setup.sh /tmp/system-setup.sh +RUN bash /tmp/system-setup.sh + +# %include taskcluster/scripts/run-task +ADD topsrcdir/taskcluster/scripts/run-task /builds/worker/bin/run-task +# %include taskcluster/scripts/misc/fetch-content +ADD topsrcdir/taskcluster/scripts/misc/fetch-content /builds/worker/bin/fetch-content +RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/* +ENV PATH $PATH:/builds/worker/bin + +# Set variable normally configured at login, by the shells parent process, these +# are taken from GNU su manual +ENV HOME /builds/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker +ENV HOSTNAME taskcluster-worker +ENV LANG en_US.UTF-8 +ENV LC_ALL en_US.UTF-8 + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/update-verify/system-setup.sh b/taskcluster/docker/update-verify/system-setup.sh new file mode 100644 index 0000000000..69738d920a --- /dev/null +++ b/taskcluster/docker/update-verify/system-setup.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +# This allows ubuntu-desktop to be installed without human interaction +export DEBIAN_FRONTEND=noninteractive + +set -ve + +test "$(whoami)" == 'root' + +mkdir -p /setup +cd /setup + +apt_packages=() +apt_packages+=('curl') +apt_packages+=('locales') +apt_packages+=('git') +apt_packages+=('python3') +apt_packages+=('python3-pip') +apt_packages+=('shellcheck') +apt_packages+=('sudo') +apt_packages+=('wget') +apt_packages+=('xz-utils') + +apt-get update +apt-get install -y "${apt_packages[@]}" + +# Without this we get spurious "LC_ALL: cannot change locale (en_US.UTF-8)" errors, +# and python scripts raise UnicodeEncodeError when trying to print unicode characters. +locale-gen en_US.UTF-8 +dpkg-reconfigure locales + +su -c 'git config --global user.email "worker@mozilla.test"' worker +su -c 'git config --global user.name "worker"' worker + +tooltool_fetch() { + cat >manifest.tt + /build/tooltool.py fetch + rm manifest.tt +} + +cd /build +# shellcheck disable=SC1091 +. install-mercurial.sh + +cd / +rm -rf /setup diff --git a/taskcluster/docker/updatebot/Dockerfile b/taskcluster/docker/updatebot/Dockerfile new file mode 100644 index 0000000000..4750b0b58d --- /dev/null +++ b/taskcluster/docker/updatebot/Dockerfile @@ -0,0 +1,20 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Tom Ritter <tom@mozilla.com> + +VOLUME /builds/worker/checkouts + +COPY privileged-setup.sh /setup/privileged-setup.sh +COPY setup.sh /builds/worker/setup.sh +COPY run.py /builds/worker/run.py +COPY hgrc /etc/mercurial/hgrc.d/updatebot.rc + +RUN cd /setup && ./privileged-setup.sh + +ENV HOME /builds/worker +ENV SHELL /bin/bash +ENV USER worker +ENV LOGNAME worker +ENV PYTHONUNBUFFERED 1 +ENV PATH "/builds/worker/go/bin:${PATH}" + +RUN cd /builds/worker && ./setup.sh diff --git a/taskcluster/docker/updatebot/VERSION b/taskcluster/docker/updatebot/VERSION new file mode 100644 index 0000000000..56a6051ca2 --- /dev/null +++ b/taskcluster/docker/updatebot/VERSION @@ -0,0 +1 @@ +1
\ No newline at end of file diff --git a/taskcluster/docker/updatebot/hgrc b/taskcluster/docker/updatebot/hgrc new file mode 100644 index 0000000000..1de0aa701d --- /dev/null +++ b/taskcluster/docker/updatebot/hgrc @@ -0,0 +1,3 @@ +[ui] +ssh = ssh -i /builds/worker/updatebot/id_rsa -l updatebot@mozilla.com +username = Updatebot <updatebot@mozilla.com> diff --git a/taskcluster/docker/updatebot/privileged-setup.sh b/taskcluster/docker/updatebot/privileged-setup.sh new file mode 100755 index 0000000000..d06dafc888 --- /dev/null +++ b/taskcluster/docker/updatebot/privileged-setup.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -vex + +export UPDATEBOT_REVISION=39a562bbae3098c6933e5078750eb067252543df +export SQLPROXY_REVISION=fb1939ab92846761595833361c6b0b0ecd543861 + +export DEBIAN_FRONTEND=noninteractive + +# Update apt-get lists +apt-get update -y + +# Install dependencies +apt-get install -y --no-install-recommends \ + arcanist \ + bzr \ + ca-certificates \ + curl \ + golang-go \ + gcc \ + libc6-dev \ + python-requests \ + python-requests-unixsocket \ + python3.5 \ + python3-minimal \ + python3-wheel \ + python3-pip \ + python3-venv \ + python3-requests \ + python3-requests-unixsocket \ + python3-setuptools \ + openssh-client \ + wget + +mkdir -p /builds/worker/.mozbuild +chown -R worker:worker /builds/worker/ + +export GOPATH=/builds/worker/go + +# Build Google's Cloud SQL Proxy from source +cd /builds/worker/ +mkdir cloud_sql_proxy +cd cloud_sql_proxy +go mod init . +go get github.com/GoogleCloudPlatform/cloudsql-proxy/cmd/cloud_sql_proxy@$SQLPROXY_REVISION + +# Check out source code +cd /builds/worker/ +git clone https://github.com/mozilla-services/updatebot.git +cd updatebot +git checkout $UPDATEBOT_REVISION + +# Set up dependencies +cd /builds/worker/ +chown -R worker:worker . +chown -R worker:worker .* + +python3 -m pip install poetry + +rm -rf /setup
\ No newline at end of file diff --git a/taskcluster/docker/updatebot/run.py b/taskcluster/docker/updatebot/run.py new file mode 100755 index 0000000000..e3555bacee --- /dev/null +++ b/taskcluster/docker/updatebot/run.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python3 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function + +import sys + +sys.path.append("/builds/worker/checkouts/gecko/third_party/python") +sys.path.append(".") + +import os +import stat +import base64 +import signal +import requests +import subprocess +import taskcluster + +# Bump this number when you need to cause a commit for the job to re-run: 17 + +OPERATING_MODE = ( + "prod" + if os.environ.get("GECKO_HEAD_REPOSITORY", "") + == "https://hg.mozilla.org/mozilla-central" + else "dev" +) + +GECKO_DEV_PATH = "/builds/worker/checkouts/gecko" +DEV_PHAB_URL = "https://phabricator-dev.allizom.org/" +PROD_PHAB_URL = "https://phabricator.services.mozilla.com/" + +phabricator_url = DEV_PHAB_URL if OPERATING_MODE == "dev" else PROD_PHAB_URL + + +def log(*args): + print(*args) + + +def get_secret(name): + secret = None + if "TASK_ID" in os.environ: + secrets_url = ( + "http://taskcluster/secrets/v1/secret/project/updatebot/" + + ("3" if OPERATING_MODE == "prod" else "2") + + "/" + + name + ) + res = requests.get(secrets_url) + res.raise_for_status() + secret = res.json() + else: + secrets = taskcluster.Secrets(taskcluster.optionsFromEnvironment()) + secret = secrets.get("project/updatebot/" + OPERATING_MODE + "/" + name) + secret = secret["secret"] if "secret" in secret else None + secret = secret["value"] if "value" in secret else None + return secret + + +# Get TC Secrets ======================================= +log("Operating mode is ", OPERATING_MODE) +log("Getting secrets...") +bugzilla_api_key = get_secret("bugzilla-api-key") +phabricator_token = get_secret("phabricator-token") +try_sshkey = get_secret("try-sshkey") +database_config = get_secret("database-password") +sentry_url = get_secret("sentry-url") +sql_proxy_config = get_secret("sql-proxy-config") + +os.chdir("/builds/worker/updatebot") + +# Update Updatebot ======================================= +if OPERATING_MODE == "dev": + """ + If we are in development mode, we will update from github. + (This command will probably only work if we checkout a branch FWIW.) + + This allows us to iterate faster by committing to github and + re-running the cron job on Taskcluster, without rebuilding the + Docker image. + + However, this mechanism is bypassing the security feature we + have in-tree, where upstream out-of-tree code is fixed at a known + revision and cannot be changed without a commit to m-c. + + Therefore, we only do this in dev mode when running on try. + """ + log("Performing git repo update...") + r = subprocess.run(["git", "symbolic-ref", "-q", "HEAD"]) + if r.returncode == 0: + # This indicates we are on a branch, and not a specific revision + subprocess.check_call(["git", "pull", "origin"]) + +# Set Up SSH & Phabricator ============================== +log("Setting up ssh and phab keys...") +with open("id_rsa", "w") as sshkey: + sshkey.write(try_sshkey) +os.chmod("id_rsa", stat.S_IRUSR | stat.S_IWUSR) + +arcrc = open("/builds/worker/.arcrc", "w") +towrite = """ +{ + "hosts": { + "PHAB_URL_HERE": { + "token": "TOKENHERE" + } + } +} +""".replace( + "TOKENHERE", phabricator_token +).replace( + "PHAB_URL_HERE", phabricator_url + "api/" +) +arcrc.write(towrite) +arcrc.close() +os.chmod("/builds/worker/.arcrc", stat.S_IRUSR | stat.S_IWUSR) + +# Set up the Cloud SQL Proxy ============================= +log("Setting up cloud_sql_proxy...") +os.chdir("/builds/worker/") +with open("sql-proxy-key", "w") as proxy_key_file: + proxy_key_file.write( + base64.b64decode(sql_proxy_config["key-value"]).decode("utf-8") + ) + +instance_name = sql_proxy_config["instance-name"] +sql_proxy_command = ( + "./go/bin/cloud_sql_proxy -instances=" + + instance_name + + "=tcp:3306 -credential_file=sql-proxy-key" +) +sql_proxy = subprocess.Popen( + sql_proxy_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True, + start_new_session=True, +) +try: + (stdout, stderr) = sql_proxy.communicate(input=None, timeout=2) + log("sql proxy stdout:", stdout.decode("utf-8")) + log("sql proxy stderr:", stderr.decode("utf-8")) +except subprocess.TimeoutExpired: + log("no sqlproxy output in 2 seconds, this means it probably didn't error.") + +database_config["host"] = "127.0.0.1" + +# Vendor ================================================= +log("Getting Updatebot ready...") +os.chdir("/builds/worker/updatebot") +localconfig = { + "General": { + "env": OPERATING_MODE, + "gecko-path": GECKO_DEV_PATH, + }, + "Logging": { + "local": True, + "sentry": True, + "sentry_config": {"url": sentry_url, "debug": False}, + }, + "Database": database_config, + "Bugzilla": { + "apikey": bugzilla_api_key, + }, + "Taskcluster": { + "url_treeherder": "https://treeherder.mozilla.org/", + "url_taskcluster": "http://taskcluster/", + }, +} + +log("Writing local config file") +config = open("localconfig.py", "w") +config.write("localconfig = " + str(localconfig)) +config.close() + +if OPERATING_MODE == "dev": + log("Rewriting $gecko/.arcconfig because we're pointing at phab dev") + subprocess.check_call( + [ + "sed", + "-i", + "s#" + PROD_PHAB_URL + "#" + DEV_PHAB_URL + "#", + os.path.join(GECKO_DEV_PATH, ".arcconfig"), + ] + ) + +log("Running updatebot") +subprocess.check_call(["poetry", "run", "./automation.py"]) + +# Clean up =============================================== +log("Killing cloud_sql_proxy") +os.killpg(sql_proxy.pid, signal.SIGTERM) diff --git a/taskcluster/docker/updatebot/setup.sh b/taskcluster/docker/updatebot/setup.sh new file mode 100755 index 0000000000..68734f3566 --- /dev/null +++ b/taskcluster/docker/updatebot/setup.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -vex + +whoami + +# If poetry is not run as worker, then it won't work when run as user later. +cd /builds/worker/updatebot +/usr/local/bin/poetry install diff --git a/taskcluster/docker/valgrind-build/Dockerfile b/taskcluster/docker/valgrind-build/Dockerfile new file mode 100644 index 0000000000..a2cbf50550 --- /dev/null +++ b/taskcluster/docker/valgrind-build/Dockerfile @@ -0,0 +1,56 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Mike Hommey <mhommey@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +# We could try to be smart and install all the -dbg packages corresponding to +# the installed packages, but, not all of them are actually for libraries used +# by Firefox, leading to a larger docker image. Moreover, some of the -dbg +# packages for unnecessary libraries pull other packages through dependencies, +# that make for even larger docker images. +RUN apt-get install \ + dbus-1-dbg \ + libatk-bridge2.0-0-dbg \ + libatk1.0-dbg \ + libcairo2-dbg \ + libdbus-glib-1-2-dbg \ + libdrm2-dbg \ + libffi6-dbg \ + libfontconfig1-dbg \ + libgl1-mesa-glx-dbg \ + libglapi-mesa-dbg \ + libglib2.0-0-dbg \ + libgdk-pixbuf2.0-0-dbg \ + libgtk-3-0-dbg \ + libice6-dbg \ + libicu52-dbg \ + libpango1.0-0-dbg \ + libpcre3-dbg \ + libpixman-1-0-dbg \ + libsm6-dbg \ + libvorbis-dbg \ + libwayland-client0-dbg \ + libwayland-cursor0-dbg \ + libx11-6-dbg \ + libx11-xcb1-dbg \ + libxau6-dbg \ + libxcb-glx0-dbg \ + libxcb-render0-dbg \ + libxcb-shm0-dbg \ + libxcb1-dbg \ + libxcomposite1-dbg \ + libxcursor1-dbg \ + libxdamage1-dbg \ + libxdmcp6-dbg \ + libxext6-dbg \ + libxfixes3-dbg \ + libxi6-dbg \ + libxinerama1-dbg \ + libxrandr2-dbg \ + libxrender1-dbg \ + libxt6-dbg \ + libxxf86vm1-dbg \ + valgrind-dbg \ + zlib1g-dbg diff --git a/taskcluster/docker/visual-metrics/Dockerfile b/taskcluster/docker/visual-metrics/Dockerfile new file mode 100644 index 0000000000..dc6af587bc --- /dev/null +++ b/taskcluster/docker/visual-metrics/Dockerfile @@ -0,0 +1,28 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Barret Rennie <barret@mozilla.com> + +RUN apt-get update && \ + apt-get install \ + ffmpeg \ + imagemagick \ + pyssim \ + python \ + python-pil \ + python3 \ + python3-pip + +WORKDIR /builds/worker + +# %include testing/mozharness/external_tools/performance-artifact-schema.json +ADD topsrcdir/testing/mozharness/external_tools/performance-artifact-schema.json /builds/worker/performance-artifact-schema.json + +COPY requirements.txt /builds/worker/requirements.txt +RUN pip3 install setuptools==42.0.2 +RUN pip3 install --require-hashes -r /builds/worker/requirements.txt && \ + rm /builds/worker/requirements.txt + +COPY similarity.py /builds/worker/bin/similarity.py +COPY run-visual-metrics.py /builds/worker/bin/run-visual-metrics.py +RUN chmod +x /builds/worker/bin/run-visual-metrics.py + +VOLUME /builds/worker/artifacts/ diff --git a/taskcluster/docker/visual-metrics/requirements.txt b/taskcluster/docker/visual-metrics/requirements.txt new file mode 100644 index 0000000000..6d4f3909c4 --- /dev/null +++ b/taskcluster/docker/visual-metrics/requirements.txt @@ -0,0 +1,20 @@ +# Direct dependencies +attrs==19.1.0 --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 +structlog==19.1.0 --hash=sha256:db441b81c65b0f104a7ce5d86c5432be099956b98b8a2c8be0b3fb3a7a0b1536 +voluptuous==0.11.5 --hash=sha256:303542b3fc07fb52ec3d7a1c614b329cdbee13a9d681935353d8ea56a7bfa9f1 +jsonschema==3.2.0 --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 +numpy==1.18.3 --hash=sha256:6725d2797c65598778409aba8cd67077bb089d5b7d3d87c2719b206dc84ec05e +scipy==1.4.1 --hash=sha256:00af72998a46c25bdb5824d2b729e7dabec0c765f9deb0b504f928591f5ff9d4 +matplotlib==3.0.3 --hash=sha256:4d4250bf508dd07cca3b43888097f873cadb66eec6ac63dbbfb798798ec07af2 +opencv-python==4.2.0.34 --hash=sha256:d87e506ab205799727f0efa34b3888949bf029a3ada5eb000ff632606370ca6e + +# Transitive dependencies +importlib_metadata==1.1.0 --hash=sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742 +more_itertools==8.0.0 --hash=sha256:a0ea684c39bc4315ba7aae406596ef191fd84f873d2d2751f84d64e81a7a2d45 +pyrsistent==0.15.6 --hash=sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b +six==1.12.0 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c +zipp==0.6.0 --hash=sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335 +cycler==0.10.0 --hash=sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d +kiwisolver==1.1.0 --hash=sha256:a0c0a9f06872330d0dd31b45607197caab3c22777600e88031bfe66799e70bb0 +pyparsing==2.4.7 --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b +python-dateutil==2.8.1 --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a diff --git a/taskcluster/docker/visual-metrics/run-visual-metrics.py b/taskcluster/docker/visual-metrics/run-visual-metrics.py new file mode 100755 index 0000000000..799207a8d6 --- /dev/null +++ b/taskcluster/docker/visual-metrics/run-visual-metrics.py @@ -0,0 +1,470 @@ +#!/usr/bin/env python3 +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""Instrument visualmetrics.py to run in parallel.""" + +import argparse +import json +import logging +import os +import statistics +import subprocess +import sys +import tarfile +import time +from concurrent.futures import ProcessPoolExecutor +from functools import partial +from multiprocessing import cpu_count +from pathlib import Path + +import attr +import structlog +from jsonschema import validate +from voluptuous import ALLOW_EXTRA, Required, Schema + + +#: The max run time for a command (5 minutes) +MAX_TIME = 300 + + +#: The directory where artifacts from this job will be placed. +OUTPUT_DIR = Path("/", "builds", "worker", "artifacts") + + +#: A job to process through visualmetrics.py +@attr.s +class Job: + #: The name of the test. + test_name = attr.ib(type=str) + + #: A unique number for the job. + count = attr.ib(type=int) + + #: The extra options for this job. + extra_options = attr.ib(type=str) + + #: json_path: The path to the ``browsertime.json`` file on disk. + json_path = attr.ib(type=Path) + + #: video_path: The path of the video file on disk. + video_path = attr.ib(type=Path) + + +#: The schema for validating jobs. +JOB_SCHEMA = Schema( + { + Required("jobs"): [ + { + Required("test_name"): str, + Required("browsertime_json_path"): str, + Required("extra_options"): [str], + } + ], + Required("application"): {Required("name"): str, "version": str}, + Required("extra_options"): [str], + } +) + +#: A partial schema for browsertime.json files. +BROWSERTIME_SCHEMA = Schema( + [{Required("files"): {Required("video"): [str]}}], extra=ALLOW_EXTRA +) + +SHOULD_ALERT = { + "ContentfulSpeedIndex": True, + "FirstVisualChange": True, + "LastVisualChange": True, + "PerceptualSpeedIndex": True, + "SpeedIndex": True, + "videoRecordingStart": False, +} + +with Path("/", "builds", "worker", "performance-artifact-schema.json").open() as f: + PERFHERDER_SCHEMA = json.loads(f.read()) + + +def run_command(log, cmd, job_count): + """Run a command using subprocess.check_output + + Args: + log: The structlog logger instance. + cmd: the command to run as a list of strings. + + Returns: + A tuple of the process' exit status and standard output. + """ + log.info("Running command", cmd=cmd) + process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + lines = [] + res = None + start = time.time() + while time.time() - start <= MAX_TIME: + time.sleep(0.1) + output = process.stdout.readline() + if output == b"" and process.poll() is not None: + break + if output: + res = output.strip() + lines.append(res.decode("utf-8", "ignore")) + else: + time.sleep(5) + + if time.time() - start > MAX_TIME: + log.error( + "TEST-UNEXPECTED-FAIL | Timed out waiting for response from command", + cmd=cmd, + ) + return 1, "Timed out" + + rc = process.poll() + job_prefix = "[JOB-" + str(job_count) + "] " + for line in lines: + # Some output doesn't start with the levels because it comes + # from FFMPEG rather than the script itself + if line.startswith(("[INFO]", "[WARNING]", "[CRITICAL]", "[ERROR]")): + splitline = line.split(" - ") + level = splitline[0] + line = " - ".join(splitline[1:]) + else: + level = "[INFO]" + + newline = job_prefix + line + if level.strip() in ("[ERROR]", "[CRITICAL]"): + if rc == 0: + rc = 1 + log.error("TEST-UNEXPECTED-FAIL | " + newline) + elif level == "[WARNING]": + log.warning(newline) + else: + log.info(newline) + + return rc, res + + +def append_result(log, suites, test_name, name, result, extra_options): + """Appends a ``name`` metrics result in the ``test_name`` suite. + + Args: + log: The structlog logger instance. + suites: A mapping containing the suites. + test_name: The name of the test. + name: The name of the metrics. + result: The value to append. + """ + if name.endswith("Progress"): + return + try: + result = int(result) + except ValueError: + log.error("Could not convert value", name=name) + log.error("%s" % result) + result = 0 + + orig_test_name = test_name + if test_name in suites and suites[test_name]["extraOptions"] != extra_options: + missing = set(extra_options) - set(suites[test_name]["extraOptions"]) + test_name = test_name + "-".join(list(missing)) + + subtests = suites.setdefault( + test_name, + {"name": orig_test_name, "subtests": {}, "extraOptions": extra_options}, + )["subtests"] + + if name not in subtests: + subtests[name] = { + "name": name, + "replicates": [result], + "lowerIsBetter": True, + "unit": "ms", + "shouldAlert": SHOULD_ALERT[name], + } + else: + subtests[name]["replicates"].append(result) + + +def compute_median(subtest): + """Adds in the subtest the ``value`` field, which is the average of all + replicates. + + Args: + subtest: The subtest containing all replicates. + + Returns: + The subtest. + """ + if "replicates" not in subtest: + return subtest + subtest["value"] = statistics.median(subtest["replicates"]) + return subtest + + +def get_suite(suite): + """Returns the suite with computed medians in its subtests. + + Args: + suite: The suite to convert. + + Returns: + The suite. + """ + suite["subtests"] = [ + compute_median(subtest) for subtest in suite["subtests"].values() + ] + return suite + + +def read_json(json_path, schema): + """Read the given json file and verify against the provided schema. + + Args: + json_path: Path of json file to parse. + schema: A callable to validate the JSON's schema. + + Returns: + The contents of the file at ``json_path`` interpreted as JSON. + """ + try: + with open(str(json_path), "r", encoding="utf-8", errors="ignore") as f: + data = json.load(f) + except Exception: + log.error("Could not read JSON file", path=json_path, exc_info=True) + raise + + log.info("Loaded JSON from file", path=json_path) + + try: + schema(data) + except Exception: + log.error("JSON failed to validate", exc_info=True) + raise + + return data + + +def main(log, args): + """Run visualmetrics.py in parallel. + + Args: + log: The structlog logger instance. + args: The parsed arguments from the argument parser. + + Returns: + The return code that the program will exit with. + """ + fetch_dir = os.getenv("MOZ_FETCHES_DIR") + if not fetch_dir: + log.error("Expected MOZ_FETCHES_DIR environment variable.") + return 1 + + fetch_dir = Path(fetch_dir) + + visualmetrics_path = fetch_dir / "visualmetrics.py" + if not visualmetrics_path.exists(): + log.error( + "Could not locate visualmetrics.py", expected_path=str(visualmetrics_path) + ) + return 1 + + browsertime_results_path = fetch_dir / "browsertime-results.tgz" + + try: + with tarfile.open(str(browsertime_results_path)) as tar: + tar.extractall(path=str(fetch_dir)) + except Exception: + log.error( + "Could not read/extract browsertime results archive", + path=browsertime_results_path, + exc_info=True, + ) + return 1 + log.info("Extracted browsertime results", path=browsertime_results_path) + + try: + jobs_json_path = fetch_dir / "browsertime-results" / "jobs.json" + jobs_json = read_json(jobs_json_path, JOB_SCHEMA) + except Exception: + log.error( + "Could not open the jobs.json file", path=jobs_json_path, exc_info=True + ) + return 1 + + jobs = [] + count = 0 + + for job in jobs_json["jobs"]: + browsertime_json_path = fetch_dir / job["browsertime_json_path"] + + try: + browsertime_json = read_json(browsertime_json_path, BROWSERTIME_SCHEMA) + except Exception: + log.error( + "Could not open a browsertime.json file", + path=browsertime_json_path, + exc_info=True, + ) + return 1 + + for site in browsertime_json: + for video in site["files"]["video"]: + count += 1 + jobs.append( + Job( + test_name=job["test_name"], + extra_options=len(job["extra_options"]) > 0 + and job["extra_options"] + or jobs_json["extra_options"], + json_path=browsertime_json_path, + video_path=browsertime_json_path.parent / video, + count=count, + ) + ) + + failed_runs = 0 + suites = {} + + with ProcessPoolExecutor(max_workers=cpu_count()) as executor: + for job, result in zip( + jobs, + executor.map( + partial( + run_visual_metrics, + visualmetrics_path=visualmetrics_path, + options=args.visual_metrics_options, + ), + jobs, + ), + ): + returncode, res = result + if returncode != 0: + log.error( + "Failed to run visualmetrics.py", + video_path=job.video_path, + error=res, + ) + failed_runs += 1 + else: + for name, value in res.items(): + append_result( + log, suites, job.test_name, name, value, job.extra_options + ) + + suites = [get_suite(suite) for suite in suites.values()] + + perf_data = { + "framework": {"name": "browsertime"}, + "application": jobs_json["application"], + "type": "pageload", + "suites": suites, + } + + # TODO: Try to get the similarity for all possible tests, this means that we + # will also get a comparison of recorded vs. live sites to check the on-going + # quality of our recordings. + # Bug 1674927 - Similarity metric is disabled until we figure out + # why it had a huge increase in run time. + + # Validates the perf data complies with perfherder schema. + # The perfherder schema uses jsonschema so we can't use voluptuous here. + validate(perf_data, PERFHERDER_SCHEMA) + + raw_perf_data = json.dumps(perf_data) + with Path(OUTPUT_DIR, "perfherder-data.json").open("w") as f: + f.write(raw_perf_data) + # Prints the data in logs for Perfherder to pick it up. + log.info("PERFHERDER_DATA: %s" % raw_perf_data) + + # Lists the number of processed jobs, failures, and successes. + with Path(OUTPUT_DIR, "summary.json").open("w") as f: + json.dump( + { + "total_jobs": len(jobs), + "successful_runs": len(jobs) - failed_runs, + "failed_runs": failed_runs, + }, + f, + ) + + # If there's one failure along the way, we want to return > 0 + # to trigger a red job in TC. + return failed_runs + + +def run_visual_metrics(job, visualmetrics_path, options): + """Run visualmetrics.py on the input job. + + Returns: + A returncode and a string containing the output of visualmetrics.py + """ + cmd = [ + "/usr/bin/python", + str(visualmetrics_path), + "-vvv", + "--logformat", + "[%(levelname)s] - %(message)s", + "--video", + str(job.video_path), + ] + cmd.extend(options) + rc, res = run_command(log, cmd, job.count) + + if rc == 0: + # Python 3.5 requires a str object (not 3.6+) + res = json.loads(res.decode("utf8")) + + # Ensure that none of these values are at 0 which + # is indicative of a failling test + monitored_tests = [ + "contentfulspeedindex", + "lastvisualchange", + "perceptualspeedindex", + "speedindex", + ] + failed_tests = [] + for metric, val in res.items(): + if metric.lower() in monitored_tests and val == 0: + failed_tests.append(metric) + if failed_tests: + log.error( + "TEST-UNEXPECTED-FAIL | Some visual metrics have an erroneous value of 0." + ) + log.info("Tests which failed: %s" % str(failed_tests)) + rc += 1 + + return rc, res + + +if __name__ == "__main__": + logging.basicConfig(format="%(levelname)s - %(message)s", level=logging.INFO) + structlog.configure( + processors=[ + structlog.processors.format_exc_info, + structlog.dev.ConsoleRenderer(colors=False), + ], + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument( + "visual_metrics_options", + type=str, + metavar="VISUAL-METRICS-OPTIONS", + help="Options to pass to visualmetrics.py", + nargs="*", + ) + + args = parser.parse_args() + log = structlog.get_logger() + + try: + sys.exit(main(log, args)) + except Exception as e: + log.error("Unhandled exception: %s" % e, exc_info=True) + sys.exit(1) diff --git a/taskcluster/docker/visual-metrics/similarity.py b/taskcluster/docker/visual-metrics/similarity.py new file mode 100644 index 0000000000..e8ebca28f6 --- /dev/null +++ b/taskcluster/docker/visual-metrics/similarity.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python3 +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import cv2 +import json +import numpy as np +import os +import pathlib +import shutil +import socket +import structlog +import tarfile +import tempfile +import urllib + +from functools import wraps +from matplotlib import pyplot as plt +from scipy.stats import spearmanr + + +log = None + + +# We add the `and` conditions to it later +base_ad_query = { + "from": "task", + "limit": 1000, + "where": {"and": []}, + "select": [ + "action.start_time", + "run.name", + "task.artifacts", + "task.group.id", + "task.id", + ], +} + + +def socket_timeout(value=120): + """Decorator for socket timeouts.""" + + def _socket_timeout(func): + @wraps(func) + def __socket_timeout(*args, **kw): + old = socket.getdefaulttimeout() + socket.setdefaulttimeout(value) + try: + return func(*args, **kw) + finally: + socket.setdefaulttimeout(old) + + return __socket_timeout + + return _socket_timeout + + +def _open_data(file): + return cv2.VideoCapture(str(file)) + + +@socket_timeout(120) +def _query_activedata(query_json): + """Used to run queries on active data.""" + active_data_url = "http://activedata.allizom.org/query" + + req = urllib.request.Request(active_data_url) + req.add_header("Content-Type", "application/json") + jsondata = json.dumps(query_json) + + jsondataasbytes = jsondata.encode("utf-8") + req.add_header("Content-Length", len(jsondataasbytes)) + + log.info("Querying Active-data...") + response = urllib.request.urlopen(req, jsondataasbytes) + log.info("Status: %s" % {str(response.getcode())}) + + data = json.loads(response.read().decode("utf8").replace("'", '"'))["data"] + return data + + +@socket_timeout(120) +def _download(url, loc): + """Downloads from a url (with a timeout).""" + log.info("Downloading %s" % url) + try: + urllib.request.urlretrieve(url, loc) + except Exception as e: + log.info(str(e)) + return False + return True + + +def _get_frames(video): + """Gets all frames from a video into a list.""" + allframes = [] + while video.isOpened(): + ret, frame = video.read() + if ret: + # Convert to gray to simplify the process + allframes.append(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)) + else: + video.release() + break + return allframes + + +def _get_browsertime_results(query): + """Used to run an AD query and extract the browsertime results if they exist.""" + failed = False + try: + data = _query_activedata(query) + except Exception as e: + log.info(str(e)) + failed = True + if failed or not data: + log.info("Couldn't get activedata data") + return None + + # Find the newest browsertime task + log.info("Found %s datums" % str(len(data["action.start_time"]))) + maxind = np.argmax([float(t) for t in data["action.start_time"]]) + artifacts = data["task.artifacts"][maxind] + btime_artifact = None + for art in artifacts: + if "browsertime-results" in art["name"]: + btime_artifact = art["url"] + break + if not btime_artifact: + log.info("Can't find an older site test") + return None + + log.info( + "Comparing videos to TASK_GROUP=%s, TASK_ID=%s" + % (data["task.group.id"][maxind], data["task.id"][maxind]) + ) + + # Download the browsertime videos and untar them + tmpdir = tempfile.mkdtemp() + loc = os.path.join(tmpdir, "tmpfile.tgz") + if not _download(btime_artifact, loc): + log.info( + "Failed to download browsertime-results artifact from %s" % btime_artifact + ) + return None + tmploc = tempfile.mkdtemp() + try: + with tarfile.open(str(loc)) as tar: + tar.extractall(path=tmploc) + except Exception: + log.info( + "Could not read/extract old browsertime results archive", + path=loc, + exc_info=True, + ) + return None + + return tmploc + + +def _data_from_last_task(label): + """Gets the data from the last PGO/OPT task with the same label. + + We look for both OPT and PGO tasks. The difference + between them should be minimal. This method also provides + a way to compare recordings from this task to another + known task based on the TC_GROUP_ID environment varible. + """ + label_opt = label.replace("/pgo", "/opt") + label_pgo = label.replace("/opt", "/pgo") + + base_ad_query["where"]["and"] = [ + {"in": {"task.run.state": ["completed"]}}, + {"or": [{"eq": {"run.name": label_pgo}}, {"eq": {"run.name": label_opt}}]}, + ] + + task_group_id = os.getenv("TC_GROUP_ID", "") + if task_group_id: + base_ad_query["where"]["and"].append({"eq": {"task.group.id": task_group_id}}) + else: + base_ad_query["where"]["and"].extend( + [ + {"in": {"repo.branch.name": ["mozilla-central"]}}, + {"gte": {"action.start_time": {"date": "today-week-week"}}}, + ] + ) + + return _get_browsertime_results(base_ad_query) + + +def _data_from_last_live_task(label): + """Gets the data from the last live site PGO task.""" + label_live = label.replace("/opt", "/pgo").replace("tp6m", "tp6m-live") + + base_ad_query["where"]["and"] = [ + {"in": {"repo.branch.name": ["mozilla-central"]}}, + {"gte": {"action.start_time": {"date": "today-week-week"}}}, + {"in": {"task.run.state": ["completed"]}}, + {"eq": {"run.name": label_live}}, + ] + + return _get_browsertime_results(base_ad_query) + + +def _get_similarity(old_videos_info, new_videos_info, output, prefix=""): + """Calculates a similarity score for two groupings of videos. + + The technique works as follows: + 1. Get the last live site test. + 2. For each 15x15 video pairings, build a cross-correlation matrix: + 1. Get each of the videos and calculate their histograms + across the full videos. + 2. Calculate the correlation coefficient between these two. + 3. Average the cross-correlation matrix to obtain the score. + + The 2D similarity score is the same, except that it builds a histogram + from the final frame instead of the full video. + + Args: + old_videos: List of old videos. + new_videos: List of new videos (from this task). + output: Location to output videos with low similarity scores. + prefix: Prefix a string to the output. + Returns: + Two similarity scores (3D, 2D) as a float. + """ + nhists = [] + nhists2d = [] + + old_videos = [entry["data"] for entry in old_videos_info] + new_videos = [entry["data"] for entry in new_videos_info] + + total_vids = min(len(old_videos), len(new_videos)) + xcorr = np.zeros((total_vids, total_vids)) + xcorr2d = np.zeros((total_vids, total_vids)) + + for i in range(total_vids): + datao = np.asarray(_get_frames(old_videos[i])) + + histo, _, _ = plt.hist(datao.flatten(), bins=255) + histo2d, _, _ = plt.hist(datao[-1, :, :].flatten(), bins=255) + + for j in range(total_vids): + if i == 0: + # Only calculate the histograms once; it takes time + datan = np.asarray(_get_frames(new_videos[j])) + + histn, _, _ = plt.hist(datan.flatten(), bins=255) + histn2d, _, _ = plt.hist(datan[-1, :, :].flatten(), bins=255) + + nhists.append(histn) + nhists2d.append(histn2d) + else: + histn = nhists[j] + histn2d = nhists2d[j] + + rho, _ = spearmanr(histn, histo) + rho2d, _ = spearmanr(histn2d, histo2d) + + xcorr[i, j] = rho + xcorr2d[i, j] = rho2d + + similarity = np.mean(xcorr) + similarity2d = np.mean(xcorr2d) + + log.info("Average 3D similarity: %s" % str(np.round(similarity, 5))) + log.info("Average 2D similarity: %s" % str(np.round(similarity2d, 5))) + + if np.round(similarity, 1) <= 0.7 or np.round(similarity2d, 1) <= 0.7: + # For low correlations, output the worst video pairing + # so that we can visually see what the issue was + minind = np.unravel_index(np.argmin(xcorr, axis=None), xcorr.shape) + + oldvid = old_videos_info[minind[0]]["path"] + shutil.copyfile(oldvid, str(pathlib.Path(output, "%sold_video.mp4" % prefix))) + + newvid = new_videos_info[minind[1]]["path"] + shutil.copyfile(newvid, str(pathlib.Path(output, "%snew_video.mp4" % prefix))) + + return np.round(similarity, 5), np.round(similarity2d, 5) + + +def calculate_similarity(jobs_json, fetch_dir, output): + """Calculates the similarity score for this task. + + Here we use activedata to find the last live site that ran and + to find the last task (with the same label) that ran. Those two + tasks are then compared to the current one and 4 metrics are produced. + + For live sites, we only calculate 2 of these metrics, since the + playback similarity is not applicable to it. + + Args: + jobs_json: The jobs JSON that holds extra information. + fetch_dir: The fetch directory that holds the new videos. + output: The output directory. + Returns: + A dictionary containing up to 4 different metrics (their values default + to None if a metric couldn't be calculated): + PlaybackSimilarity: Similarity of the full playback to a live site test. + PlaybackSimilarity2D: - // - (but for the final frame only) + Similarity: Similarity of the tests video recording to its last run. + Similarity2D: - // - (but for the final frame only) + """ + global log + log = structlog.get_logger() + + label = os.getenv("TC_LABEL", "") + if not label: + log.info("TC_LABEL is undefined, cannot calculate similarity metrics") + return {} + + # Get all the newest videos from this task + new_btime_videos = [ + {"data": _open_data(str(f)), "path": str(f)} + for f in pathlib.Path(fetch_dir).rglob("*.mp4") + ] + log.info("Found %s new videos" % str(len(new_btime_videos))) + + # Get the similarity against the last task + old_btime_res = _data_from_last_task(label) + old_sim = old_sim2d = None + if old_btime_res: + old_btime_videos = [ + {"data": _open_data(str(f)), "path": str(f)} + for f in pathlib.Path(old_btime_res).rglob("*.mp4") + ] + log.info("Found %s old videos" % str(len(old_btime_videos))) + + old_sim, old_sim2d = _get_similarity(old_btime_videos, new_btime_videos, output) + else: + log.info("Failed to find an older test task") + + # Compare recordings to their live site variant if it exists + live_sim = live_sim2d = None + if "live" not in jobs_json["extra_options"]: + live_btime_res = _data_from_last_live_task(label) + if live_btime_res: + live_btime_videos = [ + {"data": _open_data(str(f)), "path": str(f)} + for f in pathlib.Path(live_btime_res).rglob("*.mp4") + ] + log.info("Found %s live videos" % str(len(live_btime_videos))) + + live_sim, live_sim2d = _get_similarity( + live_btime_videos, new_btime_videos, output, prefix="live_" + ) + else: + log.info("Failed to find a live site variant") + + return { + "PlaybackSimilarity": live_sim, + "PlaybackSimilarity2D": live_sim2d, + "Similarity": old_sim, + "Similarity2D": old_sim2d, + } diff --git a/taskcluster/docker/webrender/Dockerfile b/taskcluster/docker/webrender/Dockerfile new file mode 100644 index 0000000000..fdde5ad4e3 --- /dev/null +++ b/taskcluster/docker/webrender/Dockerfile @@ -0,0 +1,20 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Kartikaya Gupta <kgupta@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +# %include taskcluster/docker/recipes/prepare_openjdk.sh +COPY topsrcdir/taskcluster/docker/recipes/prepare_openjdk.sh /tmp/prepare_openjdk.sh +RUN /tmp/prepare_openjdk.sh && rm /tmp/prepare_openjdk.sh + +# %include gfx/wr/ci-scripts/docker-image/setup.sh +ADD topsrcdir/gfx/wr/ci-scripts/docker-image/setup.sh /tmp/wr-setup.sh +RUN /bin/bash /tmp/wr-setup.sh && rm /tmp/wr-setup.sh + +# We need this to install cargo-vendor as part of the wrench-deps task +RUN apt-get install libssl-dev + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/wgpu/Dockerfile b/taskcluster/docker/wgpu/Dockerfile new file mode 100644 index 0000000000..d5b72eafb7 --- /dev/null +++ b/taskcluster/docker/wgpu/Dockerfile @@ -0,0 +1,15 @@ +FROM $DOCKER_IMAGE_PARENT +MAINTAINER Dzmitry Malyshau <dmalyshau@mozilla.com> + +VOLUME /builds/worker/checkouts +VOLUME /builds/worker/workspace +VOLUME /builds/worker/tooltool-cache + +ADD system-setup.sh /setup/system-setup.sh +RUN bash /setup/system-setup.sh + +# We need this to install cargo-vendor as part of the wgpu-deps task +RUN apt-get install libssl-dev + +# Set a default command useful for debugging +CMD ["/bin/bash", "--login"] diff --git a/taskcluster/docker/wgpu/system-setup.sh b/taskcluster/docker/wgpu/system-setup.sh new file mode 100644 index 0000000000..ca7ae0c3c6 --- /dev/null +++ b/taskcluster/docker/wgpu/system-setup.sh @@ -0,0 +1,11 @@ +apt-get -y update +apt-get install -y \ + bzip2 \ + cmake \ + curl \ + gcc \ + git \ + g++ \ + libx11-dev \ + pkg-config \ + software-properties-common |