summaryrefslogtreecommitdiffstats
path: root/tools/update-verify
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
commit0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d (patch)
treea31f07c9bcca9d56ce61e9a1ffd30ef350d513aa /tools/update-verify
parentInitial commit. (diff)
downloadfirefox-esr-upstream/115.8.0esr.tar.xz
firefox-esr-upstream/115.8.0esr.zip
Adding upstream version 115.8.0esr.upstream/115.8.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tools/update-verify')
-rw-r--r--tools/update-verify/README.md118
-rw-r--r--tools/update-verify/python/util/__init__.py3
-rw-r--r--tools/update-verify/python/util/commands.py57
-rw-r--r--tools/update-verify/release/common/cached_download.sh40
-rw-r--r--tools/update-verify/release/common/check_updates.sh124
-rw-r--r--tools/update-verify/release/common/download_builds.sh36
-rw-r--r--tools/update-verify/release/common/download_mars.sh105
-rwxr-xr-xtools/update-verify/release/common/installdmg.ex45
-rwxr-xr-xtools/update-verify/release/common/unpack-diskimage.sh95
-rwxr-xr-xtools/update-verify/release/common/unpack.sh121
-rwxr-xr-xtools/update-verify/release/compare-directories.py273
-rwxr-xr-xtools/update-verify/release/final-verification.sh519
-rwxr-xr-xtools/update-verify/release/get-update-xml.sh36
-rw-r--r--tools/update-verify/release/mar_certs/README29
-rw-r--r--tools/update-verify/release/mar_certs/dep1.derbin0 -> 1225 bytes
-rw-r--r--tools/update-verify/release/mar_certs/dep2.derbin0 -> 1225 bytes
-rw-r--r--tools/update-verify/release/mar_certs/nightly_aurora_level3_primary.derbin0 -> 1225 bytes
-rw-r--r--tools/update-verify/release/mar_certs/nightly_aurora_level3_secondary.derbin0 -> 1225 bytes
-rw-r--r--tools/update-verify/release/mar_certs/release_primary.derbin0 -> 1225 bytes
-rw-r--r--tools/update-verify/release/mar_certs/release_secondary.derbin0 -> 1225 bytes
-rw-r--r--tools/update-verify/release/mar_certs/sha1/dep1.derbin0 -> 709 bytes
-rw-r--r--tools/update-verify/release/mar_certs/sha1/dep2.derbin0 -> 713 bytes
-rw-r--r--tools/update-verify/release/mar_certs/sha1/release_primary.derbin0 -> 709 bytes
-rw-r--r--tools/update-verify/release/mar_certs/sha1/release_secondary.derbin0 -> 713 bytes
-rw-r--r--tools/update-verify/release/mar_certs/xpcshellCertificate.derbin0 -> 1189 bytes
-rw-r--r--tools/update-verify/release/replace-updater-certs.py41
-rwxr-xr-xtools/update-verify/release/test-mar-url.sh46
-rwxr-xr-xtools/update-verify/release/updates/verify.sh292
-rw-r--r--tools/update-verify/scripts/async_download.py362
-rw-r--r--tools/update-verify/scripts/chunked-verify.py68
-rwxr-xr-xtools/update-verify/scripts/chunked-verify.sh72
31 files changed, 2482 insertions, 0 deletions
diff --git a/tools/update-verify/README.md b/tools/update-verify/README.md
new file mode 100644
index 0000000000..14eb2a5f9a
--- /dev/null
+++ b/tools/update-verify/README.md
@@ -0,0 +1,118 @@
+Mozilla Build Verification Scripts
+==================================
+
+Contents
+--------
+
+updates -> AUS and update verification
+
+l10n -> l10n vs. en-US verification
+
+common -> useful utility scripts
+
+Update Verification
+-------------------
+
+`verify.sh`
+
+> Does a low-level check of all advertised MAR files. Expects to have a
+> file named all-locales, but does not (yet) handle platform exceptions, so
+> these should be removed from the locales file.
+>
+> Prints errors on both STDOUT and STDIN, the intention is to run the
+> script with STDOUT redirected to an output log. If there is not output
+> on the console and an exit code of 0 then all tests pass; otherwise one
+> or more tests failed.
+>
+> Does the following:
+>
+> 1) download update.xml from AUS for a particular release
+> 2) download the partial and full mar advertised
+> 3) check that the partial and full match the advertised size and sha1sum
+> 4) downloads the latest release, and an older release
+> 5) applies MAR to the older release, and compares the two releases.
+>
+> Step 5 is repeated for both the complete and partial MAR.
+>
+> Expects to have an updates.cfg file, describing all releases to try updating
+> from.
+
+Valid Platforms for AUS
+-----------------------
+- Linux_x86-gcc3
+- Darwin_Universal-gcc3
+- Linux_x86-gcc3
+- WINNT_x86-msvc
+- Darwin_ppc-gcc3
+
+---
+Running it locally
+==================
+
+Requirements:
+-------------
+
+- [Docker](https://docs.docker.com/get-docker/)
+- [optional | Mac] zstd (`brew install zst`)
+
+Docker Image
+------------
+
+1. [Ship-it](https://shipit.mozilla-releng.net/recent) holds the latest builds.
+1. Clicking on "Ship task" of latest build will open the task group in
+Taskcluster.
+1. On the "Name contains" lookup box, search for `release-update-verify-firefox`
+and open a `update-verify` task
+1. Make note of the `CHANNEL` under Payload. ie: `beta-localtest`
+1. Click "See more" under Task Details and open the `docker-image-update-verify`
+task.
+
+Download the image artifact from *docker-image-update-verify* task and load it
+manually
+```
+zstd -d image.tar.zst
+docker image load -i image.tar
+```
+
+**OR**
+
+Load docker image using mach and a task
+```
+# Replace TASK-ID with the ID of a docker-image-update-verify task
+./mach taskcluster-load-image --task-id=<TASK-ID>
+```
+
+Update Verify Config
+--------------------
+
+1. Open Taskcluster Task Group
+1. Search for `update-verify-config` and open the task
+1. Under Artifacts, download `update-verify.cfg` file
+
+Run Docker
+----------
+
+To run the container interactively:
+> Replace `<MOZ DIRECTORY>` with gecko repository path on local host <br />
+> Replace `<UVC PATH>` with path to `update-verify.cfg` file on local host.
+ie.: `~/Downloads/update-verify.cfg`
+> Replace `<CHANNEL>` with value from `update-verify` task (Docker steps)
+
+```
+docker run \
+ -it \
+ --rm \
+ -e CHANNEL=beta-localtest \
+ -e MOZ_FETCHES_DIR=/builds/worker/fetches \
+ -e MOZBUILD_STATE_PATH=/builds/worker/.mozbuild \
+ -v <UVC PATH>:/builds/worker/fetches/update-verify.cfg
+ -v <MOZ DIRECTORY>:/builds/worker/checkouts/gecko \
+ -w /builds/worker/checkouts/gecko \
+ update-verify
+```
+> Note that `MOZ_FETCHES_DIR` here is different from what is used in production.
+
+`total-chunks` and `this-chunk` refer to the number of lines in `update-verify.cfg`
+```
+./tools/update-verify/scripts/chunked-verify.sh --total-chunks=228 --this-chunk=4
+```
diff --git a/tools/update-verify/python/util/__init__.py b/tools/update-verify/python/util/__init__.py
new file mode 100644
index 0000000000..c580d191c1
--- /dev/null
+++ b/tools/update-verify/python/util/__init__.py
@@ -0,0 +1,3 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/tools/update-verify/python/util/commands.py b/tools/update-verify/python/util/commands.py
new file mode 100644
index 0000000000..e53464e6f8
--- /dev/null
+++ b/tools/update-verify/python/util/commands.py
@@ -0,0 +1,57 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""Functions for running commands"""
+
+import logging
+import os
+import subprocess
+import time
+
+import six
+
+log = logging.getLogger(__name__)
+
+
+# timeout message, used in TRANSIENT_HG_ERRORS and in tests.
+TERMINATED_PROCESS_MSG = "timeout, process terminated"
+
+
+def log_cmd(cmd, **kwargs):
+ # cwd is special in that we always want it printed, even if it's not
+ # explicitly chosen
+ kwargs = kwargs.copy()
+ if "cwd" not in kwargs:
+ kwargs["cwd"] = os.getcwd()
+ log.info("command: START")
+ log.info("command: %s" % subprocess.list2cmdline(cmd))
+ for key, value in six.iteritems(kwargs):
+ log.info("command: %s: %s", key, str(value))
+
+
+def merge_env(env):
+ new_env = os.environ.copy()
+ new_env.update(env)
+ return new_env
+
+
+def run_cmd(cmd, **kwargs):
+ """Run cmd (a list of arguments). Raise subprocess.CalledProcessError if
+ the command exits with non-zero. If the command returns successfully,
+ return 0."""
+ log_cmd(cmd, **kwargs)
+ # We update this after logging because we don't want all of the inherited
+ # env vars muddling up the output
+ if "env" in kwargs:
+ kwargs["env"] = merge_env(kwargs["env"])
+ try:
+ t = time.monotonic()
+ log.info("command: output:")
+ return subprocess.check_call(cmd, **kwargs)
+ except subprocess.CalledProcessError:
+ log.info("command: ERROR", exc_info=True)
+ raise
+ finally:
+ elapsed = time.monotonic() - t
+ log.info("command: END (%.2fs elapsed)\n", elapsed)
diff --git a/tools/update-verify/release/common/cached_download.sh b/tools/update-verify/release/common/cached_download.sh
new file mode 100644
index 0000000000..7cb3c42f8d
--- /dev/null
+++ b/tools/update-verify/release/common/cached_download.sh
@@ -0,0 +1,40 @@
+# this library works like a wrapper around wget, to allow downloads to be cached
+# so that if later the same url is retrieved, the entry from the cache will be
+# returned.
+
+pushd `dirname $0` &>/dev/null
+cache_dir="$(pwd)/cache"
+popd &>/dev/null
+
+# Deletes all files in the cache directory
+# We don't support folders or .dot(hidden) files
+# By not deleting the cache directory, it allows us to use Docker tmpfs mounts,
+# which are the only workaround to poor mount r/w performance on MacOS
+# Reference: https://forums.docker.com/t/file-access-in-mounted-volumes-extremely-slow-cpu-bound/8076/288
+clear_cache () {
+ rm -rf "${cache_dir}/*"
+}
+
+# download method - you pass a filename to save the file under, and the url to call
+cached_download () {
+ local output_file="${1}"
+ local url="${2}"
+
+ if fgrep -x "${url}" "${cache_dir}/urls.list" >/dev/null; then
+ echo "Retrieving '${url}' from cache..."
+ local line_number="$(fgrep -nx "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
+ cp "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache" "${output_file}"
+ else
+ echo "Downloading '${url}' and placing in cache..."
+ rm -f "${output_file}"
+ $retry wget -O "${output_file}" --progress=dot:giga --server-response "${url}" 2>&1
+ local exit_code=$?
+ if [ "${exit_code}" == 0 ]; then
+ echo "${url}" >> "${cache_dir}/urls.list"
+ local line_number="$(fgrep -nx "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
+ cp "${output_file}" "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache"
+ else
+ return "${exit_code}"
+ fi
+ fi
+}
diff --git a/tools/update-verify/release/common/check_updates.sh b/tools/update-verify/release/common/check_updates.sh
new file mode 100644
index 0000000000..6479f2f1f1
--- /dev/null
+++ b/tools/update-verify/release/common/check_updates.sh
@@ -0,0 +1,124 @@
+check_updates () {
+ # called with 10 args - platform, source package, target package, update package, old updater boolean,
+ # a path to the updater binary to use for the tests, a file to write diffs to, the update channel,
+ # update-settings.ini values, and a flag to indicate the target is dep-signed
+ update_platform=$1
+ source_package=$2
+ target_package=$3
+ locale=$4
+ use_old_updater=$5
+ updater=$6
+ diff_file=$7
+ channel=$8
+ mar_channel_IDs=$9
+ update_to_dep=${10}
+
+ # cleanup
+ rm -rf source/*
+ rm -rf target/*
+
+ unpack_build $update_platform source "$source_package" $locale '' $mar_channel_IDs
+ if [ "$?" != "0" ]; then
+ echo "FAILED: cannot unpack_build $update_platform source $source_package"
+ return 1
+ fi
+ unpack_build $update_platform target "$target_package" $locale
+ if [ "$?" != "0" ]; then
+ echo "FAILED: cannot unpack_build $update_platform target $target_package"
+ return 1
+ fi
+
+ case $update_platform in
+ Darwin_ppc-gcc | Darwin_Universal-gcc3 | Darwin_x86_64-gcc3 | Darwin_x86-gcc3-u-ppc-i386 | Darwin_x86-gcc3-u-i386-x86_64 | Darwin_x86_64-gcc3-u-i386-x86_64 | Darwin_aarch64-gcc3)
+ platform_dirname="*.app"
+ ;;
+ WINNT*)
+ platform_dirname="bin"
+ ;;
+ Linux_x86-gcc | Linux_x86-gcc3 | Linux_x86_64-gcc3)
+ platform_dirname=`echo $product | tr '[A-Z]' '[a-z]'`
+ ;;
+ esac
+
+ if [ -f update/update.status ]; then rm update/update.status; fi
+ if [ -f update/update.log ]; then rm update/update.log; fi
+
+ if [ -d source/$platform_dirname ]; then
+ if [ `uname | cut -c-5` == "MINGW" ]; then
+ # windows
+ # change /c/path/to/pwd to c:\\path\\to\\pwd
+ four_backslash_pwd=$(echo $PWD | sed -e 's,^/\([a-zA-Z]\)/,\1:/,' | sed -e 's,/,\\\\,g')
+ two_backslash_pwd=$(echo $PWD | sed -e 's,^/\([a-zA-Z]\)/,\1:/,' | sed -e 's,/,\\,g')
+ cwd="$two_backslash_pwd\\source\\$platform_dirname"
+ update_abspath="$two_backslash_pwd\\update"
+ else
+ # not windows
+ # use ls here, because mac uses *.app, and we need to expand it
+ cwd=$(ls -d $PWD/source/$platform_dirname)
+ update_abspath="$PWD/update"
+ fi
+
+ cd_dir=$(ls -d ${PWD}/source/${platform_dirname})
+ cd "${cd_dir}"
+ set -x
+ "$updater" "$update_abspath" "$cwd" "$cwd" 0
+ set +x
+ cd ../..
+ else
+ echo "TEST-UNEXPECTED-FAIL: no dir in source/$platform_dirname"
+ return 1
+ fi
+
+ cat update/update.log
+ update_status=`cat update/update.status`
+
+ if [ "$update_status" != "succeeded" ]
+ then
+ echo "TEST-UNEXPECTED-FAIL: update status was not successful: $update_status"
+ return 1
+ fi
+
+ # If we were testing an OS X mar on Linux, the unpack step copied the
+ # precomplete file from Contents/Resources to the root of the install
+ # to ensure the Linux updater binary could find it. However, only the
+ # precomplete file in Contents/Resources was updated, which means
+ # the copied version in the root of the install will usually have some
+ # differences between the source and target. To prevent this false
+ # positive from failing the tests, we simply remove it before diffing.
+ # The precomplete file in Contents/Resources is still diffed, so we
+ # don't lose any coverage by doing this.
+ cd `echo "source/$platform_dirname"`
+ if [[ -f "Contents/Resources/precomplete" && -f "precomplete" ]]
+ then
+ rm "precomplete"
+ fi
+ cd ../..
+ cd `echo "target/$platform_dirname"`
+ if [[ -f "Contents/Resources/precomplete" && -f "precomplete" ]]
+ then
+ rm "precomplete"
+ fi
+ cd ../..
+
+ # If we are testing an OSX mar to update from a production-signed/notarized
+ # build to a dep-signed one, ignore Contents/CodeResources which won't be
+ # present in the target, to avoid spurious failures
+ if ${update_to_dep}; then
+ ignore_coderesources=--ignore-missing=Contents/CodeResources
+ else
+ ignore_coderesources=
+ fi
+
+ ../compare-directories.py source/${platform_dirname} target/${platform_dirname} ${channel} ${ignore_coderesources} > "${diff_file}"
+ diffErr=$?
+ cat "${diff_file}"
+ if [ $diffErr == 2 ]
+ then
+ echo "TEST-UNEXPECTED-FAIL: differences found after update"
+ return 1
+ elif [ $diffErr != 0 ]
+ then
+ echo "TEST-UNEXPECTED-FAIL: unknown error from diff: $diffErr"
+ return 3
+ fi
+}
diff --git a/tools/update-verify/release/common/download_builds.sh b/tools/update-verify/release/common/download_builds.sh
new file mode 100644
index 0000000000..e279c808db
--- /dev/null
+++ b/tools/update-verify/release/common/download_builds.sh
@@ -0,0 +1,36 @@
+pushd `dirname $0` &>/dev/null
+MY_DIR=$(pwd)
+popd &>/dev/null
+retry="$MY_DIR/../../../../mach python -m redo.cmd -s 1 -a 3"
+
+download_builds() {
+ # cleanup
+ mkdir -p downloads/
+ rm -rf downloads/*
+
+ source_url="$1"
+ target_url="$2"
+
+ if [ -z "$source_url" ] || [ -z "$target_url" ]
+ then
+ "download_builds usage: <source_url> <target_url>"
+ exit 1
+ fi
+
+ for url in "$source_url" "$target_url"
+ do
+ source_file=`basename "$url"`
+ if [ -f "$source_file" ]; then rm "$source_file"; fi
+ cd downloads
+ if [ -f "$source_file" ]; then rm "$source_file"; fi
+ cached_download "${source_file}" "${url}"
+ status=$?
+ if [ $status != 0 ]; then
+ echo "TEST-UNEXPECTED-FAIL: Could not download source $source_file from $url"
+ echo "skipping.."
+ cd ../
+ return $status
+ fi
+ cd ../
+ done
+}
diff --git a/tools/update-verify/release/common/download_mars.sh b/tools/update-verify/release/common/download_mars.sh
new file mode 100644
index 0000000000..d2dab107d2
--- /dev/null
+++ b/tools/update-verify/release/common/download_mars.sh
@@ -0,0 +1,105 @@
+download_mars () {
+ update_url="$1"
+ only="$2"
+ test_only="$3"
+ to_build_id="$4"
+ to_app_version="$5"
+ to_display_version="$6"
+
+ max_tries=5
+ try=1
+ # retrying until we get offered an update
+ while [ "$try" -le "$max_tries" ]; do
+ echo "Using $update_url"
+ # retrying until AUS gives us any response at all
+ cached_download update.xml "${update_url}"
+
+ echo "Got this response:"
+ cat update.xml
+ # If the first line after <updates> is </updates> then we have an
+ # empty snippet. Otherwise we're done
+ if [ "$(grep -A1 '<updates>' update.xml | tail -1)" != "</updates>" ]; then
+ break;
+ fi
+ echo "Empty response, sleeping"
+ sleep 5
+ try=$(($try+1))
+ done
+
+ echo; echo; # padding
+
+ update_line=`fgrep "<update " update.xml`
+ grep_rv=$?
+ if [ 0 -ne $grep_rv ]; then
+ echo "TEST-UNEXPECTED-FAIL: no <update/> found for $update_url"
+ return 1
+ fi
+ command=`echo $update_line | sed -e 's/^.*<update //' -e 's:>.*$::' -e 's:\&amp;:\&:g'`
+ eval "export $command"
+
+ if [ ! -z "$to_build_id" -a "$buildID" != "$to_build_id" ]; then
+ echo "TEST-UNEXPECTED-FAIL: expected buildID $to_build_id does not match actual $buildID"
+ return 1
+ fi
+
+ if [ ! -z "$to_display_version" -a "$displayVersion" != "$to_display_version" ]; then
+ echo "TEST-UNEXPECTED-FAIL: expected displayVersion $to_display_version does not match actual $displayVersion"
+ return 1
+ fi
+
+ if [ ! -z "$to_app_version" -a "$appVersion" != "$to_app_version" ]; then
+ echo "TEST-UNEXPECTED-FAIL: expected appVersion $to_app_version does not match actual $appVersion"
+ return 1
+ fi
+
+ mkdir -p update/
+ if [ -z $only ]; then
+ only="partial complete"
+ fi
+ for patch_type in $only
+ do
+ line=`fgrep "patch type=\"$patch_type" update.xml`
+ grep_rv=$?
+
+ if [ 0 -ne $grep_rv ]; then
+ echo "TEST-UNEXPECTED-FAIL: no $patch_type update found for $update_url"
+ return 1
+ fi
+
+ command=`echo $line | sed -e 's/^.*<patch //' -e 's:/>.*$::' -e 's:\&amp;:\&:g'`
+ eval "export $command"
+
+ if [ "$test_only" == "1" ]
+ then
+ echo "Testing $URL"
+ curl -s -I -L $URL
+ return
+ else
+ cached_download "update/${patch_type}.mar" "${URL}"
+ fi
+ if [ "$?" != 0 ]; then
+ echo "Could not download $patch_type!"
+ echo "from: $URL"
+ fi
+ actual_size=`perl -e "printf \"%d\n\", (stat(\"update/$patch_type.mar\"))[7]"`
+ actual_hash=`openssl dgst -$hashFunction update/$patch_type.mar | sed -e 's/^.*= //'`
+
+ if [ $actual_size != $size ]; then
+ echo "TEST-UNEXPECTED-FAIL: $patch_type from $update_url wrong size"
+ echo "TEST-UNEXPECTED-FAIL: update.xml size: $size"
+ echo "TEST-UNEXPECTED-FAIL: actual size: $actual_size"
+ return 1
+ fi
+
+ if [ $actual_hash != $hashValue ]; then
+ echo "TEST-UNEXPECTED-FAIL: $patch_type from $update_url wrong hash"
+ echo "TEST-UNEXPECTED-FAIL: update.xml hash: $hashValue"
+ echo "TEST-UNEXPECTED-FAIL: actual hash: $actual_hash"
+ return 1
+ fi
+
+ cp update/$patch_type.mar update/update.mar
+ echo $actual_size > update/$patch_type.size
+
+ done
+}
diff --git a/tools/update-verify/release/common/installdmg.ex b/tools/update-verify/release/common/installdmg.ex
new file mode 100755
index 0000000000..08bcf9a201
--- /dev/null
+++ b/tools/update-verify/release/common/installdmg.ex
@@ -0,0 +1,45 @@
+#!/usr/bin/expect
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0/LGPL 2.1
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# The Original Code is Mozilla Corporation Code.
+#
+# The Initial Developer of the Original Code is
+# Clint Talbert.
+# Portions created by the Initial Developer are Copyright (C) 2007
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Armen Zambrano Gasparnian <armenzg@mozilla.com>
+# Axel Hecht <l10n@mozilla.com>
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 2 or later (the "GPL"), or
+# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+# in which case the provisions of the GPL or the LGPL are applicable instead
+# of those above. If you wish to allow use of your version of this file only
+# under the terms of either the GPL or the LGPL, and not to allow others to
+# use your version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the notice
+# and other provisions required by the GPL or the LGPL. If you do not delete
+# the provisions above, a recipient may use your version of this file under
+# the terms of any one of the MPL, the GPL or the LGPL.
+#
+# ***** END LICENSE BLOCK *****
+#send_user $argv
+spawn hdiutil attach -readonly -mountroot /tmp -private -noautoopen [lindex $argv 0]
+expect {
+"byte" {send "G"; exp_continue}
+"END" {send "\r"; exp_continue}
+"Y/N?" {send "Y\r"; exp_continue}
+}
diff --git a/tools/update-verify/release/common/unpack-diskimage.sh b/tools/update-verify/release/common/unpack-diskimage.sh
new file mode 100755
index 0000000000..b647a69c4d
--- /dev/null
+++ b/tools/update-verify/release/common/unpack-diskimage.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0/LGPL 2.1
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# The Original Code is the installdmg.sh script from taols utilities
+#
+# The Initial Developer of the Original Code is
+# Mozilla Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2009
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Chris AtLee <catlee@mozilla.com>
+# Robert Kaiser <kairo@kairo.at>
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 2 or later (the "GPL"), or
+# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+# in which case the provisions of the GPL or the LGPL are applicable instead
+# of those above. If you wish to allow use of your version of this file only
+# under the terms of either the GPL or the LGPL, and not to allow others to
+# use your version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the notice
+# and other provisions required by the GPL or the LGPL. If you do not delete
+# the provisions above, a recipient may use your version of this file under
+# the terms of any one of the MPL, the GPL or the LGPL.
+#
+# ***** END LICENSE BLOCK *****
+
+# Unpack a disk image to a specified target folder
+#
+# Usage: unpack-diskimage <image_file>
+# <mountpoint>
+# <target_path>
+
+DMG_PATH=$1
+MOUNTPOINT=$2
+TARGETPATH=$3
+LOGFILE=unpack.output
+
+# How long to wait before giving up waiting for the mount to fininsh
+TIMEOUT=90
+
+# If the mount point already exists, then the previous run may not have cleaned
+# up properly. We should try to umount and remove the its directory.
+if [ -d $MOUNTPOINT ]; then
+ echo "$MOUNTPOINT already exists, trying to clean up"
+ hdiutil detach $MOUNTPOINT -force
+ rm -rdfv $MOUNTPOINT
+fi
+
+# Install an on-exit handler that will unmount and remove the '$MOUNTPOINT' directory
+trap "{ if [ -d $MOUNTPOINT ]; then hdiutil detach $MOUNTPOINT -force; rm -rdfv $MOUNTPOINT; fi; }" EXIT
+
+mkdir -p $MOUNTPOINT
+
+hdiutil attach -verbose -noautoopen -mountpoint $MOUNTPOINT "$DMG_PATH" &> $LOGFILE
+# Wait for files to show up
+# hdiutil uses a helper process, diskimages-helper, which isn't always done its
+# work by the time hdiutil exits. So we wait until something shows up in the
+# mount point directory.
+i=0
+while [ "$(echo $MOUNTPOINT/*)" == "$MOUNTPOINT/*" ]; do
+ if [ $i -gt $TIMEOUT ]; then
+ echo "No files found, exiting"
+ exit 1
+ fi
+ sleep 1
+ i=$(expr $i + 1)
+done
+# Now we can copy everything out of the $MOUNTPOINT directory into the target directory
+rsync -av $MOUNTPOINT/* $MOUNTPOINT/.DS_Store $MOUNTPOINT/.background $MOUNTPOINT/.VolumeIcon.icns $TARGETPATH/ > $LOGFILE
+# sometimes hdiutil fails with "Resource busy"
+hdiutil detach $MOUNTPOINT || { sleep 10; \
+ if [ -d $MOUNTPOINT ]; then hdiutil detach $MOUNTPOINT -force; fi; }
+i=0
+while [ "$(echo $MOUNTPOINT/*)" != "$MOUNTPOINT/*" ]; do
+ if [ $i -gt $TIMEOUT ]; then
+ echo "Cannot umount, exiting"
+ exit 1
+ fi
+ sleep 1
+ i=$(expr $i + 1)
+done
+rm -rdf $MOUNTPOINT
diff --git a/tools/update-verify/release/common/unpack.sh b/tools/update-verify/release/common/unpack.sh
new file mode 100755
index 0000000000..3249936493
--- /dev/null
+++ b/tools/update-verify/release/common/unpack.sh
@@ -0,0 +1,121 @@
+#!/bin/bash
+
+function cleanup() {
+ hdiutil detach ${DEV_NAME} ||
+ { sleep 5 && hdiutil detach ${DEV_NAME} -force; };
+ return $1 && $?;
+};
+
+unpack_build () {
+ unpack_platform="$1"
+ dir_name="$2"
+ pkg_file="$3"
+ locale=$4
+ unpack_jars=$5
+ update_settings_string=$6
+
+ if [ ! -f "$pkg_file" ]; then
+ return 1
+ fi
+ mkdir -p $dir_name
+ pushd $dir_name > /dev/null
+ case $unpack_platform in
+ # $unpack_platform is either
+ # - a balrog platform name (from testing/mozharness/scripts/release/update-verify-config-creator.py)
+ # - a simple platform name (from tools/update-verify/release/updates/verify.sh)
+ mac|Darwin_*)
+ os=`uname`
+ # How we unpack a dmg differs depending on which platform we're on.
+ if [[ "$os" == "Darwin" ]]
+ then
+ cd ../
+ echo "installing $pkg_file"
+ ../common/unpack-diskimage.sh "$pkg_file" mnt $dir_name
+ else
+ 7z x ../"$pkg_file" > /dev/null
+ if [ `ls -1 | wc -l` -ne 1 ]
+ then
+ echo "Couldn't find .app package"
+ return 1
+ fi
+ unpack_dir=$(ls -1)
+ unpack_dir=$(ls -d "${unpack_dir}")
+ mv "${unpack_dir}"/*.app .
+ rm -rf "${unpack_dir}"
+ appdir=$(ls -1)
+ appdir=$(ls -d *.app)
+ # The updater guesses the location of these files based on
+ # its own target architecture, not the mar. If we're not
+ # unpacking mac-on-mac, we need to copy them so it can find
+ # them. It's important to copy (and not move), because when
+ # we diff the installer vs updated build afterwards, the
+ # installer version will have them in their original place.
+ cp "${appdir}/Contents/Resources/update-settings.ini" "${appdir}/update-settings.ini"
+ cp "${appdir}/Contents/Resources/precomplete" "${appdir}/precomplete"
+ fi
+ update_settings_file="${appdir}/update-settings.ini"
+ ;;
+ win32|WINNT_*)
+ 7z x ../"$pkg_file" > /dev/null
+ if [ -d localized ]
+ then
+ mkdir bin/
+ cp -rp nonlocalized/* bin/
+ cp -rp localized/* bin/
+ rm -rf nonlocalized
+ rm -rf localized
+ if [ $(find optional/ | wc -l) -gt 1 ]
+ then
+ cp -rp optional/* bin/
+ rm -rf optional
+ fi
+ elif [ -d core ]
+ then
+ mkdir bin/
+ cp -rp core/* bin/
+ rm -rf core
+ else
+ for file in *.xpi
+ do
+ unzip -o $file > /dev/null
+ done
+ unzip -o ${locale}.xpi > /dev/null
+ fi
+ update_settings_file='bin/update-settings.ini'
+ ;;
+ linux|Linux_*)
+ if `echo $pkg_file | grep -q "tar.gz"`
+ then
+ tar xfz ../"$pkg_file" > /dev/null
+ elif `echo $pkg_file | grep -q "tar.bz2"`
+ then
+ tar xfj ../"$pkg_file" > /dev/null
+ else
+ echo "Unknown package type for file: $pkg_file"
+ exit 1
+ fi
+ update_settings_file=`echo $product | tr '[A-Z]' '[a-z]'`'/update-settings.ini'
+ ;;
+ *)
+ echo "Unknown platform to unpack: $unpack_platform"
+ exit 1
+ esac
+
+ if [ ! -z $unpack_jars ]; then
+ for f in `find . -name '*.jar' -o -name '*.ja'`; do
+ unzip -o "$f" -d "$f.dir" > /dev/null
+ done
+ fi
+
+ if [ ! -z $update_settings_string ]; then
+ echo "Modifying update-settings.ini"
+ cat "${update_settings_file}" | sed -e "s/^ACCEPTED_MAR_CHANNEL_IDS.*/ACCEPTED_MAR_CHANNEL_IDS=${update_settings_string}/" > "${update_settings_file}.new"
+ diff -u "${update_settings_file}" "${update_settings_file}.new"
+ echo " "
+ rm "${update_settings_file}"
+ mv "${update_settings_file}.new" "${update_settings_file}"
+ fi
+
+ popd > /dev/null
+
+}
diff --git a/tools/update-verify/release/compare-directories.py b/tools/update-verify/release/compare-directories.py
new file mode 100755
index 0000000000..a45e78d62f
--- /dev/null
+++ b/tools/update-verify/release/compare-directories.py
@@ -0,0 +1,273 @@
+#! /usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import difflib
+import hashlib
+import logging
+import os
+import sys
+
+""" Define the transformations needed to make source + update == target
+
+Required:
+The files list describes the files which a transform may be used on.
+The 'side' is one of ('source', 'target') and defines where each transform is applied
+The 'channel_prefix' list controls which channels a transform may be used for, where a value of
+'beta' means all of beta, beta-localtest, beta-cdntest, etc.
+
+One or more:
+A 'deletion' specifies a start of line to match on, removing the whole line
+A 'substitution' is a list of full string to match and its replacement
+"""
+TRANSFORMS = [
+ # channel-prefs.js
+ {
+ # preprocessor comments, eg //@line 6 "/builds/worker/workspace/...
+ # this can be removed once each channel has a watershed above 59.0b2 (from bug 1431342)
+ "files": [
+ "defaults/pref/channel-prefs.js",
+ "Contents/Resources/defaults/pref/channel-prefs.js",
+ ],
+ "channel_prefix": ["aurora", "beta", "release", "esr"],
+ "side": "source",
+ "deletion": '//@line 6 "',
+ },
+ {
+ # updates from a beta to an RC build, the latter specifies the release channel
+ "files": [
+ "defaults/pref/channel-prefs.js",
+ "Contents/Resources/defaults/pref/channel-prefs.js",
+ ],
+ "channel_prefix": ["beta"],
+ "side": "target",
+ "substitution": [
+ 'pref("app.update.channel", "release");\n',
+ 'pref("app.update.channel", "beta");\n',
+ ],
+ },
+ {
+ # updates from an RC to a beta build
+ "files": [
+ "defaults/pref/channel-prefs.js",
+ "Contents/Resources/defaults/pref/channel-prefs.js",
+ ],
+ "channel_prefix": ["beta"],
+ "side": "source",
+ "substitution": [
+ 'pref("app.update.channel", "release");\n',
+ 'pref("app.update.channel", "beta");\n',
+ ],
+ },
+ {
+ # Warning comments from bug 1576546
+ # When updating from a pre-70.0 build to 70.0+ this removes the new comments in
+ # the target side. In the 70.0+ --> 70.0+ case with a RC we won't need this, and
+ # the channel munging above will make channel-prefs.js identical, allowing the code
+ # to break before applying this transform.
+ "files": [
+ "defaults/pref/channel-prefs.js",
+ "Contents/Resources/defaults/pref/channel-prefs.js",
+ ],
+ "channel_prefix": ["aurora", "beta", "release", "esr"],
+ "side": "target",
+ "deletion": "//",
+ },
+ # update-settings.ini
+ {
+ # updates from a beta to an RC build, the latter specifies the release channel
+ # on mac, we actually have both files. The second location is the real
+ # one but we copy to the first to run the linux64 updater
+ "files": ["update-settings.ini", "Contents/Resources/update-settings.ini"],
+ "channel_prefix": ["beta"],
+ "side": "target",
+ "substitution": [
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release\n",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release\n",
+ ],
+ },
+ {
+ # updates from an RC to a beta build
+ # on mac, we only need to modify the legit file this time. unpack_build
+ # handles the copy for the updater in both source and target
+ "files": ["Contents/Resources/update-settings.ini"],
+ "channel_prefix": ["beta"],
+ "side": "source",
+ "substitution": [
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release\n",
+ "ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release\n",
+ ],
+ },
+]
+
+
+def walk_dir(path):
+ all_files = []
+ all_dirs = []
+
+ for root, dirs, files in os.walk(path):
+ all_dirs.extend([os.path.join(root, d) for d in dirs])
+ all_files.extend([os.path.join(root, f) for f in files])
+
+ # trim off directory prefix for easier comparison
+ all_dirs = [d[len(path) + 1 :] for d in all_dirs]
+ all_files = [f[len(path) + 1 :] for f in all_files]
+
+ return all_dirs, all_files
+
+
+def compare_listings(
+ source_list, target_list, label, source_dir, target_dir, ignore_missing=None
+):
+ obj1 = set(source_list)
+ obj2 = set(target_list)
+ difference_found = False
+ ignore_missing = ignore_missing or ()
+
+ left_diff = obj1 - obj2
+ if left_diff:
+ if left_diff - set(ignore_missing):
+ _log = logging.error
+ difference_found = True
+ else:
+ _log = logging.warning
+ _log("Ignoring missing files due to ignore_missing")
+
+ _log("{} only in {}:".format(label, source_dir))
+ for d in sorted(left_diff):
+ _log(" {}".format(d))
+
+ right_diff = obj2 - obj1
+ if right_diff:
+ logging.error("{} only in {}:".format(label, target_dir))
+ for d in sorted(right_diff):
+ logging.error(" {}".format(d))
+ difference_found = True
+
+ return difference_found
+
+
+def hash_file(filename):
+ h = hashlib.sha256()
+ with open(filename, "rb", buffering=0) as f:
+ for b in iter(lambda: f.read(128 * 1024), b""):
+ h.update(b)
+ return h.hexdigest()
+
+
+def compare_common_files(files, channel, source_dir, target_dir):
+ difference_found = False
+ for filename in files:
+ source_file = os.path.join(source_dir, filename)
+ target_file = os.path.join(target_dir, filename)
+
+ if os.stat(source_file).st_size != os.stat(target_file).st_size or hash_file(
+ source_file
+ ) != hash_file(target_file):
+ logging.info("Difference found in {}".format(filename))
+ file_contents = {
+ "source": open(source_file).readlines(),
+ "target": open(target_file).readlines(),
+ }
+
+ transforms = [
+ t
+ for t in TRANSFORMS
+ if filename in t["files"]
+ and channel.startswith(tuple(t["channel_prefix"]))
+ ]
+ logging.debug(
+ "Got {} transform(s) to consider for {}".format(
+ len(transforms), filename
+ )
+ )
+ for transform in transforms:
+ side = transform["side"]
+
+ if "deletion" in transform:
+ d = transform["deletion"]
+ logging.debug(
+ "Trying deleting lines starting {} from {}".format(d, side)
+ )
+ file_contents[side] = [
+ l for l in file_contents[side] if not l.startswith(d)
+ ]
+
+ if "substitution" in transform:
+ r = transform["substitution"]
+ logging.debug("Trying replacement for {} in {}".format(r, side))
+ file_contents[side] = [
+ l.replace(r[0], r[1]) for l in file_contents[side]
+ ]
+
+ if file_contents["source"] == file_contents["target"]:
+ logging.info("Transforms removed all differences")
+ break
+
+ if file_contents["source"] != file_contents["target"]:
+ difference_found = True
+ logging.error(
+ "{} still differs after transforms, residual diff:".format(filename)
+ )
+ for l in difflib.unified_diff(
+ file_contents["source"], file_contents["target"]
+ ):
+ logging.error(l.rstrip())
+
+ return difference_found
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ "Compare two directories recursively, with transformations for expected diffs"
+ )
+ parser.add_argument("source", help="Directory containing updated Firefox")
+ parser.add_argument("target", help="Directory containing expected Firefox")
+ parser.add_argument("channel", help="Update channel used")
+ parser.add_argument(
+ "--verbose", "-v", action="store_true", help="Enable verbose logging"
+ )
+ parser.add_argument(
+ "--ignore-missing",
+ action="append",
+ metavar="<path>",
+ help="Ignore absence of <path> in the target",
+ )
+
+ args = parser.parse_args()
+ level = logging.INFO
+ if args.verbose:
+ level = logging.DEBUG
+ logging.basicConfig(level=level, format="%(message)s", stream=sys.stdout)
+
+ source = args.source
+ target = args.target
+ if not os.path.exists(source) or not os.path.exists(target):
+ logging.error("Source and/or target directory doesn't exist")
+ sys.exit(3)
+
+ logging.info("Comparing {} with {}...".format(source, target))
+ source_dirs, source_files = walk_dir(source)
+ target_dirs, target_files = walk_dir(target)
+
+ dir_list_diff = compare_listings(
+ source_dirs, target_dirs, "Directories", source, target
+ )
+ file_list_diff = compare_listings(
+ source_files, target_files, "Files", source, target, args.ignore_missing
+ )
+ file_diff = compare_common_files(
+ set(source_files) & set(target_files), args.channel, source, target
+ )
+
+ if file_diff:
+ # Use status of 2 since python will use 1 if there is an error running the script
+ sys.exit(2)
+ elif dir_list_diff or file_list_diff:
+ # this has traditionally been a WARN, but we don't have files on one
+ # side anymore so lets FAIL
+ sys.exit(2)
+ else:
+ logging.info("No differences found")
diff --git a/tools/update-verify/release/final-verification.sh b/tools/update-verify/release/final-verification.sh
new file mode 100755
index 0000000000..879c64697f
--- /dev/null
+++ b/tools/update-verify/release/final-verification.sh
@@ -0,0 +1,519 @@
+#!/bin/bash
+
+function usage {
+ log "In the updates subdirectory of the directory this script is in,"
+ log "there are a bunch of config files. You should call this script,"
+ log "passing the names of one or more of those files as parameters"
+ log "to this script."
+ log ""
+ log "This will validate that the update.xml files all exist for the"
+ log "given config file, and that they report the correct file sizes"
+ log "for the associated mar files, and that the associated mar files"
+ log "are available on the update servers."
+ log ""
+ log "This script will spawn multiple curl processes to query the"
+ log "snippets (update.xml file downloads) and the download urls in"
+ log "parallel. The number of parallel curl processes can be managed"
+ log "with the -p MAX_PROCS option."
+ log ""
+ log "Only the first three bytes of the mar files are downloaded"
+ log "using curl -r 0-2 option to save time. GET requests are issued"
+ log "rather than HEAD requests, since Akamai (one of our CDN"
+ log "partners) caches GET and HEAD requests separately - therefore"
+ log "they can be out-of-sync, and it is important that we validate"
+ log "that the GET requests return the expected results."
+ log ""
+ log "Please note this script can run on linux and OS X. It has not"
+ log "been tested on Windows, but may also work. It can be run"
+ log "locally, and does not require access to the mozilla vpn or"
+ log "any other special network, since the update servers are"
+ log "available over the internet. However, it does require an"
+ log "up-to-date checkout of the tools repository, as the updates/"
+ log "subfolder changes over time, and reflects the currently"
+ log "available updates. It makes no changes to the update servers"
+ log "so there is no harm in running it. It simply generates a"
+ log "report. However, please try to avoid hammering the update"
+ log "servers aggressively, e.g. with thousands of parallel"
+ log "processes. For example, feel free to run the examples below,"
+ log "first making sure that your source code checkout is up-to-"
+ log "date on your own machine, to get the latest configs in the"
+ log "updates/ subdirectory."
+ log ""
+ log "Usage:"
+ log " $(basename "${0}") [-p MAX_PROCS] config1 [config2 config3 config4 ...]"
+ log " $(basename "${0}") -h"
+ log ""
+ log "Examples:"
+ log " 1. $(basename "${0}") -p 128 mozBeta-thunderbird-linux.cfg mozBeta-thunderbird-linux64.cfg"
+ log " 2. $(basename "${0}") mozBeta-thunderbird-linux64.cfg"
+}
+
+function log {
+ echo "$(date): ${1}"
+}
+
+# subprocesses don't log in real time, due to synchronisation
+# issues which can cause log entries to overwrite each other.
+# therefore this function outputs log entries written to
+# temporary files on disk, and then deletes them.
+function flush_logs {
+ ls -1rt "${TMPDIR}" | grep '^log\.' | while read LOG
+ do
+ cat "${TMPDIR}/${LOG}"
+ rm "${TMPDIR}/${LOG}"
+ done
+}
+
+# this function takes an update.xml url as an argument
+# and then logs a list of config files and their line
+# numbers, that led to this update.xml url being tested
+function show_cfg_file_entries {
+ local update_xml_url="${1}"
+ cat "${update_xml_urls}" | cut -f1 -d' ' | grep -Fn "${update_xml_url}" | sed 's/:.*//' | while read match_line_no
+ do
+ cfg_file="$(sed -n -e "${match_line_no}p" "${update_xml_urls}" | cut -f3 -d' ')"
+ cfg_line_no="$(sed -n -e "${match_line_no}p" "${update_xml_urls}" | cut -f4 -d' ')"
+ log " ${cfg_file} line ${cfg_line_no}: $(sed -n -e "${cfg_line_no}p" "${cfg_file}")"
+ done
+}
+
+# this function takes a mar url as an argument and then
+# logs information about which update.xml urls referenced
+# this mar url, and which config files referenced those
+# mar urls - so you have a full understanding of why this
+# mar url was ever tested
+function show_update_xml_entries {
+ local mar_url="${1}"
+ grep -Frl "${mar_url}" "${TMPDIR}" | grep '/update_xml_to_mar\.' | while read update_xml_to_mar
+ do
+ mar_size="$(cat "${update_xml_to_mar}" | cut -f2 -d' ')"
+ update_xml_url="$(cat "${update_xml_to_mar}" | cut -f3 -d' ')"
+ patch_type="$(cat "${update_xml_to_mar}" | cut -f4 -d' ')"
+ update_xml_actual_url="$(cat "${update_xml_to_mar}" | cut -f5 -d' ')"
+ log " ${update_xml_url}"
+ [ -n "${update_xml_actual_url}" ] && log " which redirected to: ${update_xml_actual_url}"
+ log " This contained an entry for:"
+ log " patch type: ${patch_type}"
+ log " mar size: ${mar_size}"
+ log " mar url: ${mar_url}"
+ log " The update.xml url above was retrieved because of the following cfg file entries:"
+ show_cfg_file_entries "${update_xml_url}" | sed 's/ / /'
+ done
+}
+
+echo -n "$(date): Command called:"
+for ((INDEX=0; INDEX<=$#; INDEX+=1))
+do
+ echo -n " '${!INDEX}'"
+done
+echo ''
+log "From directory: '$(pwd)'"
+log ''
+log "Parsing arguments..."
+
+# Max procs lowered in bug 894368 to try to avoid spurious failures
+MAX_PROCS=48
+BAD_ARG=0
+BAD_FILE=0
+while getopts p:h OPT
+do
+ case "${OPT}" in
+ p) MAX_PROCS="${OPTARG}";;
+ h) usage
+ exit;;
+ *) BAD_ARG=1;;
+ esac
+done
+shift "$((OPTIND - 1))"
+
+# invalid option specified
+[ "${BAD_ARG}" == 1 ] && exit 66
+
+log "Checking one or more config files have been specified..."
+if [ $# -lt 1 ]
+then
+ usage
+ log "ERROR: You must specify one or more config files"
+ exit 64
+fi
+
+log "Checking whether MAX_PROCS is a number..."
+if ! let x=MAX_PROCS 2>/dev/null
+then
+ usage
+ log "ERROR: MAX_PROCS must be a number (-p option); you specified '${MAX_PROCS}' - this is not a number."
+ exit 65
+fi
+
+# config files are in updates subdirectory below this script
+if ! cd "$(dirname "${0}")/updates" 2>/dev/null
+then
+ log "ERROR: Cannot cd into '$(dirname "${0}")/updates' from '$(pwd)'"
+ exit 68
+fi
+
+log "Checking specified config files (and downloading them if necessary):"
+log ''
+configs=()
+for file in "${@}"
+do
+ if [[ ${file} == http* ]]
+ then
+ log " Downloading config file '${file}'"
+ cfg=$(mktemp)
+ curl -fL --retry 5 --compressed "${file}" > "$cfg"
+ if [ "$?" != 0 ]; then
+ log "Error downloading config file '${file}'"
+ BAD_FILE=1
+ else
+ log " * '${file}' ok, downloaded to '${cfg}'"
+ configs+=($cfg)
+ fi
+ elif [ -f "${file}" ]
+ then
+ log " * '${file}' ok"
+ configs+=(${file})
+ else
+ log " * '${file}' missing"
+ BAD_FILE=1
+ fi
+done
+log ''
+
+# invalid config specified
+if [ "${BAD_FILE}" == 1 ]
+then
+ log "ERROR: Unable to download config file(s) or config files are missing from repo - see above"
+ exit 67
+fi
+
+log "All checks completed successfully."
+log ''
+log "Starting stopwatch..."
+log ''
+log "Please be aware output will now be buffered up, and only displayed after completion."
+log "Therefore do not be alarmed if you see no output for several minutes."
+log "See https://bugzilla.mozilla.org/show_bug.cgi?id=863602#c5 for details".
+log ''
+
+START_TIME="$(date +%s)"
+
+# Create a temporary directory for all temp files, that can easily be
+# deleted afterwards. See https://bugzilla.mozilla.org/show_bug.cgi?id=863602
+# to understand why we write everything in distinct temporary files rather
+# than writing to standard error/standard out or files shared across
+# processes.
+# Need to unset TMPDIR first since it affects mktemp behaviour on next line
+unset TMPDIR
+export TMPDIR="$(mktemp -d -t final_verification.XXXXXXXXXX)"
+
+# this temporary file will list all update urls that need to be checked, in this format:
+# <update url> <comma separated list of patch types> <cfg file that requests it> <line number of config file>
+# e.g.
+# https://aus4.mozilla.org/update/3/Firefox/18.0/20130104154748/Linux_x86_64-gcc3/zh-TW/releasetest/default/default/default/update.xml?force=1 complete moz20-firefox-linux64-major.cfg 3
+# https://aus4.mozilla.org/update/3/Firefox/18.0/20130104154748/Linux_x86_64-gcc3/zu/releasetest/default/default/default/update.xml?force=1 complete moz20-firefox-linux64.cfg 7
+# https://aus4.mozilla.org/update/3/Firefox/19.0/20130215130331/Linux_x86_64-gcc3/ach/releasetest/default/default/default/update.xml?force=1 complete,partial moz20-firefox-linux64-major.cfg 11
+# https://aus4.mozilla.org/update/3/Firefox/19.0/20130215130331/Linux_x86_64-gcc3/af/releasetest/default/default/default/update.xml?force=1 complete,partial moz20-firefox-linux64.cfg 17
+update_xml_urls="$(mktemp -t update_xml_urls.XXXXXXXXXX)"
+
+####################################################################################
+# And now a summary of all temp files that will get generated during this process...
+#
+# 1) mktemp -t failure.XXXXXXXXXX
+#
+# Each failure will generate a one line temp file, which is a space separated
+# output of the error code, and the instance data for the failure.
+# e.g.
+#
+# PATCH_TYPE_MISSING https://aus4.mozilla.org/update/3/Firefox/4.0b12/20110222205441/Linux_x86-gcc3/dummy-locale/releasetest/update.xml?force=1 complete https://aus4.mozilla.org/update/3/Firefox/4.0b12/20110222205441/Linux_x86-gcc3/dummy-locale/releasetest/default/default/default/update.xml?force=1
+#
+# 2) mktemp -t update_xml_to_mar.XXXXXXXXXX
+#
+# For each mar url referenced in an update.xml file, a temp file will be created to store the
+# association between update.xml url and mar url. This is later used (e.g. in function
+# show_update_xml_entries) to trace back the update.xml url(s) that led to a mar url being
+# tested. It is also used to keep a full list of mar urls to test.
+# e.g.
+#
+# <mar url> <mar size> <update.xml url> <patch type> <update.xml redirection url, if HTTP 302 returned>
+#
+# 3) mktemp -t log.XXXXXXXXXX
+#
+# For each log message logged by a subprocesses, we will create a temp log file with the
+# contents of the log message, since we cannot safely output the log message from the subprocess
+# and guarantee that it will be correctly output. By buffering log output in individual log files
+# we guarantee that log messages will not interfere with each other. We then flush them when all
+# forked subprocesses have completed.
+#
+# 4) mktemp -t mar_headers.XXXXXXXXXX
+#
+# We keep a copy of the mar url http headers retrieved in one file per mar url.
+#
+# 5) mktemp -t update.xml.headers.XXXXXXXXXX
+#
+# We keep a copy of the update.xml http headers retrieved in one file per update.xml url.
+#
+# 6) mktemp -t update.xml.XXXXXXXXXX
+#
+# We keep a copy of each update.xml file retrieved in individual files.
+####################################################################################
+
+
+# generate full list of update.xml urls, followed by patch types,
+# as defined in the specified config files - and write into "${update_xml_urls}" file
+aus_server="https://aus5.mozilla.org"
+for cfg_file in "${configs[@]}"
+do
+ line_no=0
+ sed -e 's/localtest/cdntest/' "${cfg_file}" | while read config_line
+ do
+ let line_no++
+ # to avoid contamination between iterations, reset variables
+ # each loop in case they are not declared
+ # aus_server is not "cleared" each iteration - to be consistent with previous behaviour of old
+ # final-verification.sh script - might be worth reviewing if we really want this behaviour
+ release="" product="" platform="" build_id="" locales="" channel="" from="" patch_types="complete"
+ eval "${config_line}"
+ for locale in ${locales}
+ do
+ echo "${aus_server}/update/3/$product/$release/$build_id/$platform/$locale/$channel/default/default/default/update.xml?force=1" "${patch_types// /,}" "${cfg_file}" "${line_no}"
+ done
+ done
+done > "${update_xml_urls}"
+
+# download update.xml files and grab the mar urls from downloaded file for each patch type required
+cat "${update_xml_urls}" | cut -f1-2 -d' ' | sort -u | xargs -n2 "-P${MAX_PROCS}" ../get-update-xml.sh
+if [ "$?" != 0 ]; then
+ flush_logs
+ log "Error generating update requests"
+ exit 70
+fi
+
+flush_logs
+
+# download http header for each mar url
+find "${TMPDIR}" -name 'update_xml_to_mar.*' -type f | xargs cat | cut -f1-2 -d' ' | sort -u | xargs -n2 "-P${MAX_PROCS}" ../test-mar-url.sh
+if [ "$?" != 0 ]; then
+ flush_logs
+ log "Error HEADing mar urls"
+ exit 71
+fi
+
+flush_logs
+
+log ''
+log 'Stopping stopwatch...'
+STOP_TIME="$(date +%s)"
+
+number_of_failures="$(find "${TMPDIR}" -name 'failure.*' -type f | wc -l | sed 's/ //g')"
+number_of_update_xml_urls="$(cat "${update_xml_urls}" | cut -f1 -d' ' | sort -u | wc -l | sed 's/ //g')"
+number_of_mar_urls="$(find "${TMPDIR}" -name "update_xml_to_mar.*" | xargs cat | cut -f1 -d' ' | sort -u | wc -l | sed 's/ //g')"
+
+if [ "${number_of_failures}" -eq 0 ]
+then
+ log
+ log "All tests passed successfully."
+ log
+ exit_code=0
+else
+ log ''
+ log '===================================='
+ [ "${number_of_failures}" -gt 1 ] && log "${number_of_failures} FAILURES" || log '1 FAILURE'
+ failure=0
+ ls -1tr "${TMPDIR}" | grep '^failure\.' | while read failure_file
+ do
+ while read failure_code entry1 entry2 entry3 entry4 entry5 entry6 entry7
+ do
+ log '===================================='
+ log ''
+ case "${failure_code}" in
+
+ UPDATE_XML_UNAVAILABLE)
+ update_xml_url="${entry1}"
+ update_xml="${entry2}"
+ update_xml_headers="${entry3}"
+ update_xml_debug="${entry4}"
+ update_xml_curl_exit_code="${entry5}"
+ log "FAILURE $((++failure)): Update xml file not available"
+ log ""
+ log " Download url: ${update_xml_url}"
+ log " Curl returned exit code: ${update_xml_curl_exit_code}"
+ log ""
+ log " The HTTP headers were:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml_headers}"
+ log ""
+ log " The full curl debug output was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml_debug}"
+ log ""
+ log " The returned update.xml file was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml}"
+ log ""
+ log " This url was tested because of the following cfg file entries:"
+ show_cfg_file_entries "${update_xml_url}"
+ log ""
+
+ ;;
+
+ UPDATE_XML_REDIRECT_FAILED)
+ update_xml_url="${entry1}"
+ update_xml_actual_url="${entry2}"
+ update_xml="${entry3}"
+ update_xml_headers="${entry4}"
+ update_xml_debug="${entry5}"
+ update_xml_curl_exit_code="${entry6}"
+ log "FAILURE $((++failure)): Update xml file not available at *redirected* location"
+ log ""
+ log " Download url: ${update_xml_url}"
+ log " Redirected to: ${update_xml_actual_url}"
+ log " It could not be downloaded from this url."
+ log " Curl returned exit code: ${update_xml_curl_exit_code}"
+ log ""
+ log " The HTTP headers were:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml_headers}"
+ log ""
+ log " The full curl debug output was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml_debug}"
+ log ""
+ log " The returned update.xml file was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml}"
+ log ""
+ log " This url was tested because of the following cfg file entries:"
+ show_cfg_file_entries "${update_xml_url}"
+ log ""
+ ;;
+
+ PATCH_TYPE_MISSING)
+ update_xml_url="${entry1}"
+ patch_type="${entry2}"
+ update_xml="${entry3}"
+ update_xml_headers="${entry4}"
+ update_xml_debug="${entry5}"
+ update_xml_actual_url="${entry6}"
+ log "FAILURE $((++failure)): Patch type '${patch_type}' not present in the downloaded update.xml file."
+ log ""
+ log " Update xml file downloaded from: ${update_xml_url}"
+ [ -n "${update_xml_actual_url}" ] && log " This redirected to the download url: ${update_xml_actual_url}"
+ log " Curl returned exit code: 0 (success)"
+ log ""
+ log " The HTTP headers were:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml_headers}"
+ log ""
+ log " The full curl debug output was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml_debug}"
+ log ""
+ log " The returned update.xml file was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${update_xml}"
+ log ""
+ log " This url and patch type combination was tested due to the following cfg file entries:"
+ show_cfg_file_entries "${update_xml_url}"
+ log ""
+ ;;
+
+ NO_MAR_FILE)
+ mar_url="${entry1}"
+ mar_headers_file="${entry2}"
+ mar_headers_debug_file="${entry3}"
+ mar_file_curl_exit_code="${entry4}"
+ mar_actual_url="${entry5}"
+ log "FAILURE $((++failure)): Could not retrieve mar file"
+ log ""
+ log " Mar file url: ${mar_url}"
+ [ -n "${mar_actual_url}" ] && log " This redirected to: ${mar_actual_url}"
+ log " The mar file could not be downloaded from this location."
+ log " Curl returned exit code: ${mar_file_curl_exit_code}"
+ log ""
+ log " The HTTP headers were:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${mar_headers_file}"
+ log ""
+ log " The full curl debug output was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${mar_headers_debug_file}"
+ log ""
+ log " The mar download was tested because it was referenced in the following update xml file(s):"
+ show_update_xml_entries "${mar_url}"
+ log ""
+ ;;
+
+ MAR_FILE_WRONG_SIZE)
+ mar_url="${entry1}"
+ mar_required_size="${entry2}"
+ mar_actual_size="${entry3}"
+ mar_headers_file="${entry4}"
+ mar_headers_debug_file="${entry5}"
+ mar_file_curl_exit_code="${entry6}"
+ mar_actual_url="${entry7}"
+ log "FAILURE $((++failure)): Mar file is wrong size"
+ log ""
+ log " Mar file url: ${mar_url}"
+ [ -n "${mar_actual_url}" ] && log " This redirected to: ${mar_actual_url}"
+ log " The http header of the mar file url says that the mar file is ${mar_actual_size} bytes."
+ log " One or more of the following update.xml file(s) says that the file should be ${mar_required_size} bytes."
+ log ""
+ log " These are the update xml file(s) that referenced this mar:"
+ show_update_xml_entries "${mar_url}"
+ log ""
+ log " Curl returned exit code: ${mar_file_curl_exit_code}"
+ log ""
+ log " The HTTP headers were:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${mar_headers_file}"
+ log ""
+ log " The full curl debug output was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${mar_headers_debug_file}"
+ log ""
+ ;;
+
+ BAD_HTTP_RESPONSE_CODE_FOR_MAR)
+ mar_url="${entry1}"
+ mar_headers_file="${entry2}"
+ mar_headers_debug_file="${entry3}"
+ mar_file_curl_exit_code="${entry4}"
+ mar_actual_url="${entry5}"
+ http_response_code="$(sed -e "s/$(printf '\r')//" -n -e '/^HTTP\//p' "${mar_headers_file}" | tail -1)"
+ log "FAILURE $((++failure)): '${http_response_code}' for mar file"
+ log ""
+ log " Mar file url: ${mar_url}"
+ [ -n "${mar_actual_url}" ] && log " This redirected to: ${mar_actual_url}"
+ log ""
+ log " These are the update xml file(s) that referenced this mar:"
+ show_update_xml_entries "${mar_url}"
+ log ""
+ log " Curl returned exit code: ${mar_file_curl_exit_code}"
+ log ""
+ log " The HTTP headers were:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${mar_headers_file}"
+ log ""
+ log " The full curl debug output was:"
+ sed -e "s/$(printf '\r')//" -e "s/^/$(date): /" -e '$a\' "${mar_headers_debug_file}"
+ log ""
+ ;;
+
+ *)
+ log "ERROR: Unknown failure code - '${failure_code}'"
+ log "ERROR: This is a serious bug in this script."
+ log "ERROR: Only known failure codes are: UPDATE_XML_UNAVAILABLE, UPDATE_XML_REDIRECT_FAILED, PATCH_TYPE_MISSING, NO_MAR_FILE, MAR_FILE_WRONG_SIZE, BAD_HTTP_RESPONSE_CODE_FOR_MAR"
+ log ""
+ log "FAILURE $((++failure)): Data from failure is: ${entry1} ${entry2} ${entry3} ${entry4} ${entry5} ${entry6}"
+ log ""
+ ;;
+
+ esac
+ done < "${TMPDIR}/${failure_file}"
+ done
+ exit_code=1
+fi
+
+
+log ''
+log '===================================='
+log 'KEY STATS'
+log '===================================='
+log ''
+log "Config files scanned: ${#@}"
+log "Update xml files downloaded and parsed: ${number_of_update_xml_urls}"
+log "Unique mar urls found: ${number_of_mar_urls}"
+log "Failures: ${number_of_failures}"
+log "Parallel processes used (maximum limit): ${MAX_PROCS}"
+log "Execution time: $((STOP_TIME-START_TIME)) seconds"
+log ''
+
+rm -rf "${TMPDIR}"
+exit ${exit_code}
diff --git a/tools/update-verify/release/get-update-xml.sh b/tools/update-verify/release/get-update-xml.sh
new file mode 100755
index 0000000000..4c1fa724a8
--- /dev/null
+++ b/tools/update-verify/release/get-update-xml.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+update_xml_url="${1}"
+patch_types="${2}"
+update_xml="$(mktemp -t update.xml.XXXXXXXXXX)"
+update_xml_headers="$(mktemp -t update.xml.headers.XXXXXXXXXX)"
+update_xml_debug="$(mktemp -t update.xml.debug.XXXXXXXXXX)"
+curl --retry 50 --retry-max-time 300 -s --show-error -D "${update_xml_headers}" -L -v -H "Cache-Control: max-stale=0" "${update_xml_url}" > "${update_xml}" 2>"${update_xml_debug}"
+update_xml_curl_exit_code=$?
+if [ "${update_xml_curl_exit_code}" == 0 ]
+then
+ update_xml_actual_url="$(sed -e "s/$(printf '\r')//" -n -e 's/^Location: //p' "${update_xml_headers}" | tail -1)"
+ [ -n "${update_xml_actual_url}" ] && update_xml_url_with_redirects="${update_xml_url} => ${update_xml_actual_url}" || update_xml_url_with_redirects="${update_xml_url}"
+ echo "$(date): Downloaded update.xml file from ${update_xml_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
+ for patch_type in ${patch_types//,/ }
+ do
+ mar_url_and_size="$(sed -e 's/\&amp;/\&/g' -n -e 's/.*<patch .*type="'"${patch_type}"'".* URL="\([^"]*\)".*size="\([^"]*\)".*/\1 \2/p' "${update_xml}" | tail -1)"
+ if [ -z "${mar_url_and_size}" ]
+ then
+ echo "$(date): FAILURE: No patch type '${patch_type}' found in update.xml from ${update_xml_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "PATCH_TYPE_MISSING ${update_xml_url} ${patch_type} ${update_xml} ${update_xml_headers} ${update_xml_debug} ${update_xml_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+ else
+ echo "$(date): Mar url and file size for patch type '${patch_type}' extracted from ${update_xml_url_with_redirects} (${mar_url_and_size})" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "${mar_url_and_size} ${update_xml_url} ${patch_type} ${update_xml_actual_url}" > "$(mktemp -t update_xml_to_mar.XXXXXXXXXX)"
+ fi
+ done
+else
+ if [ -z "${update_xml_actual_url}" ]
+ then
+ echo "$(date): FAILURE: Could not retrieve update.xml from ${update_xml_url} for patch type(s) '${patch_types}'" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "UPDATE_XML_UNAVAILABLE ${update_xml_url} ${update_xml} ${update_xml_headers} ${update_xml_debug} ${update_xml_curl_exit_code}" > "$(mktemp -t failure.XXXXXXXXXX)"
+ else
+ echo "$(date): FAILURE: update.xml from ${update_xml_url} redirected to ${update_xml_actual_url} but could not retrieve update.xml from here" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "UPDATE_XML_REDIRECT_FAILED ${update_xml_url} ${update_xml_actual_url} ${update_xml} ${update_xml_headers} ${update_xml_debug} ${update_xml_curl_exit_code}" > "$(mktemp -t failure.XXXXXXXXXX)"
+ fi
+fi
diff --git a/tools/update-verify/release/mar_certs/README b/tools/update-verify/release/mar_certs/README
new file mode 100644
index 0000000000..dd931ef1d3
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/README
@@ -0,0 +1,29 @@
+These certificates are imported from mozilla-central (https://hg.mozilla.org/mozilla-central/file/tip/toolkit/mozapps/update/updater)
+and used to support staging update verify jobs. These jobs end up replacing the certificates within the binaries
+(through a binary search and replace), and must all be the same length for this to work correctly. If we recreate
+these certificates, and the resulting public certificates are not the same length anymore, the commonName may be
+changed to line them up again. https://github.com/google/der-ascii is a useful tool for doing this. For example:
+
+To convert the certificate to ascii:
+der2ascii -i dep1.der -o dep1.ascii
+
+Then use your favourite editor to change the commonName field. That block will look something like:
+ SEQUENCE {
+ SET {
+ SEQUENCE {
+ # commonName
+ OBJECT_IDENTIFIER { 2.5.4.3 }
+ PrintableString { "CI MAR signing key 1" }
+ }
+ }
+ }
+
+You can pad the PrintableString with spaces to increase the length of the cert (1 space = 1 byte).
+
+Then, convert back to der:
+ascii2der -i dep1.ascii -o newdep1.der
+
+The certificats in the sha1 subdirectory are from
+https://hg.mozilla.org/mozilla-central/file/0fcbe72581bc/toolkit/mozapps/update/updater
+which are the SHA-1 certs from before they where updated in Bug 1105689. They only include the release
+certs, since the nightly certs are different length, and we only care about updates from old ESRs.
diff --git a/tools/update-verify/release/mar_certs/dep1.der b/tools/update-verify/release/mar_certs/dep1.der
new file mode 100644
index 0000000000..5320f41dfa
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/dep1.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/dep2.der b/tools/update-verify/release/mar_certs/dep2.der
new file mode 100644
index 0000000000..f3eb568425
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/dep2.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/nightly_aurora_level3_primary.der b/tools/update-verify/release/mar_certs/nightly_aurora_level3_primary.der
new file mode 100644
index 0000000000..44fd95dcff
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/nightly_aurora_level3_primary.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/nightly_aurora_level3_secondary.der b/tools/update-verify/release/mar_certs/nightly_aurora_level3_secondary.der
new file mode 100644
index 0000000000..90f8e6e82c
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/nightly_aurora_level3_secondary.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/release_primary.der b/tools/update-verify/release/mar_certs/release_primary.der
new file mode 100644
index 0000000000..1d94f88ad7
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/release_primary.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/release_secondary.der b/tools/update-verify/release/mar_certs/release_secondary.der
new file mode 100644
index 0000000000..474706c4b7
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/release_secondary.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/sha1/dep1.der b/tools/update-verify/release/mar_certs/sha1/dep1.der
new file mode 100644
index 0000000000..ec8ce6184d
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/sha1/dep1.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/sha1/dep2.der b/tools/update-verify/release/mar_certs/sha1/dep2.der
new file mode 100644
index 0000000000..4d0f244df2
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/sha1/dep2.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/sha1/release_primary.der b/tools/update-verify/release/mar_certs/sha1/release_primary.der
new file mode 100644
index 0000000000..11417c35e7
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/sha1/release_primary.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/sha1/release_secondary.der b/tools/update-verify/release/mar_certs/sha1/release_secondary.der
new file mode 100644
index 0000000000..16a7ef6d91
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/sha1/release_secondary.der
Binary files differ
diff --git a/tools/update-verify/release/mar_certs/xpcshellCertificate.der b/tools/update-verify/release/mar_certs/xpcshellCertificate.der
new file mode 100644
index 0000000000..ea1fd47faa
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/xpcshellCertificate.der
Binary files differ
diff --git a/tools/update-verify/release/replace-updater-certs.py b/tools/update-verify/release/replace-updater-certs.py
new file mode 100644
index 0000000000..9e981fbfe0
--- /dev/null
+++ b/tools/update-verify/release/replace-updater-certs.py
@@ -0,0 +1,41 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os.path
+import sys
+
+cert_dir = sys.argv[1]
+# Read twice, because strings cannot be copied
+updater_data = open(sys.argv[2], "rb").read()
+new_updater = open(sys.argv[2], "rb").read()
+outfile = sys.argv[3]
+
+cert_pairs = sys.argv[4:]
+
+if (len(cert_pairs) % 2) != 0:
+ print("Certs must be provided in pairs")
+ sys.exit(1)
+
+for find_cert, replace_cert in zip(*[iter(cert_pairs)] * 2):
+ find = open(os.path.join(cert_dir, find_cert), "rb").read()
+ replace = open(os.path.join(cert_dir, replace_cert), "rb").read()
+ print("Looking for {}...".format(find_cert))
+ if find in new_updater:
+ print("Replacing {} with {}".format(find_cert, replace_cert))
+ new_updater = new_updater.replace(find, replace)
+ else:
+ print("Didn't find {}...".format(find_cert))
+
+if len(updater_data) != len(new_updater):
+ print(
+ "WARNING: new updater is not the same length as the old one (old: {}, new: {})".format(
+ len(updater_data), len(new_updater)
+ )
+ )
+
+if updater_data == new_updater:
+ print("WARNING: updater is unchanged")
+
+with open(outfile, "wb+") as f:
+ f.write(new_updater)
diff --git a/tools/update-verify/release/test-mar-url.sh b/tools/update-verify/release/test-mar-url.sh
new file mode 100755
index 0000000000..217c03d72a
--- /dev/null
+++ b/tools/update-verify/release/test-mar-url.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+mar_url="${1}"
+mar_required_size="${2}"
+
+mar_headers_file="$(mktemp -t mar_headers.XXXXXXXXXX)"
+mar_headers_debug_file="$(mktemp -t mar_headers_debug.XXXXXXXXXX)"
+curl --retry 50 --retry-max-time 300 -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
+mar_file_curl_exit_code=$?
+
+# Bug 894368 - HTTP 408's are not handled by the "curl --retry" mechanism; in this case retry in bash
+attempts=1
+while [ "$((++attempts))" -lt 50 ] && grep 'HTTP/1\.1 408 Request Timeout' "${mar_headers_file}" &>/dev/null
+do
+ sleep 1
+ curl --retry 50 --retry-max-time 300 -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
+ mar_file_curl_exit_code=$?
+done
+
+# check file size matches what was written in update.xml
+# strip out dos line returns from header if they occur
+# note: below, using $(printf '\r') for Darwin compatibility, rather than simple '\r'
+# (i.e. shell interprets '\r' rather than sed interpretting '\r')
+mar_actual_size="$(sed -e "s/$(printf '\r')//" -n -e 's/^Content-Range: bytes 0-2\///ip' "${mar_headers_file}" | tail -1)"
+mar_actual_url="$(sed -e "s/$(printf '\r')//" -n -e 's/^Location: //p' "${mar_headers_file}" | tail -1)"
+# note: below, sed -n '/^HTTP\//p' acts as grep '^HTTP/', but requires less overhead as sed already running
+http_response_code="$(sed -e "s/$(printf '\r')//" -n -e '/^HTTP\//p' "${mar_headers_file}" | tail -1)"
+
+[ -n "${mar_actual_url}" ] && mar_url_with_redirects="${mar_url} => ${mar_actual_url}" || mar_url_with_redirects="${mar_url}"
+
+if [ "${mar_actual_size}" == "${mar_required_size}" ]
+then
+ echo "$(date): Mar file ${mar_url_with_redirects} available with correct size (${mar_actual_size} bytes)" > "$(mktemp -t log.XXXXXXXXXX)"
+elif [ -z "${mar_actual_size}" ]
+then
+ echo "$(date): FAILURE: Could not retrieve http header for mar file from ${mar_url}" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "NO_MAR_FILE ${mar_url} ${mar_headers_file} ${mar_headers_debug_file} ${mar_file_curl_exit_code} ${mar_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+ # If we get a response code (i.e. not an empty string), it better contain "206 Partial Content" or we should report on it.
+ # If response code is empty, this should be caught by a different block to this one (e.g. "could not retrieve http header").
+elif [ -n "${http_response_code}" ] && [ "${http_response_code}" == "${http_response_code/206 Partial Content/}" ]
+then
+ echo "$(date): FAILURE: received a '${http_response_code}' response for mar file from ${mar_url} (expected HTTP 206 Partial Content)" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "BAD_HTTP_RESPONSE_CODE_FOR_MAR ${mar_url} ${mar_headers_file} ${mar_headers_debug_file} ${mar_file_curl_exit_code} ${mar_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+else
+ echo "$(date): FAILURE: Mar file incorrect size - should be ${mar_required_size} bytes, but is ${mar_actual_size} bytes - ${mar_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
+ echo "MAR_FILE_WRONG_SIZE ${mar_url} ${mar_required_size} ${mar_actual_size} ${mar_headers_file} ${mar_headers_debug_file} ${mar_file_curl_exit_code} ${mar_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+fi
diff --git a/tools/update-verify/release/updates/verify.sh b/tools/update-verify/release/updates/verify.sh
new file mode 100755
index 0000000000..3f8556b424
--- /dev/null
+++ b/tools/update-verify/release/updates/verify.sh
@@ -0,0 +1,292 @@
+#!/bin/bash
+#set -x
+
+. ../common/cached_download.sh
+. ../common/unpack.sh
+. ../common/download_mars.sh
+. ../common/download_builds.sh
+. ../common/check_updates.sh
+
+# Cache init being handled by new async_download.py
+# clear_cache
+# create_cache
+
+ftp_server_to="http://stage.mozilla.org/pub/mozilla.org"
+ftp_server_from="http://stage.mozilla.org/pub/mozilla.org"
+aus_server="https://aus4.mozilla.org"
+to=""
+to_build_id=""
+to_app_version=""
+to_display_version=""
+override_certs=""
+diff_summary_log=${DIFF_SUMMARY_LOG:-"$PWD/diff-summary.log"}
+if [ -e ${diff_summary_log} ]; then
+ rm ${diff_summary_log}
+fi
+touch ${diff_summary_log}
+
+pushd `dirname $0` &>/dev/null
+MY_DIR=$(pwd)
+popd &>/dev/null
+retry="$MY_DIR/../../../../mach python -m redo.cmd -s 1 -a 3"
+cert_replacer="$MY_DIR/../replace-updater-certs.py"
+
+dep_overrides="nightly_aurora_level3_primary.der dep1.der nightly_aurora_level3_secondary.der dep2.der release_primary.der dep1.der release_secondary.der dep2.der sha1/release_primary.der sha1/dep1.der sha1/release_secondary.der sha1/dep2.der"
+nightly_overrides="dep1.der nightly_aurora_level3_primary.der dep2.der nightly_aurora_level3_secondary.der release_primary.der nightly_aurora_level3_primary.der release_secondary.der nightly_aurora_level3_secondary.der"
+release_overrides="dep1.der release_primary.der dep2.der release_secondary.der nightly_aurora_level3_primary.der release_primary.der nightly_aurora_level3_secondary.der release_secondary.der"
+
+runmode=0
+config_file="updates.cfg"
+UPDATE_ONLY=1
+TEST_ONLY=2
+MARS_ONLY=3
+COMPLETE=4
+
+usage()
+{
+ echo "Usage: verify.sh [OPTION] [CONFIG_FILE]"
+ echo " -u, --update-only only download update.xml"
+ echo " -t, --test-only only test that MARs exist"
+ echo " -m, --mars-only only test MARs"
+ echo " -c, --complete complete upgrade test"
+}
+
+if [ -z "$*" ]
+then
+ usage
+ exit 0
+fi
+
+pass_arg_count=0
+while [ "$#" -gt "$pass_arg_count" ]
+do
+ case "$1" in
+ -u | --update-only)
+ runmode=$UPDATE_ONLY
+ shift
+ ;;
+ -t | --test-only)
+ runmode=$TEST_ONLY
+ shift
+ ;;
+ -m | --mars-only)
+ runmode=$MARS_ONLY
+ shift
+ ;;
+ -c | --complete)
+ runmode=$COMPLETE
+ shift
+ ;;
+ *)
+ # Move the unrecognized arg to the end of the list
+ arg="$1"
+ shift
+ set -- "$@" "$arg"
+ pass_arg_count=`expr $pass_arg_count + 1`
+ esac
+done
+
+if [ -n "$arg" ]
+then
+ config_file=$arg
+ echo "Using config file $config_file"
+else
+ echo "Using default config file $config_file"
+fi
+
+if [ "$runmode" == "0" ]
+then
+ usage
+ exit 0
+fi
+
+while read entry
+do
+ # initialize all config variables
+ release=""
+ product=""
+ platform=""
+ build_id=""
+ locales=""
+ channel=""
+ from=""
+ patch_types="complete"
+ use_old_updater=0
+ mar_channel_IDs=""
+ updater_package=""
+ eval $entry
+
+ # the arguments for updater changed in Gecko 34/SeaMonkey 2.31
+ major_version=`echo $release | cut -f1 -d.`
+ if [[ "$product" == "seamonkey" ]]; then
+ minor_version=`echo $release | cut -f2 -d.`
+ if [[ $major_version -le 2 && $minor_version -lt 31 ]]; then
+ use_old_updater=1
+ fi
+ elif [[ $major_version -lt 34 ]]; then
+ use_old_updater=1
+ fi
+
+ # Note: cross platform tests seem to work for everything except Mac-on-Windows.
+ # We probably don't care about this use case.
+ if [[ "$updater_package" == "" ]]; then
+ updater_package="$from"
+ fi
+
+ for locale in $locales
+ do
+ rm -f update/partial.size update/complete.size
+ for patch_type in $patch_types
+ do
+ update_path="${product}/${release}/${build_id}/${platform}/${locale}/${channel}/default/default/default"
+ if [ "$runmode" == "$MARS_ONLY" ] || [ "$runmode" == "$COMPLETE" ] ||
+ [ "$runmode" == "$TEST_ONLY" ]
+ then
+ if [ "$runmode" == "$TEST_ONLY" ]
+ then
+ download_mars "${aus_server}/update/3/${update_path}/default/update.xml?force=1" ${patch_type} 1 \
+ "${to_build_id}" "${to_app_version}" "${to_display_version}"
+ err=$?
+ else
+ download_mars "${aus_server}/update/3/${update_path}/update.xml?force=1" ${patch_type} 0 \
+ "${to_build_id}" "${to_app_version}" "${to_display_version}"
+ err=$?
+ fi
+ if [ "$err" != "0" ]; then
+ echo "TEST-UNEXPECTED-FAIL: [${release} ${locale} ${patch_type}] download_mars returned non-zero exit code: ${err}"
+ continue
+ fi
+ else
+ mkdir -p updates/${update_path}/complete
+ mkdir -p updates/${update_path}/partial
+ $retry wget -q -O ${patch_type} updates/${update_path}/${patch_type}/update.xml "${aus_server}/update/3/${update_path}/update.xml?force=1"
+
+ fi
+ if [ "$runmode" == "$COMPLETE" ]
+ then
+ if [ -z "$from" ] || [ -z "$to" ]
+ then
+ continue
+ fi
+
+ updater_platform=""
+ updater_package_url=`echo "${ftp_server_from}${updater_package}" | sed "s/%locale%/${locale}/"`
+ updater_package_filename=`basename "$updater_package_url"`
+ case $updater_package_filename in
+ *dmg)
+ platform_dirname="*.app"
+ updater_bins="Contents/MacOS/updater.app/Contents/MacOS/updater Contents/MacOS/updater.app/Contents/MacOS/org.mozilla.updater"
+ updater_platform="mac"
+ ;;
+ *exe)
+ updater_package_url=`echo "${updater_package_url}" | sed "s/ja-JP-mac/ja/"`
+ platform_dirname="bin"
+ updater_bins="updater.exe"
+ updater_platform="win32"
+ ;;
+ *bz2)
+ updater_package_url=`echo "${updater_package_url}" | sed "s/ja-JP-mac/ja/"`
+ platform_dirname=`echo $product | tr '[A-Z]' '[a-z]'`
+ updater_bins="updater"
+ updater_platform="linux"
+ ;;
+ *)
+ echo "Couldn't detect updater platform"
+ exit 1
+ ;;
+ esac
+
+ rm -rf updater/*
+ cached_download "${updater_package_filename}" "${updater_package_url}"
+ unpack_build "$updater_platform" updater "$updater_package_filename" "$locale"
+
+ # Even on Windows, we want Unix-style paths for the updater, because of MSYS.
+ cwd=$(\ls -d $PWD/updater/$platform_dirname)
+ # Bug 1209376. Linux updater linked against other libraries in the installation directory
+ export LD_LIBRARY_PATH=$cwd
+ updater="null"
+ for updater_bin in $updater_bins; do
+ if [ -e "$cwd/$updater_bin" ]; then
+ echo "Found updater at $updater_bin"
+ updater="$cwd/$updater_bin"
+ break
+ fi
+ done
+
+ update_to_dep=false
+ if [ ! -z "$override_certs" ]; then
+ echo "Replacing certs in updater binary"
+ cp "${updater}" "${updater}.orig"
+ case ${override_certs} in
+ dep)
+ overrides=${dep_overrides}
+ update_to_dep=true
+ ;;
+ nightly)
+ overrides=${nightly_overrides}
+ ;;
+ release)
+ overrides=${release_overrides}
+ ;;
+ *)
+ echo "Unknown override cert - skipping"
+ ;;
+ esac
+ python3 "${cert_replacer}" "${MY_DIR}/../mar_certs" "${updater}.orig" "${updater}" ${overrides}
+ else
+ echo "override_certs is '${override_certs}', not replacing any certificates"
+ fi
+
+ if [ "$updater" == "null" ]; then
+ echo "Couldn't find updater binary"
+ continue
+ fi
+
+ from_path=`echo $from | sed "s/%locale%/${locale}/"`
+ to_path=`echo $to | sed "s/%locale%/${locale}/"`
+ download_builds "${ftp_server_from}${from_path}" "${ftp_server_to}${to_path}"
+ err=$?
+ if [ "$err" != "0" ]; then
+ echo "TEST-UNEXPECTED-FAIL: [$release $locale $patch_type] download_builds returned non-zero exit code: $err"
+ continue
+ fi
+ source_file=`basename "$from_path"`
+ target_file=`basename "$to_path"`
+ diff_file="results.diff"
+ if [ -e ${diff_file} ]; then
+ rm ${diff_file}
+ fi
+ check_updates "${platform}" "downloads/${source_file}" "downloads/${target_file}" ${locale} ${use_old_updater} ${updater} ${diff_file} ${channel} "${mar_channel_IDs}" ${update_to_dep}
+ err=$?
+ if [ "$err" == "0" ]; then
+ continue
+ elif [ "$err" == "1" ]; then
+ echo "TEST-UNEXPECTED-FAIL: [$release $locale $patch_type] check_updates returned failure for $platform downloads/$source_file vs. downloads/$target_file: $err"
+ elif [ "$err" == "2" ]; then
+ echo "WARN: [$release $locale $patch_type] check_updates returned warning for $platform downloads/$source_file vs. downloads/$target_file: $err"
+ else
+ echo "TEST-UNEXPECTED-FAIL: [$release $locale $patch_type] check_updates returned unknown error for $platform downloads/$source_file vs. downloads/$target_file: $err"
+ fi
+
+ if [ -s ${diff_file} ]; then
+ echo "Found diffs for ${patch_type} update from ${aus_server}/update/3/${update_path}/update.xml?force=1" >> ${diff_summary_log}
+ cat ${diff_file} >> ${diff_summary_log}
+ echo "" >> ${diff_summary_log}
+ fi
+ fi
+ done
+ if [ -f update/partial.size ] && [ -f update/complete.size ]; then
+ partial_size=`cat update/partial.size`
+ complete_size=`cat update/complete.size`
+ if [ $partial_size -gt $complete_size ]; then
+ echo "TEST-UNEXPECTED-FAIL: [$release $locale $patch_type] partial updates are larger than complete updates"
+ elif [ $partial_size -eq $complete_size ]; then
+ echo "WARN: [$release $locale $patch_type] partial updates are the same size as complete updates, this should only happen for major updates"
+ else
+ echo "SUCCESS: [$release $locale $patch_type] partial updates are smaller than complete updates, all is well in the universe"
+ fi
+ fi
+ done
+done < $config_file
+
+clear_cache
diff --git a/tools/update-verify/scripts/async_download.py b/tools/update-verify/scripts/async_download.py
new file mode 100644
index 0000000000..efedc8295f
--- /dev/null
+++ b/tools/update-verify/scripts/async_download.py
@@ -0,0 +1,362 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import asyncio
+import glob
+import logging
+import os
+import sys
+import xml.etree.ElementTree as ET
+from os import path
+
+import aiohttp
+
+logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(message)s")
+log = logging.getLogger(__name__)
+
+UV_CACHE_PATH = os.getenv(
+ "UV_CACHE_PATH", os.path.join(path.dirname(__file__), "../release/updates/cache/")
+)
+UV_PARALLEL_DOWNLOADS = os.getenv("UV_PARALLEL_DOWNLOADS", 20)
+
+FTP_SERVER_TO = os.getenv("ftp_server_to", "http://stage.mozilla.org/pub/mozilla.org")
+FTP_SERVER_FROM = os.getenv(
+ "ftp_server_from", "http://stage.mozilla.org/pub/mozilla.org"
+)
+AUS_SERVER = os.getenv("aus_server", "https://aus5.mozilla.org")
+
+
+def create_cache():
+ if not os.path.isdir(UV_CACHE_PATH):
+ os.mkdir(UV_CACHE_PATH)
+
+
+def remove_cache():
+ """
+ Removes all files in the cache folder
+ We don't support folders or .dot(hidden) files
+ By not deleting the cache directory, it allows us to use Docker tmpfs mounts,
+ which are the only workaround to poor mount r/w performance on MacOS
+ Bug Reference:
+ https://forums.docker.com/t/file-access-in-mounted-volumes-extremely-slow-cpu-bound/8076/288
+ """
+ files = glob.glob(f"{UV_CACHE_PATH}/*")
+ for f in files:
+ os.remove(f)
+
+
+def _cachepath(i, ext):
+ # Helper function: given an index, return a cache file path
+ return path.join(UV_CACHE_PATH, f"obj_{i:0>5}.{ext}")
+
+
+async def fetch_url(url, path, connector):
+ """
+ Fetch/download a file to a specific path
+
+ Parameters
+ ----------
+ url : str
+ URL to be fetched
+
+ path : str
+ Path to save binary
+
+ Returns
+ -------
+ dict
+ Request result. If error result['error'] is True
+ """
+
+ def _result(response, error=False):
+ data = {
+ "headers": dict(response.headers),
+ "status": response.status,
+ "reason": response.reason,
+ "_request_info": str(response._request_info),
+ "url": url,
+ "path": path,
+ "error": error,
+ }
+ return data
+
+ # Set connection timeout to 15 minutes
+ timeout = aiohttp.ClientTimeout(total=900)
+
+ try:
+ async with aiohttp.ClientSession(
+ connector=connector, connector_owner=False, timeout=timeout
+ ) as session:
+ log.info(f"Retrieving {url}")
+ async with session.get(
+ url, headers={"Cache-Control": "max-stale=0"}
+ ) as response:
+ # Any response code > 299 means something went wrong
+ if response.status > 299:
+ log.info(f"Failed to download {url} with status {response.status}")
+ return _result(response, True)
+
+ with open(path, "wb") as fd:
+ while True:
+ chunk = await response.content.read()
+ if not chunk:
+ break
+ fd.write(chunk)
+ result = _result(response)
+ log.info(f'Finished downloading {url}\n{result["headers"]}')
+ return result
+
+ except (
+ UnicodeDecodeError, # Data parsing
+ asyncio.TimeoutError, # Async timeout
+ aiohttp.ClientError, # aiohttp error
+ ) as e:
+ log.error("=============")
+ log.error(f"Error downloading {url}")
+ log.error(e)
+ log.error("=============")
+ return {"path": path, "url": url, "error": True}
+
+
+async def download_multi(targets, sourceFunc):
+ """
+ Download list of targets
+
+ Parameters
+ ----------
+ targets : list
+ List of urls to download
+
+ sourceFunc : str
+ Source function name (for filename)
+
+ Returns
+ -------
+ tuple
+ List of responses (Headers)
+ """
+
+ targets = set(targets)
+ amount = len(targets)
+
+ connector = aiohttp.TCPConnector(
+ limit=UV_PARALLEL_DOWNLOADS, # Simultaneous connections, per host
+ ttl_dns_cache=600, # Cache DNS for 10 mins
+ )
+
+ log.info(f"\nDownloading {amount} files ({UV_PARALLEL_DOWNLOADS} async limit)")
+
+ # Transform targets into {url, path} objects
+ payloads = [
+ {"url": url, "path": _cachepath(i, sourceFunc)}
+ for (i, url) in enumerate(targets)
+ ]
+
+ downloads = []
+
+ fetches = [fetch_url(t["url"], t["path"], connector) for t in payloads]
+
+ downloads.extend(await asyncio.gather(*fetches))
+ connector.close()
+
+ results = []
+ # Remove file if download failed
+ for fetch in downloads:
+ # If there's an error, try to remove the file, but keep going if file not present
+ if fetch["error"]:
+ try:
+ os.unlink(fetch.get("path", None))
+ except (TypeError, FileNotFoundError) as e:
+ log.info(f"Unable to cleanup error file: {e} continuing...")
+ continue
+
+ results.append(fetch)
+
+ return results
+
+
+async def download_builds(verifyConfig):
+ """
+ Given UpdateVerifyConfig, download and cache all necessary updater files
+ Include "to" and "from"/"updater_pacakge"
+
+ Parameters
+ ----------
+ verifyConfig : UpdateVerifyConfig
+ Chunked config
+
+ Returns
+ -------
+ list : List of file paths and urls to each updater file
+ """
+
+ updaterUrls = set()
+ for release in verifyConfig.releases:
+ ftpServerFrom = release["ftp_server_from"]
+ ftpServerTo = release["ftp_server_to"]
+
+ for locale in release["locales"]:
+ toUri = verifyConfig.to
+ if toUri is not None and ftpServerTo is not None:
+ toUri = toUri.replace("%locale%", locale)
+ updaterUrls.add(f"{ftpServerTo}{toUri}")
+
+ for reference in ("updater_package", "from"):
+ uri = release.get(reference, None)
+ if uri is None:
+ continue
+ uri = uri.replace("%locale%", locale)
+ # /ja-JP-mac/ locale is replaced with /ja/ for updater packages
+ uri = uri.replace("ja-JP-mac", "ja")
+ updaterUrls.add(f"{ftpServerFrom}{uri}")
+
+ log.info(f"About to download {len(updaterUrls)} updater packages")
+
+ updaterResults = await download_multi(list(updaterUrls), "updater.async.cache")
+ return updaterResults
+
+
+def get_mar_urls_from_update(path):
+ """
+ Given an update.xml file, return MAR URLs
+
+ If update.xml doesn't have URLs, returns empty list
+
+ Parameters
+ ----------
+ path : str
+ Path to update.xml file
+
+ Returns
+ -------
+ list : List of URLs
+ """
+
+ result = []
+ root = ET.parse(path).getroot()
+ for patch in root.findall("update/patch"):
+ url = patch.get("URL")
+ if url:
+ result.append(url)
+ return result
+
+
+async def download_mars(updatePaths):
+ """
+ Given list of update.xml paths, download MARs for each
+
+ Parameters
+ ----------
+ update_paths : list
+ List of paths to update.xml files
+ """
+
+ patchUrls = set()
+ for updatePath in updatePaths:
+ for url in get_mar_urls_from_update(updatePath):
+ patchUrls.add(url)
+
+ log.info(f"About to download {len(patchUrls)} MAR packages")
+ marResults = await download_multi(list(patchUrls), "mar.async.cache")
+ return marResults
+
+
+async def download_update_xml(verifyConfig):
+ """
+ Given UpdateVerifyConfig, download and cache all necessary update.xml files
+
+ Parameters
+ ----------
+ verifyConfig : UpdateVerifyConfig
+ Chunked config
+
+ Returns
+ -------
+ list : List of file paths and urls to each update.xml file
+ """
+
+ xmlUrls = set()
+ product = verifyConfig.product
+ urlTemplate = (
+ "{server}/update/3/{product}/{release}/{build}/{platform}/"
+ "{locale}/{channel}/default/default/default/update.xml?force=1"
+ )
+
+ for release in verifyConfig.releases:
+ for locale in release["locales"]:
+ xmlUrls.add(
+ urlTemplate.format(
+ server=AUS_SERVER,
+ product=product,
+ release=release["release"],
+ build=release["build_id"],
+ platform=release["platform"],
+ locale=locale,
+ channel=verifyConfig.channel,
+ )
+ )
+
+ log.info(f"About to download {len(xmlUrls)} update.xml files")
+ xmlResults = await download_multi(list(xmlUrls), "update.xml.async.cache")
+ return xmlResults
+
+
+async def _download_from_config(verifyConfig):
+ """
+ Given an UpdateVerifyConfig object, download all necessary files to cache
+
+ Parameters
+ ----------
+ verifyConfig : UpdateVerifyConfig
+ The config - already chunked
+ """
+ remove_cache()
+ create_cache()
+
+ downloadList = []
+ ##################
+ # Download files #
+ ##################
+ xmlFiles = await download_update_xml(verifyConfig)
+ downloadList.extend(xmlFiles)
+ downloadList += await download_mars(x["path"] for x in xmlFiles)
+ downloadList += await download_builds(verifyConfig)
+
+ #####################
+ # Create cache.list #
+ #####################
+ cacheLinks = []
+
+ # Rename files and add to cache_links
+ for download in downloadList:
+ cacheLinks.append(download["url"])
+ fileIndex = len(cacheLinks)
+ os.rename(download["path"], _cachepath(fileIndex, "cache"))
+
+ cacheIndexPath = path.join(UV_CACHE_PATH, "urls.list")
+ with open(cacheIndexPath, "w") as cache:
+ cache.writelines(f"{l}\n" for l in cacheLinks)
+
+ # Log cache
+ log.info("Cache index urls.list contents:")
+ with open(cacheIndexPath, "r") as cache:
+ for ln, url in enumerate(cache.readlines()):
+ line = url.replace("\n", "")
+ log.info(f"Line {ln+1}: {line}")
+
+ return None
+
+
+def download_from_config(verifyConfig):
+ """
+ Given an UpdateVerifyConfig object, download all necessary files to cache
+ (sync function that calls the async one)
+
+ Parameters
+ ----------
+ verifyConfig : UpdateVerifyConfig
+ The config - already chunked
+ """
+ return asyncio.run(_download_from_config(verifyConfig))
diff --git a/tools/update-verify/scripts/chunked-verify.py b/tools/update-verify/scripts/chunked-verify.py
new file mode 100644
index 0000000000..8c4320d4cc
--- /dev/null
+++ b/tools/update-verify/scripts/chunked-verify.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+import sys
+from os import path
+from tempfile import mkstemp
+
+sys.path.append(path.join(path.dirname(__file__), "../python"))
+logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(message)s")
+log = logging.getLogger(__name__)
+
+from async_download import download_from_config
+from mozrelease.update_verify import UpdateVerifyConfig
+from util.commands import run_cmd
+
+UPDATE_VERIFY_COMMAND = ["bash", "verify.sh", "-c"]
+UPDATE_VERIFY_DIR = path.join(path.dirname(__file__), "../release/updates")
+
+
+if __name__ == "__main__":
+ from argparse import ArgumentParser
+
+ parser = ArgumentParser("")
+
+ parser.set_defaults(
+ chunks=None,
+ thisChunk=None,
+ )
+ parser.add_argument("--verify-config", required=True, dest="verifyConfig")
+ parser.add_argument("--verify-channel", required=True, dest="verify_channel")
+ parser.add_argument("--chunks", required=True, dest="chunks", type=int)
+ parser.add_argument("--this-chunk", required=True, dest="thisChunk", type=int)
+ parser.add_argument("--diff-summary", required=True, type=str)
+
+ options = parser.parse_args()
+ assert options.chunks and options.thisChunk, "chunks and this-chunk are required"
+ assert path.isfile(options.verifyConfig), "Update verify config must exist!"
+ verifyConfigFile = options.verifyConfig
+
+ fd, configFile = mkstemp()
+ # Needs to be opened in "bytes" mode because we perform relative seeks on it
+ fh = os.fdopen(fd, "wb")
+ try:
+ verifyConfig = UpdateVerifyConfig()
+ verifyConfig.read(path.join(UPDATE_VERIFY_DIR, verifyConfigFile))
+ myVerifyConfig = verifyConfig.getChunk(options.chunks, options.thisChunk)
+ # override the channel if explicitly set
+ if options.verify_channel:
+ myVerifyConfig.channel = options.verify_channel
+ myVerifyConfig.write(fh)
+ fh.close()
+ run_cmd(["cat", configFile])
+
+ # Before verifying, we want to download and cache all required files
+ download_from_config(myVerifyConfig)
+
+ run_cmd(
+ UPDATE_VERIFY_COMMAND + [configFile],
+ cwd=UPDATE_VERIFY_DIR,
+ env={"DIFF_SUMMARY_LOG": path.abspath(options.diff_summary)},
+ )
+ finally:
+ if path.exists(configFile):
+ os.unlink(configFile)
diff --git a/tools/update-verify/scripts/chunked-verify.sh b/tools/update-verify/scripts/chunked-verify.sh
new file mode 100755
index 0000000000..ad6af19080
--- /dev/null
+++ b/tools/update-verify/scripts/chunked-verify.sh
@@ -0,0 +1,72 @@
+#!/bin/bash
+set -ex
+set -o pipefail
+# This ugly hack is a cross-platform (Linux/Mac/Windows+MSYS) way to get the
+# absolute path to the directory containing this script
+pushd `dirname $0` &>/dev/null
+MY_DIR=$(pwd)
+popd &>/dev/null
+SCRIPTS_DIR="$MY_DIR/.."
+PYTHON='./mach python'
+VERIFY_CONFIG="$MOZ_FETCHES_DIR/update-verify.cfg"
+
+while [ "$#" -gt 0 ]; do
+ case $1 in
+ # Parse total-chunks
+ --total-chunks=*) chunks="${1#*=}"; shift 1;;
+ --total-chunks) chunks="${2}"; shift 2;;
+
+ # Parse this-chunk
+ --this-chunk=*) thisChunk="${1#*=}"; shift 1;;
+ --this-chunk) thisChunk="${2}"; shift 2;;
+
+ # Stop if other parameters are sent
+ *) echo "Unknown parameter: ${1}"; exit 1;;
+ esac
+done
+
+# Validate parameters
+if [ -z "${chunks}" ]; then echo "Required parameter: --total-chunks"; exit 1; fi
+if [ -z "${thisChunk}" ]; then echo "Required parameter: --this-chunk"; exit 1; fi
+
+# release promotion
+if [ -n "$CHANNEL" ]; then
+ EXTRA_PARAMS="--verify-channel $CHANNEL"
+else
+ EXTRA_PARAMS=""
+fi
+$PYTHON $MY_DIR/chunked-verify.py --chunks $chunks --this-chunk $thisChunk \
+--verify-config $VERIFY_CONFIG --diff-summary $PWD/diff-summary.log $EXTRA_PARAMS \
+2>&1 | tee $SCRIPTS_DIR/../verify_log.txt
+
+print_failed_msg()
+{
+ echo "-------------------------"
+ echo "This run has failed, see the above log"
+ echo
+ return 1
+}
+
+print_warning_msg()
+{
+ echo "-------------------------"
+ echo "This run has warnings, see the above log"
+ echo
+ return 2
+}
+
+set +x
+
+echo "Scanning log for failures and warnings"
+echo "--------------------------------------"
+
+# Test for a failure, note we are set -e.
+# Grep returns 0 on a match and 1 on no match
+# Testing for failures first is important because it's OK to to mark as failed
+# when there's failures+warnings, but not OK to mark as warnings in the same
+# situation.
+( ! grep 'TEST-UNEXPECTED-FAIL:' $SCRIPTS_DIR/../verify_log.txt ) || print_failed_msg
+( ! grep 'WARN:' $SCRIPTS_DIR/../verify_log.txt ) || print_warning_msg
+
+echo "-------------------------"
+echo "All is well"