summaryrefslogtreecommitdiffstats
path: root/testing/mozharness/scripts/release
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /testing/mozharness/scripts/release
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/mozharness/scripts/release')
-rw-r--r--testing/mozharness/scripts/release/bouncer_check.py202
-rw-r--r--testing/mozharness/scripts/release/generate-checksums.py263
-rw-r--r--testing/mozharness/scripts/release/update-verify-config-creator.py642
3 files changed, 1107 insertions, 0 deletions
diff --git a/testing/mozharness/scripts/release/bouncer_check.py b/testing/mozharness/scripts/release/bouncer_check.py
new file mode 100644
index 0000000000..7a7e39b274
--- /dev/null
+++ b/testing/mozharness/scripts/release/bouncer_check.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" bouncer_check.py
+
+A script to check HTTP statuses of Bouncer products to be shipped.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.automation import EXIT_STATUS_DICT, TBPL_FAILURE
+
+BOUNCER_URL_PATTERN = "{bouncer_prefix}?product={product}&os={os}&lang={lang}"
+
+
+class BouncerCheck(BaseScript):
+ config_options = [
+ [
+ ["--version"],
+ {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ },
+ ],
+ [
+ ["--product-field"],
+ {
+ "dest": "product_field",
+ "help": "Version field of release from product details, eg: LATEST_FIREFOX_VERSION", # NOQA: E501
+ },
+ ],
+ [
+ ["--products-url"],
+ {
+ "dest": "products_url",
+ "help": "The URL of the current Firefox product versions",
+ "type": str,
+ "default": "https://product-details.mozilla.org/1.0/firefox_versions.json",
+ },
+ ],
+ [
+ ["--previous-version"],
+ {
+ "dest": "prev_versions",
+ "action": "extend",
+ "help": "Previous version(s)",
+ },
+ ],
+ [
+ ["--locale"],
+ {
+ "dest": "locales",
+ # Intentionally limited for several reasons:
+ # 1) faster to check
+ # 2) do not need to deal with situation when a new locale
+ # introduced and we do not have partials for it yet
+ # 3) it mimics the old Sentry behaviour that worked for ages
+ # 4) no need to handle ja-JP-mac
+ "default": ["en-US", "de", "it", "zh-TW"],
+ "action": "append",
+ "help": "List of locales to check.",
+ },
+ ],
+ [
+ ["-j", "--parallelization"],
+ {
+ "dest": "parallelization",
+ "default": 20,
+ "type": int,
+ "help": "Number of HTTP sessions running in parallel",
+ },
+ ],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(BouncerCheck, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "cdn_urls": [
+ "download-installer.cdn.mozilla.net",
+ "download.cdn.mozilla.net",
+ "download.mozilla.org",
+ "archive.mozilla.org",
+ ],
+ },
+ all_actions=[
+ "check-bouncer",
+ ],
+ default_actions=[
+ "check-bouncer",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(BouncerCheck, self)._pre_config_lock(rw_config)
+
+ if "product_field" not in self.config:
+ return
+
+ firefox_versions = self.load_json_url(self.config["products_url"])
+
+ if self.config["product_field"] not in firefox_versions:
+ self.fatal("Unknown Firefox label: {}".format(self.config["product_field"]))
+ self.config["version"] = firefox_versions[self.config["product_field"]]
+ self.log("Set Firefox version {}".format(self.config["version"]))
+
+ def check_url(self, session, url):
+ from redo import retry
+ from requests.exceptions import HTTPError
+
+ try:
+ from urllib.parse import urlparse
+ except ImportError:
+ # Python 2
+ from urlparse import urlparse
+
+ def do_check_url():
+ self.log("Checking {}".format(url))
+ r = session.head(url, verify=True, timeout=10, allow_redirects=True)
+ try:
+ r.raise_for_status()
+ except HTTPError:
+ self.error("FAIL: {}, status: {}".format(url, r.status_code))
+ raise
+
+ final_url = urlparse(r.url)
+ if final_url.scheme != "https":
+ self.error("FAIL: URL scheme is not https: {}".format(r.url))
+ self.return_code = EXIT_STATUS_DICT[TBPL_FAILURE]
+
+ if final_url.netloc not in self.config["cdn_urls"]:
+ self.error("FAIL: host not in allowed locations: {}".format(r.url))
+ self.return_code = EXIT_STATUS_DICT[TBPL_FAILURE]
+
+ try:
+ retry(do_check_url, sleeptime=3, max_sleeptime=10, attempts=3)
+ except HTTPError:
+ # The error was already logged above.
+ self.return_code = EXIT_STATUS_DICT[TBPL_FAILURE]
+ return
+
+ def get_urls(self):
+ for product in self.config["products"].values():
+ product_name = product["product-name"] % {"version": self.config["version"]}
+ for bouncer_platform in product["platforms"]:
+ for locale in self.config["locales"]:
+ url = BOUNCER_URL_PATTERN.format(
+ bouncer_prefix=self.config["bouncer_prefix"],
+ product=product_name,
+ os=bouncer_platform,
+ lang=locale,
+ )
+ yield url
+
+ for product in self.config.get("partials", {}).values():
+ for prev_version in self.config.get("prev_versions", []):
+ product_name = product["product-name"] % {
+ "version": self.config["version"],
+ "prev_version": prev_version,
+ }
+ for bouncer_platform in product["platforms"]:
+ for locale in self.config["locales"]:
+ url = BOUNCER_URL_PATTERN.format(
+ bouncer_prefix=self.config["bouncer_prefix"],
+ product=product_name,
+ os=bouncer_platform,
+ lang=locale,
+ )
+ yield url
+
+ def check_bouncer(self):
+ import concurrent.futures as futures
+
+ import requests
+
+ session = requests.Session()
+ http_adapter = requests.adapters.HTTPAdapter(
+ pool_connections=self.config["parallelization"],
+ pool_maxsize=self.config["parallelization"],
+ )
+ session.mount("https://", http_adapter)
+ session.mount("http://", http_adapter)
+
+ with futures.ThreadPoolExecutor(self.config["parallelization"]) as e:
+ fs = []
+ for url in self.get_urls():
+ fs.append(e.submit(self.check_url, session, url))
+ for f in futures.as_completed(fs):
+ f.result()
+
+
+if __name__ == "__main__":
+ BouncerCheck().run_and_exit()
diff --git a/testing/mozharness/scripts/release/generate-checksums.py b/testing/mozharness/scripts/release/generate-checksums.py
new file mode 100644
index 0000000000..ae092ae4de
--- /dev/null
+++ b/testing/mozharness/scripts/release/generate-checksums.py
@@ -0,0 +1,263 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import binascii
+import hashlib
+import os
+import re
+import sys
+from multiprocessing.pool import ThreadPool
+
+import six
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.checksums import parse_checksums_file
+from mozharness.mozilla.merkle import MerkleTree
+
+
+class ChecksumsGenerator(BaseScript, VirtualenvMixin):
+ config_options = [
+ [
+ ["--stage-product"],
+ {
+ "dest": "stage_product",
+ "help": "Name of product used in file server's directory structure, "
+ "e.g.: firefox, mobile",
+ },
+ ],
+ [
+ ["--version"],
+ {
+ "dest": "version",
+ "help": "Version of release, e.g.: 59.0b5",
+ },
+ ],
+ [
+ ["--build-number"],
+ {
+ "dest": "build_number",
+ "help": "Build number of release, e.g.: 2",
+ },
+ ],
+ [
+ ["--bucket-name"],
+ {
+ "dest": "bucket_name",
+ "help": "Full bucket name e.g.: moz-fx-productdelivery-pr-38b5-productdelivery.",
+ },
+ ],
+ [
+ ["-j", "--parallelization"],
+ {
+ "dest": "parallelization",
+ "default": 20,
+ "type": int,
+ "help": "Number of checksums file to download concurrently",
+ },
+ ],
+ [
+ ["--branch"],
+ {
+ "dest": "branch",
+ "help": "dummy option",
+ },
+ ],
+ [
+ ["--build-pool"],
+ {
+ "dest": "build_pool",
+ "help": "dummy option",
+ },
+ ],
+ ] + virtualenv_config_options
+
+ def __init__(self):
+ BaseScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "create-summary",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "create-summary",
+ ],
+ )
+
+ self.checksums = {}
+ self.file_prefix = self._get_file_prefix()
+
+ def _pre_config_lock(self, rw_config):
+ super(ChecksumsGenerator, self)._pre_config_lock(rw_config)
+
+ # These defaults are set here rather in the config because default
+ # lists cannot be completely overidden, only appended to.
+ if not self.config.get("formats"):
+ self.config["formats"] = ["sha512", "sha256"]
+
+ if not self.config.get("includes"):
+ self.config["includes"] = [
+ r"^.*\.tar\.bz2$",
+ r"^.*\.tar\.xz$",
+ r"^.*\.snap$",
+ r"^.*\.dmg$",
+ r"^.*\.pkg$",
+ r"^.*\.bundle$",
+ r"^.*\.mar$",
+ r"^.*Setup.*\.exe$",
+ r"^.*Installer\.exe$",
+ r"^.*\.msi$",
+ r"^.*\.xpi$",
+ r"^.*fennec.*\.apk$",
+ r"^.*/jsshell.*$",
+ ]
+
+ def _get_file_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config["stage_product"],
+ self.config["version"],
+ self.config["build_number"],
+ )
+
+ def _get_sums_filename(self, format_):
+ return "{}SUMS".format(format_.upper())
+
+ def _get_summary_filename(self, format_):
+ return "{}SUMMARY".format(format_.upper())
+
+ def _get_hash_function(self, format_):
+ if format_ in ("sha256", "sha384", "sha512"):
+ return getattr(hashlib, format_)
+ else:
+ self.fatal("Unsupported format {}".format(format_))
+
+ def _get_bucket(self):
+ self.activate_virtualenv()
+ from boto import connect_s3
+
+ self.info("Connecting to S3")
+ conn = connect_s3(anon=True, host="storage.googleapis.com")
+ self.info("Connecting to bucket {}".format(self.config["bucket_name"]))
+ self.bucket = conn.get_bucket(self.config["bucket_name"])
+ return self.bucket
+
+ def collect_individual_checksums(self):
+ """This step grabs all of the small checksums files for the release,
+ filters out any unwanted files from within them, and adds the remainder
+ to self.checksums for subsequent steps to use."""
+ bucket = self._get_bucket()
+ self.info("File prefix is: {}".format(self.file_prefix))
+
+ # temporary holding place for checksums
+ raw_checksums = []
+
+ def worker(item):
+ self.debug("Downloading {}".format(item))
+ sums = bucket.get_key(item).get_contents_as_string()
+ raw_checksums.append(sums)
+
+ def find_checksums_files():
+ self.info("Getting key names from bucket")
+ checksum_files = {"beets": [], "checksums": []}
+ for key in bucket.list(prefix=self.file_prefix):
+ if key.key.endswith(".checksums"):
+ self.debug("Found checksums file: {}".format(key.key))
+ checksum_files["checksums"].append(key.key)
+ elif key.key.endswith(".beet"):
+ self.debug("Found beet file: {}".format(key.key))
+ checksum_files["beets"].append(key.key)
+ else:
+ self.debug("Ignoring non-checksums file: {}".format(key.key))
+ if checksum_files["beets"]:
+ self.log("Using beet format")
+ return checksum_files["beets"]
+ else:
+ self.log("Using checksums format")
+ return checksum_files["checksums"]
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_checksums_files())
+
+ for c in raw_checksums:
+ for f, info in six.iteritems(parse_checksums_file(c)):
+ for pattern in self.config["includes"]:
+ if re.search(pattern, f):
+ if f in self.checksums:
+ if info == self.checksums[f]:
+ self.debug(
+ "Duplicate checksum for file {}"
+ " but the data matches;"
+ " continuing...".format(f)
+ )
+ continue
+ self.fatal(
+ "Found duplicate checksum entry for {}, "
+ "don't know which one to pick.".format(f)
+ )
+ if not set(self.config["formats"]) <= set(info["hashes"]):
+ self.fatal("Missing necessary format for file {}".format(f))
+ self.debug("Adding checksums for file: {}".format(f))
+ self.checksums[f] = info
+ break
+ else:
+ self.debug("Ignoring checksums for file: {}".format(f))
+
+ def create_summary(self):
+ """
+ This step computes a Merkle tree over the checksums for each format
+ and writes a file containing the head of the tree and inclusion proofs
+ for each file.
+ """
+ for fmt in self.config["formats"]:
+ hash_fn = self._get_hash_function(fmt)
+ files = [fn for fn in sorted(self.checksums)]
+ data = [self.checksums[fn]["hashes"][fmt] for fn in files]
+
+ tree = MerkleTree(hash_fn, data)
+ head = binascii.hexlify(tree.head())
+ proofs = [
+ binascii.hexlify(tree.inclusion_proof(i).to_rfc6962_bis())
+ for i in range(len(files))
+ ]
+
+ summary = self._get_summary_filename(fmt)
+ self.info("Creating summary file: {}".format(summary))
+
+ content = "{} TREE_HEAD\n".format(head.decode("ascii"))
+ for i in range(len(files)):
+ content += "{} {}\n".format(proofs[i].decode("ascii"), files[i])
+
+ self.write_to_file(summary, content)
+
+ def create_big_checksums(self):
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Creating big checksums file: {}".format(sums))
+ with open(sums, "w+") as output_file:
+ for fn in sorted(self.checksums):
+ output_file.write(
+ "{} {}\n".format(
+ self.checksums[fn]["hashes"][fmt].decode("ascii"), fn
+ )
+ )
+
+
+if __name__ == "__main__":
+ myScript = ChecksumsGenerator()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/update-verify-config-creator.py b/testing/mozharness/scripts/release/update-verify-config-creator.py
new file mode 100644
index 0000000000..9de0175577
--- /dev/null
+++ b/testing/mozharness/scripts/release/update-verify-config-creator.py
@@ -0,0 +1,642 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import math
+import os
+import pprint
+import re
+import sys
+
+from looseversion import LooseVersion
+from mozilla_version.gecko import GeckoVersion
+from mozilla_version.version import VersionType
+from six.moves.urllib.parse import urljoin
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.log import DEBUG, FATAL, INFO, WARNING
+from mozharness.base.script import BaseScript
+
+
+# ensure all versions are 3 part (i.e. 99.1.0)
+# ensure all text (i.e. 'esr') is in the last part
+class CompareVersion(LooseVersion):
+ version = ""
+
+ def __init__(self, versionMap):
+ parts = versionMap.split(".")
+ # assume version is 99.9.0, look for 99.0
+ if len(parts) == 2:
+ intre = re.compile("([0-9.]+)(.*)")
+ match = intre.match(parts[-1])
+ if match:
+ parts[-1] = match.group(1)
+ parts.append("0%s" % match.group(2))
+ else:
+ parts.append("0")
+ self.version = ".".join(parts)
+ LooseVersion(versionMap)
+
+
+def is_triangualar(x):
+ """Check if a number is triangular (0, 1, 3, 6, 10, 15, ...)
+ see: https://en.wikipedia.org/wiki/Triangular_number#Triangular_roots_and_tests_for_triangular_numbers # noqa
+
+ >>> is_triangualar(0)
+ True
+ >>> is_triangualar(1)
+ True
+ >>> is_triangualar(2)
+ False
+ >>> is_triangualar(3)
+ True
+ >>> is_triangualar(4)
+ False
+ >>> all(is_triangualar(x) for x in [0, 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 66, 78, 91, 105])
+ True
+ >>> all(not is_triangualar(x) for x in [4, 5, 8, 9, 11, 17, 25, 29, 39, 44, 59, 61, 72, 98, 112])
+ True
+ """
+ # pylint --py3k W1619
+ n = (math.sqrt(8 * x + 1) - 1) / 2
+ return n == int(n)
+
+
+class UpdateVerifyConfigCreator(BaseScript):
+ config_options = [
+ [
+ ["--product"],
+ {
+ "dest": "product",
+ "help": "Product being tested, as used in the update URL and filenames. Eg: firefox", # NOQA: E501
+ },
+ ],
+ [
+ ["--stage-product"],
+ {
+ "dest": "stage_product",
+ "help": "Product being tested, as used in stage directories and ship it"
+ "If not passed this is assumed to be the same as product.",
+ },
+ ],
+ [
+ ["--app-name"],
+ {
+ "dest": "app_name",
+ "help": "App name being tested. Eg: browser",
+ },
+ ],
+ [
+ ["--branch-prefix"],
+ {
+ "dest": "branch_prefix",
+ "help": "Prefix of release branch names. Eg: mozilla, comm",
+ },
+ ],
+ [
+ ["--channel"],
+ {
+ "dest": "channel",
+ "help": "Channel to run update verify against",
+ },
+ ],
+ [
+ ["--aus-server"],
+ {
+ "dest": "aus_server",
+ "default": "https://aus5.mozilla.org",
+ "help": "AUS server to run update verify against",
+ },
+ ],
+ [
+ ["--to-version"],
+ {
+ "dest": "to_version",
+ "help": "The version of the release being updated to. Eg: 59.0b5",
+ },
+ ],
+ [
+ ["--to-app-version"],
+ {
+ "dest": "to_app_version",
+ "help": "The in-app version of the release being updated to. Eg: 59.0",
+ },
+ ],
+ [
+ ["--to-display-version"],
+ {
+ "dest": "to_display_version",
+ "help": "The human-readable version of the release being updated to. Eg: 59.0 Beta 9", # NOQA: E501
+ },
+ ],
+ [
+ ["--to-build-number"],
+ {
+ "dest": "to_build_number",
+ "help": "The build number of the release being updated to",
+ },
+ ],
+ [
+ ["--to-buildid"],
+ {
+ "dest": "to_buildid",
+ "help": "The buildid of the release being updated to",
+ },
+ ],
+ [
+ ["--to-revision"],
+ {
+ "dest": "to_revision",
+ "help": "The revision that the release being updated to was built against",
+ },
+ ],
+ [
+ ["--partial-version"],
+ {
+ "dest": "partial_versions",
+ "default": [],
+ "action": "append",
+ "help": "A previous release version that is expected to receive a partial update. "
+ "Eg: 59.0b4. May be specified multiple times.",
+ },
+ ],
+ [
+ ["--last-watershed"],
+ {
+ "dest": "last_watershed",
+ "help": "The earliest version to include in the update verify config. Eg: 57.0b10",
+ },
+ ],
+ [
+ ["--include-version"],
+ {
+ "dest": "include_versions",
+ "default": [],
+ "action": "append",
+ "help": "Only include versions that match one of these regexes. "
+ "May be passed multiple times",
+ },
+ ],
+ [
+ ["--mar-channel-id-override"],
+ {
+ "dest": "mar_channel_id_options",
+ "default": [],
+ "action": "append",
+ "help": "A version regex and channel id string to override those versions with."
+ "Eg: ^\\d+\\.\\d+(\\.\\d+)?$,firefox-mozilla-beta,firefox-mozilla-release "
+ "will set accepted mar channel ids to 'firefox-mozilla-beta' and "
+ "'firefox-mozilla-release for x.y and x.y.z versions. "
+ "May be passed multiple times",
+ },
+ ],
+ [
+ ["--override-certs"],
+ {
+ "dest": "override_certs",
+ "default": None,
+ "help": "Certs to override the updater with prior to running update verify."
+ "If passed, should be one of: dep, nightly, release"
+ "If not passed, no certificate overriding will be configured",
+ },
+ ],
+ [
+ ["--platform"],
+ {
+ "dest": "platform",
+ "help": "The platform to generate the update verify config for, in FTP-style",
+ },
+ ],
+ [
+ ["--updater-platform"],
+ {
+ "dest": "updater_platform",
+ "help": "The platform to run the updater on, in FTP-style."
+ "If not specified, this is assumed to be the same as platform",
+ },
+ ],
+ [
+ ["--archive-prefix"],
+ {
+ "dest": "archive_prefix",
+ "help": "The server/path to pull the current release from. "
+ "Eg: https://archive.mozilla.org/pub",
+ },
+ ],
+ [
+ ["--previous-archive-prefix"],
+ {
+ "dest": "previous_archive_prefix",
+ "help": "The server/path to pull the previous releases from"
+ "If not specified, this is assumed to be the same as --archive-prefix",
+ },
+ ],
+ [
+ ["--repo-path"],
+ {
+ "dest": "repo_path",
+ "help": (
+ "The repository (relative to the hg server root) that the current "
+ "release was built from Eg: releases/mozilla-beta"
+ ),
+ },
+ ],
+ [
+ ["--output-file"],
+ {
+ "dest": "output_file",
+ "help": "Where to write the update verify config to",
+ },
+ ],
+ [
+ ["--product-details-server"],
+ {
+ "dest": "product_details_server",
+ "default": "https://product-details.mozilla.org",
+ "help": "Product Details server to pull previous release info from. "
+ "Using anything other than the production server is likely to "
+ "cause issues with update verify.",
+ },
+ ],
+ [
+ ["--hg-server"],
+ {
+ "dest": "hg_server",
+ "default": "https://hg.mozilla.org",
+ "help": "Mercurial server to pull various previous and current version info from",
+ },
+ ],
+ [
+ ["--full-check-locale"],
+ {
+ "dest": "full_check_locales",
+ "default": ["de", "en-US", "ru"],
+ "action": "append",
+ "help": "A list of locales to generate full update verify checks for",
+ },
+ ],
+ ]
+
+ def __init__(self):
+ BaseScript.__init__(
+ self,
+ config_options=self.config_options,
+ config={},
+ all_actions=[
+ "gather-info",
+ "create-config",
+ "write-config",
+ ],
+ default_actions=[
+ "gather-info",
+ "create-config",
+ "write-config",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UpdateVerifyConfigCreator, self)._pre_config_lock(rw_config)
+
+ if "updater_platform" not in self.config:
+ self.config["updater_platform"] = self.config["platform"]
+ if "stage_product" not in self.config:
+ self.config["stage_product"] = self.config["product"]
+ if "previous_archive_prefix" not in self.config:
+ self.config["previous_archive_prefix"] = self.config["archive_prefix"]
+ self.config["archive_prefix"].rstrip("/")
+ self.config["previous_archive_prefix"].rstrip("/")
+ self.config["mar_channel_id_overrides"] = {}
+ for override in self.config["mar_channel_id_options"]:
+ pattern, override_str = override.split(",", 1)
+ self.config["mar_channel_id_overrides"][pattern] = override_str
+
+ def _get_branch_url(self, branch_prefix, version):
+ version = GeckoVersion.parse(version)
+ branch = None
+ if version.version_type == VersionType.BETA:
+ branch = "releases/{}-beta".format(branch_prefix)
+ elif version.version_type == VersionType.ESR:
+ branch = "releases/{}-esr{}".format(branch_prefix, version.major_number)
+ elif version.version_type == VersionType.RELEASE:
+ if branch_prefix == "comm":
+ # Thunderbird does not have ESR releases, regular releases
+ # go in an ESR branch
+ branch = "releases/{}-esr{}".format(branch_prefix, version.major_number)
+ else:
+ branch = "releases/{}-release".format(branch_prefix)
+ if not branch:
+ raise Exception("Cannot determine branch, cannot continue!")
+
+ return branch
+
+ def _get_update_paths(self):
+ from mozrelease.l10n import getPlatformLocales
+ from mozrelease.paths import getCandidatesDir
+ from mozrelease.platforms import ftp2infoFile
+ from mozrelease.versions import MozillaVersion
+
+ self.update_paths = {}
+
+ ret = self._retry_download(
+ "{}/1.0/{}.json".format(
+ self.config["product_details_server"],
+ self.config["stage_product"],
+ ),
+ "WARNING",
+ )
+ releases = json.load(ret)["releases"]
+ for release_name, release_info in reversed(
+ sorted(releases.items(), key=lambda x: MozillaVersion(x[1]["version"]))
+ ):
+ # we need to use releases_name instead of release_info since esr
+ # string is included in the name. later we rely on this.
+ product, version = release_name.split("-", 1)
+ tag = "{}_{}_RELEASE".format(product.upper(), version.replace(".", "_"))
+
+ # Exclude any releases that don't match one of our include version
+ # regexes. This is generally to avoid including versions from other
+ # channels. Eg: including betas when testing releases
+ for v in self.config["include_versions"]:
+ if re.match(v, version):
+ break
+ else:
+ self.log(
+ "Skipping release whose version doesn't match any "
+ "include_version pattern: %s" % release_name,
+ level=INFO,
+ )
+ continue
+
+ # We also have to trim out previous releases that aren't in the same
+ # product line, too old, etc.
+ if self.config["stage_product"] != product:
+ self.log(
+ "Skipping release that doesn't match product name: %s"
+ % release_name,
+ level=INFO,
+ )
+ continue
+ if MozillaVersion(version) < MozillaVersion(self.config["last_watershed"]):
+ self.log(
+ "Skipping release that's behind the last watershed: %s"
+ % release_name,
+ level=INFO,
+ )
+ continue
+ if version == self.config["to_version"]:
+ self.log(
+ "Skipping release that is the same as to version: %s"
+ % release_name,
+ level=INFO,
+ )
+ continue
+ if MozillaVersion(version) > MozillaVersion(self.config["to_version"]):
+ self.log(
+ "Skipping release that's newer than to version: %s" % release_name,
+ level=INFO,
+ )
+ continue
+
+ if version in self.update_paths:
+ raise Exception("Found duplicate release for version: %s", version)
+
+ # This is a crappy place to get buildids from, but we don't have a better one.
+ # This will start to fail if old info files are deleted.
+ info_file_url = "{}{}/{}_info.txt".format(
+ self.config["previous_archive_prefix"],
+ getCandidatesDir(
+ self.config["stage_product"],
+ version,
+ release_info["build_number"],
+ ),
+ ftp2infoFile(self.config["platform"]),
+ )
+ self.log(
+ "Retrieving buildid from info file: %s" % info_file_url, level=DEBUG
+ )
+ ret = self._retry_download(info_file_url, "WARNING")
+ buildID = ret.read().split(b"=")[1].strip().decode("utf-8")
+
+ branch = self._get_branch_url(self.config["branch_prefix"], version)
+
+ shipped_locales_url = urljoin(
+ self.config["hg_server"],
+ "{}/raw-file/{}/{}/locales/shipped-locales".format(
+ branch,
+ tag,
+ self.config["app_name"],
+ ),
+ )
+ ret = self._retry_download(shipped_locales_url, "WARNING")
+ shipped_locales = ret.read().strip().decode("utf-8")
+
+ app_version_url = urljoin(
+ self.config["hg_server"],
+ "{}/raw-file/{}/{}/config/version.txt".format(
+ branch,
+ tag,
+ self.config["app_name"],
+ ),
+ )
+ app_version = (
+ self._retry_download(app_version_url, "WARNING")
+ .read()
+ .strip()
+ .decode("utf-8")
+ )
+
+ self.log("Adding {} to update paths".format(version), level=INFO)
+ self.update_paths[version] = {
+ "appVersion": app_version,
+ "locales": getPlatformLocales(shipped_locales, self.config["platform"]),
+ "buildID": buildID,
+ }
+ for pattern, mar_channel_ids in self.config[
+ "mar_channel_id_overrides"
+ ].items():
+ if re.match(pattern, version):
+ self.update_paths[version]["marChannelIds"] = mar_channel_ids
+
+ def gather_info(self):
+ from mozilla_version.gecko import GeckoVersion
+
+ self._get_update_paths()
+ if self.update_paths:
+ self.log("Found update paths:", level=DEBUG)
+ self.log(pprint.pformat(self.update_paths), level=DEBUG)
+ elif GeckoVersion.parse(self.config["to_version"]) <= GeckoVersion.parse(
+ self.config["last_watershed"]
+ ):
+ self.log(
+ "Didn't find any update paths, but to_version {} is before the last_"
+ "watershed {}, generating empty config".format(
+ self.config["to_version"],
+ self.config["last_watershed"],
+ ),
+ level=WARNING,
+ )
+ else:
+ self.log("Didn't find any update paths, cannot continue", level=FATAL)
+
+ def create_config(self):
+ from mozrelease.l10n import getPlatformLocales
+ from mozrelease.paths import (
+ getCandidatesDir,
+ getReleaseInstallerPath,
+ getReleasesDir,
+ )
+ from mozrelease.platforms import ftp2updatePlatforms
+ from mozrelease.update_verify import UpdateVerifyConfig
+ from mozrelease.versions import getPrettyVersion
+
+ candidates_dir = getCandidatesDir(
+ self.config["stage_product"],
+ self.config["to_version"],
+ self.config["to_build_number"],
+ )
+ to_ = getReleaseInstallerPath(
+ self.config["product"],
+ self.config["product"].title(),
+ self.config["to_version"],
+ self.config["platform"],
+ locale="%locale%",
+ )
+ to_path = "{}/{}".format(candidates_dir, to_)
+
+ to_display_version = self.config.get("to_display_version")
+ if not to_display_version:
+ to_display_version = getPrettyVersion(self.config["to_version"])
+
+ self.update_verify_config = UpdateVerifyConfig(
+ product=self.config["product"].title(),
+ channel=self.config["channel"],
+ aus_server=self.config["aus_server"],
+ to=to_path,
+ to_build_id=self.config["to_buildid"],
+ to_app_version=self.config["to_app_version"],
+ to_display_version=to_display_version,
+ override_certs=self.config.get("override_certs"),
+ )
+
+ to_shipped_locales_url = urljoin(
+ self.config["hg_server"],
+ "{}/raw-file/{}/{}/locales/shipped-locales".format(
+ self.config["repo_path"],
+ self.config["to_revision"],
+ self.config["app_name"],
+ ),
+ )
+ to_shipped_locales = (
+ self._retry_download(to_shipped_locales_url, "WARNING")
+ .read()
+ .strip()
+ .decode("utf-8")
+ )
+ to_locales = set(
+ getPlatformLocales(to_shipped_locales, self.config["platform"])
+ )
+
+ completes_only_index = 0
+ for fromVersion in reversed(sorted(self.update_paths, key=CompareVersion)):
+ from_ = self.update_paths[fromVersion]
+ locales = sorted(list(set(from_["locales"]).intersection(to_locales)))
+ appVersion = from_["appVersion"]
+ build_id = from_["buildID"]
+ mar_channel_IDs = from_.get("marChannelIds")
+
+ # Use new build targets for Windows, but only on compatible
+ # versions (42+). See bug 1185456 for additional context.
+ if self.config["platform"] not in ("win32", "win64") or LooseVersion(
+ fromVersion
+ ) < LooseVersion("42.0"):
+ update_platform = ftp2updatePlatforms(self.config["platform"])[0]
+ else:
+ update_platform = ftp2updatePlatforms(self.config["platform"])[1]
+
+ release_dir = getReleasesDir(self.config["stage_product"], fromVersion)
+ path_ = getReleaseInstallerPath(
+ self.config["product"],
+ self.config["product"].title(),
+ fromVersion,
+ self.config["platform"],
+ locale="%locale%",
+ )
+ from_path = "{}/{}".format(release_dir, path_)
+
+ updater_package = "{}/{}".format(
+ release_dir,
+ getReleaseInstallerPath(
+ self.config["product"],
+ self.config["product"].title(),
+ fromVersion,
+ self.config["updater_platform"],
+ locale="%locale%",
+ ),
+ )
+
+ # Exclude locales being full checked
+ quick_check_locales = [
+ l for l in locales if l not in self.config["full_check_locales"]
+ ]
+ # Get the intersection of from and to full_check_locales
+ this_full_check_locales = [
+ l for l in self.config["full_check_locales"] if l in locales
+ ]
+
+ if fromVersion in self.config["partial_versions"]:
+ self.info(
+ "Generating configs for partial update checks for %s" % fromVersion
+ )
+ self.update_verify_config.addRelease(
+ release=appVersion,
+ build_id=build_id,
+ locales=locales,
+ patch_types=["complete", "partial"],
+ from_path=from_path,
+ ftp_server_from=self.config["previous_archive_prefix"],
+ ftp_server_to=self.config["archive_prefix"],
+ mar_channel_IDs=mar_channel_IDs,
+ platform=update_platform,
+ updater_package=updater_package,
+ )
+ else:
+ if this_full_check_locales and is_triangualar(completes_only_index):
+ self.info("Generating full check configs for %s" % fromVersion)
+ self.update_verify_config.addRelease(
+ release=appVersion,
+ build_id=build_id,
+ locales=this_full_check_locales,
+ from_path=from_path,
+ ftp_server_from=self.config["previous_archive_prefix"],
+ ftp_server_to=self.config["archive_prefix"],
+ mar_channel_IDs=mar_channel_IDs,
+ platform=update_platform,
+ updater_package=updater_package,
+ )
+ # Quick test for other locales, no download
+ if len(quick_check_locales) > 0:
+ self.info("Generating quick check configs for %s" % fromVersion)
+ if not is_triangualar(completes_only_index):
+ # Assuming we skipped full check locales, using all locales
+ _locales = locales
+ else:
+ # Excluding full check locales from the quick check
+ _locales = quick_check_locales
+ self.update_verify_config.addRelease(
+ release=appVersion,
+ build_id=build_id,
+ locales=_locales,
+ platform=update_platform,
+ )
+ completes_only_index += 1
+
+ def write_config(self):
+ # Needs to be opened in "bytes" mode because we perform relative seeks on it
+ with open(self.config["output_file"], "wb+") as fh:
+ self.update_verify_config.write(fh)
+
+
+if __name__ == "__main__":
+ UpdateVerifyConfigCreator().run_and_exit()