diff options
Diffstat (limited to 'bin/update')
-rwxr-xr-x | bin/update/create_full_mar.py | 30 | ||||
-rwxr-xr-x | bin/update/create_full_mar_for_languages.py | 71 | ||||
-rwxr-xr-x | bin/update/create_partial_update.py | 185 | ||||
-rw-r--r-- | bin/update/path.py | 10 | ||||
-rw-r--r-- | bin/update/tools.py | 30 |
5 files changed, 63 insertions, 263 deletions
diff --git a/bin/update/create_full_mar.py b/bin/update/create_full_mar.py index b4f53c48f1..1c5714e69b 100755 --- a/bin/update/create_full_mar.py +++ b/bin/update/create_full_mar.py @@ -3,7 +3,6 @@ import sys import glob import os -import re import subprocess import json import argparse @@ -38,35 +37,20 @@ def main(): target_dir = update_path.get_update_dir() temp_dir = update_path.get_current_build_dir() - tar_file_glob = os.path.join(update_path.get_workdir(), "installation", product_name, "archive", "install", "*", f'{product_name}_*_archive*') - tar_files = glob.glob(tar_file_glob) - if len(tar_files) != 1: - raise Exception(f'`{tar_file_glob}` does not match exactly one file') - tar_file = tar_files[0] + msi_file_glob = os.path.join(update_path.get_workdir(), "installation", product_name, "msi", "install", "*", f'{product_name}_*.msi') + msi_files = glob.glob(msi_file_glob) + if len(msi_files) != 1: + raise Exception(f'`{msi_file_glob}` does not match exactly one file') + msi_file = msi_files[0] - uncompress_dir = uncompress_file_to_dir(tar_file, temp_dir) - - metadatafile = os.path.join( - update_path.get_workdir(), 'installation', product_name, 'archive', 'install', 'metadata') - ifsfile = os.path.join(update_path.get_mar_dir(), 'ifs') - with open(metadatafile) as meta, open(ifsfile, 'w') as ifs: - for l in meta: - m = re.fullmatch('(skip|cond) (.*)', l.rstrip()) - if m and m.group(2).startswith(f'{product_name}/'): - path = m.group(2)[len(f'{product_name}/'):] - if m.group(1) == 'skip': - os.remove(os.path.join(uncompress_dir, path)) - else: - ifs.write(f'"{path}" "{path}"\n') + uncompress_dir = uncompress_file_to_dir(msi_file, temp_dir) mar_file = make_complete_mar_name(target_dir, filename_prefix) path = os.path.join( workdir, 'UnpackedTarball/onlineupdate/tools/update-packaging/make_full_update.sh') os.putenv('MOZ_PRODUCT_VERSION', version) os.putenv('MAR_CHANNEL_ID', 'LOOnlineUpdater') - subprocess.call([ - path, convert_to_native(mar_file), convert_to_native(uncompress_dir), - convert_to_native(ifsfile)]) + subprocess.call([path, convert_to_native(mar_file), convert_to_native(uncompress_dir)]) sign_mar_file(target_dir, certificate_path, certificate_name, mar_file, filename_prefix) diff --git a/bin/update/create_full_mar_for_languages.py b/bin/update/create_full_mar_for_languages.py deleted file mode 100755 index d431ecaf6d..0000000000 --- a/bin/update/create_full_mar_for_languages.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python3 - -import sys -import os -import subprocess -import json - -from tools import uncompress_file_to_dir, get_file_info - -from path import UpdaterPath -from signing import sign_mar_file - - -def make_complete_mar_name(target_dir, filename_prefix, language): - filename = filename_prefix + "_" + language + "_complete_langpack.mar" - return os.path.join(target_dir, filename) - - -def create_lang_infos(mar_file_name, language, url): - data = {'lang': language, - 'complete': get_file_info(mar_file_name, url) - } - return data - - -def main(): - if len(sys.argv) < 8: - print( - "Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $TARGETDIR $TEMPDIR $FILENAMEPREFIX $CERTIFICATEPATH $CERTIFICATENAME $BASEURL $VERSION") - sys.exit(1) - - certificate_path = sys.argv[4] - certificate_name = sys.argv[5] - base_url = sys.argv[6] - filename_prefix = sys.argv[3] - workdir = sys.argv[2] - product_name = sys.argv[1] - version = sys.argv[7] - - updater_path = UpdaterPath(workdir) - target_dir = updater_path.get_update_dir() - temp_dir = updater_path.get_language_dir() - - language_pack_dir = os.path.join(workdir, "installation", product_name + "_languagepack", "archive", "install") - language_packs = os.listdir(language_pack_dir) - lang_infos = [] - for language in language_packs: - if language == 'log': - continue - - language_dir = os.path.join(language_pack_dir, language) - language_file = os.path.join(language_dir, os.listdir(language_dir)[0]) - - directory = uncompress_file_to_dir(language_file, os.path.join(temp_dir, language)) - - mar_file_name = make_complete_mar_name(target_dir, filename_prefix, language) - - os.putenv('MOZ_PRODUCT_VERSION', version) - os.putenv('MAR_CHANNEL_ID', 'LOOnlineUpdater') - subprocess.call([os.path.join(workdir, 'UnpackedTarball/onlineupdate/tools/update-packaging/make_full_update.sh'), mar_file_name, directory]) - - sign_mar_file(target_dir, certificate_path, certificate_name, mar_file_name, filename_prefix) - - lang_infos.append(create_lang_infos(mar_file_name, language, base_url)) - - with open(os.path.join(target_dir, "complete_lang_info.json"), "w") as language_info_file: - json.dump({'languages': lang_infos}, language_info_file, indent=4) - - -if __name__ == '__main__': - main() diff --git a/bin/update/create_partial_update.py b/bin/update/create_partial_update.py index 2730c4765f..8c49bd9159 100755 --- a/bin/update/create_partial_update.py +++ b/bin/update/create_partial_update.py @@ -1,90 +1,14 @@ #!/usr/bin/env python3 +import glob import json import os +import re import subprocess import sys -import requests - -from path import UpdaterPath, mkdir_p, convert_to_unix, convert_to_native +from path import UpdaterPath, convert_to_native from signing import sign_mar_file -from tools import get_file_info, get_hash -from uncompress_mar import extract_mar - -BUF_SIZE = 1024 -current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__))) - - -class InvalidFileException(Exception): - - def __init__(self, *args, **kwargs): - super().__init__(self, *args, **kwargs) - - -def download_file(filepath, url, hash_string): - with open(filepath, "wb") as f: - response = requests.get(url, stream=True) - - if not response.ok: - return - - for block in response.iter_content(1024): - f.write(block) - - file_hash = get_hash(filepath) - - if file_hash != hash_string: - raise InvalidFileException( - "file hash does not match for file %s: Expected %s, Got: %s" % (url, hash_string, file_hash)) - - -def handle_language(lang_entries, filedir): - langs = {} - for lang, data in lang_entries.items(): - lang_dir = os.path.join(filedir, lang) - lang_file = os.path.join(lang_dir, "lang.mar") - mkdir_p(lang_dir) - download_file(lang_file, data["url"], data["hash"]) - dir_path = os.path.join(lang_dir, "lang") - mkdir_p(dir_path) - extract_mar(lang_file, dir_path) - langs[lang] = dir_path - - return langs - - -def download_mar_for_update_channel_and_platform(server_url, channel, platform, temp_dir): - base_url = server_url + "update/partial-targets/1/" - url = base_url + platform + "/" + channel - r = requests.get(url) - if r.status_code != 200: - print(r.content) - raise Exception("download failed") - - update_info = json.loads(r.content.decode("utf-8")) - update_files = update_info['updates'] - downloaded_updates = {} - for update_file in update_files: - build = update_file["build"] - filedir = os.path.join(temp_dir, build) - - mkdir_p(filedir) - - filepath = filedir + "/complete.mar" - url = update_file["update"]["url"] - expected_hash = update_file["update"]["hash"] - download_file(filepath, url, expected_hash) - - dir_path = os.path.join(filedir, "complete") - mkdir_p(dir_path) - extract_mar(filepath, dir_path) - - downloaded_updates[build] = {"complete": dir_path} - - langs = handle_language(update_file["languages"], filedir) - downloaded_updates[build]["languages"] = langs - - return downloaded_updates +from tools import get_file_info, uncompress_file_to_dir def generate_file_name(old_build_id, mar_name_prefix): @@ -92,16 +16,6 @@ def generate_file_name(old_build_id, mar_name_prefix): return name -def generate_lang_file_name(old_build_id, mar_name_prefix, lang): - name = "%s_%s_from_%s_partial.mar" % (mar_name_prefix, lang, old_build_id) - return name - - -def add_single_dir(path): - dir_name = [os.path.join(path, name) for name in os.listdir(path) if os.path.isdir(os.path.join(path, name))] - return dir_name[0] - - def main(): workdir = sys.argv[1] @@ -109,60 +23,53 @@ def main(): updater_path.ensure_dir_exist() mar_name_prefix = sys.argv[2] - server_url = sys.argv[3] - channel = sys.argv[4] - certificate_path = sys.argv[5] - certificate_name = sys.argv[6] - base_url = sys.argv[7] - platform = sys.argv[8] - build_id = sys.argv[9] - - current_build_path = updater_path.get_current_build_dir() - mar_dir = updater_path.get_mar_dir() - temp_dir = updater_path.get_previous_build_dir() - update_dir = updater_path.get_update_dir() - - current_build_path = add_single_dir(current_build_path) - if sys.platform == "cygwin": - current_build_path = add_single_dir(current_build_path) - - updates = download_mar_for_update_channel_and_platform(server_url, channel, platform, temp_dir) - - data = {"partials": []} + channel = sys.argv[3] + certificate_path = sys.argv[4] + certificate_name = sys.argv[5] + base_url = sys.argv[6] + product_name = sys.argv[7] + version = sys.argv[8] + old_msi = sys.argv[9] + + old_uncompress_dir = uncompress_file_to_dir(old_msi, updater_path.get_previous_build_dir()) + versionini = os.path.join(old_uncompress_dir, 'program', 'version.ini') #TODO: Linux, macOS + old_build_id = None + with open(versionini) as f: + for l in f: + m = re.fullmatch('buildid=(.*)', l.rstrip()) + if m: + old_build_id = m.group(1) + break + if old_build_id is None: + raise Exception(f'Cannot find buildid in {versionini}') + + new_msi_file_glob = os.path.join(updater_path.get_workdir(), "installation", product_name, "msi", "install", "*", f'{product_name}_*.msi') + new_msi_files = glob.glob(new_msi_file_glob) + if len(new_msi_files) != 1: + raise Exception(f'`{new_msi_file_glob}` does not match exactly one file') + new_msi_file = new_msi_files[0] + new_uncompress_dir = uncompress_file_to_dir(new_msi_file, updater_path.get_current_build_dir()) - for build, update in updates.items(): - file_name = generate_file_name(build, mar_name_prefix) - mar_file = os.path.join(update_dir, file_name) - subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(mar_file), - convert_to_native(update["complete"]), convert_to_native(current_build_path)]) - sign_mar_file(update_dir, certificate_path, certificate_name, mar_file, mar_name_prefix) - - partial_info = {"file": get_file_info(mar_file, base_url), "from": build, "to": build_id, - "languages": {}} - - # on Windows we don't use language packs - if sys.platform != "cygwin": - for lang, lang_info in update["languages"].items(): - lang_name = generate_lang_file_name(build, mar_name_prefix, lang) - - # write the file into the final directory - lang_mar_file = os.path.join(update_dir, lang_name) + update_dir = updater_path.get_update_dir() - # the directory of the old language file is of the form - # workdir/mar/language/en-US/LibreOffice_<version>_<os>_archive_langpack_<lang>/ - language_dir = add_single_dir(os.path.join(mar_dir, "language", lang)) - subprocess.call( - [os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(lang_mar_file), - convert_to_native(lang_info), convert_to_native(language_dir)]) - sign_mar_file(update_dir, certificate_path, certificate_name, lang_mar_file, mar_name_prefix) + file_name = generate_file_name(old_build_id, mar_name_prefix) + mar_file = os.path.join(update_dir, file_name) - # add the partial language info - partial_info["languages"][lang] = get_file_info(lang_mar_file, base_url) + os.putenv('MOZ_PRODUCT_VERSION', version) + os.putenv('MAR_CHANNEL_ID', 'LOOnlineUpdater') + subprocess.call([os.path.join(workdir, 'UnpackedTarball/onlineupdate/tools/update-packaging/make_incremental_update.sh'), convert_to_native(mar_file), + convert_to_native(old_uncompress_dir), convert_to_native(new_uncompress_dir)]) - data["partials"].append(partial_info) + sign_mar_file(update_dir, certificate_path, certificate_name, mar_file, mar_name_prefix) - with open(os.path.join(update_dir, "partial_update_info.json"), "w") as f: - json.dump(data, f) + data = { + 'from': old_build_id, + 'see also': '', + 'update': get_file_info(mar_file, base_url), + 'languages': {} + }; + with open(os.path.join(update_dir, channel), "w") as f: + json.dump(data, f, indent=4) if __name__ == '__main__': diff --git a/bin/update/path.py b/bin/update/path.py index d91e9e7fba..5acaafcace 100644 --- a/bin/update/path.py +++ b/bin/update/path.py @@ -8,19 +8,9 @@ # import os -import errno import subprocess from sys import platform -def mkdir_p(path): - try: - os.makedirs(path) - except OSError as exc: # Python >2.5 - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise - def convert_to_unix(path): if platform == "cygwin": return subprocess.check_output(["cygpath", "-u", path]).decode("utf-8", "strict").rstrip() diff --git a/bin/update/tools.py b/bin/update/tools.py index ab38d10f4b..6bc3f7971f 100644 --- a/bin/update/tools.py +++ b/bin/update/tools.py @@ -1,30 +1,20 @@ import os import hashlib -import zipfile -import tarfile +import subprocess +from path import convert_to_native -def uncompress_file_to_dir(compressed_file, uncompress_dir): - extension = os.path.splitext(compressed_file)[1] +def uncompress_file_to_dir(compressed_file, uncompress_dir): os.makedirs(uncompress_dir, exist_ok=True) - if extension == '.gz': - with tarfile.open(compressed_file) as tar: - tar.extractall(uncompress_dir) - elif extension == '.zip': - with zipfile.ZipFile(compressed_file) as zip_file: - zip_file.extractall(uncompress_dir) - - uncompress_dir = os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0]) - if " " in os.listdir(uncompress_dir)[0]: - print("replacing whitespace in directory name") - os.rename(os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0]), - os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0].replace(" ", "_"))) - else: - print("Error: unknown extension " + extension) - - return os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0]) + if subprocess.call([ + 'msiexec', '/a', convert_to_native(compressed_file).replace('/', '\\'), + '/quiet', + 'TARGETDIR=' + convert_to_native(uncompress_dir).replace('/', '\\')]) != 0: + raise Exception(f'msiexec failed') + + return uncompress_dir BUF_SIZE = 1048576 |