summaryrefslogtreecommitdiffstats
path: root/bin/update
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:06:44 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:06:44 +0000
commited5640d8b587fbcfed7dd7967f3de04b37a76f26 (patch)
tree7a5f7c6c9d02226d7471cb3cc8fbbf631b415303 /bin/update
parentInitial commit. (diff)
downloadlibreoffice-ed5640d8b587fbcfed7dd7967f3de04b37a76f26.tar.xz
libreoffice-ed5640d8b587fbcfed7dd7967f3de04b37a76f26.zip
Adding upstream version 4:7.4.7.upstream/4%7.4.7upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'bin/update')
-rw-r--r--bin/update/common.sh222
-rw-r--r--bin/update/config.py28
-rwxr-xr-xbin/update/create_build_config.py60
-rwxr-xr-xbin/update/create_full_mar.py54
-rwxr-xr-xbin/update/create_full_mar_for_languages.py66
-rwxr-xr-xbin/update/create_partial_update.py160
-rwxr-xr-xbin/update/get_update_channel.py23
-rwxr-xr-xbin/update/make_full_update.sh122
-rwxr-xr-xbin/update/make_incremental_update.sh318
-rw-r--r--bin/update/path.py69
-rw-r--r--bin/update/signing.py12
-rw-r--r--bin/update/tools.py64
-rwxr-xr-xbin/update/uncompress_mar.py54
-rwxr-xr-xbin/update/upload_build_config.py42
-rwxr-xr-xbin/update/upload_builds.py32
15 files changed, 1326 insertions, 0 deletions
diff --git a/bin/update/common.sh b/bin/update/common.sh
new file mode 100644
index 000000000..dcdbea8bb
--- /dev/null
+++ b/bin/update/common.sh
@@ -0,0 +1,222 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# Code shared by update packaging scripts.
+# Author: Darin Fisher
+#
+
+# -----------------------------------------------------------------------------
+# By default just assume that these tools exist on our path
+MAR=${MAR:-mar}
+BZIP2=${BZIP2:-bzip2}
+MBSDIFF=${MBSDIFF:-mbsdiff}
+
+# -----------------------------------------------------------------------------
+# Helper routines
+
+notice() {
+ echo "$*" 1>&2
+}
+
+get_file_size() {
+ info=($(ls -ln "$1"))
+ echo ${info[4]}
+}
+
+check_externals() {
+
+ # check whether we can call the mar executable
+ "$MAR" --version > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ notice "Could not find a valid mar executable in the path or in the MAR environment variable"
+ exit 1
+ fi
+
+ # check whether we can access the bzip2 executable
+ "$BZIP2" --help > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ notice "Could not find a valid bzip2 executable in the PATH or in the BZIP2 environment variable"
+ exit 1
+ fi
+}
+
+copy_perm() {
+ reference="$1"
+ target="$2"
+
+ if [ -x "$reference" ]; then
+ chmod 0755 "$target"
+ else
+ chmod 0644 "$target"
+ fi
+}
+
+make_add_instruction() {
+ f="$1"
+ filev2="$2"
+ # The third param will be an empty string when a file add instruction is only
+ # needed in the version 2 manifest. This only happens when the file has an
+ # add-if-not instruction in the version 3 manifest. This is due to the
+ # precomplete file prior to the version 3 manifest having a remove instruction
+ # for this file so the file is removed before applying a complete update.
+ filev3="$3"
+
+ # Used to log to the console
+ if [ $4 ]; then
+ forced=" (forced)"
+ else
+ forced=
+ fi
+
+ is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/')
+ if [ $is_extension = "1" ]; then
+ # Use the subdirectory of the extensions folder as the file to test
+ # before performing this add instruction.
+ testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
+ notice " add-if \"$testdir\" \"$f\""
+ echo "add-if \"$testdir\" \"$f\"" >> $filev2
+ if [ ! $filev3 = "" ]; then
+ echo "add-if \"$testdir\" \"$f\"" >> $filev3
+ fi
+ else
+ notice " add \"$f\"$forced"
+ echo "add \"$f\"" >> $filev2
+ if [ ! $filev3 = "" ]; then
+ echo "add \"$f\"" >> $filev3
+ fi
+ fi
+}
+
+check_for_add_if_not_update() {
+ add_if_not_file_chk="$1"
+
+ if [ `basename $add_if_not_file_chk` = "channel-prefs.js" -o \
+ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+check_for_add_to_manifestv2() {
+ add_if_not_file_chk="$1"
+
+ if [ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+make_add_if_not_instruction() {
+ f="$1"
+ filev3="$2"
+
+ notice " add-if-not \"$f\" \"$f\""
+ echo "add-if-not \"$f\" \"$f\"" >> $filev3
+}
+
+make_patch_instruction() {
+ f="$1"
+ filev2="$2"
+ filev3="$3"
+
+ is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/')
+ if [ $is_extension = "1" ]; then
+ # Use the subdirectory of the extensions folder as the file to test
+ # before performing this add instruction.
+ testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
+ notice " patch-if \"$testdir\" \"$f.patch\" \"$f\""
+ echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev2
+ echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev3
+ else
+ notice " patch \"$f.patch\" \"$f\""
+ echo "patch \"$f.patch\" \"$f\"" >> $filev2
+ echo "patch \"$f.patch\" \"$f\"" >> $filev3
+ fi
+}
+
+append_remove_instructions() {
+ dir="$1"
+ filev2="$2"
+ filev3="$3"
+
+ if [ -f "$dir/removed-files" ]; then
+ listfile="$dir/removed-files"
+ elif [ -f "$dir/Contents/Resources/removed-files" ]; then
+ listfile="$dir/Contents/Resources/removed-files"
+ fi
+ if [ -n "$listfile" ]; then
+ # Map spaces to pipes so that we correctly handle filenames with spaces.
+ files=($(cat "$listfile" | tr " " "|" | sort -r))
+ num_files=${#files[*]}
+ for ((i=0; $i<$num_files; i=$i+1)); do
+ # Map pipes back to whitespace and remove carriage returns
+ f=$(echo ${files[$i]} | tr "|" " " | tr -d '\r')
+ # Trim whitespace
+ f=$(echo $f)
+ # Exclude blank lines.
+ if [ -n "$f" ]; then
+ # Exclude comments
+ if [ ! $(echo "$f" | grep -c '^#') = 1 ]; then
+ if [ $(echo "$f" | grep -c '\/$') = 1 ]; then
+ notice " rmdir \"$f\""
+ echo "rmdir \"$f\"" >> $filev2
+ echo "rmdir \"$f\"" >> $filev3
+ elif [ $(echo "$f" | grep -c '\/\*$') = 1 ]; then
+ # Remove the *
+ f=$(echo "$f" | sed -e 's:\*$::')
+ notice " rmrfdir \"$f\""
+ echo "rmrfdir \"$f\"" >> $filev2
+ echo "rmrfdir \"$f\"" >> $filev3
+ else
+ notice " remove \"$f\""
+ echo "remove \"$f\"" >> $filev2
+ echo "remove \"$f\"" >> $filev3
+ fi
+ fi
+ fi
+ done
+ fi
+}
+
+# List all files in the current directory, stripping leading "./"
+# Pass a variable name and it will be filled as an array.
+list_files() {
+ count=0
+
+ find . -type f \
+ ! -name "update.manifest" \
+ ! -name "updatev2.manifest" \
+ ! -name "updatev3.manifest" \
+ ! -name "temp-dirlist" \
+ ! -name "temp-filelist" \
+ | sed 's/\.\/\(.*\)/\1/' \
+ | sort -r > "temp-filelist"
+ while read file; do
+ eval "${1}[$count]=\"$file\""
+ (( count++ ))
+ done < "temp-filelist"
+ rm "temp-filelist"
+}
+
+# List all directories in the current directory, stripping leading "./"
+list_dirs() {
+ count=0
+
+ find . -type d \
+ ! -name "." \
+ ! -name ".." \
+ | sed 's/\.\/\(.*\)/\1/' \
+ | sort -r > "temp-dirlist"
+ while read dir; do
+ eval "${1}[$count]=\"$dir\""
+ (( count++ ))
+ done < "temp-dirlist"
+ rm "temp-dirlist"
+}
diff --git a/bin/update/config.py b/bin/update/config.py
new file mode 100644
index 000000000..0bc60a07f
--- /dev/null
+++ b/bin/update/config.py
@@ -0,0 +1,28 @@
+
+import configparser
+import os
+
+class Config(object):
+
+ def __init__(self):
+ self.certificate_path = None
+ self.certificate_name = None
+ self.channel = None
+ self.base_url = None
+ self.upload_url = None
+ self.server_url = None
+
+def parse_config(config_file):
+ config = configparser.ConfigParser()
+ config.read(os.path.expanduser(config_file))
+
+ data = Config()
+ updater_data = config['Updater']
+ data.base_url = updater_data['base-url']
+ data.certificate_name = updater_data['certificate-name']
+ data.certificate_path = updater_data['certificate-path']
+ data.channel = updater_data['channel']
+ data.upload_url = updater_data['upload-url']
+ data.server_url = updater_data["ServerURL"]
+
+ return data
diff --git a/bin/update/create_build_config.py b/bin/update/create_build_config.py
new file mode 100755
index 000000000..7cc8ac4be
--- /dev/null
+++ b/bin/update/create_build_config.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python3
+
+import json
+import sys
+import os
+
+from config import parse_config
+
+from tools import replace_variables_in_string
+
+def update_all_url_entries(data, **kwargs):
+ data['complete']['url'] = replace_variables_in_string(data['complete']['url'], **kwargs)
+
+ if sys.platform != "cygwin":
+ for language in data['languages']:
+ language['complete']['url'] = replace_variables_in_string(language['complete']['url'], **kwargs)
+
+ if 'partials' in data:
+ for partial in data['partials']:
+ partial['file']['url'] = replace_variables_in_string(partial['file']['url'], **kwargs)
+
+ if sys.platform == "cygwin":
+ continue
+
+ for lang, lang_file in partial['languages'].items():
+ lang_file['url'] = replace_variables_in_string(lang_file['url'], **kwargs)
+
+def main(argv):
+ if len(argv) < 7:
+ print("Usage: create_build_config.py $PRODUCTNAME $VERSION $BUILDID $PLATFORM $TARGETDIR $UPDATE_CONFIG")
+ sys.exit(1)
+
+ config = parse_config(argv[6])
+
+ data = { 'productName' : argv[1],
+ 'version' : argv[2],
+ 'buildNumber' : argv[3],
+ 'updateChannel' : config.channel,
+ 'platform' : argv[4]
+ }
+
+ extra_data_files = ['complete_info.json', 'partial_update_info.json']
+ if sys.platform != "cygwin":
+ extra_data_files.append('complete_lang_info.json')
+
+ for extra_file in extra_data_files:
+ extra_file_path = os.path.join(argv[5], extra_file)
+ if not os.path.exists(extra_file_path):
+ continue
+ with open(extra_file_path, "r") as f:
+ extra_data = json.load(f)
+ data.update(extra_data)
+
+ update_all_url_entries(data, channel=config.channel, platform=argv[4], buildid=argv[3], version=argv[2])
+
+ with open(os.path.join(argv[5], "build_config.json"), "w") as f:
+ json.dump(data, f, indent=4)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/update/create_full_mar.py b/bin/update/create_full_mar.py
new file mode 100755
index 000000000..48686be21
--- /dev/null
+++ b/bin/update/create_full_mar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import subprocess
+import json
+
+from tools import uncompress_file_to_dir, get_file_info, make_complete_mar_name
+from config import parse_config
+from signing import sign_mar_file
+from path import UpdaterPath, convert_to_unix, convert_to_native
+
+current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__)))
+
+def main():
+ if len(sys.argv) < 5:
+ print("Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $FILENAMEPREFIX $UPDATE_CONFIG")
+ sys.exit(1)
+
+ update_config = sys.argv[4]
+ filename_prefix = sys.argv[3]
+ workdir = sys.argv[2]
+ product_name = sys.argv[1]
+
+ if len(update_config) == 0:
+ print("missing update config")
+ sys.exit(1)
+
+ update_path = UpdaterPath(workdir)
+ update_path.ensure_dir_exist()
+
+ target_dir = update_path.get_update_dir()
+ temp_dir = update_path.get_current_build_dir()
+
+ config = parse_config(update_config)
+
+ tar_dir = os.path.join(update_path.get_workdir(), "installation", product_name, "archive", "install", "en-US")
+ tar_file = os.path.join(tar_dir, os.listdir(tar_dir)[0])
+
+ uncompress_dir = uncompress_file_to_dir(tar_file, temp_dir)
+
+ mar_file = make_complete_mar_name(target_dir, filename_prefix)
+ path = os.path.join(current_dir_path, 'make_full_update.sh')
+ subprocess.call([path, convert_to_native(mar_file), convert_to_native(uncompress_dir)])
+
+ sign_mar_file(target_dir, config, mar_file, filename_prefix)
+
+ file_info = { 'complete' : get_file_info(mar_file, config.base_url) }
+
+ with open(os.path.join(target_dir, 'complete_info.json'), "w") as complete_info_file:
+ json.dump(file_info, complete_info_file, indent = 4)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/create_full_mar_for_languages.py b/bin/update/create_full_mar_for_languages.py
new file mode 100755
index 000000000..039521dd1
--- /dev/null
+++ b/bin/update/create_full_mar_for_languages.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import subprocess
+import json
+
+from tools import uncompress_file_to_dir, get_file_info
+
+from config import parse_config
+from path import UpdaterPath
+from signing import sign_mar_file
+
+current_dir_path = os.path.dirname(os.path.realpath(__file__))
+
+def make_complete_mar_name(target_dir, filename_prefix, language):
+ filename = filename_prefix + "_" + language + "_complete_langpack.mar"
+ return os.path.join(target_dir, filename)
+
+def create_lang_infos(mar_file_name, language, url):
+ data = {'lang' : language,
+ 'complete' : get_file_info(mar_file_name, url)
+ }
+ return data
+
+def main():
+ if len(sys.argv) < 5:
+ print("Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $TARGETDIR $TEMPDIR $FILENAMEPREFIX $UPDATE_CONFIG")
+ sys.exit(1)
+
+ update_config = sys.argv[4]
+ filename_prefix = sys.argv[3]
+ workdir = sys.argv[2]
+ product_name = sys.argv[1]
+
+ updater_path = UpdaterPath(workdir)
+ target_dir = updater_path.get_update_dir()
+ temp_dir = updater_path.get_language_dir()
+
+ config = parse_config(update_config)
+
+ language_pack_dir = os.path.join(workdir, "installation", product_name + "_languagepack", "archive", "install")
+ language_packs = os.listdir(language_pack_dir)
+ lang_infos = []
+ for language in language_packs:
+ if language == 'log':
+ continue
+
+ language_dir = os.path.join(language_pack_dir, language)
+ language_file = os.path.join(language_dir, os.listdir(language_dir)[0])
+
+ directory = uncompress_file_to_dir(language_file, os.path.join(temp_dir, language))
+
+ mar_file_name = make_complete_mar_name(target_dir, filename_prefix, language)
+
+ subprocess.call([os.path.join(current_dir_path, 'make_full_update.sh'), mar_file_name, directory])
+
+ sign_mar_file(target_dir, config, mar_file_name, filename_prefix)
+
+ lang_infos.append(create_lang_infos(mar_file_name, language, config.base_url))
+
+ with open(os.path.join(target_dir, "complete_lang_info.json"), "w") as language_info_file:
+ json.dump({'languages' : lang_infos}, language_info_file, indent=4)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/create_partial_update.py b/bin/update/create_partial_update.py
new file mode 100755
index 000000000..9412bcd6e
--- /dev/null
+++ b/bin/update/create_partial_update.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+import requests
+import json
+import sys
+import hashlib
+import os
+import subprocess
+import errno
+import json
+
+from config import parse_config
+from uncompress_mar import extract_mar
+from tools import get_file_info, get_hash
+from signing import sign_mar_file
+
+from path import UpdaterPath, mkdir_p, convert_to_unix, convert_to_native
+
+BUF_SIZE = 1024
+current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__)))
+
+class InvalidFileException(Exception):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(self, *args, **kwargs)
+
+def download_file(filepath, url, hash_string):
+ with open(filepath, "wb") as f:
+ response = requests.get(url, stream=True)
+
+ if not response.ok:
+ return
+
+ for block in response.iter_content(1024):
+ f.write(block)
+
+ file_hash = get_hash(filepath)
+
+ if file_hash != hash_string:
+ raise InvalidFileException("file hash does not match for file %s: Expected %s, Got: %s" % (url, hash_string, file_hash))
+
+def handle_language(lang_entries, filedir):
+ mar = os.environ.get('MAR', 'mar')
+ langs = {}
+ for lang, data in lang_entries.items():
+ lang_dir = os.path.join(filedir, lang)
+ lang_file = os.path.join(lang_dir, "lang.mar")
+ mkdir_p(lang_dir)
+ download_file(lang_file , data["url"], data["hash"])
+ dir_path = os.path.join(lang_dir, "lang")
+ mkdir_p(dir_path)
+ extract_mar(lang_file, dir_path)
+ langs[lang] = dir_path
+
+ return langs
+
+def download_mar_for_update_channel_and_platform(config, platform, temp_dir):
+ mar = os.environ.get('MAR', 'mar')
+ base_url = config.server_url + "update/partial-targets/1/"
+ url = base_url + platform + "/" + config.channel
+ r = requests.get(url)
+ if r.status_code != 200:
+ print(r.content)
+ raise Exception("download failed")
+
+ update_info = json.loads(r.content.decode("utf-8"))
+ update_files = update_info['updates']
+ downloaded_updates = {}
+ for update_file in update_files:
+ build = update_file["build"]
+ filedir = os.path.join(temp_dir, build)
+
+ mkdir_p(filedir)
+
+ filepath = filedir + "/complete.mar"
+ url = update_file["update"]["url"]
+ expected_hash = update_file["update"]["hash"]
+ download_file(filepath, url, expected_hash)
+
+ dir_path = os.path.join(filedir, "complete")
+ mkdir_p(dir_path)
+ extract_mar(filepath, dir_path)
+
+ downloaded_updates[build] = {"complete": dir_path}
+
+ langs = handle_language(update_file["languages"], filedir)
+ downloaded_updates[build]["languages"] = langs
+
+ return downloaded_updates
+
+def generate_file_name(current_build_id, old_build_id, mar_name_prefix):
+ name = "%s_from_%s_partial.mar" %(mar_name_prefix, old_build_id)
+ return name
+
+def generate_lang_file_name(current_build_id, old_build_id, mar_name_prefix, lang):
+ name = "%s_%s_from_%s_partial.mar" %(mar_name_prefix, lang, old_build_id)
+ return name
+
+def add_single_dir(path):
+ dir_name = [os.path.join(path, name) for name in os.listdir(path) if os.path.isdir(os.path.join(path, name))]
+ return dir_name[0]
+
+def main():
+ workdir = sys.argv[1]
+
+ updater_path = UpdaterPath(workdir)
+ updater_path.ensure_dir_exist()
+
+ mar_name_prefix = sys.argv[2]
+ update_config = sys.argv[3]
+ platform = sys.argv[4]
+ build_id = sys.argv[5]
+
+ current_build_path = updater_path.get_current_build_dir()
+ mar_dir = updater_path.get_mar_dir()
+ temp_dir = updater_path.get_previous_build_dir()
+ update_dir = updater_path.get_update_dir()
+
+ current_build_path = add_single_dir(current_build_path)
+ if sys.platform == "cygwin":
+ current_build_path = add_single_dir(current_build_path)
+
+ config = parse_config(update_config)
+
+ updates = download_mar_for_update_channel_and_platform(config, platform, temp_dir)
+
+ data = {"partials": []}
+
+ for build, update in updates.items():
+ file_name = generate_file_name(build_id, build, mar_name_prefix)
+ mar_file = os.path.join(update_dir, file_name)
+ subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(mar_file), convert_to_native(update["complete"]), convert_to_native(current_build_path)])
+ sign_mar_file(update_dir, config, mar_file, mar_name_prefix)
+
+ partial_info = {"file":get_file_info(mar_file, config.base_url), "from": build, "to": build_id, "languages": {}}
+
+ # on Windows we don't use language packs
+ if sys.platform != "cygwin":
+ for lang, lang_info in update["languages"].items():
+ lang_name = generate_lang_file_name(build_id, build, mar_name_prefix, lang)
+
+ # write the file into the final directory
+ lang_mar_file = os.path.join(update_dir, lang_name)
+
+ # the directory of the old language file is of the form
+ # workdir/mar/language/en-US/LibreOffice_<version>_<os>_archive_langpack_<lang>/
+ language_dir = add_single_dir(os.path.join(mar_dir, "language", lang))
+ subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(lang_mar_file), convert_to_native(lang_info), convert_to_native(language_dir)])
+ sign_mar_file(update_dir, config, lang_mar_file, mar_name_prefix)
+
+ # add the partial language info
+ partial_info["languages"][lang] = get_file_info(lang_mar_file, config.base_url)
+
+ data["partials"].append(partial_info)
+
+ with open(os.path.join(update_dir, "partial_update_info.json"), "w") as f:
+ json.dump(data, f)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/get_update_channel.py b/bin/update/get_update_channel.py
new file mode 100755
index 000000000..f94507d64
--- /dev/null
+++ b/bin/update/get_update_channel.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python3
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import sys
+from config import parse_config
+
+def main():
+ if len(sys.argv) < 2:
+ sys.exit(1)
+
+ update_config = sys.argv[1]
+ config = parse_config(update_config)
+ print(config.channel)
+
+if __name__ == "__main__":
+ main()
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/bin/update/make_full_update.sh b/bin/update/make_full_update.sh
new file mode 100755
index 000000000..4140ecae6
--- /dev/null
+++ b/bin/update/make_full_update.sh
@@ -0,0 +1,122 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool generates full update packages for the update system.
+# Author: Darin Fisher
+#
+
+. $(dirname "$0")/common.sh
+
+# -----------------------------------------------------------------------------
+
+print_usage() {
+ notice "Usage: $(basename $0) [OPTIONS] ARCHIVE DIRECTORY"
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit 1
+fi
+
+if [ $1 = -h ]; then
+ print_usage
+ notice ""
+ notice "The contents of DIRECTORY will be stored in ARCHIVE."
+ notice ""
+ notice "Options:"
+ notice " -h show this help text"
+ notice ""
+ exit 1
+fi
+
+check_externals
+# -----------------------------------------------------------------------------
+
+archive="$1"
+targetdir="$2"
+# Prevent the workdir from being inside the targetdir so it isn't included in
+# the update mar.
+if [ $(echo "$targetdir" | grep -c '\/$') = 1 ]; then
+ # Remove the /
+ targetdir=$(echo "$targetdir" | sed -e 's:\/$::')
+fi
+workdir="$targetdir.work"
+updatemanifestv2="$workdir/updatev2.manifest"
+updatemanifestv3="$workdir/updatev3.manifest"
+targetfiles="updatev2.manifest updatev3.manifest"
+
+mkdir -p "$workdir"
+echo "updatev2.manifest" >> $workdir/files.txt
+echo "updatev3.manifest" >> $workdir/files.txt
+
+# Generate a list of all files in the target directory.
+pushd "$targetdir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+# if [ ! -f "precomplete" ]; then
+# if [ ! -f "Contents/Resources/precomplete" ]; then
+# notice "precomplete file is missing!"
+# exit 1
+# fi
+# fi
+
+list_files files
+
+popd
+
+# Add the type of update to the beginning of the update manifests.
+> $updatemanifestv2
+> $updatemanifestv3
+notice ""
+notice "Adding type instruction to update manifests"
+notice " type complete"
+echo "type \"complete\"" >> $updatemanifestv2
+echo "type \"complete\"" >> $updatemanifestv3
+
+notice ""
+notice "Adding file add instructions to update manifests"
+num_files=${#files[*]}
+
+for ((i=0; $i<$num_files; i=$i+1)); do
+ f="${files[$i]}"
+
+ if check_for_add_if_not_update "$f"; then
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ if check_for_add_to_manifestv2 "$f"; then
+ make_add_instruction "$f" "$updatemanifestv2" "" 1
+ fi
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ fi
+
+ dir=$(dirname "$f")
+ mkdir -p "$workdir/$dir"
+ $BZIP2 -cz9 "$targetdir/$f" > "$workdir/$f"
+ copy_perm "$targetdir/$f" "$workdir/$f"
+
+ targetfiles="$targetfiles \"$f\""
+ echo $f >> $workdir/files.txt
+done
+
+# Append remove instructions for any dead files.
+notice ""
+notice "Adding file and directory remove instructions from file 'removed-files'"
+append_remove_instructions "$targetdir" "$updatemanifestv2" "$updatemanifestv3"
+
+$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
+$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
+
+eval "$MAR -C \"$workdir\" -c output.mar -f $workdir/files.txt"
+mv -f "$workdir/output.mar" "$archive"
+
+# cleanup
+rm -fr "$workdir"
+
+notice ""
+notice "Finished"
+notice ""
diff --git a/bin/update/make_incremental_update.sh b/bin/update/make_incremental_update.sh
new file mode 100755
index 000000000..31bddabdb
--- /dev/null
+++ b/bin/update/make_incremental_update.sh
@@ -0,0 +1,318 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool generates incremental update packages for the update system.
+# Author: Darin Fisher
+#
+
+. $(dirname "$0")/common.sh
+
+# -----------------------------------------------------------------------------
+
+print_usage() {
+ notice "Usage: $(basename $0) [OPTIONS] ARCHIVE FROMDIR TODIR"
+ notice ""
+ notice "The differences between FROMDIR and TODIR will be stored in ARCHIVE."
+ notice ""
+ notice "Options:"
+ notice " -h show this help text"
+ notice " -f clobber this file in the installation"
+ notice " Must be a path to a file to clobber in the partial update."
+ notice ""
+}
+
+check_for_forced_update() {
+ force_list="$1"
+ forced_file_chk="$2"
+
+ local f
+
+ if [ "$forced_file_chk" = "precomplete" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "Contents/Resources/precomplete" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "removed-files" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "Contents/Resources/removed-files" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "${forced_file_chk##*.}" = "chk" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ for f in $force_list; do
+ #echo comparing $forced_file_chk to $f
+ if [ "$forced_file_chk" = "$f" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ done
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit 1
+fi
+
+requested_forced_updates='Contents/MacOS/firefox'
+
+while getopts "hf:" flag
+do
+ case "$flag" in
+ h) print_usage; exit 0
+ ;;
+ f) requested_forced_updates="$requested_forced_updates $OPTARG"
+ ;;
+ ?) print_usage; exit 1
+ ;;
+ esac
+done
+
+# -----------------------------------------------------------------------------
+
+let arg_start=$OPTIND-1
+shift $arg_start
+
+archive="$1"
+olddir="$2"
+newdir="$3"
+# Prevent the workdir from being inside the targetdir so it isn't included in
+# the update mar.
+if [ $(echo "$newdir" | grep -c '\/$') = 1 ]; then
+ # Remove the /
+ newdir=$(echo "$newdir" | sed -e 's:\/$::')
+fi
+workdir="$newdir.work"
+updatemanifestv2="$workdir/updatev2.manifest"
+updatemanifestv3="$workdir/updatev3.manifest"
+
+mkdir -p "$workdir"
+echo "updatev2.manifest" >> $workdir/files.txt
+echo "updatev3.manifest" >> $workdir/files.txt
+
+# Generate a list of all files in the target directory.
+pushd "$olddir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+list_files oldfiles
+list_dirs olddirs
+
+popd
+
+pushd "$newdir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+# if [ ! -f "precomplete" ]; then
+# if [ ! -f "Contents/Resources/precomplete" ]; then
+# notice "precomplete file is missing!"
+# exit 1
+# fi
+# fi
+
+list_dirs newdirs
+list_files newfiles
+
+popd
+
+# Add the type of update to the beginning of the update manifests.
+notice ""
+notice "Adding type instruction to update manifests"
+> $updatemanifestv2
+> $updatemanifestv3
+notice " type partial"
+echo "type \"partial\"" >> $updatemanifestv2
+echo "type \"partial\"" >> $updatemanifestv3
+
+notice ""
+notice "Adding file patch and add instructions to update manifests"
+
+num_oldfiles=${#oldfiles[*]}
+remove_array=
+num_removes=0
+
+for ((i=0; $i<$num_oldfiles; i=$i+1)); do
+ f="${oldfiles[$i]}"
+
+ # If this file exists in the new directory as well, then check if it differs.
+ if [ -f "$newdir/$f" ]; then
+
+ if check_for_add_if_not_update "$f"; then
+ # The full workdir may not exist yet, so create it if necessary.
+ mkdir -p `dirname "$workdir/$f"`
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ echo $f >> $workdir/files.txt
+ continue 1
+ fi
+
+ if check_for_forced_update "$requested_forced_updates" "$f"; then
+ # The full workdir may not exist yet, so create it if necessary.
+ mkdir -p `dirname "$workdir/$f"`
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3" 1
+ echo $f >> $workdir/files.txt
+ continue 1
+ fi
+
+ if ! diff "$olddir/$f" "$newdir/$f" > /dev/null; then
+ # Compute both the compressed binary diff and the compressed file, and
+ # compare the sizes. Then choose the smaller of the two to package.
+ dir=$(dirname "$workdir/$f")
+ mkdir -p "$dir"
+ notice "diffing \"$f\""
+ # MBSDIFF_HOOK represents the communication interface with funsize and,
+ # if enabled, caches the intermediate patches for future use and
+ # compute avoidance
+ #
+ # An example of MBSDIFF_HOOK env variable could look like this:
+ # export MBSDIFF_HOOK="myscript.sh -A https://funsize/api -c /home/user"
+ # where myscript.sh has the following usage:
+ # myscript.sh -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] \
+ # PATH-FROM-URL PATH-TO-URL PATH-PATCH SERVER-URL
+ #
+ # Note: patches are bzipped stashed in funsize to gain more speed
+
+ # if service is not enabled then default to old behavior
+ if [ -z "$MBSDIFF_HOOK" ]; then
+ $MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
+ $BZIP2 -z9 "$workdir/$f.patch"
+ else
+ # if service enabled then check patch existence for retrieval
+ if $MBSDIFF_HOOK -g "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"; then
+ notice "file \"$f\" found in funsize, diffing skipped"
+ else
+ # if not found already - compute it and cache it for future use
+ $MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
+ $BZIP2 -z9 "$workdir/$f.patch"
+ $MBSDIFF_HOOK -u "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"
+ fi
+ fi
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ patchfile="$workdir/$f.patch.bz2"
+ patchsize=$(get_file_size "$patchfile")
+ fullsize=$(get_file_size "$workdir/$f")
+
+ if [ $patchsize -lt $fullsize ]; then
+ make_patch_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ mv -f "$patchfile" "$workdir/$f.patch"
+ rm -f "$workdir/$f"
+ echo $f.patch >> $workdir/files.txt
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ rm -f "$patchfile"
+ echo $f >> $workdir/files.txt
+ fi
+ fi
+ else
+ # remove instructions are added after add / patch instructions for
+ # consistency with make_incremental_updates.py
+ remove_array[$num_removes]=$f
+ (( num_removes++ ))
+ fi
+done
+
+# Newly added files
+notice ""
+notice "Adding file add instructions to update manifests"
+num_newfiles=${#newfiles[*]}
+
+for ((i=0; $i<$num_newfiles; i=$i+1)); do
+ f="${newfiles[$i]}"
+
+ # If we've already tested this file, then skip it
+ for ((j=0; $j<$num_oldfiles; j=$j+1)); do
+ if [ "$f" = "${oldfiles[j]}" ]; then
+ continue 2
+ fi
+ done
+
+ dir=$(dirname "$workdir/$f")
+ mkdir -p "$dir"
+
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+
+ if check_for_add_if_not_update "$f"; then
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ fi
+
+
+ echo $f >> $workdir/files.txt
+done
+
+notice ""
+notice "Adding file remove instructions to update manifests"
+for ((i=0; $i<$num_removes; i=$i+1)); do
+ f="${remove_array[$i]}"
+ notice " remove \"$f\""
+ echo "remove \"$f\"" >> $updatemanifestv2
+ echo "remove \"$f\"" >> $updatemanifestv3
+done
+
+# Add remove instructions for any dead files.
+notice ""
+notice "Adding file and directory remove instructions from file 'removed-files'"
+append_remove_instructions "$newdir" "$updatemanifestv2" "$updatemanifestv3"
+
+notice ""
+notice "Adding directory remove instructions for directories that no longer exist"
+num_olddirs=${#olddirs[*]}
+
+for ((i=0; $i<$num_olddirs; i=$i+1)); do
+ f="${olddirs[$i]}"
+ # If this dir doesn't exist in the new directory remove it.
+ if [ ! -d "$newdir/$f" ]; then
+ notice " rmdir $f/"
+ echo "rmdir \"$f/\"" >> $updatemanifestv2
+ echo "rmdir \"$f/\"" >> $updatemanifestv3
+ fi
+done
+
+$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
+$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
+
+mar_command="$MAR"
+if [[ -n $PRODUCT_VERSION ]]
+then
+ mar_command="$mar_command -V $PRODUCT_VERSION"
+fi
+if [[ -n $CHANNEL_ID ]]
+then
+ mar_command="$mar_command -H $CHANNEL_ID"
+fi
+mar_command="$mar_command -C \"$workdir\" -c output.mar -f $workdir/files.txt"
+eval "$mar_command"
+mv -f "$workdir/output.mar" "$archive"
+
+# cleanup
+rm -fr "$workdir"
+
+notice ""
+notice "Finished"
+notice ""
diff --git a/bin/update/path.py b/bin/update/path.py
new file mode 100644
index 000000000..0fe0fd5eb
--- /dev/null
+++ b/bin/update/path.py
@@ -0,0 +1,69 @@
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import os
+import errno
+import subprocess
+from sys import platform
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError as exc: # Python >2.5
+ if exc.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else:
+ raise
+
+def convert_to_unix(path):
+ if platform == "cygwin":
+ return subprocess.check_output(["cygpath", "-u", path]).decode("utf-8", "strict").rstrip()
+ else:
+ return path
+
+def convert_to_native(path):
+ if platform == "cygwin":
+ return subprocess.check_output(["cygpath", "-m", path]).decode("utf-8", "strict").rstrip()
+ else:
+ return path
+
+class UpdaterPath(object):
+
+ def __init__(self, workdir):
+ self._workdir = convert_to_unix(workdir)
+
+ def get_workdir(self):
+ return self._workdir
+
+ def get_update_dir(self):
+ return os.path.join(self._workdir, "update-info")
+
+ def get_current_build_dir(self):
+ return os.path.join(self._workdir, "mar", "current-build")
+
+ def get_mar_dir(self):
+ return os.path.join(self._workdir, "mar")
+
+ def get_previous_build_dir(self):
+ return os.path.join(self._workdir, "mar", "previous-build")
+
+ def get_language_dir(self):
+ return os.path.join(self.get_mar_dir(), "language")
+
+ def get_workdir(self):
+ return self._workdir
+
+ def ensure_dir_exist(self):
+ mkdir_p(self.get_update_dir())
+ mkdir_p(self.get_current_build_dir())
+ mkdir_p(self.get_mar_dir())
+ mkdir_p(self.get_previous_build_dir())
+ mkdir_p(self.get_language_dir())
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/update/signing.py b/bin/update/signing.py
new file mode 100644
index 000000000..c0b43ce91
--- /dev/null
+++ b/bin/update/signing.py
@@ -0,0 +1,12 @@
+from tools import make_complete_mar_name
+
+import os
+import subprocess
+import path
+
+def sign_mar_file(target_dir, config, mar_file, filename_prefix):
+ signed_mar_file = make_complete_mar_name(target_dir, filename_prefix + '_signed')
+ mar_executable = os.environ.get('MAR', 'mar')
+ subprocess.check_call([mar_executable, '-C', path.convert_to_native(target_dir), '-d', path.convert_to_native(config.certificate_path), '-n', config.certificate_name, '-s', path.convert_to_native(mar_file), path.convert_to_native(signed_mar_file)])
+
+ os.rename(signed_mar_file, mar_file)
diff --git a/bin/update/tools.py b/bin/update/tools.py
new file mode 100644
index 000000000..8cd786635
--- /dev/null
+++ b/bin/update/tools.py
@@ -0,0 +1,64 @@
+import os
+import hashlib
+import zipfile
+import tarfile
+
+def uncompress_file_to_dir(compressed_file, uncompress_dir):
+ command = None
+ extension = os.path.splitext(compressed_file)[1]
+
+ try:
+ os.mkdir(uncompress_dir)
+ except FileExistsError as e:
+ pass
+
+ if extension == '.gz':
+ tar = tarfile.open(compressed_file)
+ tar.extractall(uncompress_dir)
+ tar.close()
+ elif extension == '.zip':
+ zip_file = zipfile.ZipFile(compressed_file)
+ zip_file.extractall(uncompress_dir)
+ zip_file.close()
+
+ uncompress_dir = os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0])
+ if " " in os.listdir(uncompress_dir)[0]:
+ print("replacing whitespace in directory name")
+ os.rename(os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0]),
+ os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0].replace(" ", "_")))
+ else:
+ print("Error: unknown extension " + extension)
+
+ return os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0])
+
+BUF_SIZE = 1048576
+
+def get_hash(file_path):
+ sha512 = hashlib.sha512()
+ with open(file_path, 'rb') as f:
+ while True:
+ data = f.read(BUF_SIZE)
+ if not data:
+ break
+ sha512.update(data)
+ return sha512.hexdigest()
+
+def get_file_info(mar_file, url):
+ filesize = os.path.getsize(mar_file)
+ data = { 'hash' : get_hash(mar_file),
+ 'hashFunction' : 'sha512',
+ 'size' : filesize,
+ 'url' : url + os.path.basename(mar_file)}
+
+ return data
+
+def replace_variables_in_string(string, **kwargs):
+ new_string = string
+ for key, val in kwargs.items():
+ new_string = new_string.replace('$(%s)'%key, val)
+
+ return new_string
+
+def make_complete_mar_name(target_dir, filename_prefix):
+ filename = filename_prefix + "_complete.mar"
+ return os.path.join(target_dir, filename)
diff --git a/bin/update/uncompress_mar.py b/bin/update/uncompress_mar.py
new file mode 100755
index 000000000..02dafbaff
--- /dev/null
+++ b/bin/update/uncompress_mar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Extract a mar file and uncompress the content
+
+import os
+import re
+import sys
+import subprocess
+from path import convert_to_native
+
+def uncompress_content(file_path):
+ bzip2 = os.environ.get('BZIP2', 'bzip2')
+ file_path_compressed = file_path + ".bz2"
+ os.rename(file_path, file_path_compressed)
+ subprocess.check_call(["bzip2", "-d", convert_to_native(file_path_compressed)])
+
+def extract_mar(mar_file, target_dir):
+ mar = os.environ.get('MAR', 'mar')
+ subprocess.check_call([mar, "-C", convert_to_native(target_dir), "-x", convert_to_native(mar_file)])
+ file_info = subprocess.check_output([mar, "-t", convert_to_native(mar_file)])
+ lines = file_info.splitlines()
+ prog = re.compile(r"\d+\s+\d+\s+(.+)")
+ for line in lines:
+ match = prog.match(line.decode("utf-8", "strict"))
+ if match is None:
+ continue
+ info = match.groups()[0]
+ # ignore header line
+ if info == b'NAME':
+ continue
+
+ uncompress_content(os.path.join(target_dir, info))
+
+def main():
+ if len(sys.argv) != 3:
+ print("Help: This program takes exactly two arguments pointing to a mar file and a target location")
+ sys.exit(1)
+
+ mar_file = sys.argv[1]
+ target_dir = sys.argv[2]
+ extract_mar(mar_file, target_dir)
+
+if __name__ == "__main__":
+ main()
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/update/upload_build_config.py b/bin/update/upload_build_config.py
new file mode 100755
index 000000000..9a87661ee
--- /dev/null
+++ b/bin/update/upload_build_config.py
@@ -0,0 +1,42 @@
+#! /usr/bin/env python3
+
+import sys
+import os
+import configparser
+import requests
+
+dir_path = os.path.dirname(os.path.realpath(__file__))
+
+def main(argv):
+
+ updater_config = sys.argv[2]
+
+ config = configparser.ConfigParser()
+ config.read(os.path.expanduser(updater_config))
+
+ user = config["Updater"]["User"]
+ password = config["Updater"]["Password"]
+ base_address = config["Updater"]["ServerURL"]
+
+ login_url = base_address + "accounts/login/"
+
+ session = requests.session()
+ r1 = session.get(login_url)
+ csrftoken = session.cookies['csrftoken']
+
+ login_data = { 'username': user,'password': password,
+ 'csrfmiddlewaretoken': csrftoken }
+ r1 = session.post(login_url, data=login_data, headers={"Referer": login_url})
+
+ url = base_address + "update/upload/release"
+ data = {}
+ data['csrfmiddlewaretoken'] = csrftoken
+
+ build_config = os.path.join(sys.argv[1], "build_config.json")
+ r = session.post(url, files={'release_config': open(build_config, "r")}, data=data)
+ print(r.content)
+ if r.status_code != 200:
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/update/upload_builds.py b/bin/update/upload_builds.py
new file mode 100755
index 000000000..210668e0d
--- /dev/null
+++ b/bin/update/upload_builds.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python3
+
+import sys
+import os
+import subprocess
+
+from config import parse_config
+from path import convert_to_unix
+
+from tools import replace_variables_in_string
+
+def main():
+ product_name = sys.argv[1]
+ buildid = sys.argv[2]
+ platform = sys.argv[3]
+ update_dir = sys.argv[4]
+ update_config = sys.argv[5]
+
+ config = parse_config(update_config)
+ upload_url = replace_variables_in_string(config.upload_url, channel=config.channel, buildid=buildid, platform=platform)
+
+ target_url, target_dir = upload_url.split(':')
+
+ command = "ssh %s 'mkdir -p %s'"%(target_url, target_dir)
+ print(command)
+ subprocess.call(command, shell=True)
+ for file in os.listdir(update_dir):
+ if file.endswith('.mar'):
+ subprocess.call(['scp', convert_to_unix(os.path.join(update_dir, file)), upload_url])
+
+if __name__ == '__main__':
+ main()