summaryrefslogtreecommitdiffstats
path: root/bin/update
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:06:44 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:06:44 +0000
commited5640d8b587fbcfed7dd7967f3de04b37a76f26 (patch)
tree7a5f7c6c9d02226d7471cb3cc8fbbf631b415303 /bin/update
parentInitial commit. (diff)
downloadlibreoffice-ed5640d8b587fbcfed7dd7967f3de04b37a76f26.tar.xz
libreoffice-ed5640d8b587fbcfed7dd7967f3de04b37a76f26.zip
Adding upstream version 4:7.4.7.upstream/4%7.4.7upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--bin/update/common.sh222
-rw-r--r--bin/update/config.py28
-rwxr-xr-xbin/update/create_build_config.py60
-rwxr-xr-xbin/update/create_full_mar.py54
-rwxr-xr-xbin/update/create_full_mar_for_languages.py66
-rwxr-xr-xbin/update/create_partial_update.py160
-rwxr-xr-xbin/update/get_update_channel.py23
-rwxr-xr-xbin/update/make_full_update.sh122
-rwxr-xr-xbin/update/make_incremental_update.sh318
-rw-r--r--bin/update/path.py69
-rw-r--r--bin/update/signing.py12
-rw-r--r--bin/update/tools.py64
-rwxr-xr-xbin/update/uncompress_mar.py54
-rwxr-xr-xbin/update/upload_build_config.py42
-rwxr-xr-xbin/update/upload_builds.py32
-rwxr-xr-xbin/update_pch1322
-rwxr-xr-xbin/update_pch.sh70
-rwxr-xr-xbin/update_pch_autotune.sh229
-rwxr-xr-xbin/update_pch_bisect354
19 files changed, 3301 insertions, 0 deletions
diff --git a/bin/update/common.sh b/bin/update/common.sh
new file mode 100644
index 000000000..dcdbea8bb
--- /dev/null
+++ b/bin/update/common.sh
@@ -0,0 +1,222 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# Code shared by update packaging scripts.
+# Author: Darin Fisher
+#
+
+# -----------------------------------------------------------------------------
+# By default just assume that these tools exist on our path
+MAR=${MAR:-mar}
+BZIP2=${BZIP2:-bzip2}
+MBSDIFF=${MBSDIFF:-mbsdiff}
+
+# -----------------------------------------------------------------------------
+# Helper routines
+
+notice() {
+ echo "$*" 1>&2
+}
+
+get_file_size() {
+ info=($(ls -ln "$1"))
+ echo ${info[4]}
+}
+
+check_externals() {
+
+ # check whether we can call the mar executable
+ "$MAR" --version > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ notice "Could not find a valid mar executable in the path or in the MAR environment variable"
+ exit 1
+ fi
+
+ # check whether we can access the bzip2 executable
+ "$BZIP2" --help > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ notice "Could not find a valid bzip2 executable in the PATH or in the BZIP2 environment variable"
+ exit 1
+ fi
+}
+
+copy_perm() {
+ reference="$1"
+ target="$2"
+
+ if [ -x "$reference" ]; then
+ chmod 0755 "$target"
+ else
+ chmod 0644 "$target"
+ fi
+}
+
+make_add_instruction() {
+ f="$1"
+ filev2="$2"
+ # The third param will be an empty string when a file add instruction is only
+ # needed in the version 2 manifest. This only happens when the file has an
+ # add-if-not instruction in the version 3 manifest. This is due to the
+ # precomplete file prior to the version 3 manifest having a remove instruction
+ # for this file so the file is removed before applying a complete update.
+ filev3="$3"
+
+ # Used to log to the console
+ if [ $4 ]; then
+ forced=" (forced)"
+ else
+ forced=
+ fi
+
+ is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/')
+ if [ $is_extension = "1" ]; then
+ # Use the subdirectory of the extensions folder as the file to test
+ # before performing this add instruction.
+ testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
+ notice " add-if \"$testdir\" \"$f\""
+ echo "add-if \"$testdir\" \"$f\"" >> $filev2
+ if [ ! $filev3 = "" ]; then
+ echo "add-if \"$testdir\" \"$f\"" >> $filev3
+ fi
+ else
+ notice " add \"$f\"$forced"
+ echo "add \"$f\"" >> $filev2
+ if [ ! $filev3 = "" ]; then
+ echo "add \"$f\"" >> $filev3
+ fi
+ fi
+}
+
+check_for_add_if_not_update() {
+ add_if_not_file_chk="$1"
+
+ if [ `basename $add_if_not_file_chk` = "channel-prefs.js" -o \
+ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+check_for_add_to_manifestv2() {
+ add_if_not_file_chk="$1"
+
+ if [ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+make_add_if_not_instruction() {
+ f="$1"
+ filev3="$2"
+
+ notice " add-if-not \"$f\" \"$f\""
+ echo "add-if-not \"$f\" \"$f\"" >> $filev3
+}
+
+make_patch_instruction() {
+ f="$1"
+ filev2="$2"
+ filev3="$3"
+
+ is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/')
+ if [ $is_extension = "1" ]; then
+ # Use the subdirectory of the extensions folder as the file to test
+ # before performing this add instruction.
+ testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
+ notice " patch-if \"$testdir\" \"$f.patch\" \"$f\""
+ echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev2
+ echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev3
+ else
+ notice " patch \"$f.patch\" \"$f\""
+ echo "patch \"$f.patch\" \"$f\"" >> $filev2
+ echo "patch \"$f.patch\" \"$f\"" >> $filev3
+ fi
+}
+
+append_remove_instructions() {
+ dir="$1"
+ filev2="$2"
+ filev3="$3"
+
+ if [ -f "$dir/removed-files" ]; then
+ listfile="$dir/removed-files"
+ elif [ -f "$dir/Contents/Resources/removed-files" ]; then
+ listfile="$dir/Contents/Resources/removed-files"
+ fi
+ if [ -n "$listfile" ]; then
+ # Map spaces to pipes so that we correctly handle filenames with spaces.
+ files=($(cat "$listfile" | tr " " "|" | sort -r))
+ num_files=${#files[*]}
+ for ((i=0; $i<$num_files; i=$i+1)); do
+ # Map pipes back to whitespace and remove carriage returns
+ f=$(echo ${files[$i]} | tr "|" " " | tr -d '\r')
+ # Trim whitespace
+ f=$(echo $f)
+ # Exclude blank lines.
+ if [ -n "$f" ]; then
+ # Exclude comments
+ if [ ! $(echo "$f" | grep -c '^#') = 1 ]; then
+ if [ $(echo "$f" | grep -c '\/$') = 1 ]; then
+ notice " rmdir \"$f\""
+ echo "rmdir \"$f\"" >> $filev2
+ echo "rmdir \"$f\"" >> $filev3
+ elif [ $(echo "$f" | grep -c '\/\*$') = 1 ]; then
+ # Remove the *
+ f=$(echo "$f" | sed -e 's:\*$::')
+ notice " rmrfdir \"$f\""
+ echo "rmrfdir \"$f\"" >> $filev2
+ echo "rmrfdir \"$f\"" >> $filev3
+ else
+ notice " remove \"$f\""
+ echo "remove \"$f\"" >> $filev2
+ echo "remove \"$f\"" >> $filev3
+ fi
+ fi
+ fi
+ done
+ fi
+}
+
+# List all files in the current directory, stripping leading "./"
+# Pass a variable name and it will be filled as an array.
+list_files() {
+ count=0
+
+ find . -type f \
+ ! -name "update.manifest" \
+ ! -name "updatev2.manifest" \
+ ! -name "updatev3.manifest" \
+ ! -name "temp-dirlist" \
+ ! -name "temp-filelist" \
+ | sed 's/\.\/\(.*\)/\1/' \
+ | sort -r > "temp-filelist"
+ while read file; do
+ eval "${1}[$count]=\"$file\""
+ (( count++ ))
+ done < "temp-filelist"
+ rm "temp-filelist"
+}
+
+# List all directories in the current directory, stripping leading "./"
+list_dirs() {
+ count=0
+
+ find . -type d \
+ ! -name "." \
+ ! -name ".." \
+ | sed 's/\.\/\(.*\)/\1/' \
+ | sort -r > "temp-dirlist"
+ while read dir; do
+ eval "${1}[$count]=\"$dir\""
+ (( count++ ))
+ done < "temp-dirlist"
+ rm "temp-dirlist"
+}
diff --git a/bin/update/config.py b/bin/update/config.py
new file mode 100644
index 000000000..0bc60a07f
--- /dev/null
+++ b/bin/update/config.py
@@ -0,0 +1,28 @@
+
+import configparser
+import os
+
+class Config(object):
+
+ def __init__(self):
+ self.certificate_path = None
+ self.certificate_name = None
+ self.channel = None
+ self.base_url = None
+ self.upload_url = None
+ self.server_url = None
+
+def parse_config(config_file):
+ config = configparser.ConfigParser()
+ config.read(os.path.expanduser(config_file))
+
+ data = Config()
+ updater_data = config['Updater']
+ data.base_url = updater_data['base-url']
+ data.certificate_name = updater_data['certificate-name']
+ data.certificate_path = updater_data['certificate-path']
+ data.channel = updater_data['channel']
+ data.upload_url = updater_data['upload-url']
+ data.server_url = updater_data["ServerURL"]
+
+ return data
diff --git a/bin/update/create_build_config.py b/bin/update/create_build_config.py
new file mode 100755
index 000000000..7cc8ac4be
--- /dev/null
+++ b/bin/update/create_build_config.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python3
+
+import json
+import sys
+import os
+
+from config import parse_config
+
+from tools import replace_variables_in_string
+
+def update_all_url_entries(data, **kwargs):
+ data['complete']['url'] = replace_variables_in_string(data['complete']['url'], **kwargs)
+
+ if sys.platform != "cygwin":
+ for language in data['languages']:
+ language['complete']['url'] = replace_variables_in_string(language['complete']['url'], **kwargs)
+
+ if 'partials' in data:
+ for partial in data['partials']:
+ partial['file']['url'] = replace_variables_in_string(partial['file']['url'], **kwargs)
+
+ if sys.platform == "cygwin":
+ continue
+
+ for lang, lang_file in partial['languages'].items():
+ lang_file['url'] = replace_variables_in_string(lang_file['url'], **kwargs)
+
+def main(argv):
+ if len(argv) < 7:
+ print("Usage: create_build_config.py $PRODUCTNAME $VERSION $BUILDID $PLATFORM $TARGETDIR $UPDATE_CONFIG")
+ sys.exit(1)
+
+ config = parse_config(argv[6])
+
+ data = { 'productName' : argv[1],
+ 'version' : argv[2],
+ 'buildNumber' : argv[3],
+ 'updateChannel' : config.channel,
+ 'platform' : argv[4]
+ }
+
+ extra_data_files = ['complete_info.json', 'partial_update_info.json']
+ if sys.platform != "cygwin":
+ extra_data_files.append('complete_lang_info.json')
+
+ for extra_file in extra_data_files:
+ extra_file_path = os.path.join(argv[5], extra_file)
+ if not os.path.exists(extra_file_path):
+ continue
+ with open(extra_file_path, "r") as f:
+ extra_data = json.load(f)
+ data.update(extra_data)
+
+ update_all_url_entries(data, channel=config.channel, platform=argv[4], buildid=argv[3], version=argv[2])
+
+ with open(os.path.join(argv[5], "build_config.json"), "w") as f:
+ json.dump(data, f, indent=4)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/update/create_full_mar.py b/bin/update/create_full_mar.py
new file mode 100755
index 000000000..48686be21
--- /dev/null
+++ b/bin/update/create_full_mar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import subprocess
+import json
+
+from tools import uncompress_file_to_dir, get_file_info, make_complete_mar_name
+from config import parse_config
+from signing import sign_mar_file
+from path import UpdaterPath, convert_to_unix, convert_to_native
+
+current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__)))
+
+def main():
+ if len(sys.argv) < 5:
+ print("Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $FILENAMEPREFIX $UPDATE_CONFIG")
+ sys.exit(1)
+
+ update_config = sys.argv[4]
+ filename_prefix = sys.argv[3]
+ workdir = sys.argv[2]
+ product_name = sys.argv[1]
+
+ if len(update_config) == 0:
+ print("missing update config")
+ sys.exit(1)
+
+ update_path = UpdaterPath(workdir)
+ update_path.ensure_dir_exist()
+
+ target_dir = update_path.get_update_dir()
+ temp_dir = update_path.get_current_build_dir()
+
+ config = parse_config(update_config)
+
+ tar_dir = os.path.join(update_path.get_workdir(), "installation", product_name, "archive", "install", "en-US")
+ tar_file = os.path.join(tar_dir, os.listdir(tar_dir)[0])
+
+ uncompress_dir = uncompress_file_to_dir(tar_file, temp_dir)
+
+ mar_file = make_complete_mar_name(target_dir, filename_prefix)
+ path = os.path.join(current_dir_path, 'make_full_update.sh')
+ subprocess.call([path, convert_to_native(mar_file), convert_to_native(uncompress_dir)])
+
+ sign_mar_file(target_dir, config, mar_file, filename_prefix)
+
+ file_info = { 'complete' : get_file_info(mar_file, config.base_url) }
+
+ with open(os.path.join(target_dir, 'complete_info.json'), "w") as complete_info_file:
+ json.dump(file_info, complete_info_file, indent = 4)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/create_full_mar_for_languages.py b/bin/update/create_full_mar_for_languages.py
new file mode 100755
index 000000000..039521dd1
--- /dev/null
+++ b/bin/update/create_full_mar_for_languages.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import subprocess
+import json
+
+from tools import uncompress_file_to_dir, get_file_info
+
+from config import parse_config
+from path import UpdaterPath
+from signing import sign_mar_file
+
+current_dir_path = os.path.dirname(os.path.realpath(__file__))
+
+def make_complete_mar_name(target_dir, filename_prefix, language):
+ filename = filename_prefix + "_" + language + "_complete_langpack.mar"
+ return os.path.join(target_dir, filename)
+
+def create_lang_infos(mar_file_name, language, url):
+ data = {'lang' : language,
+ 'complete' : get_file_info(mar_file_name, url)
+ }
+ return data
+
+def main():
+ if len(sys.argv) < 5:
+ print("Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $TARGETDIR $TEMPDIR $FILENAMEPREFIX $UPDATE_CONFIG")
+ sys.exit(1)
+
+ update_config = sys.argv[4]
+ filename_prefix = sys.argv[3]
+ workdir = sys.argv[2]
+ product_name = sys.argv[1]
+
+ updater_path = UpdaterPath(workdir)
+ target_dir = updater_path.get_update_dir()
+ temp_dir = updater_path.get_language_dir()
+
+ config = parse_config(update_config)
+
+ language_pack_dir = os.path.join(workdir, "installation", product_name + "_languagepack", "archive", "install")
+ language_packs = os.listdir(language_pack_dir)
+ lang_infos = []
+ for language in language_packs:
+ if language == 'log':
+ continue
+
+ language_dir = os.path.join(language_pack_dir, language)
+ language_file = os.path.join(language_dir, os.listdir(language_dir)[0])
+
+ directory = uncompress_file_to_dir(language_file, os.path.join(temp_dir, language))
+
+ mar_file_name = make_complete_mar_name(target_dir, filename_prefix, language)
+
+ subprocess.call([os.path.join(current_dir_path, 'make_full_update.sh'), mar_file_name, directory])
+
+ sign_mar_file(target_dir, config, mar_file_name, filename_prefix)
+
+ lang_infos.append(create_lang_infos(mar_file_name, language, config.base_url))
+
+ with open(os.path.join(target_dir, "complete_lang_info.json"), "w") as language_info_file:
+ json.dump({'languages' : lang_infos}, language_info_file, indent=4)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/create_partial_update.py b/bin/update/create_partial_update.py
new file mode 100755
index 000000000..9412bcd6e
--- /dev/null
+++ b/bin/update/create_partial_update.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+import requests
+import json
+import sys
+import hashlib
+import os
+import subprocess
+import errno
+import json
+
+from config import parse_config
+from uncompress_mar import extract_mar
+from tools import get_file_info, get_hash
+from signing import sign_mar_file
+
+from path import UpdaterPath, mkdir_p, convert_to_unix, convert_to_native
+
+BUF_SIZE = 1024
+current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__)))
+
+class InvalidFileException(Exception):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(self, *args, **kwargs)
+
+def download_file(filepath, url, hash_string):
+ with open(filepath, "wb") as f:
+ response = requests.get(url, stream=True)
+
+ if not response.ok:
+ return
+
+ for block in response.iter_content(1024):
+ f.write(block)
+
+ file_hash = get_hash(filepath)
+
+ if file_hash != hash_string:
+ raise InvalidFileException("file hash does not match for file %s: Expected %s, Got: %s" % (url, hash_string, file_hash))
+
+def handle_language(lang_entries, filedir):
+ mar = os.environ.get('MAR', 'mar')
+ langs = {}
+ for lang, data in lang_entries.items():
+ lang_dir = os.path.join(filedir, lang)
+ lang_file = os.path.join(lang_dir, "lang.mar")
+ mkdir_p(lang_dir)
+ download_file(lang_file , data["url"], data["hash"])
+ dir_path = os.path.join(lang_dir, "lang")
+ mkdir_p(dir_path)
+ extract_mar(lang_file, dir_path)
+ langs[lang] = dir_path
+
+ return langs
+
+def download_mar_for_update_channel_and_platform(config, platform, temp_dir):
+ mar = os.environ.get('MAR', 'mar')
+ base_url = config.server_url + "update/partial-targets/1/"
+ url = base_url + platform + "/" + config.channel
+ r = requests.get(url)
+ if r.status_code != 200:
+ print(r.content)
+ raise Exception("download failed")
+
+ update_info = json.loads(r.content.decode("utf-8"))
+ update_files = update_info['updates']
+ downloaded_updates = {}
+ for update_file in update_files:
+ build = update_file["build"]
+ filedir = os.path.join(temp_dir, build)
+
+ mkdir_p(filedir)
+
+ filepath = filedir + "/complete.mar"
+ url = update_file["update"]["url"]
+ expected_hash = update_file["update"]["hash"]
+ download_file(filepath, url, expected_hash)
+
+ dir_path = os.path.join(filedir, "complete")
+ mkdir_p(dir_path)
+ extract_mar(filepath, dir_path)
+
+ downloaded_updates[build] = {"complete": dir_path}
+
+ langs = handle_language(update_file["languages"], filedir)
+ downloaded_updates[build]["languages"] = langs
+
+ return downloaded_updates
+
+def generate_file_name(current_build_id, old_build_id, mar_name_prefix):
+ name = "%s_from_%s_partial.mar" %(mar_name_prefix, old_build_id)
+ return name
+
+def generate_lang_file_name(current_build_id, old_build_id, mar_name_prefix, lang):
+ name = "%s_%s_from_%s_partial.mar" %(mar_name_prefix, lang, old_build_id)
+ return name
+
+def add_single_dir(path):
+ dir_name = [os.path.join(path, name) for name in os.listdir(path) if os.path.isdir(os.path.join(path, name))]
+ return dir_name[0]
+
+def main():
+ workdir = sys.argv[1]
+
+ updater_path = UpdaterPath(workdir)
+ updater_path.ensure_dir_exist()
+
+ mar_name_prefix = sys.argv[2]
+ update_config = sys.argv[3]
+ platform = sys.argv[4]
+ build_id = sys.argv[5]
+
+ current_build_path = updater_path.get_current_build_dir()
+ mar_dir = updater_path.get_mar_dir()
+ temp_dir = updater_path.get_previous_build_dir()
+ update_dir = updater_path.get_update_dir()
+
+ current_build_path = add_single_dir(current_build_path)
+ if sys.platform == "cygwin":
+ current_build_path = add_single_dir(current_build_path)
+
+ config = parse_config(update_config)
+
+ updates = download_mar_for_update_channel_and_platform(config, platform, temp_dir)
+
+ data = {"partials": []}
+
+ for build, update in updates.items():
+ file_name = generate_file_name(build_id, build, mar_name_prefix)
+ mar_file = os.path.join(update_dir, file_name)
+ subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(mar_file), convert_to_native(update["complete"]), convert_to_native(current_build_path)])
+ sign_mar_file(update_dir, config, mar_file, mar_name_prefix)
+
+ partial_info = {"file":get_file_info(mar_file, config.base_url), "from": build, "to": build_id, "languages": {}}
+
+ # on Windows we don't use language packs
+ if sys.platform != "cygwin":
+ for lang, lang_info in update["languages"].items():
+ lang_name = generate_lang_file_name(build_id, build, mar_name_prefix, lang)
+
+ # write the file into the final directory
+ lang_mar_file = os.path.join(update_dir, lang_name)
+
+ # the directory of the old language file is of the form
+ # workdir/mar/language/en-US/LibreOffice_<version>_<os>_archive_langpack_<lang>/
+ language_dir = add_single_dir(os.path.join(mar_dir, "language", lang))
+ subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(lang_mar_file), convert_to_native(lang_info), convert_to_native(language_dir)])
+ sign_mar_file(update_dir, config, lang_mar_file, mar_name_prefix)
+
+ # add the partial language info
+ partial_info["languages"][lang] = get_file_info(lang_mar_file, config.base_url)
+
+ data["partials"].append(partial_info)
+
+ with open(os.path.join(update_dir, "partial_update_info.json"), "w") as f:
+ json.dump(data, f)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/get_update_channel.py b/bin/update/get_update_channel.py
new file mode 100755
index 000000000..f94507d64
--- /dev/null
+++ b/bin/update/get_update_channel.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python3
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import sys
+from config import parse_config
+
+def main():
+ if len(sys.argv) < 2:
+ sys.exit(1)
+
+ update_config = sys.argv[1]
+ config = parse_config(update_config)
+ print(config.channel)
+
+if __name__ == "__main__":
+ main()
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/bin/update/make_full_update.sh b/bin/update/make_full_update.sh
new file mode 100755
index 000000000..4140ecae6
--- /dev/null
+++ b/bin/update/make_full_update.sh
@@ -0,0 +1,122 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool generates full update packages for the update system.
+# Author: Darin Fisher
+#
+
+. $(dirname "$0")/common.sh
+
+# -----------------------------------------------------------------------------
+
+print_usage() {
+ notice "Usage: $(basename $0) [OPTIONS] ARCHIVE DIRECTORY"
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit 1
+fi
+
+if [ $1 = -h ]; then
+ print_usage
+ notice ""
+ notice "The contents of DIRECTORY will be stored in ARCHIVE."
+ notice ""
+ notice "Options:"
+ notice " -h show this help text"
+ notice ""
+ exit 1
+fi
+
+check_externals
+# -----------------------------------------------------------------------------
+
+archive="$1"
+targetdir="$2"
+# Prevent the workdir from being inside the targetdir so it isn't included in
+# the update mar.
+if [ $(echo "$targetdir" | grep -c '\/$') = 1 ]; then
+ # Remove the /
+ targetdir=$(echo "$targetdir" | sed -e 's:\/$::')
+fi
+workdir="$targetdir.work"
+updatemanifestv2="$workdir/updatev2.manifest"
+updatemanifestv3="$workdir/updatev3.manifest"
+targetfiles="updatev2.manifest updatev3.manifest"
+
+mkdir -p "$workdir"
+echo "updatev2.manifest" >> $workdir/files.txt
+echo "updatev3.manifest" >> $workdir/files.txt
+
+# Generate a list of all files in the target directory.
+pushd "$targetdir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+# if [ ! -f "precomplete" ]; then
+# if [ ! -f "Contents/Resources/precomplete" ]; then
+# notice "precomplete file is missing!"
+# exit 1
+# fi
+# fi
+
+list_files files
+
+popd
+
+# Add the type of update to the beginning of the update manifests.
+> $updatemanifestv2
+> $updatemanifestv3
+notice ""
+notice "Adding type instruction to update manifests"
+notice " type complete"
+echo "type \"complete\"" >> $updatemanifestv2
+echo "type \"complete\"" >> $updatemanifestv3
+
+notice ""
+notice "Adding file add instructions to update manifests"
+num_files=${#files[*]}
+
+for ((i=0; $i<$num_files; i=$i+1)); do
+ f="${files[$i]}"
+
+ if check_for_add_if_not_update "$f"; then
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ if check_for_add_to_manifestv2 "$f"; then
+ make_add_instruction "$f" "$updatemanifestv2" "" 1
+ fi
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ fi
+
+ dir=$(dirname "$f")
+ mkdir -p "$workdir/$dir"
+ $BZIP2 -cz9 "$targetdir/$f" > "$workdir/$f"
+ copy_perm "$targetdir/$f" "$workdir/$f"
+
+ targetfiles="$targetfiles \"$f\""
+ echo $f >> $workdir/files.txt
+done
+
+# Append remove instructions for any dead files.
+notice ""
+notice "Adding file and directory remove instructions from file 'removed-files'"
+append_remove_instructions "$targetdir" "$updatemanifestv2" "$updatemanifestv3"
+
+$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
+$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
+
+eval "$MAR -C \"$workdir\" -c output.mar -f $workdir/files.txt"
+mv -f "$workdir/output.mar" "$archive"
+
+# cleanup
+rm -fr "$workdir"
+
+notice ""
+notice "Finished"
+notice ""
diff --git a/bin/update/make_incremental_update.sh b/bin/update/make_incremental_update.sh
new file mode 100755
index 000000000..31bddabdb
--- /dev/null
+++ b/bin/update/make_incremental_update.sh
@@ -0,0 +1,318 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool generates incremental update packages for the update system.
+# Author: Darin Fisher
+#
+
+. $(dirname "$0")/common.sh
+
+# -----------------------------------------------------------------------------
+
+print_usage() {
+ notice "Usage: $(basename $0) [OPTIONS] ARCHIVE FROMDIR TODIR"
+ notice ""
+ notice "The differences between FROMDIR and TODIR will be stored in ARCHIVE."
+ notice ""
+ notice "Options:"
+ notice " -h show this help text"
+ notice " -f clobber this file in the installation"
+ notice " Must be a path to a file to clobber in the partial update."
+ notice ""
+}
+
+check_for_forced_update() {
+ force_list="$1"
+ forced_file_chk="$2"
+
+ local f
+
+ if [ "$forced_file_chk" = "precomplete" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "Contents/Resources/precomplete" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "removed-files" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "Contents/Resources/removed-files" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "${forced_file_chk##*.}" = "chk" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ for f in $force_list; do
+ #echo comparing $forced_file_chk to $f
+ if [ "$forced_file_chk" = "$f" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ done
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit 1
+fi
+
+requested_forced_updates='Contents/MacOS/firefox'
+
+while getopts "hf:" flag
+do
+ case "$flag" in
+ h) print_usage; exit 0
+ ;;
+ f) requested_forced_updates="$requested_forced_updates $OPTARG"
+ ;;
+ ?) print_usage; exit 1
+ ;;
+ esac
+done
+
+# -----------------------------------------------------------------------------
+
+let arg_start=$OPTIND-1
+shift $arg_start
+
+archive="$1"
+olddir="$2"
+newdir="$3"
+# Prevent the workdir from being inside the targetdir so it isn't included in
+# the update mar.
+if [ $(echo "$newdir" | grep -c '\/$') = 1 ]; then
+ # Remove the /
+ newdir=$(echo "$newdir" | sed -e 's:\/$::')
+fi
+workdir="$newdir.work"
+updatemanifestv2="$workdir/updatev2.manifest"
+updatemanifestv3="$workdir/updatev3.manifest"
+
+mkdir -p "$workdir"
+echo "updatev2.manifest" >> $workdir/files.txt
+echo "updatev3.manifest" >> $workdir/files.txt
+
+# Generate a list of all files in the target directory.
+pushd "$olddir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+list_files oldfiles
+list_dirs olddirs
+
+popd
+
+pushd "$newdir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+# if [ ! -f "precomplete" ]; then
+# if [ ! -f "Contents/Resources/precomplete" ]; then
+# notice "precomplete file is missing!"
+# exit 1
+# fi
+# fi
+
+list_dirs newdirs
+list_files newfiles
+
+popd
+
+# Add the type of update to the beginning of the update manifests.
+notice ""
+notice "Adding type instruction to update manifests"
+> $updatemanifestv2
+> $updatemanifestv3
+notice " type partial"
+echo "type \"partial\"" >> $updatemanifestv2
+echo "type \"partial\"" >> $updatemanifestv3
+
+notice ""
+notice "Adding file patch and add instructions to update manifests"
+
+num_oldfiles=${#oldfiles[*]}
+remove_array=
+num_removes=0
+
+for ((i=0; $i<$num_oldfiles; i=$i+1)); do
+ f="${oldfiles[$i]}"
+
+ # If this file exists in the new directory as well, then check if it differs.
+ if [ -f "$newdir/$f" ]; then
+
+ if check_for_add_if_not_update "$f"; then
+ # The full workdir may not exist yet, so create it if necessary.
+ mkdir -p `dirname "$workdir/$f"`
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ echo $f >> $workdir/files.txt
+ continue 1
+ fi
+
+ if check_for_forced_update "$requested_forced_updates" "$f"; then
+ # The full workdir may not exist yet, so create it if necessary.
+ mkdir -p `dirname "$workdir/$f"`
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3" 1
+ echo $f >> $workdir/files.txt
+ continue 1
+ fi
+
+ if ! diff "$olddir/$f" "$newdir/$f" > /dev/null; then
+ # Compute both the compressed binary diff and the compressed file, and
+ # compare the sizes. Then choose the smaller of the two to package.
+ dir=$(dirname "$workdir/$f")
+ mkdir -p "$dir"
+ notice "diffing \"$f\""
+ # MBSDIFF_HOOK represents the communication interface with funsize and,
+ # if enabled, caches the intermediate patches for future use and
+ # compute avoidance
+ #
+ # An example of MBSDIFF_HOOK env variable could look like this:
+ # export MBSDIFF_HOOK="myscript.sh -A https://funsize/api -c /home/user"
+ # where myscript.sh has the following usage:
+ # myscript.sh -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] \
+ # PATH-FROM-URL PATH-TO-URL PATH-PATCH SERVER-URL
+ #
+ # Note: patches are bzipped stashed in funsize to gain more speed
+
+ # if service is not enabled then default to old behavior
+ if [ -z "$MBSDIFF_HOOK" ]; then
+ $MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
+ $BZIP2 -z9 "$workdir/$f.patch"
+ else
+ # if service enabled then check patch existence for retrieval
+ if $MBSDIFF_HOOK -g "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"; then
+ notice "file \"$f\" found in funsize, diffing skipped"
+ else
+ # if not found already - compute it and cache it for future use
+ $MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
+ $BZIP2 -z9 "$workdir/$f.patch"
+ $MBSDIFF_HOOK -u "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"
+ fi
+ fi
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ patchfile="$workdir/$f.patch.bz2"
+ patchsize=$(get_file_size "$patchfile")
+ fullsize=$(get_file_size "$workdir/$f")
+
+ if [ $patchsize -lt $fullsize ]; then
+ make_patch_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ mv -f "$patchfile" "$workdir/$f.patch"
+ rm -f "$workdir/$f"
+ echo $f.patch >> $workdir/files.txt
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ rm -f "$patchfile"
+ echo $f >> $workdir/files.txt
+ fi
+ fi
+ else
+ # remove instructions are added after add / patch instructions for
+ # consistency with make_incremental_updates.py
+ remove_array[$num_removes]=$f
+ (( num_removes++ ))
+ fi
+done
+
+# Newly added files
+notice ""
+notice "Adding file add instructions to update manifests"
+num_newfiles=${#newfiles[*]}
+
+for ((i=0; $i<$num_newfiles; i=$i+1)); do
+ f="${newfiles[$i]}"
+
+ # If we've already tested this file, then skip it
+ for ((j=0; $j<$num_oldfiles; j=$j+1)); do
+ if [ "$f" = "${oldfiles[j]}" ]; then
+ continue 2
+ fi
+ done
+
+ dir=$(dirname "$workdir/$f")
+ mkdir -p "$dir"
+
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+
+ if check_for_add_if_not_update "$f"; then
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ fi
+
+
+ echo $f >> $workdir/files.txt
+done
+
+notice ""
+notice "Adding file remove instructions to update manifests"
+for ((i=0; $i<$num_removes; i=$i+1)); do
+ f="${remove_array[$i]}"
+ notice " remove \"$f\""
+ echo "remove \"$f\"" >> $updatemanifestv2
+ echo "remove \"$f\"" >> $updatemanifestv3
+done
+
+# Add remove instructions for any dead files.
+notice ""
+notice "Adding file and directory remove instructions from file 'removed-files'"
+append_remove_instructions "$newdir" "$updatemanifestv2" "$updatemanifestv3"
+
+notice ""
+notice "Adding directory remove instructions for directories that no longer exist"
+num_olddirs=${#olddirs[*]}
+
+for ((i=0; $i<$num_olddirs; i=$i+1)); do
+ f="${olddirs[$i]}"
+ # If this dir doesn't exist in the new directory remove it.
+ if [ ! -d "$newdir/$f" ]; then
+ notice " rmdir $f/"
+ echo "rmdir \"$f/\"" >> $updatemanifestv2
+ echo "rmdir \"$f/\"" >> $updatemanifestv3
+ fi
+done
+
+$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
+$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
+
+mar_command="$MAR"
+if [[ -n $PRODUCT_VERSION ]]
+then
+ mar_command="$mar_command -V $PRODUCT_VERSION"
+fi
+if [[ -n $CHANNEL_ID ]]
+then
+ mar_command="$mar_command -H $CHANNEL_ID"
+fi
+mar_command="$mar_command -C \"$workdir\" -c output.mar -f $workdir/files.txt"
+eval "$mar_command"
+mv -f "$workdir/output.mar" "$archive"
+
+# cleanup
+rm -fr "$workdir"
+
+notice ""
+notice "Finished"
+notice ""
diff --git a/bin/update/path.py b/bin/update/path.py
new file mode 100644
index 000000000..0fe0fd5eb
--- /dev/null
+++ b/bin/update/path.py
@@ -0,0 +1,69 @@
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import os
+import errno
+import subprocess
+from sys import platform
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError as exc: # Python >2.5
+ if exc.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else:
+ raise
+
+def convert_to_unix(path):
+ if platform == "cygwin":
+ return subprocess.check_output(["cygpath", "-u", path]).decode("utf-8", "strict").rstrip()
+ else:
+ return path
+
+def convert_to_native(path):
+ if platform == "cygwin":
+ return subprocess.check_output(["cygpath", "-m", path]).decode("utf-8", "strict").rstrip()
+ else:
+ return path
+
+class UpdaterPath(object):
+
+ def __init__(self, workdir):
+ self._workdir = convert_to_unix(workdir)
+
+ def get_workdir(self):
+ return self._workdir
+
+ def get_update_dir(self):
+ return os.path.join(self._workdir, "update-info")
+
+ def get_current_build_dir(self):
+ return os.path.join(self._workdir, "mar", "current-build")
+
+ def get_mar_dir(self):
+ return os.path.join(self._workdir, "mar")
+
+ def get_previous_build_dir(self):
+ return os.path.join(self._workdir, "mar", "previous-build")
+
+ def get_language_dir(self):
+ return os.path.join(self.get_mar_dir(), "language")
+
+ def get_workdir(self):
+ return self._workdir
+
+ def ensure_dir_exist(self):
+ mkdir_p(self.get_update_dir())
+ mkdir_p(self.get_current_build_dir())
+ mkdir_p(self.get_mar_dir())
+ mkdir_p(self.get_previous_build_dir())
+ mkdir_p(self.get_language_dir())
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/update/signing.py b/bin/update/signing.py
new file mode 100644
index 000000000..c0b43ce91
--- /dev/null
+++ b/bin/update/signing.py
@@ -0,0 +1,12 @@
+from tools import make_complete_mar_name
+
+import os
+import subprocess
+import path
+
+def sign_mar_file(target_dir, config, mar_file, filename_prefix):
+ signed_mar_file = make_complete_mar_name(target_dir, filename_prefix + '_signed')
+ mar_executable = os.environ.get('MAR', 'mar')
+ subprocess.check_call([mar_executable, '-C', path.convert_to_native(target_dir), '-d', path.convert_to_native(config.certificate_path), '-n', config.certificate_name, '-s', path.convert_to_native(mar_file), path.convert_to_native(signed_mar_file)])
+
+ os.rename(signed_mar_file, mar_file)
diff --git a/bin/update/tools.py b/bin/update/tools.py
new file mode 100644
index 000000000..8cd786635
--- /dev/null
+++ b/bin/update/tools.py
@@ -0,0 +1,64 @@
+import os
+import hashlib
+import zipfile
+import tarfile
+
+def uncompress_file_to_dir(compressed_file, uncompress_dir):
+ command = None
+ extension = os.path.splitext(compressed_file)[1]
+
+ try:
+ os.mkdir(uncompress_dir)
+ except FileExistsError as e:
+ pass
+
+ if extension == '.gz':
+ tar = tarfile.open(compressed_file)
+ tar.extractall(uncompress_dir)
+ tar.close()
+ elif extension == '.zip':
+ zip_file = zipfile.ZipFile(compressed_file)
+ zip_file.extractall(uncompress_dir)
+ zip_file.close()
+
+ uncompress_dir = os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0])
+ if " " in os.listdir(uncompress_dir)[0]:
+ print("replacing whitespace in directory name")
+ os.rename(os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0]),
+ os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0].replace(" ", "_")))
+ else:
+ print("Error: unknown extension " + extension)
+
+ return os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0])
+
+BUF_SIZE = 1048576
+
+def get_hash(file_path):
+ sha512 = hashlib.sha512()
+ with open(file_path, 'rb') as f:
+ while True:
+ data = f.read(BUF_SIZE)
+ if not data:
+ break
+ sha512.update(data)
+ return sha512.hexdigest()
+
+def get_file_info(mar_file, url):
+ filesize = os.path.getsize(mar_file)
+ data = { 'hash' : get_hash(mar_file),
+ 'hashFunction' : 'sha512',
+ 'size' : filesize,
+ 'url' : url + os.path.basename(mar_file)}
+
+ return data
+
+def replace_variables_in_string(string, **kwargs):
+ new_string = string
+ for key, val in kwargs.items():
+ new_string = new_string.replace('$(%s)'%key, val)
+
+ return new_string
+
+def make_complete_mar_name(target_dir, filename_prefix):
+ filename = filename_prefix + "_complete.mar"
+ return os.path.join(target_dir, filename)
diff --git a/bin/update/uncompress_mar.py b/bin/update/uncompress_mar.py
new file mode 100755
index 000000000..02dafbaff
--- /dev/null
+++ b/bin/update/uncompress_mar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Extract a mar file and uncompress the content
+
+import os
+import re
+import sys
+import subprocess
+from path import convert_to_native
+
+def uncompress_content(file_path):
+ bzip2 = os.environ.get('BZIP2', 'bzip2')
+ file_path_compressed = file_path + ".bz2"
+ os.rename(file_path, file_path_compressed)
+ subprocess.check_call(["bzip2", "-d", convert_to_native(file_path_compressed)])
+
+def extract_mar(mar_file, target_dir):
+ mar = os.environ.get('MAR', 'mar')
+ subprocess.check_call([mar, "-C", convert_to_native(target_dir), "-x", convert_to_native(mar_file)])
+ file_info = subprocess.check_output([mar, "-t", convert_to_native(mar_file)])
+ lines = file_info.splitlines()
+ prog = re.compile(r"\d+\s+\d+\s+(.+)")
+ for line in lines:
+ match = prog.match(line.decode("utf-8", "strict"))
+ if match is None:
+ continue
+ info = match.groups()[0]
+ # ignore header line
+ if info == b'NAME':
+ continue
+
+ uncompress_content(os.path.join(target_dir, info))
+
+def main():
+ if len(sys.argv) != 3:
+ print("Help: This program takes exactly two arguments pointing to a mar file and a target location")
+ sys.exit(1)
+
+ mar_file = sys.argv[1]
+ target_dir = sys.argv[2]
+ extract_mar(mar_file, target_dir)
+
+if __name__ == "__main__":
+ main()
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/update/upload_build_config.py b/bin/update/upload_build_config.py
new file mode 100755
index 000000000..9a87661ee
--- /dev/null
+++ b/bin/update/upload_build_config.py
@@ -0,0 +1,42 @@
+#! /usr/bin/env python3
+
+import sys
+import os
+import configparser
+import requests
+
+dir_path = os.path.dirname(os.path.realpath(__file__))
+
+def main(argv):
+
+ updater_config = sys.argv[2]
+
+ config = configparser.ConfigParser()
+ config.read(os.path.expanduser(updater_config))
+
+ user = config["Updater"]["User"]
+ password = config["Updater"]["Password"]
+ base_address = config["Updater"]["ServerURL"]
+
+ login_url = base_address + "accounts/login/"
+
+ session = requests.session()
+ r1 = session.get(login_url)
+ csrftoken = session.cookies['csrftoken']
+
+ login_data = { 'username': user,'password': password,
+ 'csrfmiddlewaretoken': csrftoken }
+ r1 = session.post(login_url, data=login_data, headers={"Referer": login_url})
+
+ url = base_address + "update/upload/release"
+ data = {}
+ data['csrfmiddlewaretoken'] = csrftoken
+
+ build_config = os.path.join(sys.argv[1], "build_config.json")
+ r = session.post(url, files={'release_config': open(build_config, "r")}, data=data)
+ print(r.content)
+ if r.status_code != 200:
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/update/upload_builds.py b/bin/update/upload_builds.py
new file mode 100755
index 000000000..210668e0d
--- /dev/null
+++ b/bin/update/upload_builds.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python3
+
+import sys
+import os
+import subprocess
+
+from config import parse_config
+from path import convert_to_unix
+
+from tools import replace_variables_in_string
+
+def main():
+ product_name = sys.argv[1]
+ buildid = sys.argv[2]
+ platform = sys.argv[3]
+ update_dir = sys.argv[4]
+ update_config = sys.argv[5]
+
+ config = parse_config(update_config)
+ upload_url = replace_variables_in_string(config.upload_url, channel=config.channel, buildid=buildid, platform=platform)
+
+ target_url, target_dir = upload_url.split(':')
+
+ command = "ssh %s 'mkdir -p %s'"%(target_url, target_dir)
+ print(command)
+ subprocess.call(command, shell=True)
+ for file in os.listdir(update_dir):
+ if file.endswith('.mar'):
+ subprocess.call(['scp', convert_to_unix(os.path.join(update_dir, file)), upload_url])
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update_pch b/bin/update_pch
new file mode 100755
index 000000000..1cb5cb997
--- /dev/null
+++ b/bin/update_pch
@@ -0,0 +1,1322 @@
+#! /usr/bin/env python3
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+"""
+This script generates precompiled headers for a given
+module and library.
+
+Given a gmake makefile that belongs to some LO module:
+1) Process the makefile to find source files (process_makefile).
+2) For every source file, find all includes (process_source).
+3) Uncommon and rare includes are filtered (remove_rare).
+4) Conflicting headers are excluded (filter_ignore).
+5) Local files to the source are excluded (Filter_Local).
+6) Fixup missing headers that sources expect (fixup).
+7) The resulting includes are sorted by category (sort_by_category).
+8) The pch file is generated (generate).
+"""
+
+import sys
+import re
+import os
+import unittest
+import glob
+
+CUTOFF = 1
+EXCLUDE_MODULE = False
+EXCLUDE_LOCAL = False
+EXCLUDE_SYSTEM = True
+SILENT = False
+WORKDIR = 'workdir'
+
+# System includes: oox, sal, sd, svl, vcl
+
+INCLUDE = False
+EXCLUDE = True
+DEFAULTS = \
+{
+# module.library : (min, system, module, local), best time
+ 'accessibility.acc' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 7.8
+ 'basctl.basctl' : ( 3, EXCLUDE, INCLUDE, EXCLUDE), # 11.9
+ 'basegfx.basegfx' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 3.8
+ 'basic.sb' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 10.7
+ 'chart2.chartcontroller' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 18.4
+ 'chart2.chartcore' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 22.5
+ 'comphelper.comphelper' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 7.6
+ 'configmgr.configmgr' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 6.0
+ 'connectivity.ado' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 6.4
+ 'connectivity.calc' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 4.6
+ 'connectivity.dbase' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 5.2
+ 'connectivity.dbpool2' : ( 5, EXCLUDE, INCLUDE, EXCLUDE), # 3.0
+ 'connectivity.dbtools' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 0.8
+ 'connectivity.file' : ( 2, EXCLUDE, INCLUDE, EXCLUDE), # 5.1
+ 'connectivity.firebird_sdbc' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 5.1
+ 'connectivity.flat' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 4.6
+ 'connectivity.mysql' : ( 4, EXCLUDE, INCLUDE, EXCLUDE), # 3.4
+ 'connectivity.odbc' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 5.0
+ 'connectivity.postgresql-sdbc-impl' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 6.7
+ 'cppcanvas.cppcanvas' : (11, EXCLUDE, INCLUDE, INCLUDE), # 4.8
+ 'cppuhelper.cppuhelper' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 4.6
+ 'cui.cui' : ( 8, EXCLUDE, INCLUDE, EXCLUDE), # 19.7
+ 'dbaccess.dba' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 13.8
+ 'dbaccess.dbaxml' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 6.5
+ 'dbaccess.dbu' : (12, EXCLUDE, EXCLUDE, EXCLUDE), # 23.6
+ 'dbaccess.sdbt' : ( 1, EXCLUDE, INCLUDE, EXCLUDE), # 2.9
+ 'desktop.deployment' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 6.1
+ 'desktop.deploymentgui' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 5.7
+ 'desktop.deploymentmisc' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 3.4
+ 'desktop.sofficeapp' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 6.5
+ 'drawinglayer.drawinglayer' : ( 4, EXCLUDE, EXCLUDE, EXCLUDE), # 7.4
+ 'editeng.editeng' : ( 5, EXCLUDE, INCLUDE, EXCLUDE), # 13.0
+ 'forms.frm' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 14.2
+ 'framework.fwk' : ( 7, EXCLUDE, INCLUDE, INCLUDE), # 14.8
+ 'hwpfilter.hwp' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 6.0
+ 'lotuswordpro.lwpft' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 11.6
+ 'oox.oox' : ( 6, EXCLUDE, EXCLUDE, INCLUDE), # 28.2
+ 'package.package2' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 4.5
+ 'package.xstor' : ( 2, EXCLUDE, INCLUDE, EXCLUDE), # 3.8
+ 'reportdesign.rpt' : ( 9, EXCLUDE, INCLUDE, INCLUDE), # 9.4
+ 'reportdesign.rptui' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 13.1
+ 'reportdesign.rptxml' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 7.6
+ 'sal.sal' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 4.2
+ 'sc.sc' : (12, EXCLUDE, INCLUDE, INCLUDE), # 92.6
+ 'sc.scfilt' : ( 4, EXCLUDE, EXCLUDE, INCLUDE), # 39.9
+ 'sc.scui' : ( 1, EXCLUDE, EXCLUDE, INCLUDE), # 15.0
+ 'sc.vbaobj' : ( 1, EXCLUDE, EXCLUDE, INCLUDE), # 17.3
+ 'sd.sd' : ( 4, EXCLUDE, EXCLUDE, INCLUDE), # 47.4
+ 'sd.sdui' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 9.4
+ 'sdext.PresentationMinimizer' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 4.1
+ 'sdext.PresenterScreen' : ( 2, EXCLUDE, INCLUDE, EXCLUDE), # 7.1
+ 'sfx2.sfx' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 27.4
+ 'slideshow.slideshow' : ( 4, EXCLUDE, INCLUDE, EXCLUDE), # 10.8
+ 'sot.sot' : ( 5, EXCLUDE, EXCLUDE, INCLUDE), # 3.1
+ 'starmath.sm' : ( 5, EXCLUDE, EXCLUDE, INCLUDE), # 10.9
+ 'svgio.svgio' : ( 8, EXCLUDE, EXCLUDE, INCLUDE), # 4.3
+ 'emfio.emfio' : ( 8, EXCLUDE, EXCLUDE, INCLUDE), # 4.3
+ 'svl.svl' : ( 6, EXCLUDE, EXCLUDE, EXCLUDE), # 7.6
+ 'svtools.svt' : ( 4, EXCLUDE, INCLUDE, EXCLUDE), # 17.6
+ 'svx.svx' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 20.7
+ 'svx.svxcore' : ( 7, EXCLUDE, INCLUDE, EXCLUDE), # 37.0
+ 'sw.msword' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 22.4
+ 'sw.sw' : ( 7, EXCLUDE, EXCLUDE, INCLUDE), # 129.6
+ 'sw.swui' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 26.1
+ 'sw.vbaswobj' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 13.1
+ 'tools.tl' : ( 5, EXCLUDE, EXCLUDE, EXCLUDE), # 4.2
+ 'unotools.utl' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 7.0
+ 'unoxml.unoxml' : ( 1, EXCLUDE, EXCLUDE, EXCLUDE), # 4.6
+ 'uui.uui' : ( 4, EXCLUDE, EXCLUDE, EXCLUDE), # 4.9
+ 'vbahelper.msforms' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 5.2
+ 'vbahelper.vbahelper' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 7.0
+ 'vcl.vcl' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 35.7
+ 'writerfilter.writerfilter' : ( 5, EXCLUDE, EXCLUDE, EXCLUDE), # 19.7/27.3
+ 'xmloff.xo' : ( 7, EXCLUDE, INCLUDE, INCLUDE), # 22.1
+ 'xmloff.xof' : ( 1, EXCLUDE, EXCLUDE, INCLUDE), # 4.4
+ 'xmlscript.xmlscript' : ( 4, EXCLUDE, EXCLUDE, INCLUDE), # 3.6
+ 'xmlsecurity.xmlsecurity' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 5.1
+ 'xmlsecurity.xsec_xmlsec' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 4.4
+ 'xmlsecurity.xsec_gpg' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # ?
+}
+
+def remove_rare(raw, min_use=-1):
+ """ Remove headers not commonly included.
+ The minimum threshold is min_use.
+ """
+ # The minimum number of times a header
+ # must be included to be in the PCH.
+ min_use = min_use if min_use >= 0 else CUTOFF
+
+ out = []
+ if not raw or not len(raw):
+ return out
+
+ inc = sorted(raw)
+ last = inc[0]
+ count = 1
+ for x in range(1, len(inc)):
+ i = inc[x]
+ if i == last:
+ count += 1
+ else:
+ if count >= min_use:
+ out.append(last)
+ last = i
+ count = 1
+
+ # Last group.
+ if count >= min_use:
+ out.append(last)
+
+ return out
+
+def process_list(list, callable):
+ """ Given a list and callable
+ we pass each entry through
+ the callable and only add to
+ the output if not blank.
+ """
+ out = []
+ for i in list:
+ line = callable(i)
+ if line and len(line):
+ out.append(line)
+ return out
+
+def find_files(path, recurse=True):
+ list = []
+ for root, dir, files in os.walk(path):
+ list += map(lambda x: os.path.join(root, x), files)
+ return list
+
+def get_filename(line):
+ """ Strips the line from the
+ '#include' and angled brackets
+ and return the filename only.
+ """
+ if not len(line) or line[0] != '#':
+ return line
+ return re.sub(r'(.*#include\s*)<(.*)>(.*)', r'\2', line)
+
+def is_c_runtime(inc, root, module):
+ """ Heuristic-based detection of C/C++
+ runtime headers.
+ They are all-lowercase, with .h or
+ no extension, filename only.
+ Try to check that they are not LO headers.
+ """
+ inc = get_filename(inc)
+
+ if inc.endswith('.hxx') or inc.endswith('.hpp'):
+ return False
+
+ if inc.endswith('.h') and inc.startswith( 'config_' ):
+ return False
+
+ hasdot = False
+ for c in inc:
+ if c == '/':
+ return False
+ if c == '.' and not inc.endswith('.h'):
+ return False
+ if c == '.':
+ hasdot = True
+ if c.isupper():
+ return False
+ if not hasdot: # <memory> etc.
+ return True
+
+ if glob.glob(os.path.join(root, module, '**', inc), recursive=True):
+ return False;
+
+ return True
+
+def sanitize(raw):
+ """ There are two forms of includes,
+ those with <> and "".
+ Technically, the difference is that
+ the compiler can use an internal
+ representation for an angled include,
+ such that it doesn't have to be a file.
+ For our purposes, there is no difference.
+ Here, we convert everything to angled.
+ """
+ if not raw or not len(raw):
+ return ''
+ raw = raw.strip()
+ if not len(raw):
+ return ''
+ return re.sub(r'(.*#include\s*)\"(.*)\"(.*)', r'#include <\2>', raw)
+
+class Filter_Local(object):
+ """ Filter headers local to a module.
+ allow_public: allows include/module/file.hxx
+ #include <module/file.hxx>
+ allow_module: allows module/inc/file.hxx
+ #include <file.hxx>
+ allow_locals: allows module/source/file.hxx and
+ module/source/inc/file.hxx
+ #include <file.hxx>
+ """
+ def __init__(self, root, module, allow_public=True, allow_module=True, allow_locals=True):
+ self.root = root
+ self.module = module
+ self.allow_public = allow_public
+ self.allow_module = allow_module
+ self.allow_locals = allow_locals
+ self.public_prefix = '<' + self.module + '/'
+
+ all = find_files(os.path.join(root, module))
+ self.module_includes = []
+ self.locals = []
+ mod_prefix = module + '/inc/'
+ for i in all:
+ if mod_prefix in i:
+ self.module_includes.append(i)
+ else:
+ self.locals.append(i)
+
+ def is_public(self, line):
+ return self.public_prefix in line
+
+ def is_module(self, line):
+ """ Returns True if in module/inc/... """
+ filename = get_filename(line)
+ for i in self.module_includes:
+ if i.endswith(filename):
+ return True
+ return False
+
+ def is_local(self, line):
+ """ Returns True if in module/source/... """
+ filename = get_filename(line)
+ for i in self.locals:
+ if i.endswith(filename):
+ return True
+ return False
+
+ def is_external(self, line):
+ return is_c_runtime(line, self.root, self.module) and \
+ not self.is_public(line) and \
+ not self.is_module(line) and \
+ not self.is_local(line)
+
+ def find_local_file(self, line):
+ """ Finds the header file in the module dir,
+ but doesn't validate.
+ """
+ filename = get_filename(line)
+ for i in self.locals:
+ if i.endswith(filename):
+ return i
+ for i in self.module_includes:
+ if i.endswith(filename):
+ return i
+ return None
+
+ def proc(self, line):
+ assert line and len(line)
+
+ if line[0] == '#':
+ if not SILENT:
+ sys.stderr.write('unhandled #include : {}\n'.format(line))
+ return ''
+
+ assert line[0] != '<' and line[0] != '#'
+
+ filename = get_filename(line)
+
+ # Local with relative path.
+ if filename.startswith('..'):
+ # Exclude for now as we don't have cxx path.
+ return ''
+
+ # Locals are included first (by the compiler).
+ if self.is_local(filename):
+ # Use only locals that are in some /inc/ directory (either in <module>/inc or
+ # somewhere under <module>/source/**/inc/, compilations use -I for these paths
+ # and headers elsewhere would not be found when compiling the PCH.
+ if not self.allow_locals:
+ return ''
+ elif '/inc/' in filename:
+ return filename
+ elif glob.glob(os.path.join(self.root, self.module, '**', 'inc', filename), recursive=True):
+ return filename
+ else:
+ return ''
+
+ # Module headers are next.
+ if self.is_module(filename):
+ return line if self.allow_module else ''
+
+ # Public headers are last.
+ if self.is_public(line):
+ return line if self.allow_public else ''
+
+ # Leave out potentially unrelated files local
+ # to some other module we can't include directly.
+ if '/' not in filename and not self.is_external(filename):
+ return ''
+
+ # Unfiltered.
+ return line
+
+def filter_ignore(line, module):
+ """ Filters includes from known
+ problematic ones.
+ Expects sanitized input.
+ """
+ assert line and len(line)
+
+ # Always include files without extension.
+ if '.' not in line:
+ return line
+
+ # Extract filenames for ease of comparison.
+ line = get_filename(line)
+
+ # Filter out all files that are not normal headers.
+ if not line.endswith('.h') and \
+ not line.endswith('.hxx') and \
+ not line.endswith('.hpp') and \
+ not line.endswith('.hdl'):
+ return ''
+
+ ignore_list = [
+ 'LibreOfficeKit/LibreOfficeKitEnums.h', # Needs special directives
+ 'LibreOfficeKit/LibreOfficeKitTypes.h', # Needs special directives
+ 'jerror.h', # c++ unfriendly
+ 'jpeglib.h', # c++ unfriendly
+ 'boost/spirit/include/classic_core.hpp' # depends on BOOST_SPIRIT_DEBUG
+ ]
+
+ if module == 'basic':
+ ignore_list += [
+ 'basic/vbahelper.hxx',
+ ]
+ if module == 'connectivity':
+ ignore_list += [
+ 'com/sun/star/beans/PropertyAttribute.hpp', # OPTIONAL defined via objbase.h
+ 'com/sun/star/sdbcx/Privilege.hpp', # DELETE defined via objbase.h
+ 'ado/*' , # some strange type conflict because of Window's adoctint.h
+ 'adoint.h',
+ 'adoctint.h',
+ ]
+ if module == 'sc':
+ ignore_list += [
+ 'progress.hxx', # special directives
+ 'scslots.hxx', # special directives
+ ]
+ if module == 'sd':
+ ignore_list += [
+ 'sdgslots.hxx', # special directives
+ 'sdslots.hxx', # special directives
+ ]
+ if module == 'sfx2':
+ ignore_list += [
+ 'sfx2/recentdocsview.hxx', # Redefines ApplicationType defined in objidl.h
+ 'sfx2/sidebar/Sidebar.hxx',
+ 'sfx2/sidebar/UnoSidebar.hxx',
+ 'sfxslots.hxx', # externally defined types
+ ]
+ if module == 'sot':
+ ignore_list += [
+ 'sysformats.hxx', # Windows headers
+ ]
+ if module == 'vcl':
+ ignore_list += [
+ 'accmgr.hxx', # redefines ImplAccelList
+ 'image.h',
+ 'jobset.h',
+ 'opengl/gdiimpl.hxx',
+ 'opengl/salbmp.hxx',
+ 'openglgdiimpl', # ReplaceTextA
+ 'printdlg.hxx',
+ 'salinst.hxx', # GetDefaultPrinterA
+ 'salprn.hxx', # SetPrinterDataA
+ 'vcl/jobset.hxx',
+ 'vcl/oldprintadaptor.hxx',
+ 'vcl/opengl/OpenGLContext.hxx',
+ 'vcl/opengl/OpenGLHelper.hxx', # Conflicts with X header on *ix
+ 'vcl/print.hxx',
+ 'vcl/prntypes.hxx', # redefines Orientation from filter/jpeg/Exif.hxx
+ 'vcl/sysdata.hxx',
+ ]
+ if module == 'xmloff':
+ ignore_list += [
+ 'SchXMLExport.hxx', # SchXMLAutoStylePoolP.hxx not found
+ 'SchXMLImport.hxx', # enums redefined in draw\sdxmlimp_impl.hxx
+ 'XMLEventImportHelper.hxx', # NameMap redefined in XMLEventExport.hxx
+ 'xmloff/XMLEventExport.hxx', # enums redefined
+ ]
+ if module == 'xmlsecurity':
+ ignore_list += [
+ 'xmlsec/*',
+ 'xmlsecurity/xmlsec-wrapper.h',
+ ]
+ if module == 'external/pdfium':
+ ignore_list += [
+ 'third_party/freetype/include/pstables.h',
+ ]
+ if module == 'external/clucene':
+ ignore_list += [
+ '_bufferedstream.h',
+ '_condition.h',
+ '_gunichartables.h',
+ '_threads.h',
+ 'error.h',
+ 'CLucene/LuceneThreads.h',
+ 'CLucene/config/_threads.h',
+ ]
+ if module == 'external/skia':
+ ignore_list += [
+ 'skcms_internal.h',
+ 'zlib.h', # causes crc32 conflict
+ 'dirent.h', # unix-specific
+ 'pthread.h',
+ 'unistd.h',
+ 'sys/stat.h',
+ 'ft2build.h',
+ 'fontconfig/fontconfig.h',
+ 'GL/glx.h',
+ 'src/Transform_inl.h',
+ 'src/c/sk_c_from_to.h',
+ 'src/c/sk_types_priv.h',
+ 'src/core/SkBlitBWMaskTemplate.h',
+ 'src/sfnt/SkSFNTHeader.h',
+ 'src/opts/',
+ 'src/core/SkCubicSolver.h',
+ 'src/sksl/SkSLCPP.h',
+ 'src/gpu/vk/GrVkAMDMemoryAllocator.h',
+ 'src/gpu/GrUtil.h',
+ 'src/sksl/', # conflict between SkSL::Expression and SkSL::dsl::Expression
+ 'include/sksl/',
+ 'src/gpu/vk/',
+ 'include/gpu/vk'
+ ]
+ if module == 'external/zxing':
+ ignore_list += [
+ 'rss/ODRSSExpandedBinaryDecoder.h'
+ ]
+
+ for i in ignore_list:
+ if line.startswith(i):
+ return ''
+ if i[0] == '*' and line.endswith(i[1:]):
+ return ''
+ if i[-1] == '*' and line.startswith(i[:-1]):
+ return ''
+
+ return line
+
+def fixup(includes, module):
+ """ Here we add any headers
+ necessary in the pch.
+ These could be known to be very
+ common but for technical reasons
+ left out of the pch by this generator.
+ Or, they could be missing from the
+ source files where they are used
+ (probably because they had been
+ in the old pch, they were missed).
+ Also, these could be headers
+ that make the build faster but
+ aren't added automatically.
+ """
+ fixes = []
+ def append(inc):
+ # Add a space to exclude from
+ # ignore bisecting.
+ line = ' #include <{}>'.format(inc)
+ try:
+ i = fixes.index(inc)
+ fixes[i] = inc
+ except:
+ fixes.append(inc)
+
+ append('sal/config.h')
+
+ if module == 'basctl':
+ if 'basslots.hxx' in includes:
+ append('sfx2/msg.hxx')
+
+ #if module == 'sc':
+ # if 'scslots.hxx' in includes:
+ # append('sfx2/msg.hxx')
+ return fixes
+
+def sort_by_category(list, root, module, filter_local):
+ """ Move all 'system' headers first.
+ Core files of osl, rtl, sal, next.
+ Everything non-module-specific third.
+ Last, module-specific headers.
+ """
+ sys = []
+ boo = []
+ cor = []
+ rst = []
+ mod = []
+
+ prefix = '<' + module + '/'
+ for i in list:
+ if 'sal/config.h' in i:
+ continue # added unconditionally in fixup
+ if is_c_runtime(i, root, module):
+ sys.append(i)
+ elif '<boost/' in i:
+ boo.append(i)
+ elif prefix in i or not '/' in i:
+ mod.append(i)
+ elif '<sal/' in i or '<vcl/' in i:
+ cor.append(i)
+ elif '<osl/' in i or '<rtl/' in i:
+ if module == "sal": # osl and rtl are also part of sal
+ mod.append(i)
+ else:
+ cor.append(i)
+ # Headers from another module that is closely tied to the module.
+ elif module == 'sc' and '<formula' in i:
+ mod.append(i)
+ else:
+ rst.append(i)
+
+ out = []
+ out += [ "#if PCH_LEVEL >= 1" ]
+ out += sorted(sys)
+ out += sorted(boo)
+ out += [ "#endif // PCH_LEVEL >= 1" ]
+ out += [ "#if PCH_LEVEL >= 2" ]
+ out += sorted(cor)
+ out += [ "#endif // PCH_LEVEL >= 2" ]
+ out += [ "#if PCH_LEVEL >= 3" ]
+ out += sorted(rst)
+ out += [ "#endif // PCH_LEVEL >= 3" ]
+ out += [ "#if PCH_LEVEL >= 4" ]
+ out += sorted(mod)
+ out += [ "#endif // PCH_LEVEL >= 4" ]
+ return out
+
+def parse_makefile(groups, lines, lineno, lastif, ifstack):
+
+ inobjects = False
+ ingeneratedobjects = False
+ inelse = False
+ suffix = 'cxx'
+ os_cond_re = re.compile(r'(ifeq|ifneq)\s*\(\$\(OS\)\,(\w*)\)')
+
+ line = lines[lineno]
+ if line.startswith('if'):
+ lastif = line
+ if ifstack == 0:
+ # Correction if first line is an if.
+ lineno = parse_makefile(groups, lines, lineno, line, ifstack+1)
+ else:
+ lineno -= 1
+
+ while lineno + 1 < len(lines):
+ lineno += 1
+ line = lines[lineno].strip()
+ line = line.rstrip('\\').strip()
+ #print('line #{}: {}'.format(lineno, line))
+ if len(line) == 0:
+ continue
+
+ if line == '))':
+ inobjects = False
+ ingeneratedobjects = False
+ elif 'add_exception_objects' in line or \
+ 'add_cxxobject' in line:
+ inobjects = True
+ #print('inobjects')
+ #if ifstack and not SILENT:
+ #sys.stderr.write('Sources in a conditional, ignoring for now.\n')
+ elif 'add_generated_exception_objects' in line or \
+ 'add_generated_cxxobject' in line:
+ ingeneratedobjects = True
+ elif 'set_generated_cxx_suffix' in line:
+ suffix_re = re.compile('.*set_generated_cxx_suffix,[^,]*,([^)]*).*')
+ match = suffix_re.match(line)
+ if match:
+ suffix = match.group(1)
+ elif line.startswith('if'):
+ lineno = parse_makefile(groups, lines, lineno, line, ifstack+1)
+ continue
+ elif line.startswith('endif'):
+ if ifstack:
+ return lineno
+ continue
+ elif line.startswith('else'):
+ inelse = True
+ elif inobjects or ingeneratedobjects:
+ if EXCLUDE_SYSTEM and ifstack:
+ continue
+ file = line + '.' + suffix
+ if ',' in line or '(' in line or ')' in line or file.startswith('-'):
+ #print('passing: ' + line)
+ pass # $if() probably, or something similar
+ else:
+ osname = ''
+ if lastif:
+ if 'filter' in lastif:
+ # We can't grok filter, yet.
+ continue
+ match = os_cond_re.match(lastif)
+ if not match:
+ # We only support OS conditionals.
+ continue
+ in_out = match.group(1)
+ osname = match.group(2) if match else ''
+ if (in_out == 'ifneq' and not inelse) or \
+ (in_out == 'ifeq' and inelse):
+ osname = '!' + osname
+
+ if osname not in groups:
+ groups[osname] = []
+ if ingeneratedobjects:
+ file = WORKDIR + '/' + file
+ groups[osname].append(file)
+
+ return groups
+
+def process_makefile(root, module, libname):
+ """ Parse a gmake makefile and extract
+ source filenames from it.
+ """
+
+ makefile = 'Library_{}.mk'.format(libname)
+ filename = os.path.join(os.path.join(root, module), makefile)
+ if not os.path.isfile(filename):
+ makefile = 'StaticLibrary_{}.mk'.format(libname)
+ filename = os.path.join(os.path.join(root, module), makefile)
+ if not os.path.isfile(filename):
+ sys.stderr.write('Error: Module {} has no makefile at {}.'.format(module, filename))
+
+ groups = {'':[], 'ANDROID':[], 'iOS':[], 'WNT':[], 'LINUX':[], 'MACOSX':[]}
+
+ with open(filename, 'r') as f:
+ lines = f.readlines()
+ groups = parse_makefile(groups, lines, lineno=0, lastif=None, ifstack=0)
+
+ return groups
+
+def is_allowed_if(line, module):
+ """ Check whether the given #if condition
+ is allowed for the given module or whether
+ its block should be ignored.
+ """
+
+ # remove trailing comments
+ line = re.sub(r'(.*) *//.*', r'\1', line)
+ line = line.strip()
+
+ # Our sources always build with LIBO_INTERNAL_ONLY.
+ if line == "#if defined LIBO_INTERNAL_ONLY" or line == "#ifdef LIBO_INTERNAL_ONLY":
+ return True
+ # We use PCHs only for C++.
+ if line == "#if defined(__cplusplus)" or line == "#if defined __cplusplus":
+ return True
+ # Debug-specific code, it shouldn't hurt including it unconditionally.
+ if line == "#ifdef DBG_UTIL" or line == "#if OSL_DEBUG_LEVEL > 0":
+ return True
+ if module == "external/skia":
+ # We always set these.
+ if line == "#ifdef SK_VULKAN" or line == "#if SK_SUPPORT_GPU":
+ return True
+ return False
+
+def process_source(root, module, filename, maxdepth=0):
+ """ Process a source file to extract
+ included headers.
+ For now, skip on compiler directives.
+ maxdepth is used when processing headers
+ which typically have protecting ifndef.
+ """
+
+ ifdepth = 0
+ lastif = ''
+ raw_includes = []
+ allowed_ifs = []
+ ifsallowed = 0
+ with open(filename, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith('#if'):
+ if is_allowed_if(line, module):
+ allowed_ifs.append(True)
+ ifsallowed += 1
+ else:
+ allowed_ifs.append(False)
+ lastif = line
+ ifdepth += 1
+ elif line.startswith('#endif'):
+ ifdepth -= 1
+ if allowed_ifs[ ifdepth ]:
+ ifsallowed -= 1
+ else:
+ lastif = '#if'
+ del allowed_ifs[ ifdepth ]
+ elif line.startswith('#pragma once'):
+ # maxdepth == 1 means we are parsing a header file
+ # and are allowed one #ifdef block (the include guard),
+ # but in the #pragma once case do not allow that
+ assert maxdepth == 1
+ maxdepth = 0
+ elif line.startswith('#include'):
+ if ifdepth - ifsallowed <= maxdepth:
+ line = sanitize(line)
+ if line:
+ line = get_filename(line)
+ if line and len(line):
+ raw_includes.append(line)
+ elif not SILENT:
+ sys.stderr.write('#include in {} : {}\n'.format(lastif, line))
+
+ return raw_includes
+
+def explode(root, module, includes, tree, filter_local, recurse):
+ incpath = os.path.join(root, 'include')
+
+ for inc in includes:
+ filename = get_filename(inc)
+ if filename in tree or len(filter_local.proc(filename)) == 0:
+ continue
+
+ try:
+ # Module or Local header.
+ filepath = filter_local.find_local_file(inc)
+ if filepath:
+ #print('trying loc: ' + filepath)
+ incs = process_source(root, module, filepath, maxdepth=1)
+ incs = map(get_filename, incs)
+ incs = process_list(incs, lambda x: filter_ignore(x, module))
+ incs = process_list(incs, filter_local.proc)
+ tree[filename] = incs
+ if recurse:
+ tree = explode(root, module, incs, tree, filter_local, recurse)
+ #print('{} => {}'.format(filepath, tree[filename]))
+ continue
+ except:
+ pass
+
+ try:
+ # Public header.
+ filepath = os.path.join(incpath, filename)
+ #print('trying pub: ' + filepath)
+ incs = process_source(root, module, filepath, maxdepth=1)
+ incs = map(get_filename, incs)
+ incs = process_list(incs, lambda x: filter_ignore(x, module))
+ incs = process_list(incs, filter_local.proc)
+ tree[filename] = incs
+ if recurse:
+ tree = explode(root, module, incs, tree, filter_local, recurse)
+ #print('{} => {}'.format(filepath, tree[filename]))
+ continue
+ except:
+ pass
+
+ # Failed, but remember to avoid searching again.
+ tree[filename] = []
+
+ return tree
+
+def make_command_line():
+ args = sys.argv[:]
+ # Remove command line flags and
+ # use internal flags.
+ for i in range(len(args)-1, 0, -1):
+ if args[i].startswith('--'):
+ args.pop(i)
+
+ args.append('--cutoff=' + str(CUTOFF))
+ if EXCLUDE_SYSTEM:
+ args.append('--exclude:system')
+ else:
+ args.append('--include:system')
+ if EXCLUDE_MODULE:
+ args.append('--exclude:module')
+ else:
+ args.append('--include:module')
+ if EXCLUDE_LOCAL:
+ args.append('--exclude:local')
+ else:
+ args.append('--include:local')
+
+ return ' '.join(args)
+
+def generate_includes(includes):
+ """Generates the include lines of the pch.
+ """
+ lines = []
+ for osname, group in includes.items():
+ if not len(group):
+ continue
+
+ if len(osname):
+ not_eq = ''
+ if osname[0] == '!':
+ not_eq = '!'
+ osname = osname[1:]
+ lines.append('')
+ lines.append('#if {}defined({})'.format(not_eq, osname))
+
+ for i in group:
+ lines.append(i)
+
+ if len(osname):
+ lines.append('#endif')
+
+ return lines
+
+def generate(includes, libname, filename, module):
+ header = \
+"""/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+
+/*
+ This file has been autogenerated by update_pch.sh. It is possible to edit it
+ manually (such as when an include file has been moved/renamed/removed). All such
+ manual changes will be rewritten by the next run of update_pch.sh (which presumably
+ also fixes all possible problems, so it's usually better to use it).
+"""
+
+ footer = \
+"""
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
+"""
+ import datetime
+
+ with open(filename, 'w') as f:
+ f.write(header)
+ f.write('\n Generated on {} using:\n {}\n'.format(
+ datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+ make_command_line()))
+ f.write('\n If after updating build fails, use the following command to locate conflicting headers:\n ./bin/update_pch_bisect {} "make {}.build" --find-conflicts\n*/\n'.format(
+ filename, module))
+
+ # sal needs this for rand_s()
+ if module == 'sal' and libname == 'sal':
+ sal_define = """
+#if defined(_WIN32)
+#define _CRT_RAND_S
+#endif
+"""
+ f.write(sal_define)
+
+ # Dump the headers.
+ f.write('\n')
+ for i in includes:
+ f.write(i + '\n')
+
+ # Some libraries pull windows headers that aren't self contained.
+ if (module == 'connectivity' and libname == 'ado') or \
+ (module == 'xmlsecurity' and libname == 'xsec_xmlsec'):
+ ado_define = """
+// Cleanup windows header macro pollution.
+#if defined(_WIN32) && defined(WINAPI)
+#include <postwin.h>
+#undef RGB
+#endif
+"""
+ f.write(ado_define)
+
+ f.write(footer)
+
+def remove_from_tree(filename, tree):
+ # Remove this file, if top-level.
+ incs = tree.pop(filename, [])
+ for i in incs:
+ tree = remove_from_tree(i, tree)
+
+ # Also remove if included from another.
+ for (k, v) in tree.items():
+ if filename in v:
+ v.remove(filename)
+
+ return tree
+
+def tree_to_list(includes, filename, tree):
+ if filename in includes:
+ return includes
+ includes.append(filename)
+ #incs = tree.pop(filename, [])
+ incs = tree[filename] if filename in tree else []
+ for i in incs:
+ tree_to_list(includes, i, tree)
+
+ return includes
+
+def promote(includes):
+ """ Common library headers are heavily
+ referenced, even if they are included
+ from a few places.
+ Here we separate them to promote
+ their inclusion in the final pch.
+ """
+ promo = []
+ for inc in includes:
+ if inc.startswith('boost') or \
+ inc.startswith('sal') or \
+ inc.startswith('osl') or \
+ inc.startswith('rtl'):
+ promo.append(inc)
+ return promo
+
+def make_pch_filename(root, module, libname):
+ """ PCH files are stored here:
+ <root>/<module>/inc/pch/precompiled_<libname>.hxx
+ """
+
+ path = os.path.join(root, module)
+ path = os.path.join(path, 'inc')
+ path = os.path.join(path, 'pch')
+ path = os.path.join(path, 'precompiled_' + libname + '.hxx')
+ return path
+
+def main():
+
+ global CUTOFF
+ global EXCLUDE_MODULE
+ global EXCLUDE_LOCAL
+ global EXCLUDE_SYSTEM
+ global SILENT
+ global WORKDIR
+
+ if os.getenv('WORKDIR'):
+ WORKDIR = os.getenv('WORKDIR')
+
+ root = '.'
+ module = sys.argv[1]
+ libname = sys.argv[2]
+ header = make_pch_filename(root, module, libname)
+
+ if not os.path.exists(os.path.join(root, module)):
+ raise Exception('Error: module [{}] not found.'.format(module))
+
+ key = '{}.{}'.format(module, libname)
+ if key in DEFAULTS:
+ # Load the module-specific defaults.
+ CUTOFF = DEFAULTS[key][0]
+ EXCLUDE_SYSTEM = DEFAULTS[key][1]
+ EXCLUDE_MODULE = DEFAULTS[key][2]
+ EXCLUDE_LOCAL = DEFAULTS[key][3]
+
+ force_update = False
+ for x in range(3, len(sys.argv)):
+ i = sys.argv[x]
+ if i.startswith('--cutoff='):
+ CUTOFF = int(i.split('=')[1])
+ elif i.startswith('--exclude:'):
+ cat = i.split(':')[1]
+ if cat == 'module':
+ EXCLUDE_MODULE = True
+ elif cat == 'local':
+ EXCLUDE_LOCAL = True
+ elif cat == 'system':
+ EXCLUDE_SYSTEM = True
+ elif i.startswith('--include:'):
+ cat = i.split(':')[1]
+ if cat == 'module':
+ EXCLUDE_MODULE = False
+ elif cat == 'local':
+ EXCLUDE_LOCAL = False
+ elif cat == 'system':
+ EXCLUDE_SYSTEM = False
+ elif i == '--silent':
+ SILENT = True
+ elif i == '--force':
+ force_update = True
+ else:
+ sys.stderr.write('Unknown option [{}].'.format(i))
+ return 1
+
+ filter_local = Filter_Local(root, module, \
+ not EXCLUDE_MODULE, \
+ not EXCLUDE_LOCAL)
+
+ # Read input.
+ groups = process_makefile(root, module, libname)
+
+ generic = []
+ for osname, group in groups.items():
+ if not len(group):
+ continue
+
+ includes = []
+ for filename in group:
+ includes += process_source(root, module, filename)
+
+ # Save unique top-level includes.
+ unique = set(includes)
+ promoted = promote(unique)
+
+ # Process includes.
+ includes = remove_rare(includes)
+ includes = process_list(includes, lambda x: filter_ignore(x, module))
+ includes = process_list(includes, filter_local.proc)
+
+ # Remove the already included ones.
+ for inc in includes:
+ unique.discard(inc)
+
+ # Explode the excluded ones.
+ tree = {i:[] for i in includes}
+ tree = explode(root, module, unique, tree, filter_local, not EXCLUDE_MODULE)
+
+ # Remove the already included ones from the tree.
+ for inc in includes:
+ filename = get_filename(inc)
+ tree = remove_from_tree(filename, tree)
+
+ extra = []
+ for (k, v) in tree.items():
+ extra += tree_to_list([], k, tree)
+
+ promoted += promote(extra)
+ promoted = process_list(promoted, lambda x: filter_ignore(x, module))
+ promoted = process_list(promoted, filter_local.proc)
+ promoted = set(promoted)
+ # If a promoted header includes others, remove the rest.
+ for (k, v) in tree.items():
+ if k in promoted:
+ for i in v:
+ promoted.discard(i)
+ includes += [x for x in promoted]
+
+ extra = remove_rare(extra)
+ extra = process_list(extra, lambda x: filter_ignore(x, module))
+ extra = process_list(extra, filter_local.proc)
+ includes += extra
+
+ includes = [x for x in set(includes)]
+ fixes = fixup(includes, module)
+ fixes = map(lambda x: '#include <' + x + '>', fixes)
+
+ includes = map(lambda x: '#include <' + x + '>', includes)
+ sorted = sort_by_category(includes, root, module, filter_local)
+ includes = list(fixes) + sorted
+
+ if len(osname):
+ for i in generic:
+ if i in includes:
+ includes.remove(i)
+
+ groups[osname] = includes
+ if not len(osname):
+ generic = includes
+
+ # Open the old pch and compare its contents
+ # with new includes.
+ # Clobber only if they are different.
+ with open(header, 'r') as f:
+ old_pch_lines = [x.strip() for x in f.readlines()]
+ new_lines = generate_includes(groups)
+ # Find the first include in the old pch.
+ start = -1
+ for i in range(len(old_pch_lines)):
+ if old_pch_lines[i].startswith('#include') or old_pch_lines[i].startswith('#if PCH_LEVEL'):
+ start = i
+ break
+ # Clobber if there is a mismatch.
+ if force_update or start < 0 or (len(old_pch_lines) - start < len(new_lines)):
+ generate(new_lines, libname, header, module)
+ return 0
+ else:
+ for i in range(len(new_lines)):
+ if new_lines[i] != old_pch_lines[start + i]:
+ generate(new_lines, libname, header, module)
+ return 0
+ else:
+ # Identical, but see if new pch removed anything.
+ for i in range(start + len(new_lines), len(old_pch_lines)):
+ if '#include' in old_pch_lines[i]:
+ generate(new_lines, libname, header, module)
+ return 0
+
+ # Didn't update.
+ # Use exit code 2 to distinguish it from exit code 1 used e.g. when an exception occurs.
+ return 2
+
+if __name__ == '__main__':
+ """ Process all the includes in a Module
+ to make into a PCH file.
+ Run without arguments for unittests,
+ and to see usage.
+ """
+
+ if len(sys.argv) >= 3:
+ status = main()
+ sys.exit(status)
+
+ print('Usage: {} <Module name> <Library name> [options]'.format(sys.argv[0]))
+ print(' Always run from the root of LO repository.\n')
+ print(' Options:')
+ print(' --cutoff=<count> - Threshold to excluding headers.')
+ print(' --exclude:<category> - Exclude category-specific headers.')
+ print(' --include:<category> - Include category-specific headers.')
+ print(' --force - Force updating the pch even when nothing changes.')
+ print(' Categories:')
+ print(' module - Headers in /inc directory of a module.')
+ print(' local - Headers local to a source file.')
+ print(' system - Platform-specific headers.')
+ print(' --silent - print only errors.')
+ print('\nRunning unit-tests...')
+
+
+class TestMethods(unittest.TestCase):
+
+ def test_sanitize(self):
+ self.assertEqual(sanitize('#include "blah/file.cxx"'),
+ '#include <blah/file.cxx>')
+ self.assertEqual(sanitize(' #include\t"blah/file.cxx" '),
+ '#include <blah/file.cxx>')
+ self.assertEqual(sanitize(' '),
+ '')
+
+ def test_filter_ignore(self):
+ self.assertEqual(filter_ignore('blah/file.cxx', 'mod'),
+ '')
+ self.assertEqual(filter_ignore('vector', 'mod'),
+ 'vector')
+ self.assertEqual(filter_ignore('file.cxx', 'mod'),
+ '')
+
+ def test_remove_rare(self):
+ self.assertEqual(remove_rare([]),
+ [])
+
+class TestMakefileParser(unittest.TestCase):
+
+ def setUp(self):
+ global EXCLUDE_SYSTEM
+ EXCLUDE_SYSTEM = False
+
+ def test_parse_singleline_eval(self):
+ source = "$(eval $(call gb_Library_Library,sal))"
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_multiline_eval(self):
+ source = """$(eval $(call gb_Library_set_include,sal,\\
+ $$(INCLUDE) \\
+ -I$(SRCDIR)/sal/inc \\
+))
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_multiline_eval_with_if(self):
+ source = """$(eval $(call gb_Library_add_defs,sal,\\
+ $(if $(filter $(OS),iOS), \\
+ -DNO_CHILD_PROCESSES \\
+ ) \\
+))
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_multiline_add_with_if(self):
+ source = """$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/time \\
+ $(if $(filter DESKTOP,$(BUILD_TYPE)), sal/osl/unx/salinit) \\
+))
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 1)
+ self.assertEqual(groups[''][0], 'sal/osl/unx/time.cxx')
+
+ def test_parse_if_else(self):
+ source = """ifeq ($(OS),MACOSX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/mac/mac \\
+))
+else
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/uunxapi \\
+))
+endif
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 3)
+ self.assertEqual(len(groups['']), 0)
+ self.assertEqual(len(groups['MACOSX']), 1)
+ self.assertEqual(len(groups['!MACOSX']), 1)
+ self.assertEqual(groups['MACOSX'][0], 'sal/osl/mac/mac.cxx')
+ self.assertEqual(groups['!MACOSX'][0], 'sal/osl/unx/uunxapi.cxx')
+
+ def test_parse_nested_if(self):
+ source = """ifeq ($(OS),MACOSX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/mac/mac \\
+))
+else
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/uunxapi \\
+))
+
+ifeq ($(OS),LINUX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/textenc/context \\
+))
+endif
+endif
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 4)
+ self.assertEqual(len(groups['']), 0)
+ self.assertEqual(len(groups['MACOSX']), 1)
+ self.assertEqual(len(groups['!MACOSX']), 1)
+ self.assertEqual(len(groups['LINUX']), 1)
+ self.assertEqual(groups['MACOSX'][0], 'sal/osl/mac/mac.cxx')
+ self.assertEqual(groups['!MACOSX'][0], 'sal/osl/unx/uunxapi.cxx')
+ self.assertEqual(groups['LINUX'][0], 'sal/textenc/context.cxx')
+
+ def test_parse_exclude_system(self):
+ source = """ifeq ($(OS),MACOSX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/mac/mac \\
+))
+else
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/uunxapi \\
+))
+
+ifeq ($(OS),LINUX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/textenc/context \\
+))
+endif
+endif
+"""
+ global EXCLUDE_SYSTEM
+ EXCLUDE_SYSTEM = True
+
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_filter(self):
+ source = """ifneq ($(filter $(OS),MACOSX iOS),)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/osxlocale \\
+))
+endif
+"""
+ # Filter is still unsupported.
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+unittest.main()
+
+# vim: set et sw=4 ts=4 expandtab:
diff --git a/bin/update_pch.sh b/bin/update_pch.sh
new file mode 100755
index 000000000..4c17a0374
--- /dev/null
+++ b/bin/update_pch.sh
@@ -0,0 +1,70 @@
+#! /bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Usage: update_pch.sh [<module>/inc/pch/precompiled_xxx.hxx]
+# Usage: update_pch.sh [<module>]
+# Invoke: make cmd cmd="./bin/update_pch.sh [..]"
+
+if test -n "$SRC_DIR"; then
+ root="$SRC_DIR"
+else
+ root=`dirname $0`
+ root=`cd $root/.. >/dev/null && pwd`
+fi
+root=`readlink -f $root`
+cd $root
+
+if test -z "$1"; then
+ headers=`ls ./*/inc/pch/precompiled_*.hxx`
+else
+ headers="$@"
+fi
+
+# Split the headers into an array.
+IFS=' ' read -a aheaders <<< $headers
+hlen=${#aheaders[@]};
+if [ $hlen -gt 1 ]; then
+ if [ -z "$PARALLELISM" ]; then
+ PARALLELISM=0 # Let xargs decide
+ fi
+ echo $headers | xargs -n 1 -P $PARALLELISM $0
+ exit $?
+fi
+
+for x in $headers; do
+ if [ -d "$x" ]; then
+ # We got a directory, find pch files to update.
+ headers=`find $root/$x/ -type f -iname "precompiled_*.hxx"`
+ if test -n "$headers"; then
+ $0 "$headers"
+ fi
+ else
+ header=$x
+ update_msg=`echo $header | sed -e s%$root/%%`
+ module=`readlink -f $header | sed -e s%$root/%% -e s%/.*%%`
+ if [ "$module" = "pch" ]; then
+ continue # PCH's in pch/inc/pch/ are handled manually
+ fi
+ echo updating $update_msg
+ if [ "$module" = "external" ]; then
+ module=external/`readlink -f $header | sed -e s%$root/external/%% -e s%/.*%%`
+ fi
+ libname=`echo $header | sed -e s/.*precompiled_// -e s/\.hxx//`
+
+ ./bin/update_pch "$module" "$libname"
+ exitcode=$?
+ if test $exitcode -ne 0 -a $exitcode -ne 2; then
+ echo Failed.
+ exit 1
+ fi
+ fi
+done
+
+#echo Done.
+exit 0
diff --git a/bin/update_pch_autotune.sh b/bin/update_pch_autotune.sh
new file mode 100755
index 000000000..806e1ad17
--- /dev/null
+++ b/bin/update_pch_autotune.sh
@@ -0,0 +1,229 @@
+#! /bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Finds the optimal update_pch settings that results in,
+# per module and library, the fastest build time and
+# smallest intermediate files (.o/.obj) output.
+
+# Usage: update_pch_autotune.sh [<module1> <module2>]
+# Invoke: /opt/lo/bin/make cmd cmd="./bin/update_pch_autotune.sh [..]"
+
+# The resulting values may be entered in update_pch
+# to be use for generating PCH in the future.
+# Run this script after major header changes.
+
+root=`dirname $0`
+root=`cd $root/.. && pwd`
+cd $root
+
+if test -z "$1"; then
+ modules=`ls ./*/inc/pch/precompiled_*.hxx | sed -e s%./%% -e s%/.*%% | uniq`
+else
+ modules="$@"
+fi
+
+if [[ "$OSTYPE" == "cygwin" ]]; then
+ MAKE=/opt/lo/bin/make
+else
+ MAKE=make
+fi
+
+function build()
+{
+ local START=$(date +%s.%N)
+
+ $MAKE -sr "$module" > /dev/null
+ status=$?
+ if [ $status -ne 0 ];
+ then
+ # Spurious failures happen.
+ $MAKE "$module.build" > /dev/null
+ status=$?
+ fi
+
+ local END=$(date +%s.%N1)
+ build_time=$(printf %.1f $(echo "$END - $START" | bc))
+
+ size="FAILED"
+ score="FAILED"
+ if [ $status -eq 0 ];
+ then
+ # The total size of the object files.
+ size="$(du -s workdir/CxxObject/$module/ | awk '{print $1}')"
+ # Add the pch file size.
+ filename_rel="workdir/PrecompiledHeader/nodebug/$(basename $header)*"
+ filename_dbg="workdir/PrecompiledHeader/debug/$(basename $header)*"
+ if [[ $filename_rel -nt $filename_dbg ]]; then
+ pch_size="$(du -s $filename_rel | awk '{print $1}' | paste -sd+ | bc)"
+ else
+ pch_size="$(du -s $filename_dbg | awk '{print $1}' | paste -sd+ | bc)"
+ fi
+ size="$(echo "$pch_size + $size" | bc)"
+
+ # Compute a score based on the build time and size.
+ # The shorter the build time, and smaller disk usage, the higher the score.
+ score=$(printf %.2f $(echo "10000 / ($build_time * e($size/1048576))" | bc -l))
+ fi
+}
+
+function run()
+{
+ local msg="$module.$libname, ${@:3}, "
+ printf "$msg"
+ ./bin/update_pch "$module" "$libname" "${@:3}" --silent
+ status=$?
+
+ if [ $status -eq 0 ];
+ then
+ build
+
+ summary="$build_time, $size, $score"
+ if [ $status -eq 0 ];
+ then
+ new_best_for_cuttof=$(echo "$score > $best_score_for_cuttof" | bc -l)
+ if [ $new_best_for_cuttof -eq 1 ];
+ then
+ best_score_for_cuttof=$score
+ fi
+
+ new_best=$(echo "$score > $best_score" | bc -l)
+ if [ $new_best -eq 1 ];
+ then
+ best_score=$score
+ best_args="${@:3}"
+ best_time=$build_time
+ best_cutoff=$cutoff
+ summary="$build_time, $size, $score,*"
+ fi
+ fi
+ else
+ # Skip if pch is not updated.
+ summary="0, 0, 0"
+ fi
+
+ echo "$summary"
+}
+
+function args_to_table()
+{
+ local sys="EXCLUDE"
+ local mod="EXCLUDE"
+ local loc="EXCLUDE"
+ local cutoff=0
+ IFS=' ' read -r -a aargs <<< $best_args
+ for index in "${!aargs[@]}"
+ do
+ if [ "${aargs[index]}" = "--include:system" ];
+ then
+ sys="INCLUDE"
+ elif [ "${aargs[index]}" = "--exclude:system" ];
+ then
+ sys="EXCLUDE"
+ elif [ "${aargs[index]}" = "--include:module" ];
+ then
+ mod="INCLUDE"
+ elif [ "${aargs[index]}" = "--exclude:module" ];
+ then
+ mod="EXCLUDE"
+ elif [ "${aargs[index]}" = "--include:local" ];
+ then
+ loc="INCLUDE"
+ elif [ "${aargs[index]}" = "--exclude:local" ];
+ then
+ loc="EXCLUDE"
+ elif [[ "${aargs[index]}" == *"cutoff"* ]]
+ then
+ cutoff=$(echo "${aargs[index]}" | grep -Po '\-\-cutoff\=\K\d+')
+ fi
+ done
+
+ local key=$(printf "'%s.%s'" $module $libname)
+ echo "$(printf " %-36s: (%2d, %s, %s, %s), # %5.1f" $key $cutoff $sys $mod $loc $best_time)"
+}
+
+for module in $modules; do
+
+ # Build without pch includes as sanity check.
+ #run "$root" "$module" --cutoff=999
+
+ # Build before updating pch.
+ $MAKE "$module.build" > /dev/null
+ if [ $? -ne 0 ];
+ then
+ # Build with dependencies before updating pch.
+ echo "Failed to build $module, building known state with dependencies..."
+ ./bin/update_pch.sh "$module" > /dev/null
+ $MAKE "$module.clean" > /dev/null
+ $MAKE "$module.all" > /dev/null
+ if [ $? -ne 0 ];
+ then
+ # Build all!
+ echo "Failed to build $module with dependencies, building all..."
+ $MAKE > /dev/null
+ if [ $? -ne 0 ];
+ then
+ >&2 echo "Broken build. Please revert changes and try again."
+ exit 1
+ fi
+ fi
+ fi
+
+ # Find pch files in the module to update.
+ headers=`find $root/$module/ -type f -iname "precompiled_*.hxx"`
+
+ # Each pch belongs to a library.
+ for header in $headers; do
+ libname=`echo $header | sed -e s/.*precompiled_// -e s/\.hxx//`
+ #TODO: Backup the header and restore when last tune fails.
+
+ # Force update on first try below.
+ echo "Autotuning $module.$libname..."
+ ./bin/update_pch "$module" "$libname" --cutoff=999 --silent --force
+
+ best_score=0
+ best_args=""
+ best_time=0
+ best_cutoff=0
+ for i in {1..16}; do
+ cutoff=$i
+ best_score_for_cuttof=0
+ #run "$root" "$module" "--cutoff=$i" --include:system --exclude:module --exclude:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --exclude:module --exclude:local
+ #run "$root" "$module" "--cutoff=$i" --include:system --include:module --exclude:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --include:module --exclude:local
+ #run "$root" "$module" "--cutoff=$i" --include:system --exclude:module --include:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --exclude:module --include:local
+ #run "$root" "$module" "--cutoff=$i" --include:system --include:module --include:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --include:module --include:local
+
+ if [ $i -gt $((best_cutoff+2)) ];
+ then
+ score_too_low=$(echo "$best_score_for_cuttof < $best_score / 1.10" | bc -l)
+ if [ $score_too_low -eq 1 ];
+ then
+ echo "Score hit low of $best_score_for_cuttof, well below overall best of $best_score. Stopping."
+ break;
+ fi
+ fi
+ done
+
+ ./bin/update_pch "$module" "$libname" $best_args --force --silent
+ echo "> $module.$libname, $best_args, $best_time, $size, $score"
+ echo
+
+ table+=$'\n'
+ table+="$(args_to_table)"
+ done
+
+done
+
+echo "Update the relevant lines in ./bin/update_pch script:"
+>&2 echo "$table"
+
+exit 0
diff --git a/bin/update_pch_bisect b/bin/update_pch_bisect
new file mode 100755
index 000000000..271cbc88f
--- /dev/null
+++ b/bin/update_pch_bisect
@@ -0,0 +1,354 @@
+#! /usr/bin/env python
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+"""
+This script is to fix precompiled headers.
+
+This script runs in two modes.
+In one mode, it starts with a header
+that doesn't compile. If finds the
+minimum number of includes in the
+header to remove to get a successful
+run of the command (i.e. compile).
+
+In the second mode, it starts with a
+header that compiles fine, however,
+it contains one or more required
+include without which it wouldn't
+compile, which it identifies.
+
+Usage: ./bin/update_pch_bisect ./vcl/inc/pch/precompiled_vcl.hxx "make vcl.build" --find-required --verbose
+"""
+
+import sys
+import re
+import os
+import unittest
+import subprocess
+
+SILENT = True
+FIND_CONFLICTS = True
+
+IGNORE = 0
+GOOD = 1
+TEST_ON = 2
+TEST_OFF = 3
+BAD = 4
+
+def run(command):
+ try:
+ cmd = command.split(' ', 1)
+ status = subprocess.call(cmd, stdout=open(os.devnull, 'w'),
+ stderr=subprocess.STDOUT, close_fds=True)
+ return True if status == 0 else False
+ except Exception as e:
+ sys.stderr.write('Error: {}\n'.format(e))
+ return False
+
+def update_pch(filename, lines, marks):
+ with open(filename, 'w') as f:
+ for i in xrange(len(marks)):
+ mark = marks[i]
+ if mark <= TEST_ON:
+ f.write(lines[i])
+ else:
+ f.write('//' + lines[i])
+
+def log(*args, **kwargs):
+ global SILENT
+ if not SILENT:
+ print(*args, **kwargs)
+
+def bisect(lines, marks, min, max, update, command):
+ """ Disable half the includes and
+ calls the command.
+ Depending on the result,
+ recurse or return.
+ """
+ global FIND_CONFLICTS
+
+ log('Bisecting [{}, {}].'.format(min+1, max))
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_ON if FIND_CONFLICTS else TEST_OFF
+
+ assume_fail = False
+ if not FIND_CONFLICTS:
+ on_list = [x for x in marks if x in (TEST_ON, GOOD)]
+ assume_fail = (len(on_list) == 0)
+
+ update(lines, marks)
+ if assume_fail or not command():
+ # Failed
+ log('Failed [{}, {}].'.format(min+1, max))
+ if min >= max - 1:
+ if not FIND_CONFLICTS:
+ # Try with this one alone.
+ marks[min] = TEST_ON
+ update(lines, marks)
+ if command():
+ log(' Found @{}: {}'.format(min+1, lines[min].strip('\n')))
+ marks[min] = GOOD
+ return marks
+ else:
+ log(' Found @{}: {}'.format(min+1, lines[min].strip('\n')))
+ # Either way, this one is irrelevant.
+ marks[min] = BAD
+ return marks
+
+ # Bisect
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_OFF if FIND_CONFLICTS else TEST_ON
+
+ half = min + ((max - min) / 2)
+ marks = bisect(lines, marks, min, half, update, command)
+ marks = bisect(lines, marks, half, max, update, command)
+ else:
+ # Success
+ if FIND_CONFLICTS:
+ log(' Good [{}, {}].'.format(min+1, max))
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = GOOD
+
+ return marks
+
+def get_filename(line):
+ """ Strips the line from the
+ '#include' and angled brackets
+ and return the filename only.
+ """
+ return re.sub(r'(.*#include\s*)<(.*)>(.*)', r'\2', line)
+
+def get_marks(lines):
+ marks = []
+ min = -1
+ max = -1
+ for i in xrange(len(lines)):
+ line = lines[i]
+ if line.startswith('#include'):
+ marks.append(TEST_ON)
+ min = i if min < 0 else min
+ max = i
+ else:
+ marks.append(IGNORE)
+
+ return (marks, min, max+1)
+
+def main():
+
+ global FIND_CONFLICTS
+ global SILENT
+
+ filename = sys.argv[1]
+ command = sys.argv[2]
+
+ for i in range(3, len(sys.argv)):
+ opt = sys.argv[i]
+ if opt == '--find-conflicts':
+ FIND_CONFLICTS = True
+ elif opt == '--find-required':
+ FIND_CONFLICTS = False
+ elif opt == '--verbose':
+ SILENT = False
+ else:
+ sys.stderr.write('Error: Unknown option [{}].\n'.format(opt))
+ return 1
+
+ lines = []
+ with open(filename) as f:
+ lines = f.readlines()
+
+ (marks, min, max) = get_marks(lines)
+
+ # Test preconditions.
+ log('Validating all-excluded state...')
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_OFF
+ update_pch(filename, lines, marks)
+ res = run(command)
+
+ if FIND_CONFLICTS:
+ # Must build all excluded.
+ if not res:
+ sys.stderr.write("Error: broken state when all excluded, fix first and try again.")
+ return 1
+ else:
+ # If builds all excluded, we can't bisect.
+ if res:
+ sys.stderr.write("Done: in good state when all excluded, nothing to do.")
+ return 1
+
+ # Must build all included.
+ log('Validating all-included state...')
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_ON
+ update_pch(filename, lines, marks)
+ if not run(command):
+ sys.stderr.write("Error: broken state without modifying, fix first and try again.")
+ return 1
+
+ marks = bisect(lines, marks, min, max+1,
+ lambda l, m: update_pch(filename, l, m),
+ lambda: run(command))
+ if not FIND_CONFLICTS:
+ # Simplify further, as sometimes we can have
+ # false positives due to the benign nature
+ # of includes that are not absolutely required.
+ for i in xrange(len(marks)):
+ if marks[i] == GOOD:
+ marks[i] = TEST_OFF
+ update_pch(filename, lines, marks)
+ if not run(command):
+ # Revert.
+ marks[i] = GOOD
+ else:
+ marks[i] = BAD
+ elif marks[i] == TEST_OFF:
+ marks[i] = TEST_ON
+
+ update_pch(filename, lines, marks)
+
+ log('')
+ for i in xrange(len(marks)):
+ if marks[i] == (BAD if FIND_CONFLICTS else GOOD):
+ print("'{}',".format(get_filename(lines[i].strip('\n'))))
+
+ return 0
+
+if __name__ == '__main__':
+
+ if len(sys.argv) in (3, 4, 5):
+ status = main()
+ sys.exit(status)
+
+ print('Usage: {} <pch> <command> [--find-conflicts]|[--find-required] [--verbose]\n'.format(sys.argv[0]))
+ print(' --find-conflicts - Finds all conflicting includes. (Default)')
+ print(' Must compile without any includes.\n')
+ print(' --find-required - Finds all required includes.')
+ print(' Must compile with all includes.\n')
+ print(' --verbose - print noisy progress.')
+ print('Example: ./bin/update_pch_bisect ./vcl/inc/pch/precompiled_vcl.hxx "make vcl.build" --find-required --verbose')
+ print('\nRunning unit-tests...')
+
+
+class TestBisectConflict(unittest.TestCase):
+ TEST = """ /* Test header. */
+#include <memory>
+#include <set>
+#include <algorithm>
+#include <vector>
+/* blah blah */
+"""
+ BAD_LINE = "#include <bad>"
+
+ def setUp(self):
+ global FIND_CONFLICTS
+ FIND_CONFLICTS = True
+
+ def _update_func(self, lines, marks):
+ self.lines = []
+ for i in xrange(len(marks)):
+ mark = marks[i]
+ if mark <= TEST_ON:
+ self.lines.append(lines[i])
+ else:
+ self.lines.append('//' + lines[i])
+
+ def _test_func(self):
+ """ Command function called by bisect.
+ Returns True on Success, False on failure.
+ """
+ # If the bad line is still there, fail.
+ return self.BAD_LINE not in self.lines
+
+ def test_success(self):
+ lines = self.TEST.split('\n')
+ (marks, min, max) = get_marks(lines)
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ self.assertTrue(BAD not in marks)
+
+ def test_conflict(self):
+ lines = self.TEST.split('\n')
+ for pos in xrange(len(lines) + 1):
+ lines = self.TEST.split('\n')
+ lines.insert(pos, self.BAD_LINE)
+ (marks, min, max) = get_marks(lines)
+
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ for i in xrange(len(marks)):
+ if i == pos:
+ self.assertEqual(BAD, marks[i])
+ else:
+ self.assertNotEqual(BAD, marks[i])
+
+class TestBisectRequired(unittest.TestCase):
+ TEST = """#include <algorithm>
+#include <set>
+#include <map>
+#include <vector>
+"""
+ REQ_LINE = "#include <req>"
+
+ def setUp(self):
+ global FIND_CONFLICTS
+ FIND_CONFLICTS = False
+
+ def _update_func(self, lines, marks):
+ self.lines = []
+ for i in xrange(len(marks)):
+ mark = marks[i]
+ if mark <= TEST_ON:
+ self.lines.append(lines[i])
+ else:
+ self.lines.append('//' + lines[i])
+
+ def _test_func(self):
+ """ Command function called by bisect.
+ Returns True on Success, False on failure.
+ """
+ # If the required line is not there, fail.
+ found = self.REQ_LINE in self.lines
+ return found
+
+ def test_success(self):
+ lines = self.TEST.split('\n')
+ (marks, min, max) = get_marks(lines)
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ self.assertTrue(GOOD not in marks)
+
+ def test_required(self):
+ lines = self.TEST.split('\n')
+ for pos in xrange(len(lines) + 1):
+ lines = self.TEST.split('\n')
+ lines.insert(pos, self.REQ_LINE)
+ (marks, min, max) = get_marks(lines)
+
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ for i in xrange(len(marks)):
+ if i == pos:
+ self.assertEqual(GOOD, marks[i])
+ else:
+ self.assertNotEqual(GOOD, marks[i])
+
+unittest.main()
+
+# vim: set et sw=4 ts=4 expandtab: