summaryrefslogtreecommitdiffstats
path: root/bin
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-27 16:51:28 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-27 16:51:28 +0000
commit940b4d1848e8c70ab7642901a68594e8016caffc (patch)
treeeb72f344ee6c3d9b80a7ecc079ea79e9fba8676d /bin
parentInitial commit. (diff)
downloadlibreoffice-940b4d1848e8c70ab7642901a68594e8016caffc.tar.xz
libreoffice-940b4d1848e8c70ab7642901a68594e8016caffc.zip
Adding upstream version 1:7.0.4.upstream/1%7.0.4upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--bin/README9
-rw-r--r--bin/bash-completion.in90
-rw-r--r--bin/benchmark-document-loading486
-rw-r--r--bin/bffvalidator.sh.in3
-rwxr-xr-xbin/check-elf-dynamic-objects262
-rwxr-xr-xbin/check-icon-sizes.py34
-rwxr-xr-xbin/check-implementer-notes.py31
-rwxr-xr-xbin/check-missing-unittests.py137
-rw-r--r--bin/convwatch.py462
-rwxr-xr-xbin/distro-install-clean-up92
-rwxr-xr-xbin/distro-install-desktop-integration192
-rwxr-xr-xbin/distro-install-file-lists517
-rwxr-xr-xbin/distro-install-sdk84
-rwxr-xr-xbin/dump-poolitems-values.py91
-rwxr-xr-xbin/extract-tooltip.py107
-rw-r--r--bin/fake_pom.xml6
-rw-r--r--bin/find-can-be-private-symbols.classes.results283
-rw-r--r--bin/find-can-be-private-symbols.functions.results39
-rwxr-xr-xbin/find-can-be-private-symbols.py226
-rwxr-xr-xbin/find-clang-format.py67
-rwxr-xr-xbin/find-duplicated-files.py43
-rwxr-xr-xbin/find-duplicated-sids.py92
-rwxr-xr-xbin/find-files-not-referenced-by-makefile.py53
-rwxr-xr-xbin/find-german-comments402
-rwxr-xr-xbin/find-headers-to-move-inside-modules.py52
-rw-r--r--bin/find-mergedlib-can-be-private.classes.results426
-rwxr-xr-xbin/find-mergedlib-can-be-private.py154
-rwxr-xr-xbin/find-most-common-warn-messages.py39
-rwxr-xr-xbin/find-most-repeated-functions.py42
-rwxr-xr-xbin/find-undocumented-classes33
-rwxr-xr-xbin/find-unneeded-includes313
-rwxr-xr-xbin/find-unused-defines.py170
-rwxr-xr-xbin/find-unused-sid-commands.py53
-rwxr-xr-xbin/find-unused-typedefs.py34
-rwxr-xr-xbin/find-unusedheaders.py48
-rwxr-xr-xbin/fixincludeguards.sh85
-rwxr-xr-xbin/fuzzfiles41
-rwxr-xr-xbin/gbuild-to-ide1911
-rwxr-xr-xbin/gen-boost-headers67
-rwxr-xr-xbin/gen-iwyu-dummy-lib79
-rw-r--r--bin/gen-iwyu-dummy-lib.awk34
-rwxr-xr-xbin/generate-bash-completion.py162
-rwxr-xr-xbin/get-bugzilla-attachments-by-mimetype584
-rw-r--r--bin/get_config_variables23
-rwxr-xr-xbin/git-ps152
-rwxr-xr-xbin/gla11y1401
-rwxr-xr-xbin/includebloat.awk51
-rwxr-xr-xbin/ios-mapfile-statistics78
-rw-r--r--bin/java-set-classpath.in53
-rwxr-xr-xbin/lint-ui.py157
-rwxr-xr-xbin/list-dispatch-commands.py130
-rwxr-xr-xbin/list-uitest.py78
-rwxr-xr-xbin/lo-all-static-libs91
-rwxr-xr-xbin/lo-commit-stat584
-rwxr-xr-xbin/lo-pack-sources485
-rwxr-xr-xbin/lo-xlate-lang213
-rwxr-xr-xbin/lolcat21
-rwxr-xr-xbin/module-deps.pl556
-rwxr-xr-xbin/moveglobalheaders.sh43
-rw-r--r--bin/odfvalidator.sh.in2
-rw-r--r--bin/officeotron.sh.in2
-rwxr-xr-xbin/oss-fuzz-build.sh55
-rwxr-xr-xbin/parse-perfcheck.py258
-rwxr-xr-xbin/refcount_leak.py179
-rwxr-xr-xbin/removetooltip_markups.sh55
-rwxr-xr-xbin/rename-sw-abbreviations.sh43
-rwxr-xr-xbin/run76
-rw-r--r--bin/sanitize-blacklist.txt12
-rwxr-xr-xbin/sanitize-image-links38
-rwxr-xr-xbin/striplanguagetags.sh74
-rwxr-xr-xbin/stubify.pl262
-rwxr-xr-xbin/symbolstore.py644
-rwxr-xr-xbin/symstore.sh174
-rwxr-xr-xbin/test-hid-vs-ui.py188
-rw-r--r--bin/text_cat/COPYING504
-rw-r--r--bin/text_cat/Copyright21
-rw-r--r--bin/text_cat/LM/english.lm400
-rw-r--r--bin/text_cat/LM/german.lm400
-rwxr-xr-xbin/text_cat/text_cat242
-rw-r--r--bin/text_cat/version2
-rwxr-xr-xbin/ui-checkdomain.sh52
-rwxr-xr-xbin/ui-translatable.sh22
-rwxr-xr-xbin/unpack-sources91
-rw-r--r--bin/update/common.sh222
-rw-r--r--bin/update/config.py28
-rwxr-xr-xbin/update/create_build_config.py60
-rwxr-xr-xbin/update/create_full_mar.py54
-rwxr-xr-xbin/update/create_full_mar_for_languages.py66
-rwxr-xr-xbin/update/create_partial_update.py160
-rwxr-xr-xbin/update/get_update_channel.py23
-rwxr-xr-xbin/update/make_full_update.sh122
-rwxr-xr-xbin/update/make_incremental_update.sh318
-rw-r--r--bin/update/path.py69
-rw-r--r--bin/update/signing.py12
-rw-r--r--bin/update/tools.py64
-rwxr-xr-xbin/update/uncompress_mar.py54
-rwxr-xr-xbin/update/upload_build_config.py42
-rwxr-xr-xbin/update/upload_builds.py32
-rwxr-xr-xbin/update_pch1308
-rwxr-xr-xbin/update_pch.sh65
-rwxr-xr-xbin/update_pch_autotune.sh229
-rwxr-xr-xbin/update_pch_bisect354
-rwxr-xr-xbin/upload_symbols.py55
-rwxr-xr-xbin/verify-custom-widgets-libs30
-rw-r--r--binaryurp/CppunitTest_binaryurp_test-cache.mk22
-rw-r--r--binaryurp/CppunitTest_binaryurp_test-unmarshal.mk33
-rw-r--r--binaryurp/IwyuFilter_binaryurp.yaml34
-rw-r--r--binaryurp/Library_binaryurp.mk40
-rw-r--r--binaryurp/Makefile7
-rw-r--r--binaryurp/Module_binaryurp.mk21
-rw-r--r--binaryurp/README9
-rw-r--r--binaryurp/qa/test-cache.cxx77
-rw-r--r--binaryurp/qa/test-unmarshal.cxx94
-rw-r--r--binaryurp/source/binaryany.cxx114
-rw-r--r--binaryurp/source/binaryany.hxx69
-rw-r--r--binaryurp/source/binaryurp.component25
-rw-r--r--binaryurp/source/bridge.cxx1019
-rw-r--r--binaryurp/source/bridge.hxx283
-rw-r--r--binaryurp/source/bridgefactory.cxx220
-rw-r--r--binaryurp/source/bridgefactory.hxx128
-rw-r--r--binaryurp/source/cache.hxx98
-rw-r--r--binaryurp/source/currentcontext.cxx55
-rw-r--r--binaryurp/source/currentcontext.hxx37
-rw-r--r--binaryurp/source/incomingreply.hxx54
-rw-r--r--binaryurp/source/incomingrequest.cxx285
-rw-r--r--binaryurp/source/incomingrequest.hxx82
-rw-r--r--binaryurp/source/lessoperators.cxx65
-rw-r--r--binaryurp/source/lessoperators.hxx42
-rw-r--r--binaryurp/source/marshal.cxx300
-rw-r--r--binaryurp/source/marshal.hxx91
-rw-r--r--binaryurp/source/outgoingrequest.hxx49
-rw-r--r--binaryurp/source/outgoingrequests.cxx68
-rw-r--r--binaryurp/source/outgoingrequests.hxx61
-rw-r--r--binaryurp/source/proxy.cxx238
-rw-r--r--binaryurp/source/proxy.hxx88
-rw-r--r--binaryurp/source/reader.cxx479
-rw-r--r--binaryurp/source/reader.hxx67
-rw-r--r--binaryurp/source/readerstate.hxx49
-rw-r--r--binaryurp/source/specialfunctionids.hxx43
-rw-r--r--binaryurp/source/unmarshal.cxx488
-rw-r--r--binaryurp/source/unmarshal.hxx96
-rw-r--r--binaryurp/source/writer.cxx454
-rw-r--r--binaryurp/source/writer.hxx165
-rw-r--r--binaryurp/source/writerstate.hxx52
144 files changed, 25015 insertions, 0 deletions
diff --git a/bin/README b/bin/README
new file mode 100644
index 000000000..d5d0829ce
--- /dev/null
+++ b/bin/README
@@ -0,0 +1,9 @@
+Tools and scripts mostly not used during the build
+
+This direction has a number of key pieces (?) that are used during the
+build, or are simply generally useful. One example is
+
+bin/find-german-comments <directory>
+
+which will try to detect and extract all the German comments in a
+given source code hierarchy / directory.
diff --git a/bin/bash-completion.in b/bin/bash-completion.in
new file mode 100644
index 000000000..77087c593
--- /dev/null
+++ b/bin/bash-completion.in
@@ -0,0 +1,90 @@
+# Programmable bash_completion file for the main office applications
+# It is based on /etc/profile.d/complete.bash from SUSE Linux 10.1
+
+_def=; _dir=; _file=; _nosp=
+if complete -o default _nullcommand &> /dev/null ; then
+ _def="-o default"
+ _dir="-o dirnames"
+ _file="-o filenames"
+fi
+_minusdd="-d ${_dir}"
+_minusdf="-d ${_file}"
+if complete -o nospace _nullcommand &> /dev/null ; then
+ _nosp="-o nospace"
+ _minusdd="${_nosp} ${_dir}"
+ _minusdf="${_nosp} ${_dir}"
+fi
+complete -r _nullcommand &> /dev/null
+
+# General expanding shell function
+@OFFICE_SHELL_FUNCTION@ ()
+{
+ # bash `complete' is broken because you can not combine
+ # -d, -f, and -X pattern without missing directories.
+ local c=${COMP_WORDS[COMP_CWORD]}
+ local a="${COMP_LINE}"
+ local e s g=0 cd dc t=""
+ local IFS
+
+ shopt -q extglob && g=1
+ test $g -eq 0 && shopt -s extglob
+ # Don't be fooled by the bash parser if extglob is off by default
+ cd='*-?(c)d*'
+ dc='*-d?(c)*'
+
+ case "${1##*/}" in
+@BASH_COMPLETION_SUFFIXES_CHECKS@
+ *) e='!*'
+ esac
+
+ case "$(complete -p ${1##*/} 2> /dev/null)" in
+ *-d*) ;;
+ *) s="-S/"
+ esac
+
+ IFS='
+'
+ case "$c" in
+ \$\(*\)) eval COMPREPLY=\(${c}\) ;;
+ \$\(*) COMPREPLY=($(compgen -c -P '$(' -S ')' -- ${c#??})) ;;
+ \`*\`) eval COMPREPLY=\(${c}\) ;;
+ \`*) COMPREPLY=($(compgen -c -P '\`' -S '\`' -- ${c#?})) ;;
+ \$\{*\}) eval COMPREPLY=\(${c}\) ;;
+ \$\{*) COMPREPLY=($(compgen -v -P '${' -S '}' -- ${c#??})) ;;
+ \$*) COMPREPLY=($(compgen -v -P '$' -- ${c#?})) ;;
+ \~*/*) COMPREPLY=($(compgen -f -X "$e" -- ${c})) ;;
+ \~*) COMPREPLY=($(compgen -u ${s} -- ${c})) ;;
+ *@*) COMPREPLY=($(compgen -A hostname -P '@' -S ':' -- ${c#*@})) ;;
+ *[*?[]*) COMPREPLY=($(compgen -G "${c}")) ;;
+ *[?*+\!@]\(*\)*)
+ if test $g -eq 0 ; then
+ COMPREPLY=($(compgen -f -X "$e" -- $c))
+ test $g -eq 0 && shopt -u extglob
+ return
+ fi
+ COMPREPLY=($(compgen -G "${c}")) ;;
+ *)
+ if test "$c" = ".." ; then
+ COMPREPLY=($(compgen -d -X "$e" -S / ${_nosp} -- $c))
+ else
+ for s in $(compgen -f -X "$e" -- $c) ; do
+ if test -d $s ; then
+ COMPREPLY=(${COMPREPLY[@]} $(compgen -f -X "$e" -S / -- $s))
+ elif test -z "$t" ; then
+ COMPREPLY=(${COMPREPLY[@]} $s)
+ else
+ case "$(file -b $s 2> /dev/null)" in
+ $t) COMPREPLY=(${COMPREPLY[@]} $s) ;;
+ esac
+ fi
+ done
+ fi ;;
+ esac
+ test $g -eq 0 && shopt -u extglob
+}
+
+
+complete -d -X '.[^./]*' -F @OFFICE_SHELL_FUNCTION@ ${_file} \
+@BASH_COMPLETION_OOO_APPS@
+
+unset _def _dir _file _nosp _minusdd _minusdf
diff --git a/bin/benchmark-document-loading b/bin/benchmark-document-loading
new file mode 100644
index 000000000..11611a2b2
--- /dev/null
+++ b/bin/benchmark-document-loading
@@ -0,0 +1,486 @@
+#!/usr/bin/env python # -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# Version: MPL 1.1 / GPLv3+ / LGPLv3+
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License or as specified alternatively below. You may obtain a copy of
+# the License at http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# Major Contributor(s):
+# Copyright (C) 2012 Red Hat, Inc., Michael Stahl <mstahl@redhat.com>
+# (initial developer)
+#
+# All Rights Reserved.
+#
+# For minor contributions see the git repository.
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 3 or later (the "GPLv3+"), or
+# the GNU Lesser General Public License Version 3 or later (the "LGPLv3+"),
+# in which case the provisions of the GPLv3+ or the LGPLv3+ are applicable
+# instead of those above.
+
+# Simple script to load a bunch of documents and export them as Flat ODF
+#
+# Personally I run it like this:
+# ~/lo/master-suse/instdir/program/python ~/lo/master-suse/bin/benchmark-document-loading --soffice=path:/home/tml/lo/master-suse/instdir/program/soffice --outdir=file://$PWD/out --userdir=file:///tmp/test $PWD/docs
+#
+
+import argparse
+import datetime
+import os
+import subprocess
+import sys
+import threading
+import time
+import urllib
+try:
+ from urllib.parse import quote
+except ImportError:
+ from urllib import quote
+import uuid
+
+try:
+ import pyuno
+ import uno
+ import unohelper
+except ImportError:
+ print("pyuno not found: try to set PYTHONPATH and URE_BOOTSTRAP variables")
+ print("PYTHONPATH=/installation/opt/program")
+ print("URE_BOOTSTRAP=file:///installation/opt/program/fundamentalrc")
+ raise
+
+try:
+ from com.sun.star.beans import PropertyValue
+ from com.sun.star.document import XDocumentEventListener
+ from com.sun.star.io import IOException, XOutputStream
+except ImportError:
+ print("UNO API class not found: try to set URE_BOOTSTRAP variable")
+ print("URE_BOOTSTRAP=file:///installation/opt/program/fundamentalrc")
+ raise
+
+validCalcFileExtensions = [ ".xlsx", ".xls", ".ods", ".fods" ]
+validWriterFileExtensions = [ ".docx" , ".rtf", ".odt", ".fodt", ".doc" ]
+validImpressFileExtensions = [ ".ppt", ".pptx", ".odp", ".fodp" ]
+validDrawFileExtensions = [ ".odg", ".fodg" ]
+validRevereseFileExtensions = [ ".vsd", ".vdx", ".cdr", ".pub", ".wpd" ]
+validFileExtensions = {"calc": validCalcFileExtensions,
+ "writer": validWriterFileExtensions,
+ "impress": validImpressFileExtensions,
+ "draw": validDrawFileExtensions,
+ "reverse": validRevereseFileExtensions}
+flatODFTypes = {"calc": (".fods", "OpenDocument Spreadsheet Flat XML"),
+ "writer": (".fodt", "OpenDocument Text Flat XML"),
+ "impress": (".fodp", "OpenDocument Presentation Flat XML"),
+ "draw": (".fodg", "OpenDocument Drawing Flat XML")}
+
+outdir = ""
+
+def partition(list, pred):
+ left = []
+ right = []
+ for e in list:
+ if pred(e):
+ left.append(e)
+ else:
+ right.append(e)
+ return (left, right)
+
+def filelist(directory, suffix):
+ if not directory:
+ raise Exception("filelist: empty directory")
+ if directory[-1] != "/":
+ directory += "/"
+ files = [directory + f for f in os.listdir(directory)]
+# print(files)
+ return [f for f in files
+ if os.path.isfile(f) and os.path.splitext(f)[1] == suffix]
+
+def getFiles(dirs, suffix):
+# print( dirs )
+ files = []
+ for d in dirs:
+ files += filelist(d, suffix)
+ return files
+
+### UNO utilities ###
+
+class OutputStream( unohelper.Base, XOutputStream ):
+ def __init__( self ):
+ self.closed = 0
+
+ def closeOutput(self):
+ self.closed = 1
+
+ def writeBytes( self, seq ):
+ sys.stdout.write( seq.value )
+
+ def flush( self ):
+ pass
+
+class OfficeConnection:
+ def __init__(self, args):
+ self.args = args
+ self.soffice = None
+ self.socket = None
+ self.xContext = None
+ self.pro = None
+ def setUp(self):
+ (method, sep, rest) = self.args.soffice.partition(":")
+ if sep != ":":
+ raise Exception("soffice parameter does not specify method")
+ if method == "path":
+ socket = "pipe,name=pytest" + str(uuid.uuid1())
+ userdir = self.args.userdir
+ if not userdir:
+ raise Exception("'path' method requires --userdir")
+ if not userdir.startswith("file://"):
+ raise Exception("--userdir must be file URL")
+ self.soffice = self.bootstrap(rest, userdir, socket)
+ elif method == "connect":
+ socket = rest
+ else:
+ raise Exception("unsupported connection method: " + method)
+ self.xContext = self.connect(socket)
+
+ def bootstrap(self, soffice, userdir, socket):
+ argv = [ soffice, "--accept=" + socket + ";urp",
+ "-env:UserInstallation=" + userdir,
+ "--quickstart=no",
+ "--norestore", "--nologo", "--headless" ]
+ if self.args.valgrind:
+ argv.append("--valgrind")
+ os.putenv("SAL_LOG", "-INFO-WARN")
+ os.putenv("LIBO_ONEWAY_STABLE_ODF_EXPORT", "YES")
+ self.pro = subprocess.Popen(argv)
+# print(self.pro.pid)
+
+ def connect(self, socket):
+ xLocalContext = uno.getComponentContext()
+ xUnoResolver = xLocalContext.ServiceManager.createInstanceWithContext("com.sun.star.bridge.UnoUrlResolver", xLocalContext)
+ url = "uno:" + socket + ";urp;StarOffice.ComponentContext"
+# print("OfficeConnection: connecting to: " + url)
+ while True:
+ try:
+ xContext = xUnoResolver.resolve(url)
+ return xContext
+# except com.sun.star.connection.NoConnectException
+ except pyuno.getClass("com.sun.star.connection.NoConnectException"):
+# print("NoConnectException: sleeping...")
+ time.sleep(1)
+
+ def tearDown(self):
+ if self.soffice:
+ if self.xContext:
+ try:
+# print("tearDown: calling terminate()...")
+ xMgr = self.xContext.ServiceManager
+ xDesktop = xMgr.createInstanceWithContext("com.sun.star.frame.Desktop", self.xContext)
+ xDesktop.terminate()
+# print("...done")
+# except com.sun.star.lang.DisposedException:
+ except pyuno.getClass("com.sun.star.beans.UnknownPropertyException"):
+# print("caught UnknownPropertyException while TearDown")
+ pass # ignore, also means disposed
+ except pyuno.getClass("com.sun.star.lang.DisposedException"):
+# print("caught DisposedException while TearDown")
+ pass # ignore
+ else:
+ self.soffice.terminate()
+ ret = self.soffice.wait()
+ self.xContext = None
+ self.socket = None
+ self.soffice = None
+ if ret != 0:
+ raise Exception("Exit status indicates failure: " + str(ret))
+# return ret
+ def kill(self):
+ command = "kill " + str(self.pro.pid)
+ with open("killFile.log", "a") as killFile:
+ killFile.write(command + "\n")
+# print("kill")
+# print(command)
+ os.system(command)
+
+class PersistentConnection:
+ def __init__(self, args):
+ self.args = args
+ self.connection = None
+ def getContext(self):
+ return self.connection.xContext
+ def setUp(self):
+ assert(not self.connection)
+ conn = OfficeConnection(self.args)
+ conn.setUp()
+ self.connection = conn
+ def preTest(self):
+ assert(self.connection)
+ def postTest(self):
+ assert(self.connection)
+ def tearDown(self):
+ if self.connection:
+ try:
+ self.connection.tearDown()
+ finally:
+ self.connection = None
+ def kill(self):
+ if self.connection:
+ self.connection.kill()
+
+def simpleInvoke(connection, test):
+ try:
+ connection.preTest()
+ test.run(connection.getContext(), connection)
+ finally:
+ connection.postTest()
+
+def runConnectionTests(connection, invoker, tests):
+ try:
+ connection.setUp()
+ for test in tests:
+ invoker(connection, test)
+ finally:
+ pass
+ #connection.tearDown()
+
+class EventListener(XDocumentEventListener,unohelper.Base):
+ def __init__(self):
+ self.layoutFinished = False
+ def documentEventOccured(self, event):
+# print(str(event.EventName))
+ if event.EventName == "OnLayoutFinished":
+ self.layoutFinished = True
+ def disposing(event):
+ pass
+
+def mkPropertyValue(name, value):
+ return uno.createUnoStruct("com.sun.star.beans.PropertyValue",
+ name, 0, value, 0)
+
+### tests ###
+
+def logTimeSpent(url, startTime):
+ print(os.path.basename(urllib.parse.urlparse(url).path) + "\t" + str(time.time()-startTime))
+
+def loadFromURL(xContext, url, t, component):
+ xDesktop = xContext.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", xContext)
+ props = [("Hidden", True), ("ReadOnly", True)] # FilterName?
+ loadProps = tuple([mkPropertyValue(name, value) for (name, value) in props])
+ xListener = None
+ if component == "writer":
+ xListener = EventListener()
+ xGEB = xContext.getValueByName(
+ "/singletons/com.sun.star.frame.theGlobalEventBroadcaster")
+ xGEB.addDocumentEventListener(xListener)
+ try:
+ xDoc = None
+ startTime = time.time()
+ xDoc = xDesktop.loadComponentFromURL(url, "_blank", 0, loadProps)
+ if component == "calc":
+ try:
+ if xDoc:
+ xDoc.calculateAll()
+ except AttributeError:
+ pass
+ t.cancel()
+ logTimeSpent(url, startTime)
+ return xDoc
+ elif component == "writer":
+ time_ = 0
+ t.cancel()
+ while time_ < 30:
+ if xListener.layoutFinished:
+ logTimeSpent(url, startTime)
+ return xDoc
+# print("delaying...")
+ time_ += 1
+ time.sleep(1)
+ else:
+ t.cancel()
+ logTimeSpent(url, startTime)
+ return xDoc
+ with open("file.log", "a") as fh:
+ fh.write("layout did not finish\n")
+ return xDoc
+ except pyuno.getClass("com.sun.star.beans.UnknownPropertyException"):
+ xListener = None
+ raise # means crashed, handle it later
+ except pyuno.getClass("com.sun.star.lang.DisposedException"):
+ xListener = None
+ raise # means crashed, handle it later
+ except pyuno.getClass("com.sun.star.lang.IllegalArgumentException"):
+ pass # means could not open the file, ignore it
+ except:
+ if xDoc:
+# print("CLOSING")
+ xDoc.close(True)
+ raise
+ finally:
+ if xListener:
+ xGEB.removeDocumentEventListener(xListener)
+
+def exportToODF(xContext, xDoc, baseName, t, component):
+ exportFileName = outdir + "/" + os.path.splitext(baseName)[0] + flatODFTypes[component][0]
+ print("exportToODF " + baseName + " => " + exportFileName)
+ props = [("FilterName", flatODFTypes[component][1]),
+ ("Overwrite", True)]
+ storeProps = tuple([mkPropertyValue(name, value) for (name, value) in props])
+ xDoc.storeToURL(exportFileName, tuple(storeProps))
+
+def handleCrash(file, disposed):
+# print("File: " + file + " crashed")
+ with open("crashlog.txt", "a") as crashLog:
+ crashLog.write('Crash:' + file + ' ')
+ if disposed == 1:
+ crashLog.write('through disposed\n')
+# crashed_files.append(file)
+# add here the remaining handling code for crashed files
+
+def alarm_handler(args):
+ args.kill()
+
+class HandleFileTest:
+ def __init__(self, file, state, component):
+ self.file = file
+ self.state = state
+ self.component = component
+ def run(self, xContext, connection):
+# print("Loading document: " + self.file)
+ t = None
+ args = None
+ try:
+ url = "file://" + quote(self.file)
+ with open("file.log", "a") as fh:
+ fh.write(url + "\n")
+ xDoc = None
+ args = [connection]
+ t = threading.Timer(60, alarm_handler, args)
+ t.start()
+ xDoc = loadFromURL(xContext, url, t, self.component)
+ self.state.goodFiles.append(self.file)
+ exportToODF(xContext, xDoc, os.path.basename(urllib.parse.urlparse(url).path), t, self.component)
+ except pyuno.getClass("com.sun.star.beans.UnknownPropertyException"):
+# print("caught UnknownPropertyException " + self.file)
+ if not t.is_alive():
+# print("TIMEOUT!")
+ self.state.timeoutFiles.append(self.file)
+ else:
+ t.cancel()
+ handleCrash(self.file, 0)
+ self.state.badPropertyFiles.append(self.file)
+ connection.tearDown()
+ connection.setUp()
+ except pyuno.getClass("com.sun.star.lang.DisposedException"):
+# print("caught DisposedException " + self.file)
+ if not t.is_alive():
+# print("TIMEOUT!")
+ self.state.timeoutFiles.append(self.file)
+ else:
+ t.cancel()
+ handleCrash(self.file, 1)
+ self.state.badDisposedFiles.append(self.file)
+ connection.tearDown()
+ connection.setUp()
+ finally:
+ if t.is_alive():
+ t.cancel()
+ try:
+ if xDoc:
+ t = threading.Timer(10, alarm_handler, args)
+ t.start()
+ xDoc.close(True)
+ t.cancel()
+ except pyuno.getClass("com.sun.star.beans.UnknownPropertyException"):
+ print("caught UnknownPropertyException while closing")
+ self.state.badPropertyFiles.append(self.file)
+ connection.tearDown()
+ connection.setUp()
+ except pyuno.getClass("com.sun.star.lang.DisposedException"):
+ print("caught DisposedException while closing")
+ if t.is_alive():
+ t.cancel()
+ else:
+ self.state.badDisposedFiles.append(self.file)
+ connection.tearDown()
+ connection.setUp()
+# print("...done with: " + self.file)
+
+class State:
+ def __init__(self):
+ self.goodFiles = []
+ self.badDisposedFiles = []
+ self.badPropertyFiles = []
+ self.timeoutFiles = []
+
+
+def write_state_report(files_list, start_time, report_filename, description):
+ with open(report_filename, "w") as fh:
+ fh.write("%s:\n" % description)
+ fh.write("Starttime: %s\n" % start_time.isoformat())
+ for f in files_list:
+ fh.write("%s\n" % f)
+
+
+def writeReport(state, startTime):
+ write_state_report(state.goodFiles, startTime, "goodFiles.log",
+ "Files which loaded perfectly")
+ write_state_report(state.badDisposedFiles, startTime, "badDisposedFiles.log",
+ "Files which crashed with DisposedException")
+ write_state_report(state.badPropertyFiles, startTime, "badPropertyFiles.log",
+ "Files which crashed with UnknownPropertyException")
+ write_state_report(state.timeoutFiles, startTime, "timeoutFiles.log",
+ "Files which timed out")
+
+def runHandleFileTests(opts):
+ startTime = datetime.datetime.now()
+ connection = PersistentConnection(opts)
+ global outdir
+ outdir = os.path.join(opts.outdir, startTime.strftime('%Y%m%d.%H%M%S'))
+ try:
+ tests = []
+ state = State()
+# print("before map")
+ for component, validExtension in validFileExtensions.items():
+ files = []
+ for suffix in validExtension:
+ files.extend(getFiles(opts.dirs, suffix))
+ files.sort()
+ tests.extend( (HandleFileTest(file, state, component) for file in files) )
+ runConnectionTests(connection, simpleInvoke, tests)
+ finally:
+ connection.kill()
+ writeReport(state, startTime)
+
+def parseArgs(argv):
+ epilog = "'location' is a pathname, not a URL. 'outdir' and 'userdir' are URLs.\n" \
+ "The 'directory' parameters should be full absolute pathnames, not URLs."
+
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
+ epilog=epilog)
+ parser.add_argument('--soffice', metavar='method:location', required=True,
+ help="specify soffice instance to connect to\n"
+ "supported methods: 'path', 'connect'")
+ parser.add_argument('--outdir', metavar='URL', required=True,
+ help="specify the output directory for flat ODF exports")
+ parser.add_argument('--userdir', metavar='URL',
+ help="specify user installation directory for 'path' method")
+ parser.add_argument('--valgrind', action='store_true',
+ help="pass --valgrind to soffice for 'path' method")
+ parser.add_argument('dirs', metavar='directory', nargs='+')
+
+ args = parser.parse_args(argv[1:])
+
+ return args
+
+
+if __name__ == "__main__":
+ opts = parseArgs(sys.argv)
+ runHandleFileTests(opts)
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/bffvalidator.sh.in b/bin/bffvalidator.sh.in
new file mode 100644
index 000000000..e43522764
--- /dev/null
+++ b/bin/bffvalidator.sh.in
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+wine @BFFVALIDATOR_EXE@ `winepath -w $1`
diff --git a/bin/check-elf-dynamic-objects b/bin/check-elf-dynamic-objects
new file mode 100755
index 000000000..11f587363
--- /dev/null
+++ b/bin/check-elf-dynamic-objects
@@ -0,0 +1,262 @@
+#!/bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# verify that ELF NEEDED entries are known-good so hopefully builds run on
+# lots of different GNU/Linux distributions
+
+set -euo pipefail
+
+PARA=1
+check_path="${INSTDIR:-.}"
+
+help()
+{
+ cat << "EOF"
+ -d <dir> directory to check
+ -p run unbound parallel checks
+ -h help
+EOF
+ [ -z "${1:-}" ] && exit 0
+}
+
+die()
+{
+ echo "$1"
+ echo
+ help 1
+ exit 1
+}
+
+while [ "${1:-}" != "" ]; do
+ parm=${1%%=*}
+ arg=${1#*=}
+ has_arg=
+ if [ "${1}" != "${parm?}" ] ; then
+ has_arg=1
+ else
+ arg=""
+ fi
+
+ case "${parm}" in
+ --dir|-d)
+ if [ "$has_arg" ] ; then
+ check_path="$arg"
+ else
+ shift
+ check_path="$1"
+ fi
+ if [ ! -d "$check_path" ]; then
+ die "Invalid directory '$check_path'"
+ fi
+ ;;
+ -h)
+ help
+ ;;
+ -p)
+ # this sounds counter intuitive but the idea
+ # is to possibly support -p <n>
+ # in the meantime: 0 = nolimit and -p 1 would mean
+ # the current default: serialize
+ PARA=0
+ ;;
+ -*)
+ die "Invalid option $1"
+ ;;
+ *)
+ if [ "$DO_NEW" = 1 ] ; then
+ REPO="$1"
+ else
+ die "Invalid argument $1"
+ fi
+ ;;
+ esac
+ shift
+done
+
+
+files=$(find "${check_path}/program" "${check_path}/sdk/bin" -type f)
+# all RPATHs should point to ${INSTDIR}/program so that's the files they find
+programfiles=$(echo ${files} | grep -o '/program/[^/]* ' | xargs -n 1 basename)
+
+# whitelists should contain only system libraries that have a good reputation
+# of maintaining ABI stability
+# allow extending the whitelist using the environment variable to be able to work
+# on the installer stuff without the need for a baseline setup
+globalwhitelist="ld-linux-x86-64.so.2 ld-linux.so.2 libc.so.6 libm.so.6 libdl.so.2 libpthread.so.0 librt.so.1 libutil.so.1 libnsl.so.1 libcrypt.so.1 libgcc_s.so.1 libstdc++.so.6 libz.so.1 libfontconfig.so.1 libfreetype.so.6 libxml2.so.2 libxslt.so.1 libexslt.so.0 ${LO_ELFCHECK_WHITELIST-}"
+x11whitelist="libX11.so.6 libX11-xcb.so.1 libXext.so.6 libSM.so.6 libICE.so.6 libXinerama.so.1 libXrender.so.1 libXrandr.so.2 libcairo.so.2"
+openglwhitelist="libGL.so.1"
+giowhitelist="libgio-2.0.so.0 libgobject-2.0.so.0 libgmodule-2.0.so.0 libgthread-2.0.so.0 libglib-2.0.so.0 libdbus-glib-1.so.2 libdbus-1.so.3"
+gstreamerwhitelist="libgstaudio-1.0.so.0 libgstpbutils-1.0.so.0 libgstvideo-1.0.so.0 libgstbase-1.0.so.0 libgstreamer-1.0.so.0"
+gtk3whitelist="libgtk-3.so.0 libgdk-3.so.0 libcairo-gobject.so.2 libpangocairo-1.0.so.0 libfribidi.so.0 libatk-1.0.so.0 libcairo.so.2 libgio-2.0.so.0 libpangoft2-1.0.so.0 libpango-1.0.so.0 libfontconfig.so.1 libfreetype.so.6 libgdk_pixbuf-2.0.so.0 libgobject-2.0.so.0 libglib-2.0.so.0 libgmodule-2.0.so.0 libgthread-2.0.so.0 libdbus-glib-1.so.2 libdbus-1.so.3 libharfbuzz.so.0"
+qt5whitelist="libQt5Core.so.5 libQt5Gui.so.5 libQt5Network.so.5 libQt5Widgets.so.5 libQt5X11Extras.so.5 libcairo.so.2 libglib-2.0.so.0 libgobject-2.0.so.0 libxcb.so.1 libxcb-icccm.so.4"
+kf5whitelist="libKF5ConfigCore.so.5 libKF5CoreAddons.so.5 libKF5I18n.so.5 libKF5KIOCore.so.5 libKF5KIOFileWidgets.so.5 libKF5KIOWidgets.so.5 libKF5WindowSystem.so.5"
+avahiwhitelist="libdbus-glib-1.so.2 libdbus-1.so.3 libgobject-2.0.so.0 libgmodule-2.0.so.0 libgthread-2.0.so.0 libglib-2.0.so.0 libavahi-common.so.3 libavahi-client.so.3"
+kerberoswhitelist="libgssapi_krb5.so.2 libcom_err.so.2 libkrb5.so.3"
+dconfwhitelist="libdconf.so.1 libgio-2.0.so.0 libglib-2.0.so.0 libgobject-2.0.so.0"
+
+check_one_file()
+{
+local file="$1"
+
+ skip=0
+ whitelist="${globalwhitelist}"
+ case "${file}" in
+ */sdk/docs/*)
+ # skip the majority of files, no ELF binaries here
+ skip=1
+ ;;
+ */_uuid.cpython-*.so)
+ whitelist="${whitelist} libuuid.so.1"
+ ;;
+ */libcairo.so.2)
+ whitelist="${whitelist} ${x11whitelist} libxcb-shm.so.0 libxcb.so.1 libxcb-render.so.0"
+ ;;
+ */libcairocanvaslo.so)
+ whitelist="${whitelist} libcairo.so.2"
+ ;;
+ */libucpgio1lo.so|*/liblosessioninstalllo.so|*/libevoablo.so)
+ whitelist="${whitelist} ${giowhitelist}"
+ ;;
+ */libavmediagst.so)
+ whitelist="${whitelist} ${gtk3whitelist} ${gstreamerwhitelist}"
+ ;;
+ */libvclplug_kf5lo.so|*/libkf5be1lo.so)
+ if [ "$ENABLE_KF5" = TRUE ]; then
+ whitelist="${whitelist} ${qt5whitelist} ${kf5whitelist}"
+ fi
+ ;;
+ */libvclplug_gtk3lo.so|*/updater)
+ whitelist="${whitelist} ${x11whitelist} ${gtk3whitelist}"
+ ;;
+ */libvclplug_qt5lo.so)
+ if [ "$ENABLE_QT5" = TRUE ]; then
+ whitelist="${whitelist} ${qt5whitelist}"
+ fi
+ ;;
+ */libvclplug_gtk3_kde5lo.so)
+ if [ "$ENABLE_GTK3_KDE5" = TRUE ]; then
+ whitelist="${whitelist} ${x11whitelist} ${gtk3whitelist} ${qt5whitelist} ${kf5whitelist}"
+ fi
+ ;;
+ */lo_kde5filepicker)
+ if [ "$ENABLE_GTK3_KDE5" = TRUE ]; then
+ whitelist="${whitelist} ${x11whitelist} ${gtk3whitelist} ${qt5whitelist} \
+ ${kf5whitelist}"
+ fi
+ ;;
+ */libdesktop_detectorlo.so|*/ui-previewer|*/oosplash|*/gengal.bin)
+ whitelist="${whitelist} ${x11whitelist}"
+ ;;
+ */libvclplug_genlo.so|*/libchartcorelo.so|*/libavmediaogl.so|*/libOGLTranslo.so|*/liboglcanvaslo.so)
+ whitelist="${whitelist} ${x11whitelist} ${openglwhitelist}"
+ ;;
+ */libvcllo.so)
+ whitelist="${whitelist} ${x11whitelist} ${openglwhitelist} ${giowhitelist} libcups.so.2"
+ ;;
+ */libsofficeapp.so)
+ whitelist="${whitelist} ${x11whitelist} ${openglwhitelist} ${giowhitelist} libcups.so.2"
+ ;;
+ */liblibreofficekitgtk.so)
+ whitelist="${whitelist} ${gtk3whitelist}"
+ ;;
+ */libsdlo.so)
+ whitelist="${whitelist} ${avahiwhitelist}"
+ ;;
+ */libskialo.so)
+ whitelist="${whitelist} ${openglwhitelist} ${x11whitelist}"
+ ;;
+ */libofficebean.so)
+ whitelist="${whitelist} libjawt.so"
+ ;;
+ */libpostgresql-sdbc-impllo.so)
+ whitelist="${whitelist} ${kerberoswhitelist}"
+ ;;
+ */libconfigmgrlo.so)
+ if [ "$ENABLE_DCONF" = TRUE ]; then
+ whitelist="${whitelist} ${dconfwhitelist}"
+ fi
+ ;;
+ */libmergedlo.so)
+ whitelist="${whitelist} ${x11whitelist} ${openglwhitelist} ${giowhitelist} libcups.so.2 libcairo.so.2"
+ ;;
+ esac
+ if test "${skip}" = 0 && readelf -d "${file}" &> /dev/null ; then
+ rpath=$(readelf -d "${file}" | grep '(\(RPATH\|RUNPATH\))' || true)
+ neededs=$(readelf -d "${file}" | grep '(NEEDED)' | sed -e 's/.*\[\(.*\)\]$/\1/')
+ neededsinternal=
+ for needed in ${neededs}
+ do
+ if ! echo ${whitelist} | grep -q -w "${needed}" ; then
+ neededsinternal="${neededsinternal} ${needed}"
+ if ! echo ${programfiles} | grep -q -w "${needed}" ; then
+ echo "${file}" has suspicious NEEDED: "${needed}"
+ status=1
+ fi
+ fi
+ done
+ if test -z "${rpath}" ; then
+ case "${file}" in
+ */python-core-*/lib/lib-dynload/*)
+ # python modules don't have RPATH
+ ;;
+ */share/extensions/*)
+ # extension libraries don't have RPATH
+ ;;
+ *)
+ # no NEEDED from instdir, no RPATH needed
+ if test -n "${neededsinternal}" ; then
+ echo "${file}" has no RPATH
+ status=1
+ fi
+ ;;
+ esac
+ else
+ case "$file" in
+ */sdk/bin/*)
+ if echo "${rpath}" | grep -q -v '\[\$ORIGIN/../../program\]$' ; then
+ echo "${file}" has unexpected RPATH "${rpath}"
+ status=1
+ fi
+ ;;
+ *)
+ if echo "${rpath}" | grep -q -v '\[\$ORIGIN\]$' ; then
+ echo "${file}" has unexpected RPATH "${rpath}"
+ status=1
+ fi
+ ;;
+ esac
+ fi
+ fi
+}
+status=0
+
+if [ "$PARA" = "1" ] ; then
+ for file in ${files}
+ do
+ check_one_file $file
+ done
+else
+ rm -f check_elf.out
+ for file in ${files}
+ do
+ (
+ check_one_file $file
+ )>> check_elf.out &
+ done
+
+ wait
+
+ if [ -s check_elf.out ] ; then
+ cat check_elf.out
+ status=1
+ fi
+ rm check_elf.out
+fi
+exit ${status}
+
diff --git a/bin/check-icon-sizes.py b/bin/check-icon-sizes.py
new file mode 100755
index 000000000..535caa3ef
--- /dev/null
+++ b/bin/check-icon-sizes.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import os
+
+from PIL import Image
+
+"""
+This script walks through all icon files and checks whether the sc_* and lc_* files have the correct size.
+"""
+
+icons_folder = os.path.abspath(os.path.join(__file__, '..', '..', 'icon-themes'))
+
+def check_size(filename, size):
+ image = Image.open(filename)
+ width, height = image.size
+ if width != size or height != size:
+ print("%s has size %dx%d but should have %dx%d" % (filename, width, height, size, size))
+
+for root, dirs, files in os.walk(icons_folder):
+ for filename in files:
+ if not filename.endswith('png'):
+ continue
+ if filename.startswith('lc_'):
+ check_size(os.path.join(root, filename), 24)
+ elif filename.startswith('sc_'):
+ check_size(os.path.join(root, filename), 16)
+
diff --git a/bin/check-implementer-notes.py b/bin/check-implementer-notes.py
new file mode 100755
index 000000000..10b7c168b
--- /dev/null
+++ b/bin/check-implementer-notes.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+import json, re, subprocess, sys, urllib3
+
+http = urllib3.PoolManager()
+
+# TDF implementer notes pages for LibreOffice
+wiki_pages = [
+ 'https://wiki.documentfoundation.org/api.php?action=parse&format=json&page=Development/ODF_Implementer_Notes/List_of_LibreOffice_ODF_Extensions&prop=wikitext',
+ 'https://wiki.documentfoundation.org/api.php?action=parse&format=json&page=Development/ODF_Implementer_Notes/List_of_LibreOffice_OpenFormula_Extensions&prop=wikitext']
+
+# get all commit hashes mentioned in implementer notes
+wiki_commit_hashes = {}
+query = re.compile('\{\{commit\|(\\w+)\|\\w*\|\\w*\}\}', re.IGNORECASE)
+for page in wiki_pages:
+ r = http.request('GET', page)
+ data = json.loads(r.data.decode('utf-8'))
+ for line in data['parse']['wikitext']['*'].split('\n'):
+ for res in query.finditer(line):
+ wiki_commit_hashes[res.group(1)] = ''
+
+# get all commits that change core/schema/* - and are _not_ mentioned
+# in the wiki page
+# Cut-off is May 18th 2020, when Michael Stahl had finished cleaning this up
+for commit in subprocess.check_output(
+ ['git', '--no-pager', '-C', sys.path[0]+'/..', 'log',
+ '--since=2020-05-18', '--format=%H', '--', 'schema/'],
+ stderr=subprocess.STDOUT).decode("utf-8").split("\n"):
+ if commit != '' and commit not in wiki_commit_hashes:
+ print('missing commit: %s' % commit)
+
diff --git a/bin/check-missing-unittests.py b/bin/check-missing-unittests.py
new file mode 100755
index 000000000..9a81b3ab7
--- /dev/null
+++ b/bin/check-missing-unittests.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python3
+
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import datetime
+import subprocess
+import sys
+
+def main(ignoredBugs):
+ results = {
+ 'export': {
+ 'docx': {},
+ 'doc': {},
+ 'pptx': {},
+ 'xlsx': {},
+ 'xhtml': {},
+ 'html': {},
+ }
+ }
+ hasTestSet = set()
+
+ repoPath = os.path.dirname(os.path.abspath(__file__)) + '/..'
+ branch = subprocess.check_output(
+ ['git', '-C', repoPath, 'rev-parse', '--abbrev-ref', 'HEAD'],
+ stderr=subprocess.DEVNULL)
+ last_hash = subprocess.check_output(
+ ['git', '-C', repoPath, 'rev-parse', 'HEAD'],
+ stderr=subprocess.DEVNULL)
+ output = subprocess.check_output(
+ ['git', '-C', repoPath, 'log', '--since="2012-01-01', '--name-only' ,'--pretty=format:"%s"'],
+ stderr=subprocess.DEVNULL)
+ commits = output.decode('utf-8', 'ignore').split('\n\n')
+
+ for commit in reversed(commits):
+
+ summary = commit.split('\n', 1)[0].lower()
+
+ #Check summary has a bug id
+ if 'tdf#' in summary or 'fdo#' in summary:
+
+ isIgnored = False
+ for i in ignoredBugs:
+ if i in summary:
+ isIgnored = True
+ if isIgnored:
+ continue
+
+ if 'tdf#' in summary:
+ if not summary.split('tdf#')[1][0].isdigit():
+ continue
+ bugId = ''.join(filter(str.isdigit, summary.split('tdf#')[1].split(' ')[0]))
+ elif 'fdo#' in summary:
+ if not summary.split('fdo#')[1][0].isdigit():
+ continue
+ bugId = ''.join(filter(str.isdigit, summary.split('fdo#')[1].split(' ')[0]))
+
+
+ if bugId in hasTestSet:
+ continue
+
+ changedFiles = commit.split('\n', 1)[1]
+ if 'qa' in changedFiles:
+ hasTestSet.add(bugId)
+ continue
+
+ elif 'sw/source/filter/ww8/docx' in changedFiles or \
+ 'writerfilter/source/dmapper' in changedFiles or \
+ 'starmath/source/ooxmlimport' in changedFiles:
+ results['export']['docx'][bugId] = summary
+
+ elif 'sw/source/filter/ww8/ww8' in changedFiles:
+ results['export']['doc'][bugId] = summary
+
+ elif 'sc/source/filter/excel/xe' in changedFiles:
+ results['export']['xlsx'][bugId] = summary
+
+ elif 'oox/source/export/' in changedFiles:
+ results['export']['pptx'][bugId] = summary
+
+ elif 'filter/source/xslt/odf2xhtml/export' in changedFiles:
+ results['export']['xhtml'][bugId] = summary
+
+ elif 'sw/source/filter/html/' in changedFiles:
+ results['export']['html'][bugId] = summary
+
+ # Add others here
+
+ print()
+ print('{{TopMenu}}')
+ print('{{Menu}}')
+ print('{{Menu.Development}}')
+ print()
+ print('Date: ' + str(datetime.datetime.now()))
+ print()
+ print('Commits: ' + str(len(commits)))
+ print()
+ print('Branch: ' + branch.decode().strip())
+ print()
+ print('Hash: ' + str(last_hash.decode().strip()))
+ for k,v in results.items():
+ print('\n== ' + k + ' ==')
+ for k1, v1 in v.items():
+ print('\n=== ' + k1 + ' ===')
+ for bugId, summary in v1.items():
+ if bugId not in hasTestSet:
+ print(
+ "* {} - [https://bugs.documentfoundation.org/show_bug.cgi?id={} tdf#{}]".format(
+ summary, bugId, bugId))
+ print('\n== ignored bugs ==')
+ print(' '.join(ignoredBugs))
+ print()
+ print('[[Category:QA]][[Category:Development]]')
+
+def usage():
+ message = """usage: {program} [bugs to ignore (each one is one argument)]
+
+Sample: {program} 10000 10001 10002"""
+ print(message.format(program = os.path.basename(sys.argv[0])))
+
+if __name__ == '__main__':
+
+ args = set()
+ if len(sys.argv) > 1:
+ arg1 = sys.argv[1]
+ if arg1 == '-h' or arg1 == "--help":
+ usage()
+ sys.exit(1)
+ for i in sys.argv:
+ if i.isdigit():
+ args.add(i)
+
+ main(sorted(args))
diff --git a/bin/convwatch.py b/bin/convwatch.py
new file mode 100644
index 000000000..d88d43d64
--- /dev/null
+++ b/bin/convwatch.py
@@ -0,0 +1,462 @@
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# Conversion watch, initially intended to detect if document layout changed since the last time it was run.
+#
+# Print a set of docs, compare the pdf against the old run and highlight the differences
+#
+
+import getopt
+import os
+import subprocess
+import sys
+import time
+import uuid
+import datetime
+import traceback
+import threading
+try:
+ from urllib.parse import quote
+except ImportError:
+ from urllib import quote
+
+try:
+ import pyuno
+ import uno
+ import unohelper
+except ImportError:
+ print("pyuno not found: try to set PYTHONPATH and URE_BOOTSTRAP variables")
+ print("PYTHONPATH=/installation/opt/program")
+ print("URE_BOOTSTRAP=file:///installation/opt/program/fundamentalrc")
+ raise
+
+try:
+ from com.sun.star.document import XDocumentEventListener
+except ImportError:
+ print("UNO API class not found: try to set URE_BOOTSTRAP variable")
+ print("URE_BOOTSTRAP=file:///installation/opt/program/fundamentalrc")
+ raise
+
+### utilities ###
+
+def log(*args):
+ print(*args, flush=True)
+
+def partition(list, pred):
+ left = []
+ right = []
+ for e in list:
+ if pred(e):
+ left.append(e)
+ else:
+ right.append(e)
+ return (left, right)
+
+def filelist(dir, suffix):
+ if len(dir) == 0:
+ raise Exception("filelist: empty directory")
+ if not(dir[-1] == "/"):
+ dir += "/"
+ files = [dir + f for f in os.listdir(dir)]
+# log(files)
+ return [f for f in files
+ if os.path.isfile(f) and os.path.splitext(f)[1] == suffix]
+
+def getFiles(dirs, suffix):
+ files = []
+ for dir in dirs:
+ files += filelist(dir, suffix)
+ return files
+
+### UNO utilities ###
+
+class OfficeConnection:
+ def __init__(self, args):
+ self.args = args
+ self.soffice = None
+ self.socket = None
+ self.xContext = None
+ def setUp(self):
+ (method, sep, rest) = self.args["--soffice"].partition(":")
+ if sep != ":":
+ raise Exception("soffice parameter does not specify method")
+ if method == "path":
+ self.socket = "pipe,name=pytest" + str(uuid.uuid1())
+ try:
+ userdir = self.args["--userdir"]
+ except KeyError:
+ raise Exception("'path' method requires --userdir")
+ if not(userdir.startswith("file://")):
+ raise Exception("--userdir must be file URL")
+ self.soffice = self.bootstrap(rest, userdir, self.socket)
+ elif method == "connect":
+ self.socket = rest
+ else:
+ raise Exception("unsupported connection method: " + method)
+ self.xContext = self.connect(self.socket)
+
+ def bootstrap(self, soffice, userdir, socket):
+ argv = [ soffice, "--accept=" + socket + ";urp",
+ "-env:UserInstallation=" + userdir,
+ "--quickstart=no",
+ "--norestore", "--nologo", "--headless" ]
+ if "--valgrind" in self.args:
+ argv.append("--valgrind")
+ return subprocess.Popen(argv)
+
+ def connect(self, socket):
+ xLocalContext = uno.getComponentContext()
+ xUnoResolver = xLocalContext.ServiceManager.createInstanceWithContext(
+ "com.sun.star.bridge.UnoUrlResolver", xLocalContext)
+ url = "uno:" + socket + ";urp;StarOffice.ComponentContext"
+ log("OfficeConnection: connecting to: " + url)
+ while True:
+ try:
+ xContext = xUnoResolver.resolve(url)
+ return xContext
+# except com.sun.star.connection.NoConnectException
+ except pyuno.getClass("com.sun.star.connection.NoConnectException"):
+ log("NoConnectException: sleeping...")
+ time.sleep(1)
+
+ def tearDown(self):
+ if self.soffice:
+ if self.xContext:
+ try:
+ log("tearDown: calling terminate()...")
+ xMgr = self.xContext.ServiceManager
+ xDesktop = xMgr.createInstanceWithContext(
+ "com.sun.star.frame.Desktop", self.xContext)
+ xDesktop.terminate()
+ log("...done")
+# except com.sun.star.lang.DisposedException:
+ except pyuno.getClass("com.sun.star.beans.UnknownPropertyException"):
+ log("caught UnknownPropertyException")
+ pass # ignore, also means disposed
+ except pyuno.getClass("com.sun.star.lang.DisposedException"):
+ log("caught DisposedException")
+ pass # ignore
+ else:
+ self.soffice.terminate()
+ ret = self.soffice.wait()
+ self.xContext = None
+ self.socket = None
+ self.soffice = None
+ if ret != 0:
+ raise Exception("Exit status indicates failure: " + str(ret))
+# return ret
+
+class WatchDog(threading.Thread):
+ def __init__(self, connection):
+ threading.Thread.__init__(self, name="WatchDog " + connection.socket)
+ self.connection = connection
+ def run(self):
+ try:
+ if self.connection.soffice: # not possible for "connect"
+ self.connection.soffice.wait(timeout=120) # 2 minutes?
+ except subprocess.TimeoutExpired:
+ log("WatchDog: TIMEOUT -> killing soffice")
+ self.connection.soffice.terminate() # actually killing oosplash...
+ self.connection.xContext = None
+ log("WatchDog: killed soffice")
+
+class PerTestConnection:
+ def __init__(self, args):
+ self.args = args
+ self.connection = None
+ self.watchdog = None
+ def getContext(self):
+ return self.connection.xContext
+ def setUp(self):
+ assert(not(self.connection))
+ def preTest(self):
+ conn = OfficeConnection(self.args)
+ conn.setUp()
+ self.connection = conn
+ self.watchdog = WatchDog(self.connection)
+ self.watchdog.start()
+ def postTest(self):
+ if self.connection:
+ try:
+ self.connection.tearDown()
+ finally:
+ self.connection = None
+ self.watchdog.join()
+ def tearDown(self):
+ assert(not(self.connection))
+
+class PersistentConnection:
+ def __init__(self, args):
+ self.args = args
+ self.connection = None
+ def getContext(self):
+ return self.connection.xContext
+ def setUp(self):
+ conn = OfficeConnection(self.args)
+ conn.setUp()
+ self.connection = conn
+ def preTest(self):
+ assert(self.connection)
+ def postTest(self):
+ assert(self.connection)
+ def tearDown(self):
+ if self.connection:
+ try:
+ self.connection.tearDown()
+ finally:
+ self.connection = None
+
+def simpleInvoke(connection, test):
+ try:
+ connection.preTest()
+ test.run(connection.getContext())
+ finally:
+ connection.postTest()
+
+def retryInvoke(connection, test):
+ tries = 5
+ while tries > 0:
+ try:
+ tries -= 1
+ try:
+ connection.preTest()
+ test.run(connection.getContext())
+ return
+ finally:
+ connection.postTest()
+ except KeyboardInterrupt:
+ raise # Ctrl+C should work
+ except:
+ log("retryInvoke: caught exception")
+ raise Exception("FAILED retryInvoke")
+
+def runConnectionTests(connection, invoker, tests):
+ try:
+ connection.setUp()
+ failed = []
+ for test in tests:
+ try:
+ invoker(connection, test)
+ except KeyboardInterrupt:
+ raise # Ctrl+C should work
+ except:
+ failed.append(test.file)
+ estr = traceback.format_exc()
+ log("... FAILED with exception:\n" + estr)
+ return failed
+ finally:
+ connection.tearDown()
+
+class EventListener(XDocumentEventListener,unohelper.Base):
+ def __init__(self):
+ self.layoutFinished = False
+ def documentEventOccured(self, event):
+# log(str(event.EventName))
+ if event.EventName == "OnLayoutFinished":
+ self.layoutFinished = True
+ def disposing(event):
+ pass
+
+def mkPropertyValue(name, value):
+ return uno.createUnoStruct("com.sun.star.beans.PropertyValue",
+ name, 0, value, 0)
+
+### tests ###
+
+def loadFromURL(xContext, url):
+ xDesktop = xContext.ServiceManager.createInstanceWithContext(
+ "com.sun.star.frame.Desktop", xContext)
+ props = [("Hidden", True), ("ReadOnly", True)] # FilterName?
+ loadProps = tuple([mkPropertyValue(name, value) for (name, value) in props])
+ xListener = EventListener()
+ xGEB = xContext.getValueByName(
+ "/singletons/com.sun.star.frame.theGlobalEventBroadcaster")
+ xGEB.addDocumentEventListener(xListener)
+ xDoc = None
+ try:
+ xDoc = xDesktop.loadComponentFromURL(url, "_blank", 0, loadProps)
+ if xDoc is None:
+ raise Exception("No document loaded?")
+ time_ = 0
+ while time_ < 30:
+ if xListener.layoutFinished:
+ return xDoc
+ log("delaying...")
+ time_ += 1
+ time.sleep(1)
+ log("timeout: no OnLayoutFinished received")
+ return xDoc
+ except:
+ if xDoc:
+ log("CLOSING")
+ xDoc.close(True)
+ raise
+ finally:
+ if xListener:
+ xGEB.removeDocumentEventListener(xListener)
+
+def printDoc(xContext, xDoc, url):
+ props = [ mkPropertyValue("FileName", url) ]
+# xDoc.print(props)
+ uno.invoke(xDoc, "print", (tuple(props),)) # damn, that's a keyword!
+ busy = True
+ while busy:
+ log("printing...")
+ time.sleep(1)
+ prt = xDoc.getPrinter()
+ for value in prt:
+ if value.Name == "IsBusy":
+ busy = value.Value
+ log("...done printing")
+
+class LoadPrintFileTest:
+ def __init__(self, file, prtsuffix):
+ self.file = file
+ self.prtsuffix = prtsuffix
+ def run(self, xContext):
+ start = datetime.datetime.now()
+ log("Time: " + str(start) + " Loading document: " + self.file)
+ xDoc = None
+ try:
+ if os.name == 'nt' and self.file[1] == ':':
+ url = "file:///" + self.file[0:2] + quote(self.file[2:])
+ else:
+ url = "file://" + quote(self.file)
+ xDoc = loadFromURL(xContext, url)
+ printDoc(xContext, xDoc, url + self.prtsuffix)
+ finally:
+ if xDoc:
+ xDoc.close(True)
+ end = datetime.datetime.now()
+ log("...done with: " + self.file + " in: " + str(end - start))
+
+def runLoadPrintFileTests(opts, dirs, suffix, reference):
+ if reference:
+ prtsuffix = ".pdf.reference"
+ else:
+ prtsuffix = ".pdf"
+ files = getFiles(dirs, suffix)
+ tests = (LoadPrintFileTest(file, prtsuffix) for file in files)
+# connection = PersistentConnection(opts)
+ connection = PerTestConnection(opts)
+ failed = runConnectionTests(connection, simpleInvoke, tests)
+ print("all printed: FAILURES: " + str(len(failed)))
+ for fail in failed:
+ print(fail)
+ return failed
+
+def mkImages(file, resolution):
+ argv = [ "gs", "-r" + resolution, "-sOutputFile=" + file + ".%04d.jpeg",
+ "-dNOPROMPT", "-dNOPAUSE", "-dBATCH", "-sDEVICE=jpeg", file ]
+ ret = subprocess.check_call(argv)
+
+def mkAllImages(dirs, suffix, resolution, reference, failed):
+ if reference:
+ prtsuffix = ".pdf.reference"
+ else:
+ prtsuffix = ".pdf"
+ for dir in dirs:
+ files = filelist(dir, suffix)
+ log(files)
+ for f in files:
+ if f in failed:
+ log("Skipping failed: " + f)
+ else:
+ mkImages(f + prtsuffix, resolution)
+
+def identify(imagefile):
+ argv = ["identify", "-format", "%k", imagefile]
+ process = subprocess.Popen(argv, stdout=subprocess.PIPE)
+ result, _ = process.communicate()
+ if process.wait() != 0:
+ raise Exception("identify failed")
+ if result.partition(b"\n")[0] != b"1":
+ log("identify result: " + result.decode('utf-8'))
+ log("DIFFERENCE in " + imagefile)
+
+def compose(refimagefile, imagefile, diffimagefile):
+ argv = [ "composite", "-compose", "difference",
+ refimagefile, imagefile, diffimagefile ]
+ subprocess.check_call(argv)
+
+def compareImages(file):
+ allimages = [f for f in filelist(os.path.dirname(file), ".jpeg")
+ if f.startswith(file)]
+# refimages = [f for f in filelist(os.path.dirname(file), ".jpeg")
+# if f.startswith(file + ".reference")]
+# log("compareImages: allimages:" + str(allimages))
+ (refimages, images) = partition(sorted(allimages),
+ lambda f: f.startswith(file + ".pdf.reference"))
+# log("compareImages: images" + str(images))
+ for (image, refimage) in zip(images, refimages):
+ compose(image, refimage, image + ".diff")
+ identify(image + ".diff")
+ if (len(images) != len(refimages)):
+ log("DIFFERENT NUMBER OF IMAGES FOR: " + file)
+
+def compareAllImages(dirs, suffix):
+ log("compareAllImages...")
+ for dir in dirs:
+ files = filelist(dir, suffix)
+# log("compareAllImages:" + str(files))
+ for f in files:
+ compareImages(f)
+ log("...compareAllImages done")
+
+
+def parseArgs(argv):
+ (optlist,args) = getopt.getopt(argv[1:], "hr",
+ ["help", "soffice=", "userdir=", "reference", "valgrind"])
+# print optlist
+ return (dict(optlist), args)
+
+def usage():
+ message = """usage: {program} [option]... [directory]..."
+ -h | --help: print usage information
+ -r | --reference: generate new reference files (otherwise: compare)
+ --soffice=method:location
+ specify soffice instance to connect to
+ supported methods: 'path', 'connect'
+ --userdir=URL specify user installation directory for 'path' method
+ --valgrind pass --valgrind to soffice for 'path' method"""
+ print(message.format(program = os.path.basename(sys.argv[0])))
+
+def checkTools():
+ try:
+ subprocess.check_output(["gs", "--version"])
+ except:
+ print("Cannot execute 'gs'. Please install ghostscript.")
+ sys.exit(1)
+ try:
+ subprocess.check_output(["composite", "-version"])
+ subprocess.check_output(["identify", "-version"])
+ except:
+ print("Cannot execute 'composite' or 'identify'.")
+ print("Please install ImageMagick.")
+ sys.exit(1)
+
+if __name__ == "__main__":
+ checkTools()
+ (opts,args) = parseArgs(sys.argv)
+ if len(args) == 0:
+ usage()
+ sys.exit(1)
+ if "-h" in opts or "--help" in opts:
+ usage()
+ sys.exit()
+ elif "--soffice" in opts:
+ reference = "-r" in opts or "--reference" in opts
+ failed = runLoadPrintFileTests(opts, args, ".odt", reference)
+ mkAllImages(args, ".odt", "200", reference, failed)
+ if not(reference):
+ compareAllImages(args, ".odt")
+ else:
+ usage()
+ sys.exit(1)
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/distro-install-clean-up b/bin/distro-install-clean-up
new file mode 100755
index 000000000..701c9ffa0
--- /dev/null
+++ b/bin/distro-install-clean-up
@@ -0,0 +1,92 @@
+#!/bin/sh
+
+if test -z "${SRC_ROOT}"; then
+ echo "distro-install-clean-up: No environment set!"
+ exit 1
+fi
+
+echo "Cleaning up ...";
+
+remove_help_localization()
+{
+ lang=$1
+
+ # nothing to be done if the localization is en-US if it does not exist
+ # or if it is already removed
+ test "$lang" = "en-US" -o \
+ ! -e $DESTDIR$INSTALLDIR/help/$lang -o \
+ -L $DESTDIR$INSTALLDIR/help/$lang && return;
+
+ echo "... remove \"$lang\""
+
+ rm -rf $DESTDIR$INSTALLDIR/help/$lang
+ grep -v "$INSTALLDIR/help/$lang" $DESTDIR/gid_Module_Root.$lang >$DESTDIR/gid_Module_Root.$lang.new
+ mv -f $DESTDIR/gid_Module_Root.$lang.new $DESTDIR/gid_Module_Root.$lang
+ # FIXME: the following code could be used without the condition
+ # and should replace the lines above after only the milestones
+ # providing gid_Module_Helppack_Help and fixed gid_Module_Root.$lang
+ # are supported
+ # Note: The problem with gid_Module_Root.$lang is that it still includes
+ # %dir */help/* entries.
+ # Note: It was still necessary on ppc with gcj (OOo-2.0.2). Strange. Have to
+ # investigate it later.
+ if test -f $DESTDIR/gid_Module_Helppack_Help.$lang ; then
+ grep -v "$INSTALLDIR/help/$lang" $DESTDIR/gid_Module_Helppack_Help.$lang >$DESTDIR/gid_Module_Helppack_Help.$lang.new
+ mv -f $DESTDIR/gid_Module_Helppack_Help.$lang.new $DESTDIR/gid_Module_Helppack_Help.$lang
+ fi
+
+ # Note: We created a compat symlink in the past. It is no longer necessary.
+ # We do not want it because RPM has problems with update when we remove
+ # poor localizations in never packages
+}
+
+# Check if the English help is installed and is in the main package (is first on the list)
+# Note that Java-disabled builds do not create help at all.
+if test -f $DESTDIR$INSTALLDIR/help/en/sbasic.cfg -a \
+ "`for lang in $WITH_LANG_LIST ; do echo $lang ; break ; done`" = "en-US" ; then
+
+ echo "Removing duplicated English help..."
+
+ for lang in $WITH_LANG_LIST ; do
+ test ! -f $DESTDIR$INSTALLDIR/help/en/sbasic.cfg -o ! -f $DESTDIR$INSTALLDIR/help/$lang/sbasic.cfg && continue;
+ if diff $DESTDIR$INSTALLDIR/help/en/sbasic.cfg $DESTDIR$INSTALLDIR/help/$lang/sbasic.cfg >/dev/null 2>&1 ; then
+ remove_help_localization $lang
+ fi
+ done
+
+ echo "Removing poor help localizations..."
+
+ for lang in $WITH_POOR_HELP_LOCALIZATIONS ; do
+ remove_help_localization $lang
+ done
+fi
+
+echo "Fixing permissions..."
+for dir in $DESTDIR$DOCDIR $DESTDIR$INSTALLDIR/sdk/examples ; do
+ if test -d $dir -a -w $dir ; then
+ find "$dir" -type f \( -name "*.txt" -o -name "*.java" -o -name "*.xml" -o \
+ -name "*.xcu" -o -name "*.xcs" -o -name "*.html" -o \
+ -name "*.pdf" -o -name "*.ps" -o -name "*.gif" -o \
+ -name "*.png" -o -name "*.jpg" -o -name "Makefile" -o \
+ -name "manifest.mf" \) -exec chmod 644 {} \;
+ fi
+done
+
+if test "z$DESTDIR" != "z" ; then
+ echo "Checking for DESTDIR inside installed files..."
+ found_destdir=
+ for file in `find $DESTDIR -type f` ; do
+ grep -q "$DESTDIR" $file && echo "$file: includes the string \"$DESTDIR\"" && found_destdir=1
+ done
+ if test "z$found_destdir" != "z" ; then
+ echo "!!!!!!!!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!!!!!!"
+ echo "The path DESTDIR:$DESTDIR was found inside some"
+ echo "installed files. It is probably a bug."
+ echo
+ echo "Especially, if the DESTDIR is set to \$RPM_BUILD_ROOT"
+ echo "when creating RPM packages. Even it could be a security hole"
+ echo "if the application searches /var/tmp for binaries or"
+ echo "config files because the directory is world-writable."
+ echo "!!!!!!!!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!!!!!!"
+ fi
+fi
diff --git a/bin/distro-install-desktop-integration b/bin/distro-install-desktop-integration
new file mode 100755
index 000000000..1da104e47
--- /dev/null
+++ b/bin/distro-install-desktop-integration
@@ -0,0 +1,192 @@
+#!/bin/sh
+
+if test -z "${SRC_ROOT}"; then
+ echo "distro-install-clean-up: No environment set!"
+ exit 1
+fi
+
+PRODUCTVERSION_NODOT=`echo $PRODUCTVERSION | sed -e "s/\.//"`
+
+mkdir -p "$DESTDIR$BINDIR"
+
+create_wrapper()
+{
+ echo "Install $BINDIR/$1"
+
+ if test -L "$DESTDIR$BINDIR/$1" ; then
+ # do not overwrite $BINDIR/libreoffice symlink created by create_tree.sh
+ # the symlink is necessary by java UNO components to find
+ # the UNO installation using $PATH; this function used to be provided
+ # by $BINDIR/soffice symlink, see
+ # http://udk.openoffice.org/common/man/spec/transparentofficecomponents.html
+ # Note: if you want to support parallel installation of more OOo versions
+ # you cannot include this link directly into the package
+ # For example, the Novell package mark this symlink as %ghost
+ # and update it in %post and %postun
+ echo " skip already existing symlink $BINDIR/$1"
+ else
+ mkdir -p "$DESTDIR$BINDIR"
+ cat <<EOT >"$DESTDIR$BINDIR/$1"
+#!/bin/sh
+$INSTALLDIR/program/$2 $3 "\$@"
+EOT
+ chmod 755 "$DESTDIR$BINDIR/$1"
+ fi
+ # put into file list
+ test -f "$DESTDIR/$4" && echo "$BINDIR/$1" >>$DESTDIR/$4
+}
+
+create_man_link()
+{
+ echo "Install $MANDIR/man1/$1.1.gz"
+
+ mkdir -p $DESTDIR$MANDIR/man1
+ echo ".so man1/$2.1" >| $DESTDIR$MANDIR/man1/$1.1
+ gzip -f $DESTDIR$MANDIR/man1/$1.1
+ test -f "$DESTDIR/$3" && echo "$MANDIR/man1/$1.1.gz" >>"$DESTDIR/$3"
+}
+
+install_man()
+{
+ echo "Install $MANDIR/man1/$1.1.gz"
+
+ mkdir -p $DESTDIR$MANDIR/man1
+ cp "${SRCDIR?}"/sysui/desktop/man/$1.1 $DESTDIR$MANDIR/man1 || exit 1;
+ gzip -f $DESTDIR$MANDIR/man1/$1.1
+ test -f "$DESTDIR/$2" && echo "$MANDIR/man1/$1.1.gz" >>"$DESTDIR/$2"
+}
+
+
+add_wrapper()
+{
+ lowrapper_name="$1"
+ target_binary="$2"
+ target_option_1="$3"
+ used_man_page="$4"
+ desktop_file="$5"
+ file_list="$6"
+
+ # do we want compat oowrapper?
+ oowrapper_name=""
+ if test "$WITH_COMPAT_OOWRAPPERS" = 'TRUE' ; then
+ oowrapper_name=`echo "$lowrapper_name" | sed -e "s/^lo/oo/"`
+ # "oo" prefix only for wrappers stating with "lo" prefix
+ test "$oowrapper_name" = "$lowrapper_name" && oowrapper_name=
+ fi
+
+ # wrappers
+ create_wrapper "$lowrapper_name" "$target_binary" "$target_option_1" "$file_list"
+ test -n "$oowrapper_name" && create_wrapper "$oowrapper_name" "$target_binary" "$target_option_1" "$file_list"
+
+ # man pages
+ if test "$used_man_page" = "$lowrapper_name" ; then
+ # need to install the manual page
+ install_man "$lowrapper_name" "$file_list"
+ else
+ # just link the manual page
+ create_man_link "$lowrapper_name" "$used_man_page" "$file_list"
+ fi
+ test -n "$oowrapper_name" && create_man_link "$oowrapper_name" "$used_man_page" "$file_list"
+
+ # add desktop file to the right file list
+ test -n "$desktop_file" -a -f "$DESTDIR/$file_list" && echo "$PREFIXDIR/share/applications/$desktop_file" >>"$DESTDIR/$file_list"
+}
+
+# install desktop integration from plain packages
+sysui_temp=`mktemp -d -t distro-pack-desktop-integration-XXXXXX`
+cp -a workdir/CustomTarget/sysui/share/libreoffice/* "$sysui_temp"
+cp -a "${SRCDIR?}"/sysui/desktop/share/create_tree.sh "$sysui_temp"
+cd $sysui_temp
+# we want non-versioned stuff in the distro packages
+sed -i \
+ -e "s/\($INSTALLDIRNAME\)$PRODUCTVERSION_NODOT/\1/" \
+ -e "s/\($INSTALLDIRNAME\)$PRODUCTVERSION/\1/" \
+ -e "s/\($PRODUCTNAME\) $PRODUCTVERSION/\1/" \
+ *
+# call in subshell to do not malform PRODUCTVERSION, ...
+(
+ export OFFICE_PREFIX=$LIBDIR
+ export PREFIX=$INSTALLDIRNAME
+ export ICON_PREFIX=$INSTALLDIRNAME
+ export ICON_SOURCE_DIR="${SRCDIR?}"/sysui/desktop/icons
+ export PRODUCTVERSION=
+ export KDEMAINDIR=$PREFIXDIR
+ export PREFIXDIR=${PREFIXDIR}
+ export GNOMEDIR=$PREFIXDIR
+ export GNOME_MIME_THEME=hicolor
+ export APPDATA_SOURCE_DIR="${SRCDIR?}"/sysui/desktop/appstream-appdata
+ bash ./create_tree.sh
+)
+cd -
+rm -rf $sysui_temp
+
+# we do not want some stuff from the plain packages
+if test -d $DESTDIR/opt ; then
+ rm -f $DESTDIR/opt/$INSTALLDIRNAME
+ rmdir $DESTDIR/opt 2>/dev/null || true
+fi
+
+# we want non-versioned desktop files
+cd $DESTDIR/$INSTALLDIR/share/xdg
+# we want non-versioned stuff in the distro packages
+sed -i \
+ -e "s/\($INSTALLDIRNAME\)$PRODUCTVERSION_NODOT/\1/" \
+ -e "s/\($INSTALLDIRNAME\)$PRODUCTVERSION/\1/" \
+ -e "s/\($PRODUCTNAME\) $PRODUCTVERSION/\1/" \
+ *.desktop
+cd -
+
+# put the stuff installed by create_tree.sh into the right file lists
+# desktop files will be added by the corresponding add_wrapper command
+if test -f $DESTDIR/gid_Module_Root_Brand ; then
+ for dir in $PREFIXDIR/share/application-registry \
+ $PREFIXDIR/share/mime/packages \
+ $PREFIXDIR/share/mime-info \
+ $PREFIXDIR/share/icons ; do
+ find "$DESTDIR$dir" \( -type f -o -type l \) -printf "$dir/%P\n" >>$DESTDIR/gid_Module_Root_Brand
+ done
+fi
+
+# wrappers and man pages
+# FIXME: do not have desktop file and MIME icon for unopkg
+add_wrapper lobase soffice "--base" "libreoffice" "libreoffice-base.desktop" "gid_Module_Brand_Prg_Base"
+add_wrapper localc soffice "--calc" "libreoffice" "libreoffice-calc.desktop" "gid_Module_Brand_Prg_Calc"
+add_wrapper lodraw soffice "--draw" "libreoffice" "libreoffice-draw.desktop" "gid_Module_Brand_Prg_Draw"
+add_wrapper lomath soffice "--math" "libreoffice" "libreoffice-math.desktop" "gid_Module_Brand_Prg_Math"
+add_wrapper loimpress soffice "--impress" "libreoffice" "libreoffice-impress.desktop" "gid_Module_Brand_Prg_Impress"
+add_wrapper loweb soffice "--web" "libreoffice" "" "gid_Module_Brand_Prg_Wrt"
+add_wrapper lowriter soffice "--writer" "libreoffice" "libreoffice-writer.desktop" "gid_Module_Brand_Prg_Wrt"
+add_wrapper lofromtemplate soffice ".uno:NewDoc" "libreoffice" "" "gid_Module_Root_Brand"
+add_wrapper libreoffice soffice "" "libreoffice" "libreoffice-startcenter.desktop" "gid_Module_Root_Brand"
+add_wrapper loffice soffice "" "libreoffice" "" "gid_Module_Root_Brand"
+add_wrapper unopkg unopkg "" "unopkg" "" "gid_Module_Root_Brand"
+
+# there are two more desktop files for optional filters
+test -f $DESTDIR/gid_Module_Optional_Xsltfiltersamples && echo "$PREFIXDIR/share/applications/libreoffice-xsltfilter.desktop" >>"$DESTDIR/gid_Module_Optional_Xsltfiltersamples"
+
+# $BINDIR/ooffice symlink is necessary by java UNO components to find
+# the UNO installation using $PATH, see
+# http://udk.openoffice.org/common/man/spec/transparentofficecomponents.html
+# Note: if you want to support parallel installation of more OOo versions
+# you cannot include this link directly into the package
+# For example, the Novell package mark this symlink as %ghost
+# and update it in %post and %postun
+ln -sf "$INSTALLDIR/program/soffice" "$DESTDIR$BINDIR/soffice"
+test -f $DESTDIR/gid_Module_Root_Brand && echo "$BINDIR/soffice" >>$DESTDIR/gid_Module_Root_Brand
+
+# create bash completion
+mkdir -p $DESTDIR/usr/share/bash-completion/completions
+"${SRCDIR?}"/bin/generate-bash-completion.py bin/bash-completion.in $DESTDIR/usr/share/bash-completion/completions/$INSTALLDIRNAME.sh
+test -f $DESTDIR/gid_Module_Root_Brand && echo "/usr/share/bash-completion/completions/$INSTALLDIRNAME.sh" >>$DESTDIR/gid_Module_Root_Brand
+if test "$WITH_COMPAT_OOWRAPPERS" = "TRUE" ; then
+ "${SRCDIR?}"/bin/generate-bash-completion.py --compat-oowrappers bin/bash-completion.in $DESTDIR/usr/share/bash-completion/completions/ooffice.sh
+ test -f $DESTDIR/gid_Module_Root_Brand && echo "/usr/share/bash-completion/completions/ooffice.sh" >>$DESTDIR/gid_Module_Root_Brand
+fi
+
+echo "Install $OOINSTDIR/basis$VERSION/program/java-set-classpath";
+mkdir -p $DESTDIR$INSTALLDIR/program
+sed -e "s|@INSTALLDIR@|$INSTALLDIR|g" "${SRCDIR?}"/bin/java-set-classpath.in >| "$DESTDIR$INSTALLDIR/program/java-set-classpath" || exit 1;
+chmod 755 "$DESTDIR$INSTALLDIR/program/java-set-classpath"
+test -f $DESTDIR/gid_Module_Root_Brand && echo "$INSTALLDIR/program/java-set-classpath" >>$DESTDIR/gid_Module_Root_Brand
+
+exit 0
diff --git a/bin/distro-install-file-lists b/bin/distro-install-file-lists
new file mode 100755
index 000000000..34db93e0a
--- /dev/null
+++ b/bin/distro-install-file-lists
@@ -0,0 +1,517 @@
+#!/bin/sh
+
+if test -z "${SRC_ROOT}"; then
+ echo "distro-install-clean-up: No environment set!"
+ exit 1
+fi
+
+BUILDDIR=`pwd`
+FILELISTSDIR="$BUILDDIR/file-lists"
+
+# remove installed file even from the file list
+# Params: file_list file_to_remove
+remove_file()
+{
+ rm -f "$DESTDIR/$2"
+ perl -pi -e "s|^$2$||" "$1"
+}
+
+# move one file from one list of files to a second one
+# Params: target_file_list source_file_list file_to_move
+mv_file_between_flists()
+{
+ if grep "^$3\$" $2 >/dev/null 2>&1 ; then
+ # \$3 can be regular expression
+ grep "^$3\$" $2 >>$1
+ perl -pi -e "s|^$3$||" $2
+ fi
+}
+# add the directories from the source list of files to the target list of
+# file which are used in the target list of files but are missing there
+# Params: target_file_list source_file_list
+add_used_directories()
+{
+ sort -u -r $2 | sed -n "s|^%dir \(.*\)\$|s%^\\\\(\1\\\\).*%\\\\1%p|p" >$2.pattern
+ sed -n -f $2.pattern $1 | sort -u | sed "s|^|%dir |" >>$1
+ rm $2.pattern
+ sort -u $1 >$1.unique
+ mv $1.unique $1
+}
+
+# remove a duplicity between two filelist
+# Params: filelist_with_original filelist_with_duplicity duplicit_path
+remove_duplicity_from_flists()
+{
+ if grep "$3" "$1" >/dev/null 2>&1 && \
+ grep "$3" "$2" >/dev/null 2>&1 ; then
+ perl -pi -e "s|^$3$||" $2
+ fi
+}
+
+# merges one file list into another one
+# Params: source_filelist dest_filelist replace_dest
+merge_flists()
+{
+ if test -f "$1" ; then
+ cat "$1" >>"$2"
+ sort -u "$2" >"$2".sorted
+ mv "$2".sorted "$2"
+ fi
+}
+
+if ! test -f $DESTDIR/gid_Module_Root; then
+ echo "Error: Failed to generate package file lists";
+ echo " Have you defined DESTDIR?"
+ exit
+fi
+
+
+rm -rf "$FILELISTSDIR"
+mkdir -p "$FILELISTSDIR"
+
+cd $DESTDIR
+
+if test "z$OOO_VENDOR" != "zDebian" ; then
+
+ echo "Generating package file lists for $OOO_VENDOR..."
+
+ rm -f common_list.txt
+ for module in gid_Module_Root gid_Module_Root_Brand \
+ gid_Module_Root_Files_[0-9] \
+ gid_Module_Root_Hack \
+ gid_Module_Oo_Linguistic \
+ gid_Module_Root_Extension_Dictionary_* \
+ gid_Module_Root_Ure_Hidden ; do
+ merge_flists $module $FILELISTSDIR/common_list.txt
+ done
+
+ # it is not a real extension; it used to be in the main package...
+ merge_flists gid_Module_Optional_Extensions_Script_Provider_For_JS $FILELISTSDIR/common_list.txt
+
+ if test "$SPLIT_APP_MODULES" = "TRUE" ; then
+ merge_flists gid_Module_Prg_Base_Bin $FILELISTSDIR/base_list.txt
+ merge_flists gid_Module_Prg_Calc_Bin $FILELISTSDIR/calc_list.txt
+ merge_flists gid_Module_Prg_Draw_Bin $FILELISTSDIR/draw_list.txt
+ merge_flists gid_Module_Prg_Math_Bin $FILELISTSDIR/math_list.txt
+ merge_flists gid_Module_Prg_Impress_Bin $FILELISTSDIR/impress_list.txt
+ merge_flists gid_Module_Prg_Wrt_Bin $FILELISTSDIR/writer_list.txt
+ merge_flists gid_Module_Brand_Prg_Base $FILELISTSDIR/base_list.txt
+ merge_flists gid_Module_Brand_Prg_Calc $FILELISTSDIR/calc_list.txt
+ merge_flists gid_Module_Brand_Prg_Draw $FILELISTSDIR/draw_list.txt
+ merge_flists gid_Module_Brand_Prg_Math $FILELISTSDIR/math_list.txt
+ merge_flists gid_Module_Brand_Prg_Impress $FILELISTSDIR/impress_list.txt
+ merge_flists gid_Module_Brand_Prg_Wrt $FILELISTSDIR/writer_list.txt
+ merge_flists gid_Module_Reportbuilder $FILELISTSDIR/base_list.txt
+ merge_flists gid_Module_Pdfimport $FILELISTSDIR/draw_list.txt
+
+ # FIXME: small; low dependencies; why optional module?
+ merge_flists gid_Module_Optional_OGLTrans $FILELISTSDIR/impress_list.txt
+ else
+ merge_flists gid_Module_Prg_Base_Bin $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Prg_Calc_Bin $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Prg_Draw_Bin $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Prg_Math_Bin $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Prg_Impress_Bin $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Prg_Wrt_Bin $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Brand_Prg_Base $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Brand_Prg_Calc $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Brand_Prg_Draw $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Brand_Prg_Math $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Brand_Prg_Impress $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Brand_Prg_Wrt $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Reportbuilder $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Pdfimport $FILELISTSDIR/common_list.txt
+ # FIXME: small; low dependencies; why optional module?
+ merge_flists gid_Module_Optional_OGLTrans $FILELISTSDIR/common_list.txt
+ fi
+
+ if test "$SPLIT_APP_MODULES" = "TRUE" -a "$OOO_VENDOR" = "SUSE" ; then
+ # move the prebuilt icons into a hacky temporary package
+ # we want to repack them into a noarch package as soon as possible
+ # without the build dependency on the huge devel package
+ merge_flists gid_Module_Root_Files_Images $FILELISTSDIR/icon_themes_prebuilt.txt
+ else
+ merge_flists gid_Module_Root_Files_Images $FILELISTSDIR/common_list.txt
+ fi
+
+ if test "$SPLIT_OPT_FEATURES" = "TRUE" ; then
+ if test "z$OOO_VENDOR" = "zMandriva" ; then
+ merge_flists gid_Module_Optional_Grfflt $FILELISTSDIR/draw_list.txt
+ merge_flists gid_Module_Optional_Headless $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Pymailmerge $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Pyuno $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Script_Provider_For_Python $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Optional_Pyuno_LibreLogo $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Optional_Xsltfiltersamples $FILELISTSDIR/common_list.txt
+ else
+ merge_flists gid_Module_Optional_Grfflt $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Headless $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Pymailmerge $FILELISTSDIR/mailmerge_list.txt
+ merge_flists gid_Module_Pyuno $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Optional_Pyuno_LibreLogo $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Script_Provider_For_Python $FILELISTSDIR/pyuno_list.txt
+ merge_flists gid_Module_Optional_Xsltfiltersamples $FILELISTSDIR/filters_list.txt
+ fi
+ else
+ merge_flists gid_Module_Optional_Grfflt $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Headless $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Pymailmerge $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Pyuno $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Pyuno_LibreLogo $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Script_Provider_For_Python $FILELISTSDIR/common_list.txt
+ merge_flists gid_Module_Optional_Xsltfiltersamples $FILELISTSDIR/common_list.txt
+ fi
+
+ # lang packs
+ for lang in `echo $WITH_LANG_LIST | sed -e s/-/_/g`; do
+ lang_lists=
+ if test "$OOO_VENDOR" = "Mandriva" -o \( "$OOO_VENDOR" = "SUSE" -a "$SPLIT_APP_MODULES" = "TRUE" \) ; then
+ test -f gid_Module_Langpack_Basis_$lang && lang_lists="$lang_lists gid_Module_Langpack_Basis_$lang" || :
+ test -f gid_Module_Langpack_Brand_$lang && lang_lists="$lang_lists gid_Module_Langpack_Brand_$lang" || :
+ test -f gid_Module_Langpack_Resource_$lang && lang_lists="$lang_lists gid_Module_Langpack_Resource_$lang" || :
+ test -f gid_Module_Langpack_Impress_$lang && lang_lists="$lang_lists gid_Module_Langpack_Impress_$lang" || :
+ test -f gid_Module_Langpack_Draw_$lang && lang_lists="$lang_lists gid_Module_Langpack_Draw_$lang" || :
+ test -f gid_Module_Langpack_Math_$lang && lang_lists="$lang_lists gid_Module_Langpack_Math_$lang" || :
+ test -f gid_Module_Langpack_Calc_$lang && lang_lists="$lang_lists gid_Module_Langpack_Calc_$lang" || :
+ test -f gid_Module_Langpack_Base_$lang && lang_lists="$lang_lists gid_Module_Langpack_Base_$lang" || :
+ test -f gid_Module_Langpack_Writer_$lang && lang_lists="$lang_lists gid_Module_Langpack_Writer_$lang" || :
+ # Place helps on dedicated packages.
+ test -f gid_Module_Helppack_Help_$lang && sort -u gid_Module_Helppack_Help_$lang > $FILELISTSDIR/help_${lang}_list.txt || :
+ else
+ test -f gid_Module_Langpack_Basis_$lang && lang_lists="$lang_lists gid_Module_Langpack_Basis_$lang" || :
+ test -f gid_Module_Langpack_Brand_$lang && lang_lists="$lang_lists gid_Module_Langpack_Brand_$lang" || :
+ test -f gid_Module_Langpack_Resource_$lang && lang_lists="$lang_lists gid_Module_Langpack_Resource_$lang" || :
+ test -f gid_Module_Langpack_Impress_$lang && lang_lists="$lang_lists gid_Module_Langpack_Impress_$lang" || :
+ test -f gid_Module_Langpack_Draw_$lang && lang_lists="$lang_lists gid_Module_Langpack_Draw_$lang" || :
+ test -f gid_Module_Langpack_Math_$lang && lang_lists="$lang_lists gid_Module_Langpack_Math_$lang" || :
+ test -f gid_Module_Langpack_Calc_$lang && lang_lists="$lang_lists gid_Module_Langpack_Calc_$lang" || :
+ test -f gid_Module_Langpack_Base_$lang && lang_lists="$lang_lists gid_Module_Langpack_Base_$lang" || :
+ test -f gid_Module_Langpack_Writer_$lang && lang_lists="$lang_lists gid_Module_Langpack_Writer_$lang" || :
+ test -f gid_Module_Helppack_Help_$lang && lang_lists="$lang_lists gid_Module_Helppack_Help_$lang" || :
+ fi
+ if test -n "$lang_lists" ; then
+ # all files are installed below $INSTALLDIR/basis; we want to own also $INSTALLDIR
+ echo "%dir $INSTALLDIR" >$FILELISTSDIR/lang_${lang}_list.txt
+ cat $lang_lists | sort -u >>$FILELISTSDIR/lang_${lang}_list.txt
+ fi
+ # some help files are in _Langpack_{Writer,Impress,...}_<lang>
+ # move them from -l10n to -help
+ if test "$OOO_VENDOR" = "Mandriva" -o \( "$OOO_VENDOR" = "SUSE" -a "$SPLIT_APP_MODULES" = "TRUE" \) ; then
+ for lang in `echo $WITH_LANG_LIST | sed -e s/-/_/g`; do
+ test -f $FILELISTSDIR/help_${lang}_list.txt || continue;
+ mv_file_between_flists $FILELISTSDIR/help_${lang}_list.txt $FILELISTSDIR/lang_${lang}_list.txt $INSTALLDIR/help/.*
+ add_used_directories $FILELISTSDIR/help_${lang}_list.txt $FILELISTSDIR/lang_${lang}_list.txt
+ done
+ fi
+ done
+
+ if test -f $FILELISTSDIR/lang_en_US_list.txt -a "$OOO_VENDOR" = "SUSE" -a "$SPLIT_APP_MODULES" != "TRUE" ; then
+ cat $FILELISTSDIR/lang_en_US_list.txt >>$FILELISTSDIR/common_list.txt
+ rm $FILELISTSDIR/lang_en_US_list.txt
+ fi
+
+ if test -f gid_Module_Root_SDK ; then
+ cp gid_Module_Root_SDK $FILELISTSDIR/sdk_list.txt
+ fi
+
+ cd $FILELISTSDIR
+
+ # gnome subpackage
+ test -f $DESTDIR/gid_Module_Optional_Gnome && cp $DESTDIR/gid_Module_Optional_Gnome gnome_list.txt || :
+ mv_file_between_flists gnome_list.txt common_list.txt $INSTALLDIR/program/libevoab2.so
+ mv_file_between_flists gnome_list.txt common_list.txt $INSTALLDIR/program/libvclplug_gtk[0-9]*l..so
+ add_used_directories gnome_list.txt common_list.txt
+
+ # mono subpackage
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/cli_.*.dll
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/cli_.*.dll.config
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/policy.*.cli_.*.dll
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/cli_.*.dll
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/cli_.*.dll.config
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/policy.*.cli_.*.dll
+ mv_file_between_flists mono_list.txt common_list.txt $INSTALLDIR/program/libcli_.*.so
+ add_used_directories mono_list.txt common_list.txt
+ # add the files from GAC if it was installed
+ test -f mono_gac && cat mono_gac >>mono_list.txt
+
+ # postgresql subpackage
+ test -f $DESTDIR/gid_Module_Optional_PostgresqlSdbc && cp $DESTDIR/gid_Module_Optional_PostgresqlSdbc postgresql_list.txt || :
+
+ # mailmerge
+ if test "$SPLIT_OPT_FEATURES" = "TRUE" ; then
+ if test "z$OOO_VENDOR" = "zMandriva" ; then
+ flist=pyuno_list.txt
+ else
+ flist=mailmerge_list.txt
+ fi
+ mv_file_between_flists $flist common_list.txt $INSTALLDIR/program/mailmerge.py
+ add_used_directories $flist common_list.txt
+ fi
+
+ if test "z$OOO_VENDOR" = "zSUSE" ; then
+ # officebean subpackage
+ test -f $DESTDIR/gid_Module_Optional_Extensions_Script_Provider_For_BS && cp $DESTDIR/gid_Module_Optional_Extensions_Script_Provider_For_BS officebean_list.txt || :
+ mv_file_between_flists officebean_list.txt common_list.txt $INSTALLDIR/program/classes/officebean.jar
+ mv_file_between_flists officebean_list.txt common_list.txt $INSTALLDIR/program/libofficebean.so
+ add_used_directories officebean_list.txt common_list.txt
+ fi
+
+ if test -f sdk_list.txt ; then
+ # in this case we move all entries including directories
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "%dir $DOCDIR/sdk/docs.*"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$DOCDIR/sdk/docs.*"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$DOCDIR/sdk/examples"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$DOCDIR/sdk/index.html"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "%dir $INSTALLDIR/sdk/examples.*"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$INSTALLDIR/sdk/docs"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$INSTALLDIR/sdk/examples.*"
+ mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$INSTALLDIR/sdk/index.html"
+ add_used_directories sdk_doc_list.txt sdk_list.txt
+ fi
+
+
+ # Mandriva packaging
+ if test "$OOO_VENDOR" = "Mandriva"; then
+ # Not used
+ remove_file common_list.txt $INSTALLDIR/share/gallery/htmltheme.orig
+ remove_file common_list.txt $INSTALLDIR/share/dict/ooo/dictionary.lst
+
+ # And these are in -draw package
+ mv_file_between_flists draw_list.txt common_list.txt $INSTALLDIR/share/registry/modules/org/openoffice/TypeDetection/Filter/fcfg_drawgraphics_filters.xcu
+ mv_file_between_flists draw_list.txt common_list.txt $INSTALLDIR/share/registry/modules/org/openoffice/TypeDetection/Filter/fcfg_drawgraphics_types.xcu
+
+ # And these are in -impress package
+ mv_file_between_flists impress_list.txt common_list.txt $INSTALLDIR/share/registry/modules/org/openoffice/TypeDetection/Filter/fcfg_impressgraphics_filters.xcu
+ mv_file_between_flists impress_list.txt common_list.txt $INSTALLDIR/share/registry/modules/org/openoffice/TypeDetection/Types/fcfg_impressgraphics_types.xcu
+
+ # Split out the gallery
+ mv_file_between_flists gallery_list.txt common_list.txt "$INSTALLDIR/share/gallery.*"
+ test -r galleries.txt && cat galleries.txt >> gallery_list.txt
+
+ # Split out dtd-officedocument1.0
+ mv_file_between_flists dtd_list.txt common_list.txt "$INSTALLDIR/share/dtd/officedocument.*"
+
+ # Split out java stuff
+ mv_file_between_flists java_common_list.txt common_list.txt $INSTALLDIR/program/JREProperties.class
+ mv_file_between_flists java_common_list.txt common_list.txt "$INSTALLDIR/program/classes.*"
+ mv_file_between_flists java_common_list.txt common_list.txt $INSTALLDIR/program/libofficebean.so
+ mv_file_between_flists java_common_list.txt common_list.txt "$INSTALLDIR/share/Scripts/java.*"
+ mv_file_between_flists java_common_list.txt writer_list.txt $INSTALLDIR/program/classes/writer2latex.jar
+
+ # Move arch-dependent/dup files from common to core
+ for f in \
+ ".*\.so" \
+ ".*\.so\..*" \
+ "program/.*\.rdb" \
+ program/configimport.bin \
+ program/javaldx \
+ program/msfontextract \
+ program/oosplash.bin \
+ program/pagein \
+ program/pagein-calc \
+ program/pagein-common \
+ program/pagein-draw \
+ program/pagein-impress \
+ program/pagein-writer \
+ program/pkgchk.bin \
+ program/pluginapp.bin \
+ program/setofficelang.bin \
+ program/soffice.bin \
+ program/uno.bin \
+ program/unopkg.bin \
+ program/uri-encode
+ do
+ mv_file_between_flists core_list.txt common_list.txt "$INSTALLDIR/$f"
+ done
+
+ # themes are included in other packages
+ # don't use remove_file as we don't want them removed from the buildroot.
+ mv_file_between_flists /dev/null common_list.txt $INSTALLDIR/share/config/images_crystal.zip
+ mv_file_between_flists /dev/null common_list.txt $INSTALLDIR/share/config/images_hicontrast.zip
+ mv_file_between_flists /dev/null common_list.txt $INSTALLDIR/share/config/images.zip
+ fi
+
+ # remove known duplicities to do not have files packaged in two packages
+ # the Bulgarian fixes can be removed after the issue #54110 is fixed
+ remove_duplicity_from_flists common_list.txt lang_bg_list.txt $INSTALLDIR/presets/config/arrowhd.soe
+ remove_duplicity_from_flists common_list.txt lang_bg_list.txt $INSTALLDIR/presets/config/classic.sog
+ remove_duplicity_from_flists common_list.txt lang_bg_list.txt $INSTALLDIR/presets/config/hatching.soh
+ remove_duplicity_from_flists common_list.txt lang_bg_list.txt $INSTALLDIR/presets/config/modern.sog
+ remove_duplicity_from_flists common_list.txt lang_bg_list.txt $INSTALLDIR/presets/config/palette.soc
+ remove_duplicity_from_flists common_list.txt lang_bg_list.txt $INSTALLDIR/presets/config/styles.sod
+ # the British fixes can be removed after the issue #54113 is fixed
+ remove_duplicity_from_flists common_list.txt lang_en-GB_list.txt $INSTALLDIR/presets/config/standard.sog
+
+else
+
+ echo "Creating package directories..."
+
+ test -d pkg && rm -r pkg || :
+
+ # Create package tree (needed by Debian's dpkg)
+ # create_package_directory <list_file> <directory_name>
+ create_package_directory()
+ {
+ listfile=$1
+ directory="$2"
+ perl -nl \
+ -e " if(/^%dir (.*)/)
+ {system('mkdir', '-p', '-m', '755', \"$directory\".\$1);}
+ else
+ {rename('./'.\$_, \"$directory\".\$_);}
+ " \
+ $listfile
+ }
+
+ create_package_directory gid_Module_Root_Ure_Hidden pkg/ure
+
+ create_package_directory gid_Module_Root pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Brand pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Files_Images pkg/libreoffice-common
+ create_package_directory gid_Module_Oo_Linguistic pkg/libreoffice-common
+ create_package_directory gid_Module_Optional_Xsltfiltersamples pkg/libreoffice-common
+ create_package_directory gid_Module_Optional_Grfflt pkg/libreoffice-draw
+ create_package_directory gid_Module_Prg_Calc_Bin pkg/libreoffice-calc
+ create_package_directory gid_Module_Prg_Math_Bin pkg/libreoffice-math
+ create_package_directory gid_Module_Prg_Draw_Bin pkg/libreoffice-draw
+ create_package_directory gid_Module_Prg_Wrt_Bin pkg/libreoffice-writer
+ create_package_directory gid_Module_Prg_Impress_Bin pkg/libreoffice-impress
+ create_package_directory gid_Module_Prg_Base_Bin pkg/libreoffice-base
+ create_package_directory gid_Module_Brand_Prg_Calc pkg/libreoffice-calc
+ create_package_directory gid_Module_Brand_Prg_Math pkg/libreoffice-math
+ create_package_directory gid_Module_Brand_Prg_Draw pkg/libreoffice-draw
+ create_package_directory gid_Module_Brand_Prg_Wrt pkg/libreoffice-writer
+ create_package_directory gid_Module_Brand_Prg_Impress pkg/libreoffice-impress
+ create_package_directory gid_Module_Brand_Prg_Base pkg/libreoffice-base
+ create_package_directory gid_Module_Pyuno pkg/python-uno
+ create_package_directory gid_Module_Optional_Gnome pkg/libreoffice-gnome
+
+ create_package_directory gid_Module_Root_Files_2 pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Files_3 pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Files_4 pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Files_5 pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Files_6 pkg/libreoffice-common
+ create_package_directory gid_Module_Root_Files_7 pkg/libreoffice-common
+ if [ -e gid_Module_Optional_Pymailmerge ]; then
+ create_package_directory gid_Module_Optional_Pymailmerge pkg/libreoffice-emailmerge
+ else # post m26
+ mkdir -p pkg/libreoffice-emailmerge/$INSTALLDIR/program
+ mv pkg/libreoffice-common/$INSTALLDIR/program/mailmerge.py \
+ pkg/libreoffice-emailmerge/$INSTALLDIR/program/mailmerge.py
+ fi
+ create_package_directory gid_Module_Optional_OGLTrans pkg/libreoffice-ogltrans
+
+ create_package_directory gid_Module_Root_SDK pkg/libreoffice-dev
+
+ for l in `echo $WITH_LANG_LIST`; do
+ for p in Impress Draw Math Calc Base Writer; do
+ create_package_directory gid_Module_Langpack_${p}_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
+ done
+ create_package_directory gid_Module_Langpack_Basis_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
+ create_package_directory gid_Module_Langpack_Brand_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
+ create_package_directory gid_Module_Langpack_Resource_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
+ create_package_directory gid_Module_Helppack_Help_`echo $l | sed -e s/-/_/g` pkg/libreoffice-help-$l
+
+ # some help files are in _Langpack_{Writer,Impress,...}_<lang>
+ # move them from -l10n to -help
+ if [ "$l" = "en-US" ]; then d=en; else d=$l; fi
+ mv pkg/libreoffice-l10n-$l/$INSTALLDIR/help/$d/* \
+ pkg/libreoffice-help-$l/$INSTALLDIR/help/$d && \
+ rmdir pkg/libreoffice-l10n-$l/$INSTALLDIR/help/$d
+ done
+
+ # move_wrappers <directory_name> <name> [...]
+ move_wrappers()
+ {
+ directory=$1
+ shift
+ mkdir -m755 -p "$directory"/usr/bin
+ while test -n "$1"; do
+ mv usr/*bin/"$1$BINSUFFIX" "$directory"/usr/bin
+ shift
+ done
+ }
+ move_wrappers pkg/libreoffice-common soffice unopkg
+ if test "$COMPAT_OOWRAPPERS" = "YES" ; then
+ move_wrappers pkg/libreoffice-common ooffice oofromtemplate
+ move_wrappers pkg/libreoffice-base oobase
+ move_wrappers pkg/libreoffice-writer oowriter ooweb
+ move_wrappers pkg/libreoffice-calc oocalc
+ move_wrappers pkg/libreoffice-impress ooimpress
+ move_wrappers pkg/libreoffice-math oomath
+ move_wrappers pkg/libreoffice-draw oodraw
+ fi
+ move_wrappers pkg/libreoffice-common libreoffice lofromtemplate
+ move_wrappers pkg/libreoffice-base lobase
+ move_wrappers pkg/libreoffice-writer lowriter loweb
+ move_wrappers pkg/libreoffice-calc localc
+ move_wrappers pkg/libreoffice-impress loimpress
+ move_wrappers pkg/libreoffice-math lomath
+ move_wrappers pkg/libreoffice-draw lodraw
+
+ # Move all libraries, binaries, *.rdb from -common to -core
+ for d in $INSTALLDIR/program $INSTALLDIR/program; do \
+ if [ ! -d $DESTDIR/pkg/libreoffice-core/$d ]; then \
+ mkdir -p $DESTDIR/pkg/libreoffice-core/$d; \
+ fi &&
+ ( cd pkg/libreoffice-common/$d
+ find -maxdepth 1 \
+ -regex '\./\(.*\.so.*\|.*\.bin\|pagein\|msfontextract\|.*\.rdb\|javaldx\|uri-encode\)' \
+ -exec mv {} $DESTDIR/pkg/libreoffice-core/$d \;
+ ); \
+ done
+
+ # install additional ooo-build scripts & misc stuff
+ mkdir -p pkg/libreoffice-common/usr/share/man/man1
+ if test "$COMPAT_OOWRAPPERS" = "YES" ; then
+ mv usr/share/man/man1/openoffice$BINSUFFIX.1 \
+ pkg/libreoffice-common/usr/share/man/man1
+ fi
+ mv usr/share/man/man1/libreoffice$BINSUFFIX.1 \
+ pkg/libreoffice-common/usr/share/man/man1
+ mkdir -p pkg/libreoffice-common/etc/bash_completion.d
+ if test "$COMPAT_OOWRAPPERS" = "YES" ; then
+ mv etc/bash_completion.d/ooffice$BINSUFFIX.sh \
+ pkg/libreoffice-common/etc/bash_completion.d
+ fi
+ mv etc/bash_completion.d/libreoffice$BINSUFFIX.sh \
+ pkg/libreoffice-common/etc/bash_completion.d
+ mv .$INSTALLDIR/program/java-set-classpath \
+ pkg/libreoffice-common/$INSTALLDIR/program
+ if echo $WITH_LANG_LIST | grep -q en-US; then
+ for i in forms/resume.ott officorr/project-proposal.ott; do \
+ mkdir -p pkg/libreoffice-common/$INSTALLDIR/share/template/en-US/`dirname $i`; \
+ mv .$INSTALLDIR/share/template/en-US/$i \
+ pkg/libreoffice-common/$INSTALLDIR/share/template/en-US/$i; \
+ done; \
+ fi
+ # Warn for any remaining files
+ find . -path './pkg' -prune -o -not -name 'gid_Module_*' -not -type d -exec echo "File not packaged: {}" \;
+fi
+
+# mark the config files
+RPM_CONFIG_FILE_TAGS=
+if test "$OOO_VENDOR" = "SUSE" -o "$OOO_VENDOR" = "RedHat"; then
+ RPM_CONFIG_FILE_TAGS="%config"
+elif test "$OOO_VENDOR" = "PLD" ; then
+ RPM_CONFIG_FILE_TAGS="%config(noreplace) %verify(not md5 size mtime)"
+fi
+
+if test "z$RPM_CONFIG_FILE_TAGS" != "z" ; then
+ cd $FILELISTSDIR
+ perl -pi -e "s|^($INSTALLDIR/help/.*\.xsl)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/help/.*\.css)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/program/[a-zA-Z0-9_\.]*rc)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/program/.*\.xsl)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/share/config/[a-zA-Z0-9]*rc)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/share/dict/ooo/.*\.lst)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/share/psprint/.*\.conf)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/share/registry/.*\.xcu)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/share/registry/.*\.properties)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/share/registry/.*\.xcs)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ -e "s|^($INSTALLDIR/user/config/.*\.so.)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
+ *_list.txt
+fi
+
+mkdir -p $FILELISTSDIR/orig
+mv -f $DESTDIR/gid_Module_* $FILELISTSDIR/orig
diff --git a/bin/distro-install-sdk b/bin/distro-install-sdk
new file mode 100755
index 000000000..e8cf28d61
--- /dev/null
+++ b/bin/distro-install-sdk
@@ -0,0 +1,84 @@
+#!/bin/sh
+
+if test -z "${SRC_ROOT}"; then
+ echo "distro-install-clean-up: No environment set!"
+ exit 1
+fi
+
+if test -d $DESTDIR$INSTALLDIR/sdk ; then
+
+ echo "SDK installation clean up"
+
+ # bin potential .orig files
+ find $DESTDIR$INSTALLDIR/sdk -name "*.orig" -exec rm -f {} \;
+
+ # move some SDK directories to the right place according to FHS
+ # note that examples must stay in $DESTDIR$INSTALLDIR/sdk because there are used
+ # relative paths to $DESTDIR$INSTALLDIR/sdk/setting and it does not work via
+ # a symlink
+ mkdir -p $DESTDIR$PREFIXDIR/include
+ mkdir -p $DESTDIR$DATADIR/idl
+ mkdir -p $DESTDIR$DATADIR/$INSTALLDIRNAME/sdk
+ mkdir -p $DESTDIR$DOCDIR/sdk
+ mv $DESTDIR$INSTALLDIR/sdk/include $DESTDIR$PREFIXDIR/include/$INSTALLDIRNAME
+ if [ -d $DESTDIR$INSTALLDIR/sdk/classes ]; then
+ mv $DESTDIR$INSTALLDIR/sdk/classes $DESTDIR$DATADIR/$INSTALLDIRNAME/sdk/classes
+ fi
+ mv $DESTDIR$INSTALLDIR/sdk/idl $DESTDIR$DATADIR/idl/$INSTALLDIRNAME
+ mv $DESTDIR$INSTALLDIR/sdk/docs $DESTDIR$DOCDIR/sdk
+ mv $DESTDIR$INSTALLDIR/sdk/share/readme $DESTDIR$DOCDIR/sdk/readme
+ mv $DESTDIR$INSTALLDIR/sdk/index.html $DESTDIR$DOCDIR/sdk
+
+ # compat symlinks
+ ln -sf $PREFIXDIR/include/$INSTALLDIRNAME $DESTDIR$INSTALLDIR/sdk/include
+ ln -sf $DATADIR/$INSTALLDIRNAME/sdk/classes $DESTDIR$INSTALLDIR/sdk/classes
+ ln -sf $DATADIR/idl/$INSTALLDIRNAME $DESTDIR$INSTALLDIR/sdk/idl
+ ln -sf $DOCDIR/sdk/docs $DESTDIR$INSTALLDIR/sdk/
+ ln -sf $DOCDIR/sdk/index.html $DESTDIR$INSTALLDIR/sdk/index.html
+ ln -sf $INSTALLDIR/sdk/examples $DESTDIR$DOCDIR/sdk/examples
+
+ # fix file list
+ sed -e "s|^\(%dir \)\?$INSTALLDIR/sdk/include|\1$PREFIXDIR/include/$INSTALLDIRNAME|" \
+ -e "s|^\(%dir \)\?$INSTALLDIR/sdk/classes|\1$DATADIR/$INSTALLDIRNAME/sdk/classes|" \
+ -e "s|^\(%dir \)\?$INSTALLDIR/sdk/idl|\1$DATADIR/idl/$INSTALLDIRNAME|" \
+ -e "s|^\(%dir \)\?$INSTALLDIR/sdk/docs|\1$DOCDIR/sdk/docs|" \
+ -e "s|^\(%dir \)\?$INSTALLDIR/sdk/share/readme|\1$DOCDIR/sdk/readme|" \
+ -e "s|^$INSTALLDIR/sdk/index.html$|$DOCDIR/sdk/index.html|" \
+ -e "s|^\(%dir \)\?$INSTALLDIR/sdk/share.*$||" \
+ -e "/\.orig$/D" \
+ -e "/^$/D" \
+ $DESTDIR/gid_Module_Root_SDK | sort -u \
+ >$DESTDIR/gid_Module_Root_SDK.new
+ mv $DESTDIR/gid_Module_Root_SDK.new $DESTDIR/gid_Module_Root_SDK
+ #
+ echo "%dir $DATADIR/$INSTALLDIRNAME/sdk" >>$DESTDIR/gid_Module_Root_SDK
+ echo "%dir $DATADIR/$INSTALLDIRNAME" >>$DESTDIR/gid_Module_Root_SDK
+ echo "%dir $DATADIR/idl" >>$DESTDIR/gid_Module_Root_SDK
+ echo "%dir $DOCDIR/sdk/docs" >>$DESTDIR/gid_Module_Root_SDK
+ echo "%dir $DOCDIR/sdk" >>$DESTDIR/gid_Module_Root_SDK
+ echo "%dir $DOCDIR" >>$DESTDIR/gid_Module_Root_SDK
+ echo "$INSTALLDIR/sdk/include" >>$DESTDIR/gid_Module_Root_SDK
+ echo "$INSTALLDIR/sdk/classes" >>$DESTDIR/gid_Module_Root_SDK
+ echo "$INSTALLDIR/sdk/idl" >>$DESTDIR/gid_Module_Root_SDK
+ echo "$INSTALLDIR/sdk/docs" >>$DESTDIR/gid_Module_Root_SDK
+ echo "$INSTALLDIR/sdk/index.html" >>$DESTDIR/gid_Module_Root_SDK
+ echo "$DOCDIR/sdk/examples" >>$DESTDIR/gid_Module_Root_SDK
+
+ # generate default profiles
+ sed -e "s,@OO_SDK_NAME@,libreoffice${PRODUCTVERSION}_sdk," \
+ -e "s,@OO_SDK_HOME@,$INSTALLDIR/sdk," \
+ -e "s,@OFFICE_HOME@,$INSTALLDIR," \
+ -e "s,@OO_SDK_MAKE_HOME@,$(dirname $(command -v make))," \
+ -e "s,@OO_SDK_ZIP_HOME@,$(dirname $(command -v zip))," \
+ -e "s,@OO_SDK_CPP_HOME@,$(dirname $(command -v cpp))," \
+ -e "s,@OO_SDK_SED_HOME@,$(dirname $(command -v sed))," \
+ -e "s,@OO_SDK_CAT_HOME@,$(dirname $(command -v cat))," \
+ -e "s,@OO_SDK_JAVA_HOME@,$JAVA_HOME," \
+ -e "s,@OO_SDK_OUTPUT_DIR@,\$HOME," \
+ -e "s,@SDK_AUTO_DEPLOYMENT@,NO," \
+ $DESTDIR$INSTALLDIR/sdk/setsdkenv_unix.sh.in \
+ > $DESTDIR$INSTALLDIR/sdk/setsdkenv_unix.sh
+ chmod 755 $DESTDIR$INSTALLDIR/sdk/setsdkenv_unix.sh
+ echo $INSTALLDIR/sdk/setsdkenv_unix.sh >>$DESTDIR/gid_Module_Root_SDK
+
+fi
diff --git a/bin/dump-poolitems-values.py b/bin/dump-poolitems-values.py
new file mode 100755
index 000000000..c2c5f357e
--- /dev/null
+++ b/bin/dump-poolitems-values.py
@@ -0,0 +1,91 @@
+#!/usr/bin/python
+
+
+# Produce a dump of name->constant of the poolitem values, to make interpreting things in the debugger easier
+#
+
+import subprocess
+import sys
+
+macroNameToValue = dict()
+macroNameToOriginalLine = dict()
+
+
+def extractMacroValue(macroValue):
+ if isinstance(macroValue, int):
+ return macroValue
+ elif macroValue.isdigit():
+ return int(macroValue)
+ elif macroValue[0:2] == "0x":
+ return int(macroValue, 16)
+ elif macroValue.find("+") != -1:
+ tokens = macroValue.split("+")
+ tokens1 = tokens[0].strip()
+ tokens2 = tokens[1].strip()
+ return extractMacroValue(tokens1) + extractMacroValue(tokens2)
+ elif macroValue.find("-") != -1:
+ tokens = macroValue.split("-")
+ tokens1 = tokens[0].strip()
+ tokens2 = tokens[1].strip()
+ return extractMacroValue(tokens1) - extractMacroValue(tokens2)
+ rv = extractMacroValue(macroNameToValue[macroValue])
+ macroNameToValue[macroValue] = rv
+ return rv
+
+
+a = subprocess.Popen("cpp -E -dD -Iinclude/ include/editeng/eeitem.hxx", stdout=subprocess.PIPE, shell=True)
+
+with a.stdout as txt:
+ for line in txt:
+ line = line.strip()
+ originalLine = line
+ if not line.startswith("#define "): continue
+ # strip the '#define' off the front
+ idx1 = line.find(" ")
+ line = line[idx1 : len(line)].strip()
+ # extract the name
+ idx1 = line.find(" ")
+ if (idx1 == -1): continue
+ macroName = line[0 : idx1].strip()
+ line = line[idx1 : len(line)].strip()
+ # ignore internal stuff
+ if macroName.startswith("_"): continue
+ # strip any trailing comments
+ idx1 = line.find("//")
+ if (idx1 != -1):
+ line = line[0 : idx1].strip()
+ idx1 = line.find("/*")
+ if (idx1 != -1):
+ line = line[0 : idx1].strip()
+ if len(line) == 0: continue
+ # strip brackets
+ if line[0] == "(": line = line[1:]
+ if line[len(line)-1] == ")": line = line[0:len(line)-1]
+ macroValue = line.strip()
+ # ignore macros that #define strings, not interested in those
+ if (macroValue.find("\"") != -1): continue
+ # ignore the multiline macros
+ if (macroValue.find("\\") != -1): continue
+ # check for redefinitions
+ if macroNameToValue.has_key(macroName):
+ print "Redefinition:\n\t", macroNameToOriginalLine[macroName], "\n\t" , originalLine
+ else:
+ macroNameToValue[macroName] = macroValue
+ macroNameToOriginalLine[macroName] = originalLine
+
+# decode the constants into their numeric values recursively
+macroValueToName = dict()
+for macroName in macroNameToValue:
+ macroValue = macroNameToValue[macroName]
+ try:
+ macroValue = extractMacroValue(macroName)
+ macroValueToName[macroValue] = macroName
+ except KeyError:
+ print "warning: could not decode macro ", macroName
+
+for macroValue in sorted(macroValueToName):
+ macroName = macroValueToName[macroValue]
+ print repr(macroNameToValue[macroName]).rjust(5), " ", macroName
+
+
+
diff --git a/bin/extract-tooltip.py b/bin/extract-tooltip.py
new file mode 100755
index 000000000..5397c718f
--- /dev/null
+++ b/bin/extract-tooltip.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+import sys
+import os
+import re
+import urlparse
+
+def usage():
+ message = """ usage: {program} inDir outDir
+inDir: directory containing .ht files
+outDir: target for the new files"""
+ print(message.format(program = os.path.basename(sys.argv[0])))
+
+def parseFile(filename):
+ file = open(filename, "r")
+ data = file.readlines()
+ data = [line.rstrip('\n') for line in data]
+
+ pairs = {}
+ regEx = re.compile("^(\S+)\s(\S+)\s(\S+)\s((?:\s*\S*)+)$")
+ old_line = None
+ for line in data:
+ if len(line) > 0:
+ if(old_line != None):
+ print filename
+ #print("failed to parse line")
+ #print(old_line)
+ line = old_line + line
+ print line
+ old_line = None
+ split_line = regEx.split(line)
+ #print(split_line)
+ #print(urlparse.unquote(split_line[2]))
+ #print(split_line[4])
+ if(old_line == None and split_line[4] == "" and split_line[3] != "0"):
+ print(line)
+ print(split_line)
+ old_line = line
+ else:
+ pairs[urlparse.unquote(split_line[2])] = split_line[4]
+ assert(len(split_line) == 6)
+ #print data
+ #print(pairs)
+ return pairs
+
+def parseFiles(dir):
+ strings = []
+ for files in os.listdir(dir):
+ if files.endswith(".ht"):
+ string = parseFile(os.path.join(dir,files))
+ print(files)
+ #print string
+ strings.append([files, string])
+ return strings
+
+def extractSharedEntries(strings):
+ first_dict = strings[0][1]
+ shared_dict = {}
+ #print(first_dict)
+ for key, value in first_dict.iteritems():
+ # check that the entry in the same in all dics
+ is_in_all_dicts = True
+ for dict_file_pair in strings:
+ dict = dict_file_pair[1]
+ if not dict.has_key(key):
+ is_in_all_dicts = False
+ elif not dict[key] == value:
+ print("Element with different values")
+ print(key)
+ is_in_all_dicts = False
+ if is_in_all_dicts:
+ shared_dict[key] = value
+ #print(shared_dict)
+ for dict_file_pair in strings:
+ for key in shared_dict.iterkeys():
+ dict_file_pair[1].pop(key)
+
+ strings.append(["shared.ht", shared_dict])
+ return strings
+
+def writeOutFiles(dir, strings):
+ for string in strings:
+ file_name_base = string[0]
+ file_name_base = file_name_base.replace(".ht", ".properties")
+ file_name = os.path.join(dir, file_name_base)
+ file = open(file_name, "w")
+ for key, value in string[1].iteritems():
+ try:
+ file.write(key)
+ file.write("=")
+ file.write(value)
+ file.write("\n")
+ except UnicodeDecodeError:
+ print key
+ print value
+ file.close()
+
+def main (args):
+ if(len(args) != 3):
+ usage()
+ sys.exit(1)
+
+ strings = parseFiles(args[1])
+ new_strings = extractSharedEntries(strings)
+ writeOutFiles(args[2], new_strings)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/fake_pom.xml b/bin/fake_pom.xml
new file mode 100644
index 000000000..50599f3ab
--- /dev/null
+++ b/bin/fake_pom.xml
@@ -0,0 +1,6 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.libreoffice</groupId>
+ <artifactId>LibreOffice-Maven</artifactId>
+ <version>1</version>
+</project>
diff --git a/bin/find-can-be-private-symbols.classes.results b/bin/find-can-be-private-symbols.classes.results
new file mode 100644
index 000000000..05defda91
--- /dev/null
+++ b/bin/find-can-be-private-symbols.classes.results
@@ -0,0 +1,283 @@
+BitmapAlphaClampFilter
+BitmapColorQuantizationFilter
+BitmapConvolutionMatrixFilter
+BitmapEmbossGreyFilter
+BitmapMedianFilter
+BitmapMonochromeFilter
+BitmapMonochromeMatrixFilter
+BitmapMosaicFilter
+BitmapPalette
+BitmapPopArtFilter
+BitmapSepiaFilter
+BitmapSimpleColorQuantizationFilter
+BitmapSmoothenFilter
+BitmapSobelGreyFilter
+BitmapSolarizeFilter
+ConditionEditDropTarget
+CurrencyFormatter
+DdeGetPutItem
+DdeLink
+DdeService
+DdeTopic
+E3dCompoundObject
+EditUndo
+FmDesignModeChangedHint
+FocusListenerMultiplexer
+FontSelectPattern
+FontSubsetInfo
+FreetypeManager::IFSD_Equal
+GrBackendFormat
+GrBackendRenderTarget
+GrBackendTexture
+GrContext
+GrContextThreadSafeProxy
+GrContext_Base
+GrGLExtensions
+GrGLInterface
+GrGpuResource
+GrGpuResource::ProxyAccess
+GrImageContext
+GrVkExtensions
+GrVkSecondaryCBDrawContext
+HelpLinker
+Hunspell
+Hunzip
+ImplJobSetup
+IndexerPreProcessor
+KeyListenerMultiplexer
+MetaAction
+MetaGradientExAction
+MorkParser
+MouseListenerMultiplexer
+MouseMotionListenerMultiplexer
+MyThes
+OpenGLFramebuffer
+OpenGLZone
+PackedTextureAtlasManager
+PaintListenerMultiplexer
+PhysicalFontFamily
+ProcessData
+RenderList
+SalData
+SalDisplay
+SalInfoPrinter
+SalPrinter
+SalSystem
+SbClassModuleObject
+ScChart2DataProvider
+ScFormatEntry
+ScPaintHint
+ScPreviewShell
+ScRefreshTimer
+SdAbstractDialogFactory
+SdOptionsItem
+SdOptionsLayout
+SdOptionsMisc
+SdOptionsPrint
+SdOptionsSnap
+SdXImpressDocument
+SdrCaptionEscDirItem
+SdrCaptionTypeItem
+SdrEdgeNode1HorzDistItem
+SdrEdgeNode1VertDistItem
+SdrEdgeNode2HorzDistItem
+SdrEdgeNode2VertDistItem
+SdrEmbedObjectLink
+SdrGrafBlueItem
+SdrGrafContrastItem
+SdrGrafCropItem
+SdrGrafGamma100Item
+SdrGrafGreenItem
+SdrGrafLuminanceItem
+SdrGrafModeItem
+SdrGrafRedItem
+SdrGrafTransparenceItem
+SdrMeasureTextHPosItem
+SdrMeasureTextVPosItem
+SdrMeasureUnitItem
+SdrOnOffItem
+SdrPercentItem
+SdrSignedPercentItem
+SdrTextAniAmountItem
+SdrTextAniDelayItem
+SdrTextAniDirectionItem
+SdrTextAniKindItem
+SdrTextHorzAdjustItem
+SdrUndoInsertObj
+SdrUndoNewPage
+SdrUndoPageMasterPage
+SdrYesNoItem
+SfxNavigator
+SfxStyleSheetModifiedHint
+SfxViewFrameItem
+SfxVisibilityItem
+SpinListenerMultiplexer
+SvxGraphicObject
+SvxMetricField
+SvxPrintItem
+SvxRsidItem
+SvxShowText
+SvxTPage
+SwAnchoredObject
+SwAuthenticator
+SwColExample
+SwConnectionListener
+SwContrastGrf
+SwDrawFrameFormat
+SwDrawModeGrf
+SwExtraRedline
+SwFltRedline
+SwFormatEditInReadonly
+SwFormatEndAtTextEnd
+SwFormatFollowTextFlow
+SwFormatFootnoteAtTextEnd
+SwFormatLayoutSplit
+SwFormatNoBalancedColumns
+SwFormatRowSplit
+SwGammaGrf
+SwHeaderAndFooterEatSpacingItem
+SwLayoutFrame
+SwLuminanceGrf
+SwMirrorGrf
+SwNumRuleItem
+SwPagePreview
+SwRedlineExtraData
+SwRedlineExtraData_FormatColl
+SwShellCursor
+SwTableCellInfo::Impl
+SwTableCellRedline
+SwTableRowRedline
+SwTestItem
+SwWebDocShell
+SwWebView
+SwWrtShellItem
+SwXTextRange::Impl
+SwXTextTableCursor
+SyntaxHighlighter::Tokenizer
+SystemWindow::ImplData
+TBCExtraInfo
+TBCGeneralInfo
+TreeEditListenerMultiplexer
+TreeExpansionListenerMultiplexer
+TreeSelectionListenerMultiplexer
+VclAlignment
+VclBin
+VclBuilder::MenuAndId
+VclBuilder::ParserState
+VclBuilder::sortIntoBestTabTraversalOrder
+VclDrawingArea
+VclGrid
+VclWindowEvent
+WString
+WindowListenerMultiplexer
+X11SalObject
+X11SkiaSalGraphicsImpl
+XMLCellStyleExport
+XMLConstantsPropertyHandler
+XMLEnumPropertyHdl
+XMLShapeStyleContext
+basegfx::BColorModifier
+basegfx::MinimalSystemDependentDataManager
+canvas
+chart::PopupRequest
+comphelper::RefCountedMutex
+comphelper::service_decl::ServiceDecl::Factory
+connectivity::component::OComponentPreparedStatement
+connectivity::component::OComponentStatement
+connectivity::file::OBoolOperator
+connectivity::file::OOp_ISNOTNULL
+connectivity::file::OOp_ISNULL
+connectivity::file::OOp_LIKE
+connectivity::odbc::OConnection
+connectivity::odbc::ODBCDriver
+connectivity::odbc::ODatabaseMetaData
+connectivity::odbc::ODatabaseMetaDataResultSet
+connectivity::odbc::OPreparedStatement
+connectivity::odbc::OResultSet
+connectivity::odbc::OResultSetMetaData
+connectivity::odbc::OStatement
+connectivity::odbc::OStatement_BASE2
+connectivity::odbc::OStatement_Base
+connectivity::odbc::OTools
+connectivity::sdbcx::IObjectCollection
+connectivity::sdbcx::OGroup
+connectivity::sdbcx::OKey
+cppu::BootstrapException
+cppu::ClassData
+cppu::ClassDataBase
+dbtools::param::ParameterWrapper
+desktop::CallbackFlushHandler::CallbackData
+dp_misc::AbortChannel
+drawinglayer::animation::AnimationEntry
+drawinglayer::primitive2d::AnimatedSwitchPrimitive2D
+drawinglayer::primitive2d::ObjectAndViewTransformationDependentPrimitive2D
+drawinglayer::primitive2d::SdrFrameBorderData::SdrConnectStyleData
+drawinglayer::primitive2d::ViewTransformationDependentPrimitive2D
+drawinglayer::primitive3d
+drawinglayer::primitive3d::BasePrimitive3D
+drawinglayer::primitive3d::BufferedDecompositionPrimitive3D
+drawinglayer::primitive3d::GroupPrimitive3D
+drawinglayer::primitive3d::PolyPolygonMaterialPrimitive3D
+drawinglayer::primitive3d::PolygonHairlinePrimitive3D
+drawinglayer::primitive3d::SdrPrimitive3D
+formula::FormulaByteToken
+formula::FormulaDoubleToken
+formula::FormulaErrorToken
+formula::FormulaExternalToken
+formula::FormulaFAPToken
+formula::FormulaIndexToken
+formula::FormulaJumpToken
+formula::FormulaMissingToken
+formula::FormulaTokenIterator::Item
+formula::FormulaTypedDoubleToken
+formula::FormulaUnknownToken
+jvmaccess::UnoVirtualMachine::CreationException
+jvmaccess::VirtualMachine::AttachGuard::CreationException
+linguistic::PropertyChgHelper
+linguistic::PropertyHelper_Spell
+oox::IProgressBar
+oox::ole::AxContainerModelBase
+oox::ole::AxControlModelBase
+oox::ole::AxFontDataModel
+oox::ole::AxImageModel
+oox::ole::AxMorphDataModelBase
+oox::ole::AxMultiPageModel
+oox::ole::AxPageModel
+oox::ole::AxTabStripModel
+oox::ole::AxToggleButtonModel
+oox::ole::AxUserFormModel
+psp::PrintFontManager::PrintFont
+salhelper::ORealDynamicLoader
+sc::FormulaGroupInterpreter
+sd::DrawView
+sdr::SelectionController
+sdr::ViewSelection
+sdr::animation::primitiveAnimator
+sdr::contact::ObjectContactPainter
+sdr::properties::BaseProperties
+sfx2::sidebar::Panel
+sfx2::sidebar::SidebarToolBox
+sfx2::sidebar::TabBar::Item
+skjpeg_destination_mgr
+svt::MultiLineEditImplementation
+svt::MultiLineTextCell
+svx::CommonStyleManager
+svx::DialControl::DialControl_Impl
+svx::PropertyValueProvider
+sw::BroadcastingModify
+sw::UnoCursorHint
+ucbhelper::ActiveDataSink
+ucbhelper::InteractionAbort
+ucbhelper::InteractionApprove
+ucbhelper::InteractionDisapprove
+ucbhelper::InteractionRetry
+ucbhelper::InteractionSupplyAuthentication
+utl::Bootstrap::Impl
+utl::OInputStreamHelper
+vcl::ExtOutDevData
+vcl::test::OutputDeviceTestGradient
+void OpenGLTexture
+writerperfect::DirectoryStream::Impl
+xmloff::OControlBorderHandler
+xmloff::OFontWidthHandler
+xmloff::ORotationAngleHandler
diff --git a/bin/find-can-be-private-symbols.functions.results b/bin/find-can-be-private-symbols.functions.results
new file mode 100644
index 000000000..59453f8ea
--- /dev/null
+++ b/bin/find-can-be-private-symbols.functions.results
@@ -0,0 +1,39 @@
+GrGLAssembleInterface(void*, void (*(*)(void*, char const*))())
+ImplCallPreNotify(NotifyEvent&)
+ImplDestroyHelpWindow(bool)
+ImplFastBitmapConversion(BitmapBuffer&, BitmapBuffer const&, SalTwoRect const&)
+ImplGetSalSystem()
+ImplHideSplash()
+ImplSVMain()
+ScFilterCreate
+SdResId(char const*, int)
+Simplify(SkPath const&, SkPath*)
+clewErrorString
+component_getImplementationEnvironment
+createLink
+ddot
+dl_cairo_surface_set_device_scale(_cairo_surface*, double, double)
+endlu(SvStream&)
+explain
+fieldlen(char const*)
+getDataArea
+heuristics
+invert
+libreofficekit_hook
+libreofficekit_hook_2
+main
+mod
+privateSnippetExecutor
+reg_closeKey(void*)
+reg_closeRegistry(void*)
+reg_dumpRegistry(void*)
+reg_openKey(void*, _rtl_uString*, void**)
+reg_openRegistry(_rtl_uString*, void**)
+reg_openRootKey(void*, void**)
+report
+scale
+setLink
+set_column
+set_title
+spaces
+vcl_crc64
diff --git a/bin/find-can-be-private-symbols.py b/bin/find-can-be-private-symbols.py
new file mode 100755
index 000000000..f5ff83fd1
--- /dev/null
+++ b/bin/find-can-be-private-symbols.py
@@ -0,0 +1,226 @@
+#!/usr/bin/python2
+#
+# Find exported symbols that can be made non-exported.
+#
+# Noting that (a) parsing these commands is a pain, the output is quite irregular and (b) I'm fumbling in the
+# dark here, trying to guess what exactly constitutes an "import" vs an "export" of a symbol, linux linking
+# is rather complex.
+#
+# Takes about 5min to run on a decent machine.
+#
+# The standalone function analysis is reasonable reliable, but the class/method analysis is less so
+# (something to do with destructor thunks not showing up in my results?)
+#
+# Also, the class/method analysis will not catch problems like
+# 'dynamic_cast from 'Foo' with hidden type visibility to 'Bar' with default type visibility'
+# but loplugin:dyncastvisibility will do that for you
+#
+
+import subprocess
+import sys
+import re
+
+exported_symbols = set()
+imported_symbols = set()
+# standalone functions that are exported but not imported
+unused_function_exports = set()
+classes_with_exported_symbols = set()
+classes_with_imported_symbols = set()
+# all names that exist in the source code
+all_source_names = set()
+
+
+# look for imported symbols in executables
+subprocess_find_all_source_names = subprocess.Popen("git grep -oh -P '\\b\\w\\w\\w+\\b' -- '*.h*'", stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
+with subprocess_find_all_source_names.stdout as txt:
+ for line in txt:
+ line = line.strip()
+ all_source_names.add(line)
+subprocess_find_all_source_names.terminate()
+
+subprocess_find = subprocess.Popen("find ./instdir -name *.so && find ./workdir/LinkTarget/CppunitTest -name *.so", stdout=subprocess.PIPE, shell=True)
+with subprocess_find.stdout as txt:
+ for line in txt:
+ sharedlib = line.strip()
+ # look for exported symbols
+ subprocess_nm = subprocess.Popen("nm -D " + sharedlib, stdout=subprocess.PIPE, shell=True)
+ with subprocess_nm.stdout as txt2:
+ # We are looking for lines something like:
+ # 0000000000036ed0 T flash_component_getFactory
+ line_regex = re.compile(r'^[0-9a-fA-F]+ T ')
+ for line2 in txt2:
+ line2 = line2.strip()
+ if line_regex.match(line2):
+ exported_symbols.add(line2.split(" ")[2])
+ # look for imported symbols
+ subprocess_objdump = subprocess.Popen("objdump -T " + sharedlib, stdout=subprocess.PIPE, shell=True)
+ with subprocess_objdump.stdout as txt2:
+ # ignore some header bumpf
+ txt2.readline()
+ txt2.readline()
+ txt2.readline()
+ txt2.readline()
+ # We are looking for lines something like:
+ # 0000000000000000 DF *UND* 0000000000000000 _ZN16FilterConfigItem10WriteInt32ERKN3rtl8OUStringEi
+ for line2 in txt2:
+ line2 = line2.strip()
+ tokens = line2.split(" ")
+ if len(tokens) < 7 or not(tokens[7].startswith("*UND*")): continue
+ sym = tokens[len(tokens)-1]
+ imported_symbols.add(sym)
+subprocess_find.terminate()
+
+# look for imported symbols in executables
+subprocess_find = subprocess.Popen("find ./instdir -name *.bin", stdout=subprocess.PIPE, shell=True)
+with subprocess_find.stdout as txt:
+ for line in txt:
+ executable = line.strip()
+ # look for exported symbols
+ subprocess_nm = subprocess.Popen("nm -D " + executable + " | grep -w U", stdout=subprocess.PIPE, shell=True)
+ with subprocess_nm.stdout as txt2:
+ # We are looking for lines something like:
+ # U sal_detail_deinitialize
+ for line2 in txt2:
+ line2 = line2.strip()
+ sym = line2.split(" ")[1]
+ imported_symbols.add(sym)
+subprocess_find.terminate()
+
+diff = exported_symbols - imported_symbols
+print("exported = " + str(len(exported_symbols)))
+print("imported = " + str(len(imported_symbols)))
+print("diff = " + str(len(diff)))
+
+for sym in exported_symbols:
+ filtered_sym = subprocess.check_output(["c++filt", sym]).strip()
+ if filtered_sym.startswith("non-virtual thunk to "): filtered_sym = filtered_sym[21:]
+ elif filtered_sym.startswith("virtual thunk to "): filtered_sym = filtered_sym[17:]
+ i = filtered_sym.find("(")
+ i = filtered_sym.rfind("::", 0, i)
+ if i != -1:
+ classname = filtered_sym[:i]
+ # find classes where all of the exported symbols are not imported
+ classes_with_exported_symbols.add(classname)
+ else:
+ func = filtered_sym
+ # find standalone functions which are exported but not imported
+ if not(sym in imported_symbols): unused_function_exports.add(func)
+
+for sym in imported_symbols:
+ filtered_sym = subprocess.check_output(["c++filt", sym]).strip()
+ if filtered_sym.startswith("non-virtual thunk to "): filtered_sym = filtered_sym[21:]
+ elif filtered_sym.startswith("virtual thunk to "): filtered_sym = filtered_sym[17:]
+ i = filtered_sym.find("(")
+ i = filtered_sym.rfind("::", 0, i)
+ if i != -1:
+ classname = filtered_sym[:i]
+ classes_with_imported_symbols.add(classname)
+
+def extractFunctionNameFromSignature(sym):
+ i = sym.find("(")
+ if i == -1: return sym
+ return sym[:i]
+
+with open("bin/find-can-be-private-symbols.functions.results", "wt") as f:
+ for sym in sorted(unused_function_exports):
+ # Filter out most of the noise.
+ # No idea where these are coming from, but not our code.
+ if sym.startswith("CERT_"): continue
+ elif sym.startswith("DER_"): continue
+ elif sym.startswith("FORM_"): continue
+ elif sym.startswith("FPDF"): continue
+ elif sym.startswith("HASH_"): continue
+ elif sym.startswith("Hunspell_"): continue
+ elif sym.startswith("LL_"): continue
+ elif sym.startswith("LP_"): continue
+ elif sym.startswith("LU"): continue
+ elif sym.startswith("MIP"): continue
+ elif sym.startswith("MPS"): continue
+ elif sym.startswith("NSS"): continue
+ elif sym.startswith("NSC_"): continue
+ elif sym.startswith("PK11"): continue
+ elif sym.startswith("PL_"): continue
+ elif sym.startswith("PQ"): continue
+ elif sym.startswith("PBE_"): continue
+ elif sym.startswith("PORT_"): continue
+ elif sym.startswith("PRP_"): continue
+ elif sym.startswith("PR_"): continue
+ elif sym.startswith("PT_"): continue
+ elif sym.startswith("QS_"): continue
+ elif sym.startswith("REPORT_"): continue
+ elif sym.startswith("RSA_"): continue
+ elif sym.startswith("SEC"): continue
+ elif sym.startswith("SGN"): continue
+ elif sym.startswith("SOS"): continue
+ elif sym.startswith("SSL_"): continue
+ elif sym.startswith("VFY_"): continue
+ elif sym.startswith("_PR_"): continue
+ elif sym.startswith("_"): continue
+ elif sym.startswith("ber_"): continue
+ elif sym.startswith("bfp_"): continue
+ elif sym.startswith("ldap_"): continue
+ elif sym.startswith("ne_"): continue
+ elif sym.startswith("opj_"): continue
+ elif sym.startswith("pg_"): continue
+ elif sym.startswith("pq"): continue
+ elif sym.startswith("presolve_"): continue
+ elif sym.startswith("sqlite3_"): continue
+ # dynamically loaded
+ elif sym.endswith("get_implementation"): continue
+ elif sym.endswith("component_getFactory"): continue
+ elif sym == "CreateDialogFactory": continue
+ elif sym == "CreateUnoWrapper": continue
+ elif sym == "CreateWindow": continue
+ elif sym == "ExportDOC": continue
+ elif sym == "ExportPPT": continue
+ elif sym == "ExportRTF": continue
+ elif sym == "GetSaveWarningOfMSVBAStorage_ww8": continue
+ elif sym == "GetSpecialCharsForEdit": continue
+ elif sym.startswith("Import"): continue
+ elif sym.startswith("Java_com_sun_star_"): continue
+ elif sym.startswith("TestImport"): continue
+ elif sym.startswith("getAllCalendars_"): continue
+ elif sym.startswith("getAllCurrencies_"): continue
+ elif sym.startswith("getAllFormats"): continue
+ elif sym.startswith("getBreakIteratorRules_"): continue
+ elif sym.startswith("getCollationOptions_"): continue
+ elif sym.startswith("getCollatorImplementation_"): continue
+ elif sym.startswith("getContinuousNumberingLevels_"): continue
+ elif sym.startswith("getDateAcceptancePatterns_"): continue
+ elif sym.startswith("getForbiddenCharacters_"): continue
+ elif sym.startswith("getIndexAlgorithm_"): continue
+ elif sym.startswith("getLCInfo_"): continue
+ elif sym.startswith("getLocaleItem_"): continue
+ elif sym.startswith("getOutlineNumberingLevels_"): continue
+ elif sym.startswith("getReservedWords_"): continue
+ elif sym.startswith("getSTC_"): continue
+ elif sym.startswith("getSearchOptions_"): continue
+ elif sym.startswith("getTransliterations_"): continue
+ elif sym.startswith("getUnicodeScripts_"): continue
+ elif sym.startswith("lok_"): continue
+ # UDK API
+ elif sym.startswith("osl_"): continue
+ elif sym.startswith("rtl_"): continue
+ elif sym.startswith("typelib_"): continue
+ elif sym.startswith("typereg_"): continue
+ elif sym.startswith("uno_"): continue
+ # remove things we found that do not exist in our source code, they're not ours
+ if not(extractFunctionNameFromSignature(sym) in all_source_names): continue
+ f.write(sym + "\n")
+
+with open("bin/find-can-be-private-symbols.classes.results", "wt") as f:
+ for sym in sorted(classes_with_exported_symbols - classes_with_imported_symbols):
+ # externals
+ if sym.startswith("libcdr"): continue
+ elif sym.startswith("libabw"): continue
+ elif sym.startswith("libebook"): continue
+ elif sym.startswith("libepubgen"): continue
+ elif sym.startswith("libfreehand"): continue
+ elif sym.startswith("libmspub"): continue
+ elif sym.startswith("libpagemaker"): continue
+ elif sym.startswith("libqxp"): continue
+ elif sym.startswith("libvisio"): continue
+ elif sym.startswith("libzmf"): continue
+ elif sym.startswith("lucene::"): continue
+ elif sym.startswith("Sk"): continue
+ f.write(sym + "\n")
diff --git a/bin/find-clang-format.py b/bin/find-clang-format.py
new file mode 100755
index 000000000..38c9aac10
--- /dev/null
+++ b/bin/find-clang-format.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from difflib import unified_diff
+from pathlib import Path
+from subprocess import PIPE, Popen
+
+BLACKLIST = Path("solenv/clang-format/blacklist")
+THRESHOLD = os.getenv("CLANG_THRESHOLD", 5)
+CLANG_BINARY = Path(os.getenv("CLANG_FORMAT", "/opt/lo/bin/clang-format"))
+
+
+def calculate_diff_size(diff):
+ additions, removals = 0, 0
+ # ignore first 2 item in the sequence
+ # which are +++ and ---
+ for line in diff[2:]:
+ if line.startswith("+"):
+ additions += 1
+ elif line.startswith("-"):
+ removals += 1
+ return max((additions, removals))
+
+
+def format_stream(path, *extra_args):
+ process = Popen(
+ [CLANG_BINARY, *extra_args], stdout=PIPE, stderr=PIPE, stdin=PIPE,
+ )
+ stdout, stderr = process.communicate(input=path.read_bytes())
+ if stderr:
+ print("<FAIL>", str(path))
+ print(stderr.decode())
+ print("<FAIL>")
+ exit(1)
+ stdout = stdout.decode()
+ return stdout
+
+
+def main(*args):
+ if not CLANG_BINARY.exists():
+ print("Couldn't find clang-format on {!s}".format(CLANG_BINARY))
+ exit(1)
+
+ for path in BLACKLIST.read_text().splitlines():
+ path = Path(path)
+ if not path.exists():
+ continue
+
+ original = path.read_text()
+ new = format_stream(path, *args)
+ originalsize = len(original.splitlines())
+ diff = unified_diff(original.splitlines(), new.splitlines(), n=0)
+ diffsize = calculate_diff_size(tuple(diff))
+ if diffsize <= (originalsize * 5) // 100:
+ print(path, "(size: {}/{})".format(diffsize, originalsize))
+
+
+if __name__ == "__main__":
+ import sys
+
+ main(*sys.argv[1:])
diff --git a/bin/find-duplicated-files.py b/bin/find-duplicated-files.py
new file mode 100755
index 000000000..08d90076c
--- /dev/null
+++ b/bin/find-duplicated-files.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import os
+import sys
+
+from filecmp import dircmp
+
+"""
+This script compares two directories and lists the files which are the same in both directories.
+Intended to find duplicate icons among icon themes.
+
+Adopted from the example at https://docs.python.org/3.5/library/filecmp.html
+
+Usage: ./bin/findduplicatefiles dir1 dir2
+"""
+
+def print_diff_files(dcmp):
+ for name in dcmp.same_files:
+ print("%s found in %s and %s" % (name, dcmp.left, dcmp.right))
+ for sub_dcmp in dcmp.subdirs.values():
+ print_diff_files(sub_dcmp)
+
+if len(sys.argv) != 3:
+ print("Usage: %s dir1 dir2" % sys.argv[0])
+ exit()
+
+dir1 = sys.argv[1]
+dir2 = sys.argv[2]
+
+if not os.path.isdir(dir1) or not os.path.isdir(dir2):
+ print("Arguments must be directories!")
+ exit()
+
+dcmp = dircmp(dir1, dir2)
+print_diff_files(dcmp)
+
diff --git a/bin/find-duplicated-sids.py b/bin/find-duplicated-sids.py
new file mode 100755
index 000000000..8f5e4ff92
--- /dev/null
+++ b/bin/find-duplicated-sids.py
@@ -0,0 +1,92 @@
+#!/usr/bin/python
+
+
+# Scan .hrc files for conflicting SID constants
+#
+# This is not as easy as it sounds because some of the constants depend on other constants whose names do not start with SID_
+#
+
+import subprocess
+import sys
+
+sidNameToValue = dict()
+sidNameToOriginalLine = dict()
+
+
+def extractSidValue(sidValue):
+ if isinstance(sidValue, int):
+ return sidValue
+ if sidValue.isdigit():
+ return int(sidValue)
+ if sidValue[0:2] == "0x":
+ return int(sidValue, 16)
+ if sidValue.find("+") != -1:
+ tokens = sidValue.split("+")
+ tokens1 = tokens[0].strip()
+ tokens2 = tokens[1].strip()
+ return extractSidValue(tokens1) + extractSidValue(tokens2)
+ rv = extractSidValue(sidNameToValue[sidValue])
+ sidNameToValue[sidValue] = rv
+ return rv
+
+
+#a = subprocess.Popen("git grep -P '#define\s+(SID_|SC_|DETECTIVE_|DRAWTEXTBAR_|DRAW_BAR_|RID_|OBJBAR_FORMAT_|TAB_POPUP_|DATA_MENU_|EXTRA_MENU_|FORMAT_MENU_|INSERT_MENU_|VIEW_MENU_|EDIT_MENU_|FILE_MENU_|SC_FUNCTION_|RC_)'", stdout=subprocess.PIPE, shell=True)
+a = subprocess.Popen("git grep -Pn '#define\s+(\S+)' -- *.hrc", stdout=subprocess.PIPE, shell=True)
+
+with a.stdout as txt:
+ for line in txt:
+ originalLine = line.strip()
+ # strip the '#define' off the front
+ idx1 = line.find(" ")
+ line = line[idx1 : len(line)].strip()
+ # extract the name
+ idx1 = line.find(" ")
+ if (idx1 == -1): continue
+ sidName = line[0 : idx1].strip()
+ line = line[idx1 : len(line)].strip()
+ # strip any trailing comments
+ idx1 = line.find("//")
+ if (idx1 != -1):
+ line = line[0 : idx1].strip()
+ idx1 = line.find("/*")
+ if (idx1 != -1):
+ line = line[0 : idx1].strip()
+ if len(line) == 0: continue
+ # strip brackets
+ if line[0] == "(": line = line[1:]
+ if line[len(line)-1] == ")": line = line[0:len(line)-1]
+ sidTextValue = line.strip()
+ # ignore the #define strings
+ if (sidTextValue.find("\"") != -1): continue
+ # ignore the multiline macros
+ if (sidTextValue.find("\\") != -1): continue
+ # check for redefinitions
+ if sidName[0:4] == "SID_" and sidNameToValue.has_key(sidName):
+ print "Redefinition:\n\t", sidNameToOriginalLine[sidName], "\n\t" , originalLine
+ else:
+ sidNameToValue[sidName] = sidTextValue
+ sidNameToOriginalLine[sidName] = originalLine
+
+ # decode the constants into their numeric values recursively
+ sidNamesToIgnore = set()
+ for sidName in sidNameToValue:
+ sidTextValue = sidNameToValue[sidName]
+ try:
+ sidValueNum = extractSidValue(sidTextValue)
+ sidNameToValue[sidName] = sidValueNum
+ except KeyError:
+ sidNamesToIgnore.add(sidName)
+
+ # check for conflicts
+ sidValueToName = dict()
+ for sidName in sidNameToValue:
+ if sidName in sidNamesToIgnore: continue
+ if sidName[0:4] != "SID_": continue
+ sidValue = sidNameToValue[sidName]
+ if sidValueToName.has_key(sidValue):
+ print "conflict:\n\t", sidNameToOriginalLine[sidName], "\n\t", sidNameToOriginalLine[sidValueToName[sidValue]]
+ else:
+ sidValueToName[sidValue] = sidName
+
+
+
diff --git a/bin/find-files-not-referenced-by-makefile.py b/bin/find-files-not-referenced-by-makefile.py
new file mode 100755
index 000000000..70232ed1c
--- /dev/null
+++ b/bin/find-files-not-referenced-by-makefile.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python2
+
+# Look for CXX files that are not referenced by any makefile
+
+import subprocess
+import sys
+
+sourceFiles = set()
+
+a = subprocess.Popen("git ls-files", stdout=subprocess.PIPE, shell=True)
+with a.stdout as txt:
+ for filename in txt:
+ if filename.find(".cxx") != -1 \
+ and filename.find("precompiled") == -1 \
+ and filename.find("/workben") == -1 \
+ and not filename.startswith("odk/examples/") \
+ and not filename.startswith("bridges/") \
+ and not filename.startswith("compilerplugins/") \
+ and filename.find("/qa/") == -1 \
+ and filename.find("/test/") == -1 \
+ and not filename.startswith("testtools/") \
+ and not filename.startswith("vcl/") \
+ and not filename.startswith("cli_ure/"):
+ sourceFiles.add(filename.strip())
+
+a = subprocess.Popen("git ls-files */*.mk", stdout=subprocess.PIPE, shell=True)
+with a.stdout as txt:
+ for makefilename in txt:
+ makefilename = makefilename.strip()
+ with open(makefilename, "r") as makefile:
+ moduleName = makefilename[:makefilename.find("/")]
+ state = 0
+ for line in makefile:
+ line = line.strip()
+ if state == 0 and "_add_exception_objects" in line:
+ state = 1
+ elif state == 1 and line != "))":
+ s = line.replace("\\","").replace(")", "").strip()
+ # parse line like: $(call gb_Helper_optional,AVMEDIA,svx/source/sidebar/media/MediaPlaybackPanel) \
+ idx = s.rfind(",")
+ if idx != -1:
+ s = s[idx+1:].strip()
+ sourceFiles.discard(s + ".cxx")
+ elif state == 1:
+ state = 0
+
+
+
+
+print "files not listed in makefile"
+print "----------------------------"
+for x in sorted(sourceFiles):
+ print x
diff --git a/bin/find-german-comments b/bin/find-german-comments
new file mode 100755
index 000000000..bb76941c1
--- /dev/null
+++ b/bin/find-german-comments
@@ -0,0 +1,402 @@
+#!/usr/bin/env python3
+########################################################################
+#
+# Copyright (c) 2010 Jonas Jensen, Miklos Vajna
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+########################################################################
+
+
+import sys
+import re
+import subprocess
+import os
+import argparse
+import string
+
+class Parser:
+ """
+ This parser extracts comments from source files, tries to guess
+ their language and then prints out the German ones.
+ """
+ def __init__(self):
+ self.strip = string.punctuation + " \n"
+ self.text_cat = self.start_text_cat()
+ parser = argparse.ArgumentParser(description='Searches for German comments in cxx/hxx source files inside a given root directory recursively.')
+ parser.add_argument("-f", "--filenames-only", action="store_true",
+ help="Only print the filenames of files containing German comments")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="Turn on verbose mode (print only positives progress to stderr)")
+ parser.add_argument("-l", "--line-numbers", action="store_true",
+ help="Prints the filenames and line numbers only.")
+ parser.add_argument("-L", "--line-numbers-pos", action="store_true",
+ help="Prints the filenames and line numbers only (if positive).")
+ parser.add_argument("-t", "--threshold", action="store", default=0, type=int,
+ help="When used with '--line-numbers', only bothers outputting comment info if there are more than X number of flagged comments. Useful for weeding out false positives.")
+ parser.add_argument("directory", nargs='?', default='.', type=str, help='Give a directory to search in')
+ self.args = parser.parse_args()
+ self.check_source_files(self.args.directory)
+
+ def get_comments(self, filename):
+ """
+ Extracts the source code comments.
+ """
+ linenum = 0
+ if self.args.verbose:
+ print("processing file '%s'...\n" % filename)
+ sock = open(filename)
+ # add an empty line to trigger the output of collected oneliner
+ # comment group
+ lines = sock.readlines() + ["\n"]
+ sock.close()
+
+ in_comment = False
+ buf = []
+ count = 1
+ for i in lines:
+ if "//" in i and not in_comment:
+ # if we find a new //-style comment, then we
+ # just append it to a previous one if: there is
+ # only whitespace before the // mark that is
+ # necessary to make comments longer, giving
+ # more reliable output
+ if not len(re.sub("(.*)//.*", r"\1", i).strip(self.strip)):
+ s = re.sub(".*// ?", "", i).strip(self.strip)
+ if len(s):
+ buf.append(s)
+ else:
+ # otherwise it's an independent //-style comment in the next line
+ yield (count, "\n ".join(buf))
+ buf = [re.sub(".*// ?", "", i.strip(self.strip))]
+ elif "//" not in i and not in_comment and len(buf) > 0:
+ # first normal line after a // block
+ yield (count, "\n ".join(buf))
+ buf = []
+ elif "/*" in i and "*/" not in i and not in_comment:
+ # start of a real multiline comment
+ in_comment = True
+ linenum = count
+ s = re.sub(".*/\*+", "", i.strip(self.strip))
+ if len(s):
+ buf.append(s.strip(self.strip))
+ elif in_comment and not "*/" in i:
+ # in multiline comment
+ s = re.sub("^( |\|)*\*?", "", i)
+ if len(s.strip(self.strip)):
+ buf.append(s.strip(self.strip))
+ elif "*/" in i and in_comment:
+ # end of multiline comment
+ in_comment = False
+ s = re.sub(r"\*+/.*", "", i.strip(self.strip))
+ if len(s):
+ buf.append(s)
+ yield (count, "\n ".join(buf))
+ buf = []
+ elif "/*" in i and "*/" in i:
+ # c-style oneliner comment
+ yield (count, re.sub(".*/\*(.*)\*/.*", r"\1", i).strip(self.strip))
+ count += 1
+
+ def start_text_cat(self):
+ cwd = os.getcwd()
+ # change to our directory
+ os.chdir(os.path.split(os.path.abspath(sys.argv[0]))[0])
+ sock = subprocess.Popen(["text_cat/text_cat", "-s", "-d", "text_cat/LM"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ os.chdir(cwd)
+ return sock
+
+ def get_lang(self, s):
+ """ the output is 'german' or 'english' or 'german or english'. When
+ unsure, just don't warn, there are strings where you just can't
+ determine the results reliably, like '#110680#' """
+
+ self.text_cat.stdin.write(bytes(s, 'utf-8'))
+ self.text_cat.stdin.write(bytes("\n", 'utf-8'))
+ self.text_cat.stdin.flush()
+ lang = self.text_cat.stdout.readline().strip()
+ return lang
+
+ def is_german(self, s):
+ """
+ determines if a string is German or not
+ """
+ # for short strings we can't do reliable recognition, so skip
+ # short strings and less than 4 words
+ s = s.replace('\n', ' ')
+ if len(s) < 32 or len(s.split()) < 4:
+ return False
+ return self.get_lang(s) == b"german"
+
+ def check_file(self, path):
+ """
+ checks each comment in a file
+ """
+ def tab_calc(path):
+ START = 40 #Default of 10 tabs
+ if len(path) >= START:
+ return 1
+ diff = START - len(path)
+ if diff % 4 is not 0:
+ padding = 1
+ else:
+ padding = 0
+ return (diff/4)+padding
+
+ if self.args.line_numbers or self.args.line_numbers_pos:
+ TABS = "\t"*10
+ path_linenums = []
+ for linenum, s in self.get_comments(path):
+ if self.is_german(s):
+ path_linenums.append(linenum)
+ valid = len(path_linenums) > int(self.args.threshold)
+ if self.args.line_numbers:
+ print("%s ... %s positives -- %s\n" % (path, str(len(path_linenums)), str(valid)))
+ if valid:
+ if self.args.line_numbers_pos:
+ print("%s ... %s positives\n" % (path, str(len(path_linenums))))
+ return
+ if len(path) + (len(path_linenums)*4) > 75:
+ print("%s:\n" % path)
+ while path_linenums:
+ i = 0
+ numline = []
+ while i < 10:
+ try:
+ numline.append(path_linenums[0])
+ path_linenums.remove(path_linenums[0])
+ except IndexError:
+ i = 10
+ i += 1
+ numline = [str(i) for i in numline]
+ print("%s%s" % (TABS, ",".join(numline)))
+ else:
+ if self.args.line_numbers:
+ path_linenums = [str(i) for i in path_linenums]
+ print("%s:%s%s" % (path, "\t"*int(tab_calc(path)), ",".join(path_linenums)))
+
+ elif not self.args.filenames_only:
+ for linenum, s in self.get_comments(path):
+ if self.is_german(s):
+ print("%s:%s: %s" % (path, linenum, s))
+ else:
+ fnames = set([])
+ for linenum, s in self.get_comments(path):
+ if self.is_german(s):
+ # Make sure we print each filename only once
+ fnames.add(path)
+ # Print the filenames
+ for f in fnames:
+ print(f)
+
+ def first_elem(self, path):
+ """
+ Returns the root directory in our repo of a given path, so we can check against the whitelist.
+ """
+ lastElem = os.path.dirname(path)
+ done = False
+ while not done:
+ nextElem = os.path.split(lastElem)[0]
+ if nextElem is not '':
+ lastElem = nextElem
+ else:
+ done = True
+ return lastElem
+
+ def check_source_files(self, directory):
+ """
+ checks each _tracked_ file in a directory recursively
+ """
+
+ # top-level project directory -> use whitelist.
+ globalscan = False
+ if os.path.exists(directory + "/.git/config"):
+ globalscan = True
+
+ # Change into the given dir, so "git ls-tree" does work.
+ os.chdir(directory)
+
+ sock = os.popen(r"git ls-tree -r HEAD --name-only |egrep '\.(c|cc|cpp|cxx|h|hxx|mm)$'")
+ lines = sock.readlines()
+ sock.close()
+
+ # Helps to speedup a global scan
+ directory_whitelist = {
+ "ure" : 1,
+ "ios" : 1,
+ "bean" : 1,
+ "apple_remote" : 1,
+ "UnoControls" : 1,
+ "accessibility" : 1,
+ "android" : 1,
+ "animations" : 1,
+ "avmedia" : 1,
+ "basctl" : 1,
+ "basegfx" : 1,
+ "basic" : 1,
+ "binaryurp" : 1,
+ "bridges" : 1,
+ "canvas" : 1,
+ "chart2" : 1,
+ "cli_ure" : 1,
+ "codemaker" : 1,
+ "comphelper" : 1,
+ "compilerplugins" : 1,
+ "configmgr" : 1,
+ "connectivity" : 1,
+ "cppcanvas" : 1,
+ "cppu" : 1,
+ "cppuhelper" : 1,
+ "cpputools" : 1,
+ "cui" : 1,
+ "dbaccess" : 1,
+ "desktop" : 1,
+ "drawinglayer" : 1,
+ "dtrans" : 1,
+ "editeng" : 1,
+ "embeddedobj" : 1,
+ "embedserv" : 1,
+ "eventattacher" : 1,
+ "extensions" : 1,
+ "external" : 1,
+ "filter" : 1,
+ "forms" : 1,
+ "formula" : 1,
+ "fpicker" : 1,
+ "framework" : 1,
+ "helpcompiler" : 1,
+ "hwpfilter" : 1,
+ "i18npool" : 1,
+ "i18nlangtag" : 1,
+ "i18nutil" : 1,
+ "idl" : 1,
+ "idlc" : 1,
+ "include" : 1,
+ "io" : 1,
+ "javaunohelper" : 1,
+ "jvmaccess" : 1,
+ "jvmfwk" : 1,
+ "jurt" : 1,
+ "l10ntools" : 1,
+ "libreofficekit" : 1,
+ "lingucomponent" : 1,
+ "linguistic" : 1,
+ "lotuswordpro" : 1,
+ "mysqlc" : 1,
+ "o3tl" : 1,
+ "odk" : 1,
+ "officecfg" : 1,
+ "onlineupdate" : 1,
+ "opencl" : 1,
+ "oox" : 1,
+ "package" : 1,
+ "postprocess" : 1,
+ "pyuno" : 1,
+ "registry" : 1,
+ "remotebridges" : 1,
+ "reportdesign" : 1,
+ "rsc" : 1,
+ "sal" : 1,
+ "salhelper" : 1,
+ "sax" : 1,
+ "sc" : 1,
+ "scaddins" : 1,
+ "sccomp" : 1,
+ "scripting" : 1,
+ "sd" : 1,
+ "sdext" : 1,
+ "sfx2" : 1,
+ "shell" : 1,
+ "setup_native" : 1,
+ "sot" : 1,
+ "slideshow" : 1,
+ "smoketest" : 1,
+ "solenv" : 1,
+ "soltools" : 1,
+ "starmath" : 1,
+ "stoc" : 1,
+ "store" : 1,
+ "svgio" : 1,
+ "svl" : 1,
+ "svtools" : 1,
+ "svx" : 1,
+ "sw" : 1,
+ "test" : 1,
+ "testtools" : 1,
+ "toolkit" : 1,
+ "tools" : 1,
+ "touch" : 1,
+ "ucb" : 1,
+ "ucbhelper" : 1,
+ "unodevtools" : 1,
+ "unotest" : 1,
+ "unoidl" : 1,
+ "unotools" : 1,
+ "unoxml" : 1,
+ "uui" : 1,
+ "vbahelper" : 1,
+ "vcl" : 1,
+ "winaccessibility" : 1,
+ "writerfilter" : 1,
+ "writerperfect" : 1,
+ "xmlhelp" : 1,
+ "xmloff" : 1,
+ "xmlreader" : 1,
+ "xmlsecurity" : 1,
+ "xmlscript" : 1,
+ }
+
+ if globalscan:
+ print("Scanning all files globally:")
+ elif directory == '.':
+ print("Scanning all files in our current directory:")
+ else:
+ print("Scanning all files in", directory + ":")
+
+ num_checked = 0
+
+ for path in lines:
+ baseDir = self.first_elem(path)
+ # If we have a globalscan use the whitelist.
+ if globalscan:
+ if not baseDir in directory_whitelist:
+ sys.stderr.write("\n - Error: Missing path %s -\n\n" % baseDir)
+ sys.exit(1)
+ elif directory_whitelist[baseDir] is 0:
+ self.check_file(path.strip())
+ num_checked = num_checked + 1
+ elif directory_whitelist[baseDir] is 1:
+ sys.stderr.write("Skipping whitelisted directory %s\n" % baseDir)
+ directory_whitelist[baseDir] = 2
+ elif not globalscan:
+ self.check_file(path.strip())
+ num_checked = num_checked + 1
+
+ print("Scanned %s files\n" % num_checked)
+
+try:
+ Parser()
+except KeyboardInterrupt:
+ print("Interrupted!")
+ sys.exit(0)
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/find-headers-to-move-inside-modules.py b/bin/find-headers-to-move-inside-modules.py
new file mode 100755
index 000000000..313e30762
--- /dev/null
+++ b/bin/find-headers-to-move-inside-modules.py
@@ -0,0 +1,52 @@
+#!/usr/bin/python2
+
+# Look for headers inside include/ that can be moved into their respective modules.
+
+import subprocess
+import sys
+
+headerSet = set()
+a = subprocess.Popen("git ls-files include/", stdout=subprocess.PIPE, shell=True)
+with a.stdout as txt:
+ for line in txt:
+ header = line[8:].strip();
+ if "README" in header: continue
+ if header == "version.hrc": continue
+ if header == "svtools/editimplementation.hxx": continue
+ # ignore URE headers
+ if header.startswith("IwyuFilter_include.yaml"): continue
+ if header.startswith("cppu/"): continue
+ if header.startswith("cppuhelper/"): continue
+ if header.startswith("osl/"): continue
+ if header.startswith("sal/"): continue
+ if header.startswith("salhelper/"): continue
+ if header.startswith("uno/"): continue
+ # these are direct copies of mozilla code
+ if header.startswith("onlineupdate/mozilla/"): continue
+ headerSet.add(header)
+
+headerSetUnused = headerSet.copy()
+headerSetOnlyInOwnModule = headerSet.copy()
+a = subprocess.Popen("git grep '^#include <'", stdout=subprocess.PIPE, shell=True)
+with a.stdout as txt:
+ for line in txt:
+ idx1 = line.find("#include <")
+ include = line[idx1 + 10 : len(line)-2]
+ headerSetUnused.discard(include)
+ #
+ idx1 = line.find("/")
+ includedFromModule = line[0 : idx1]
+ idx1 = include.find("/")
+ module = include[0 : idx1]
+ if module != includedFromModule:
+ headerSetOnlyInOwnModule.discard(include)
+
+print "completely unused"
+print "----------------------------"
+for x in sorted(headerSetUnused):
+ print x
+print ""
+print "only used in own module"
+print "----------------------------"
+for x in sorted(headerSetOnlyInOwnModule):
+ print x
diff --git a/bin/find-mergedlib-can-be-private.classes.results b/bin/find-mergedlib-can-be-private.classes.results
new file mode 100644
index 000000000..b75fe09fb
--- /dev/null
+++ b/bin/find-mergedlib-can-be-private.classes.results
@@ -0,0 +1,426 @@
+Accelerator
+B3dCamera
+B3dTransformationSet
+B3dViewport
+BitmapMedianFilter
+BitmapMonochromeMatrixFilter
+BitmapPalette
+BitmapPopArtFilter
+BitmapSobelGreyFilter
+CalendarField
+CodeCompleteDataCache
+ConvertChar
+CurrencyBox
+CurrencyFormatter
+CursorWrapper
+DateBox
+DateField
+DdeGetPutItem
+DdeHotLink
+DdeItem
+DdeLink
+DdeService
+DdeTopic
+DockingAreaWindow
+DockingManager
+DoubleCurrencyField
+DoubleNumericField
+E3dCompoundObject
+E3dDefaultAttributes
+E3dExtrudeObj
+E3dPolygonObj
+EditAbstractDialogFactory
+EditUndo
+EditUndoManager
+EditViewCallbacks
+EnhancedCustomShape
+EnhancedCustomShape::FunctionParser
+FileChangedChecker
+FilterMatch
+FixedBitmap
+FixedHyperlink
+FmDesignModeChangedHint
+FmFormObj
+FmFormPageImpl
+FmXFormShell
+FontSelectPattern
+FontSizeNames
+FontSubsetInfo
+FormattedField::StaticFormatter
+FormatterBase
+FreetypeManager::IFSD_Equal
+GroupBox
+HelpLinker
+Hunspell
+Hunzip
+ImageControl
+ImplJobSetup
+IndexerPreProcessor
+IntroWindow
+ListenerMultiplexerBase
+LongCurrencyBox
+LongCurrencyField
+LongCurrencyFormatter
+MenuToggleButton
+MetaAction
+MetaArcAction
+MetaBmpAction
+MetaBmpExScalePartAction
+MetaBmpScaleAction
+MetaBmpScalePartAction
+MetaChordAction
+MetaClipRegionAction
+MetaEllipseAction
+MetaFontAction
+MetaGradientExAction
+MetaISectRectClipRegionAction
+MetaISectRegionClipRegionAction
+MetaLayoutModeAction
+MetaMapModeAction
+MetaMoveClipRegionAction
+MetaOverlineColorAction
+MetaPieAction
+MetaPixelAction
+MetaPolyLineAction
+MetaPolyPolygonAction
+MetaPolygonAction
+MetaPopAction
+MetaPushAction
+MetaRasterOpAction
+MetaRefPointAction
+MetaRoundRectAction
+MetaTextAlignAction
+MetaTextArrayAction
+MetaTextColorAction
+MetaTextFillColorAction
+MetaTextLanguageAction
+MetaTextLineColorAction
+MetaWallpaperAction
+MetafileAccessor
+ModuleSizeExceeded
+MoreButton
+MultiListBox
+MyThes
+NativeNumberWrapper
+NfCurrencyEntry
+NotebookbarTabControlBase
+NotifyEvent
+NumericBox
+NumericField
+OFlowChainedText
+OpenFileDropTargetListener
+OpenGLFramebuffer
+OpenGLZone
+PackedTextureAtlasManager
+PatternBox
+PatternField
+PatternFormatter
+PhysicalFontFamily
+PlaceEditDialog
+PrinterOptions
+ProgressBar
+QueueInfo
+RenderList
+SalData
+SalInfoPrinter
+SalPrinter
+SalSystem
+SbClassModuleObject
+SbMethod
+SbxInfo
+SbxObject
+SdrEmbedObjectLink
+SdrGrafBlueItem
+SdrGrafContrastItem
+SdrGrafCropItem
+SdrGrafGamma100Item
+SdrGrafGreenItem
+SdrGrafLuminanceItem
+SdrGrafModeItem
+SdrGrafRedItem
+SdrGrafTransparenceItem
+SdrMeasureField
+SdrMeasureObj
+SdrSignedPercentItem
+SdrTextFixedCellHeightItem
+SdrUndoPageMasterPage
+SelectionListenerMultiplexer
+SfxAllEnumItem
+SfxDocumentInfoItem
+SfxItemSetHint
+SfxMetricItem
+SfxNavigator
+SfxObjectItem
+SfxStatusListener
+SfxStyleSheetModifiedHint
+SfxTemplatePanelControl
+SfxViewFrameItem
+SgaObject
+SkiaPackedSurfaceAtlasManager
+SkiaZone
+SpinButton
+SpinListenerMultiplexer
+SvParser<HtmlTokenId>::TokenStackType
+SvParser<int>::TokenStackType
+SvtBasePrintOptions
+SvtPrintFileOptions
+SvtPrinterOptions
+Svx3DCloseBackItem
+Svx3DCloseFrontItem
+Svx3DNormalsKindItem
+Svx3DPerspectiveItem
+Svx3DShadeModeItem
+Svx3DTextureKindItem
+Svx3DTextureModeItem
+Svx3DTextureProjectionXItem
+Svx3DTextureProjectionYItem
+SvxCurrencyToolBoxControl
+SvxEditSourceAdapter
+SvxPasswordDialog
+SvxPropertySetInfoPool
+SvxTPage
+SvxTextRotateItem
+SyntaxHighlighter::Tokenizer
+SystemWindow::ImplData
+TETextDataObject
+TabDialog
+TabPaneValue
+TextListenerMultiplexer
+Throbber
+TimeBox
+TimeFormatter
+UFlowChainedText
+UnoEditControl
+UnoWrapperBase
+VCLXDateField
+VCLXEdit
+VCLXMenuBar
+VCLXSpinField
+ValueSet
+VclAlignment
+VclBin
+VclBuilder::MenuAndId
+VclBuilder::ParserState
+VclBuilder::sortIntoBestTabTraversalOrder
+VclDrawingArea
+VclGrid
+VclWindowEvent
+XMLDashStyleExport
+XMLDashStyleImport
+XMLGradientStyleExport
+XMLGradientStyleImport
+XMLHatchStyleExport
+XMLHatchStyleImport
+XMLImageStyle
+XMLMarkerStyleExport
+XMLMarkerStyleImport
+XMLShapeStyleContext
+accessibility::AccessibleEditableTextPara
+accessibility::AccessibleParaManager
+avmedia::MediaControlBase
+avmedia::MediaFloater
+basegfx::B2DTrapezoid
+basegfx::B3DPoint
+basegfx::B3DTuple
+basegfx::BColorModifier
+basegfx::BColorModifierStack
+basegfx::BColorModifier_RGBLuminanceContrast
+basegfx::BColorModifier_black_and_white
+basegfx::BColorModifier_gamma
+basegfx::BColorModifier_gray
+basegfx::BColorModifier_invert
+basegfx::BColorModifier_replace
+basegfx::MinimalSystemDependentDataManager
+basegfx::ODFGradientInfo
+basegfx::RasterConverter3D
+basegfx::SystemDependentDataHolder
+basegfx::SystemDependentDataManager
+basegfx::triangulator
+canvas
+char& std::vector<char, std::allocator<char> >
+comphelper::IndexAccessIterator
+comphelper::OAccessibleSelectionHelper
+comphelper::OEventListenerHelper
+comphelper::OPropertySetAggregationHelper
+comphelper::OPropertyStateHelper
+comphelper::OSequenceOutputStream
+comphelper::OStatefulPropertySet
+comphelper::OStreamSection
+comphelper::OWeakEventListenerAdapter
+comphelper::OWrappedAccessibleChildrenManager
+comphelper::PropertyBag
+comphelper::StillReadWriteInteraction
+comphelper::service_decl::ServiceDecl::Factory
+connectivity::sdbcx::IObjectCollection
+connectivity::sdbcx::OGroup
+connectivity::sdbcx::OKey
+dbtools::param::ParameterWrapper
+desktop::CallbackFlushHandler::CallbackData
+dp_misc::AbortChannel
+drawinglayer::animation::AnimationEntry
+drawinglayer::animation::AnimationEntryFixed
+drawinglayer::animation::AnimationEntryLinear
+drawinglayer::animation::AnimationEntryList
+drawinglayer::animation::AnimationEntryLoop
+drawinglayer::attribute::FillGraphicAttribute
+drawinglayer::attribute::FillHatchAttribute
+drawinglayer::attribute::LineStartEndAttribute
+drawinglayer::attribute::MaterialAttribute3D
+drawinglayer::attribute::Sdr3DLightAttribute
+drawinglayer::attribute::Sdr3DObjectAttribute
+drawinglayer::attribute::SdrFillGraphicAttribute
+drawinglayer::attribute::SdrGlowAttribute
+drawinglayer::attribute::SdrLightingAttribute
+drawinglayer::attribute::SdrLineAttribute
+drawinglayer::attribute::SdrLineFillShadowAttribute3D
+drawinglayer::attribute::SdrLineStartEndAttribute
+drawinglayer::attribute::SdrSceneAttribute
+drawinglayer::attribute::SdrShadowAttribute
+drawinglayer::primitive2d::AnimatedBlinkPrimitive2D
+drawinglayer::primitive2d::AnimatedInterpolatePrimitive2D
+drawinglayer::primitive2d::AnimatedSwitchPrimitive2D
+drawinglayer::primitive2d::BackgroundColorPrimitive2D
+drawinglayer::primitive2d::ControlPrimitive2D
+drawinglayer::primitive2d::DiscreteShadowPrimitive2D
+drawinglayer::primitive2d::Embedded3DPrimitive2D
+drawinglayer::primitive2d::FillGraphicPrimitive2D
+drawinglayer::primitive2d::GlowPrimitive2D
+drawinglayer::primitive2d::GridPrimitive2D
+drawinglayer::primitive2d::GroupPrimitive2D
+drawinglayer::primitive2d::HelplinePrimitive2D
+drawinglayer::primitive2d::InvertPrimitive2D
+drawinglayer::primitive2d::MarkerArrayPrimitive2D
+drawinglayer::primitive2d::MediaPrimitive2D
+drawinglayer::primitive2d::MetafilePrimitive2D
+drawinglayer::primitive2d::ObjectAndViewTransformationDependentPrimitive2D
+drawinglayer::primitive2d::PagePreviewPrimitive2D
+drawinglayer::primitive2d::PolyPolygonGradientPrimitive2D
+drawinglayer::primitive2d::PolyPolygonGraphicPrimitive2D
+drawinglayer::primitive2d::PolyPolygonHatchPrimitive2D
+drawinglayer::primitive2d::PolyPolygonSelectionPrimitive2D
+drawinglayer::primitive2d::PolygonMarkerPrimitive2D
+drawinglayer::primitive2d::PolygonStrokeArrowPrimitive2D
+drawinglayer::primitive2d::ScenePrimitive2D
+drawinglayer::primitive2d::SdrFrameBorderData::SdrConnectStyleData
+drawinglayer::primitive2d::ShadowPrimitive2D
+drawinglayer::primitive2d::TextHierarchyBlockPrimitive2D
+drawinglayer::primitive2d::TextHierarchyBulletPrimitive2D
+drawinglayer::primitive2d::TextHierarchyEditPrimitive2D
+drawinglayer::primitive2d::TextHierarchyFieldPrimitive2D
+drawinglayer::primitive2d::TextHierarchyLinePrimitive2D
+drawinglayer::primitive2d::TextHierarchyParagraphPrimitive2D
+drawinglayer::primitive2d::ViewTransformationDependentPrimitive2D
+drawinglayer::primitive2d::ViewportDependentPrimitive2D
+drawinglayer::primitive2d::WrongSpellPrimitive2D
+drawinglayer::primitive3d
+drawinglayer::primitive3d::BasePrimitive3D
+drawinglayer::primitive3d::BufferedDecompositionPrimitive3D
+drawinglayer::primitive3d::GroupPrimitive3D
+drawinglayer::primitive3d::ModifiedColorPrimitive3D
+drawinglayer::primitive3d::PolyPolygonMaterialPrimitive3D
+drawinglayer::primitive3d::PolygonHairlinePrimitive3D
+drawinglayer::primitive3d::Primitive3DContainer
+drawinglayer::primitive3d::SdrCubePrimitive3D
+drawinglayer::primitive3d::SdrExtrudePrimitive3D
+drawinglayer::primitive3d::SdrLathePrimitive3D
+drawinglayer::primitive3d::SdrPolyPolygonPrimitive3D
+drawinglayer::primitive3d::SdrPrimitive3D
+drawinglayer::primitive3d::SdrSpherePrimitive3D
+drawinglayer::primitive3d::TransformPrimitive3D
+drawinglayer::processor2d::HitTestProcessor2D
+drawinglayer::processor3d::BaseProcessor3D
+drawinglayer::processor3d::CutFindProcessor
+emfio::WinMtfFontStyle
+formula::FormulaTokenIterator::Item
+framework
+framework::AddonMenuManager
+framework::AddonsOptions
+framework::ConfigAccess
+framework::ConstItemContainer
+framework::Converter
+framework::DispatchHelper
+framework::FrameListAnalyzer
+framework::HandlerCache
+framework::InteractionRequest
+framework::MenuAttributes
+framework::MenuConfiguration
+framework::RequestFilterSelect
+framework::RootItemContainer
+framework::SaxNamespaceFilter
+framework::StatusBarConfiguration
+framework::ToolBoxConfiguration
+framework::TransactionManager
+framework::UIConfigurationImporterOOo1x
+legacy::CntInt32
+legacy::SfxBool
+legacy::SvxAdjust
+legacy::SvxBox
+legacy::SvxBrush
+legacy::SvxColor
+legacy::SvxCrossedOut
+legacy::SvxFont
+legacy::SvxFontHeight
+legacy::SvxHorJustify
+legacy::SvxLine
+legacy::SvxPosture
+legacy::SvxTextLine
+legacy::SvxVerJustify
+legacy::SvxWeight
+psp::PrintFontManager::PrintFont
+sdr::SelectionController
+sdr::ViewSelection
+sdr::animation::primitiveAnimator
+sdr::contact::ObjectContactPainter
+sdr::properties::BaseProperties
+sdr::table::Cell
+sfx2::sidebar::Panel
+sfx2::sidebar::SidebarToolBox
+sfx2::sidebar::TabBar
+sfx2::sidebar::TabBar::Item
+svt
+svt::AddressBookSourceDialog
+svt::GenericToolboxController
+svt::GraphicAccess
+svt::IEditImplementation
+svt::MultiLineEditImplementation
+svt::MultiLineTextCell
+svt::OStringTransferable
+svt::PopupMenuControllerBase
+svt::SpinCellController
+svt::TemplateFolderCache
+svtools::AsynchronLink
+svtools::ToolbarPopup
+svx::DialControl::DialControl_Impl
+svx::IPropertyValueProvider
+svx::sidebar::GalleryControl
+svxform
+svxform::DataNavigatorManager
+svxform::NavigatorFrameManager
+svxform::OLocalExchange
+svxform::OLocalExchangeHelper
+svxform::OSQLParserClient
+toolkitform
+ucbhelper::ActiveDataSink
+ucbhelper::InteractionApprove
+ucbhelper::InteractionDisapprove
+ucbhelper::InteractionSupplyAuthentication
+ucbhelper::InterceptedInteraction
+ucbhelper::SimpleNameClashResolveRequest
+utl::Bootstrap::Impl
+utl::DefaultFontConfiguration
+utl::DesktopTerminationObserver
+utl::FontSubstConfiguration
+utl::OConfigurationValueContainer
+utl::ProgressHandlerWrap
+utl::ZipPackageHelper
+utl::detail::Options
+vcl::AccessibleFactoryAccess
+vcl::EventPoster
+vcl::ExtOutDevData
+vcl::ILibreOfficeKitNotifier
+vcl::ORoadmap
+vcl::OldStylePrintAdaptor
+vcl::PDFWriter::AnyWidget
+vcl::test::OutputDeviceTestGradient
+void OpenGLTexture
+wchar_t& std::vector<wchar_t, std::allocator<wchar_t> >
diff --git a/bin/find-mergedlib-can-be-private.py b/bin/find-mergedlib-can-be-private.py
new file mode 100755
index 000000000..ac9d96712
--- /dev/null
+++ b/bin/find-mergedlib-can-be-private.py
@@ -0,0 +1,154 @@
+#!/usr/bin/python2
+#
+# Generate a custom linker script/map file for the --enabled-mergedlibs merged library
+# which reduces the startup time and enables further optimisations with --enable-lto because 60% or more
+# of the symbols become internal only.
+#
+
+import subprocess
+import sys
+import re
+import multiprocessing
+
+exported_symbols = set()
+imported_symbols = set()
+
+
+# Copied from solenv/gbuild/extensions/pre_MergedLibsList.mk
+# TODO there has to be a way to run gmake and get it to dump this list for me
+merged_libs = { \
+ "avmedia" \
+ ,"basctl" \
+ ,"basprov" \
+ ,"basegfx" \
+ ,"canvasfactory" \
+ ,"canvastools" \
+ ,"comphelper" \
+ ,"configmgr" \
+ ,"cppcanvas" \
+ ,"crashreport)" \
+ ,"dbtools" \
+ ,"deployment" \
+ ,"deploymentmisc" \
+ ,"desktopbe1)" \
+ ,"desktop_detector)" \
+ ,"drawinglayer" \
+ ,"editeng" \
+ ,"expwrap" \
+ ,"filterconfig" \
+ ,"fsstorage" \
+ ,"fwe" \
+ ,"fwi" \
+ ,"fwk" \
+ ,"helplinker)" \
+ ,"i18npool" \
+ ,"i18nutil" \
+ ,"lng" \
+ ,"localebe1" \
+ ,"mcnttype" \
+ ,"msfilter" \
+ ,"mtfrenderer" \
+ ,"opencl" \
+ ,"package2" \
+ ,"sax" \
+ ,"sb" \
+ ,"simplecanvas" \
+ ,"sfx" \
+ ,"sofficeapp" \
+ ,"sot" \
+ ,"spl" \
+ ,"stringresource" \
+ ,"svl" \
+ ,"svt" \
+ ,"svx" \
+ ,"svxcore" \
+ ,"tk" \
+ ,"tl" \
+ ,"ucb1" \
+ ,"ucbhelper" \
+ ,"ucpexpand1" \
+ ,"ucpfile1" \
+ ,"unoxml" \
+ ,"utl" \
+ ,"uui" \
+ ,"vcl" \
+ ,"xmlscript" \
+ ,"xo" \
+ ,"xstor" }
+
+# look for symbols exported by libmerged
+subprocess_nm = subprocess.Popen("nm -D instdir/program/libmergedlo.so", stdout=subprocess.PIPE, shell=True)
+with subprocess_nm.stdout as txt:
+ # We are looking for lines something like:
+ # 0000000000036ed0 T flash_component_getFactory
+ line_regex = re.compile(r'^[0-9a-fA-F]+ T ')
+ for line in txt:
+ line = line.strip()
+ if line_regex.match(line):
+ exported_symbols.add(line.split(" ")[2])
+subprocess_nm.terminate()
+
+# look for symbols imported from libmerged
+subprocess_find = subprocess.Popen("(find instdir/program/ -type f; ls ./workdir/LinkTarget/CppunitTest/*.so) | xargs grep -l mergedlo",
+ stdout=subprocess.PIPE, shell=True)
+with subprocess_find.stdout as txt:
+ for line in txt:
+ sharedlib = line.strip()
+ s = sharedlib[sharedlib.find("/lib") + 4 : len(sharedlib) - 3]
+ if s in merged_libs: continue
+ # look for imported symbols
+ subprocess_objdump = subprocess.Popen("objdump -T " + sharedlib, stdout=subprocess.PIPE, shell=True)
+ with subprocess_objdump.stdout as txt2:
+ # ignore some header bumpf
+ txt2.readline()
+ txt2.readline()
+ txt2.readline()
+ txt2.readline()
+ # We are looking for lines something like (noting that one of them uses spaces, and the other tabs)
+ # 0000000000000000 DF *UND* 0000000000000000 _ZN16FilterConfigItem10WriteInt32ERKN3rtl8OUStringEi
+ for line2 in txt2:
+ line2 = line2.strip()
+ if line2.find("*UND*") == -1: continue
+ tokens = line2.split(" ")
+ sym = tokens[len(tokens)-1].strip()
+ imported_symbols.add(sym)
+ subprocess_objdump.terminate()
+subprocess_find.terminate()
+
+intersec_symbols = exported_symbols.intersection(imported_symbols)
+print("no symbols exported from libmerged = " + str(len(exported_symbols)))
+print("no symbols that can be made internal = " + str(len(intersec_symbols)))
+
+# Now look for classes where none of the class symbols are imported,
+# i.e. we can mark the whole class as hidden
+
+def extract_class(sym):
+ filtered_sym = subprocess.check_output(["c++filt", sym]).strip()
+ if filtered_sym.startswith("vtable for "):
+ classname = filtered_sym[11:]
+ return classname
+ if filtered_sym.startswith("non-virtual thunk to "):
+ filtered_sym = filtered_sym[21:]
+ elif filtered_sym.startswith("virtual thunk to "):
+ filtered_sym = filtered_sym[17:]
+ i = filtered_sym.find("(")
+ if i != -1:
+ i = filtered_sym.rfind("::", 0, i)
+ if i != -1:
+ classname = filtered_sym[:i]
+ return classname
+ return ""
+
+pool = multiprocessing.Pool(multiprocessing.cpu_count())
+classes_with_exported_symbols = set(pool.map(extract_class, list(exported_symbols)))
+classes_with_imported_symbols = set(pool.map(extract_class, list(imported_symbols)))
+
+# Some stuff is particular to Windows, so won't be found by a Linux analysis, so remove
+# those classes.
+can_be_private_classes = classes_with_exported_symbols - classes_with_imported_symbols;
+can_be_private_classes.discard("SpinField")
+
+with open("bin/find-mergedlib-can-be-private.classes.results", "wt") as f:
+ for sym in sorted(can_be_private_classes):
+ if sym.startswith("std::") or sym.startswith("void std::"): continue
+ f.write(sym + "\n")
diff --git a/bin/find-most-common-warn-messages.py b/bin/find-most-common-warn-messages.py
new file mode 100755
index 000000000..dc2ecf8ab
--- /dev/null
+++ b/bin/find-most-common-warn-messages.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python3
+
+# A script to search our test logs and sort the messages by how common they are so we can start to
+# reduce the noise a little.
+
+import sys
+import re
+import io
+import subprocess
+
+# find . -name '*.log' | xargs grep -h 'warn:' | sort | uniq -c | sort -n --field-separator=: --key=5,6
+
+process = subprocess.Popen("find workdir -name '*.log' | xargs grep -h 'warn:' | sort",
+ shell=True, stdout=subprocess.PIPE, universal_newlines=True)
+
+messages = dict() # dict of sourceAndLine->count
+sampleOfMessage = dict() # dict of sourceAndLine->string
+for line in process.stdout:
+ line = line.strip()
+ # a sample line is:
+ # warn:sw:18790:1:sw/source/core/doc/DocumentRedlineManager.cxx:98: redline table corrupted: overlapping redlines
+ tokens = line.split(":")
+ sourceAndLine = tokens[4] + ":" + tokens[5]
+ if (sourceAndLine in messages):
+ messages[sourceAndLine] = messages[sourceAndLine] + 1
+ else:
+ messages[sourceAndLine] = 1
+ sampleOfMessage[sourceAndLine] = line[line.find(tokens[6]):]
+
+tmplist = list() # set of tuple (count, sourceAndLine)
+for key, value in messages.items():
+ tmplist.append([value,key])
+
+print( "The top 20 warnings" )
+print("")
+for i in sorted(tmplist, key=lambda v: v[0])[-20:]:
+ print( "%6d %s %s" % (i[0], i[1], sampleOfMessage[i[1]]) )
+
+
diff --git a/bin/find-most-repeated-functions.py b/bin/find-most-repeated-functions.py
new file mode 100755
index 000000000..767f80240
--- /dev/null
+++ b/bin/find-most-repeated-functions.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+#
+# Find the top 100 functions that are repeated in multiple .o files, so we can out-of-line those
+#
+#
+
+import subprocess
+from collections import defaultdict
+
+# the odd bash construction here is because some of the .o files returned by find are not object files
+# and I don't want xargs to stop when it hits an error
+a = subprocess.Popen("find instdir/program/ -name *.so | xargs echo nm --radix=d --size-sort --demangle | bash", stdout=subprocess.PIPE, shell=True)
+
+#xargs sh -c "somecommand || true"
+
+nameDict = defaultdict(int)
+with a.stdout as txt:
+ for line in txt:
+ line = line.strip()
+ idx1 = line.find(" ")
+ idx2 = line.find(" ", idx1 + 1)
+ name = line[idx2:]
+ nameDict[name] += 1
+
+sizeDict = defaultdict(set)
+for k, v in nameDict.iteritems():
+ sizeDict[v].add(k)
+
+cnt = 0
+for k in sorted(list(sizeDict), reverse=True):
+ print k
+ for v in sizeDict[k]:
+ print v
+ cnt += 1
+ if cnt > 100 : break
+
+#first = sorted(list(sizeDict))[-1]
+#print first
+
+
+#include/vcl/ITiledRenderable.hxx
+# why is gaLOKPointerMap declared inside this header?
diff --git a/bin/find-undocumented-classes b/bin/find-undocumented-classes
new file mode 100755
index 000000000..8bab72bc9
--- /dev/null
+++ b/bin/find-undocumented-classes
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# finds undocumented classes in the current directory (recursive)
+
+type -p doxygen >/dev/null || exit
+
+filter=
+quiet=n
+if [ "$1" = "-q" ]; then
+ filter=">/dev/null"
+ quiet=y
+ shift
+fi
+
+doxygen=$(mktemp -d)
+eval doxygen -g $doxygen/doxygen.cfg $filter
+sed -i "/HTML_OUTPUT/s|html|$doxygen/html|" $doxygen/doxygen.cfg
+sed -i '/GENERATE_LATEX/s/= YES/= NO/' $doxygen/doxygen.cfg
+sed -i '/RECURSIVE/s/= NO/= YES/' $doxygen/doxygen.cfg
+# do we have any arguments?
+if [ -n "$*" ]; then
+ sed -i "/^INPUT[^_]/s|=.*|= $*|" $doxygen/doxygen.cfg
+fi
+eval doxygen $doxygen/doxygen.cfg $filter 2> $doxygen/errors.txt
+if [ "$quiet" == "n" ]; then
+ echo
+ echo "The following classes are undocumented:"
+ echo
+fi
+cat $doxygen/errors.txt|grep -i 'Warning: Compound.*is not documented'
+rm -rf $doxygen
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/find-unneeded-includes b/bin/find-unneeded-includes
new file mode 100755
index 000000000..fcbabad87
--- /dev/null
+++ b/bin/find-unneeded-includes
@@ -0,0 +1,313 @@
+#!/usr/bin/env python3
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# This parses the output of 'include-what-you-use', focusing on just removing
+# not needed includes and providing a relatively conservative output by
+# filtering out a number of LibreOffice-specific false positives.
+#
+# It assumes you have a 'compile_commands.json' around (similar to clang-tidy),
+# you can generate one with 'make vim-ide-integration'.
+#
+# Design goals:
+# - blacklist mechanism, so a warning is either fixed or blacklisted
+# - works in a plugins-enabled clang build
+# - no custom configure options required
+# - no need to generate a dummy library to build a header
+
+import glob
+import json
+import multiprocessing
+import os
+import queue
+import re
+import subprocess
+import sys
+import threading
+import yaml
+
+
+def ignoreRemoval(include, toAdd, absFileName, moduleRules):
+ # global rules
+
+ # Avoid replacing .hpp with .hdl in the com::sun::star and ooo::vba namespaces.
+ if ( include.startswith("com/sun/star") or include.startswith("ooo/vba") ) and include.endswith(".hpp"):
+ hdl = include.replace(".hpp", ".hdl")
+ if hdl in toAdd:
+ return True
+
+ # Avoid debug STL.
+ debugStl = {
+ "array": ("debug/array", ),
+ "bitset": ("debug/bitset", ),
+ "deque": ("debug/deque", ),
+ "forward_list": ("debug/forward_list", ),
+ "list": ("debug/list", ),
+ "map": ("debug/map.h", "debug/multimap.h"),
+ "set": ("debug/set.h", "debug/multiset.h"),
+ "unordered_map": ("debug/unordered_map", ),
+ "unordered_set": ("debug/unordered_set", ),
+ "vector": ("debug/vector", ),
+ }
+ for k, values in debugStl.items():
+ if include == k:
+ for value in values:
+ if value in toAdd:
+ return True
+
+ # Avoid proposing to use libstdc++ internal headers.
+ bits = {
+ "exception": "bits/exception.h",
+ "memory": "bits/shared_ptr.h",
+ "functional": "bits/std_function.h",
+ "cmath": "bits/std_abs.h",
+ "ctime": "bits/types/clock_t.h",
+ "cstdint": "bits/stdint-uintn.h",
+ }
+ for k, v in bits.items():
+ if include == k and v in toAdd:
+ return True
+
+ # Avoid proposing o3tl fw declaration
+ o3tl = {
+ "o3tl/typed_flags_set.hxx" : "namespace o3tl { template <typename T> struct typed_flags; }",
+ "o3tl/deleter.hxx" : "namespace o3tl { template <typename T> struct default_delete; }",
+ "o3tl/span.hxx" : "namespace o3tl { template <typename T> class span; }",
+ }
+ for k, v, in o3tl.items():
+ if include == k and v in toAdd:
+ return True
+
+ # Follow boost documentation.
+ if include == "boost/optional.hpp" and "boost/optional/optional.hpp" in toAdd:
+ return True
+ if include == "boost/intrusive_ptr.hpp" and "boost/smart_ptr/intrusive_ptr.hpp" in toAdd:
+ return True
+ if include == "boost/variant.hpp" and "boost/variant/variant.hpp" in toAdd:
+ return True
+ if include == "boost/unordered_map.hpp" and "boost/unordered/unordered_map.hpp" in toAdd:
+ return True
+ if include == "boost/functional/hash.hpp" and "boost/container_hash/extensions.hpp" in toAdd:
+ return True
+
+ # Avoid .hxx to .h proposals in basic css/uno/* API
+ unoapi = {
+ "com/sun/star/uno/Any.hxx": "com/sun/star/uno/Any.h",
+ "com/sun/star/uno/Reference.hxx": "com/sun/star/uno/Reference.h",
+ "com/sun/star/uno/Sequence.hxx": "com/sun/star/uno/Sequence.h",
+ "com/sun/star/uno/Type.hxx": "com/sun/star/uno/Type.h"
+ }
+ for k, v in unoapi.items():
+ if include == k and v in toAdd:
+ return True
+
+ # 3rd-party, non-self-contained headers.
+ if include == "libepubgen/libepubgen.h" and "libepubgen/libepubgen-decls.h" in toAdd:
+ return True
+ if include == "librevenge/librevenge.h" and "librevenge/RVNGPropertyList.h" in toAdd:
+ return True
+
+ noRemove = (
+ # <https://www.openoffice.org/tools/CodingGuidelines.sxw> insists on not
+ # removing this.
+ "sal/config.h",
+ # Works around a build breakage specific to the broken Android
+ # toolchain.
+ "android/compatibility.hxx",
+ )
+ if include in noRemove:
+ return True
+
+ # Ignore when <foo> is to be replaced with "foo".
+ if include in toAdd:
+ return True
+
+ fileName = os.path.relpath(absFileName, os.getcwd())
+
+ # Skip headers used only for compile test
+ if fileName == "cppu/qa/cppumaker/test_cppumaker.cxx":
+ if include.endswith(".hpp"):
+ return True
+
+ # yaml rules
+
+ if "blacklist" in moduleRules.keys():
+ blacklistRules = moduleRules["blacklist"]
+ if fileName in blacklistRules.keys():
+ if include in blacklistRules[fileName]:
+ return True
+
+ return False
+
+
+def unwrapInclude(include):
+ # Drop <> or "" around the include.
+ return include[1:-1]
+
+
+def processIWYUOutput(iwyuOutput, moduleRules, fileName):
+ inAdd = False
+ toAdd = []
+ inRemove = False
+ toRemove = []
+ currentFileName = None
+
+ for line in iwyuOutput:
+ line = line.strip()
+
+ # Bail out if IWYU gave an error due to non self-containedness
+ if re.match ("(.*): error: (.*)", line):
+ return -1
+
+ if len(line) == 0:
+ if inRemove:
+ inRemove = False
+ continue
+ if inAdd:
+ inAdd = False
+ continue
+
+ shouldAdd = fileName + " should add these lines:"
+ match = re.match(shouldAdd, line)
+ if match:
+ currentFileName = match.group(0).split(' ')[0]
+ inAdd = True
+ continue
+
+ shouldRemove = fileName + " should remove these lines:"
+ match = re.match(shouldRemove, line)
+ if match:
+ currentFileName = match.group(0).split(' ')[0]
+ inRemove = True
+ continue
+
+ if inAdd:
+ match = re.match('#include ([^ ]+)', line)
+ if match:
+ include = unwrapInclude(match.group(1))
+ toAdd.append(include)
+ else:
+ # Forward declaration.
+ toAdd.append(line)
+
+ if inRemove:
+ match = re.match("- #include (.*) // lines (.*)-.*", line)
+ if match:
+ # Only suggest removals for now. Removing fwd decls is more complex: they may be
+ # indeed unused or they may removed to be replaced with an include. And we want to
+ # avoid the later.
+ include = unwrapInclude(match.group(1))
+ lineno = match.group(2)
+ if not ignoreRemoval(include, toAdd, currentFileName, moduleRules):
+ toRemove.append("%s:%s: %s" % (currentFileName, lineno, include))
+
+ for remove in sorted(toRemove):
+ print("ERROR: %s: remove not needed include" % remove)
+ return len(toRemove)
+
+
+def run_tool(task_queue, failed_files):
+ while True:
+ invocation, moduleRules = task_queue.get()
+ if not len(failed_files):
+ print("[IWYU] " + invocation.split(' ')[-1])
+ p = subprocess.Popen(invocation, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ retcode = processIWYUOutput(p.communicate()[0].decode('utf-8').splitlines(), moduleRules, invocation.split(' ')[-1])
+ if retcode == -1:
+ print("ERROR: A file is probably not self contained, check this commands output:\n" + invocation)
+ elif retcode > 0:
+ print("ERROR: The following command found unused includes:\n" + invocation)
+ failed_files.append(invocation)
+ task_queue.task_done()
+
+
+def isInUnoIncludeFile(path):
+ return path.startswith("include/com/") \
+ or path.startswith("include/cppu/") \
+ or path.startswith("include/cppuhelper/") \
+ or path.startswith("include/osl/") \
+ or path.startswith("include/rtl/") \
+ or path.startswith("include/sal/") \
+ or path.startswith("include/salhelper/") \
+ or path.startswith("include/systools/") \
+ or path.startswith("include/typelib/") \
+ or path.startswith("include/uno/")
+
+
+def tidy(compileCommands, paths):
+ return_code = 0
+ try:
+ max_task = multiprocessing.cpu_count()
+ task_queue = queue.Queue(max_task)
+ failed_files = []
+ for _ in range(max_task):
+ t = threading.Thread(target=run_tool, args=(task_queue, failed_files))
+ t.daemon = True
+ t.start()
+
+ for path in sorted(paths):
+ if isInUnoIncludeFile(path):
+ continue
+
+ moduleName = path.split("/")[0]
+
+ rulePath = os.path.join(moduleName, "IwyuFilter_" + moduleName + ".yaml")
+ moduleRules = {}
+ if os.path.exists(rulePath):
+ moduleRules = yaml.load(open(rulePath))
+ assume = None
+ pathAbs = os.path.abspath(path)
+ compileFile = pathAbs
+ matches = [i for i in compileCommands if i["file"] == compileFile]
+ if not len(matches):
+ if "assumeFilename" in moduleRules.keys():
+ assume = moduleRules["assumeFilename"]
+ if assume:
+ assumeAbs = os.path.abspath(assume)
+ compileFile = assumeAbs
+ matches = [i for i in compileCommands if i["file"] == compileFile]
+ if not len(matches):
+ print("WARNING: no compile commands for '" + path + "' (assumed filename: '" + assume + "'")
+ continue
+ else:
+ print("WARNING: no compile commands for '" + path + "'")
+ continue
+
+ _, _, args = matches[0]["command"].partition(" ")
+ if assume:
+ args = args.replace(assumeAbs, "-x c++ " + pathAbs)
+
+ invocation = "include-what-you-use -Xiwyu --no_fwd_decls -Xiwyu --max_line_length=200 " + args
+ task_queue.put((invocation, moduleRules))
+
+ task_queue.join()
+ if len(failed_files):
+ return_code = 1
+
+ except KeyboardInterrupt:
+ print('\nCtrl-C detected, goodbye.')
+ os.kill(0, 9)
+
+ sys.exit(return_code)
+
+
+def main(argv):
+ if not len(argv):
+ print("usage: find-unneeded-includes [FILE]...")
+ return
+
+ try:
+ with open("compile_commands.json", 'r') as compileCommandsSock:
+ compileCommands = json.load(compileCommandsSock)
+ except FileNotFoundError:
+ print ("File 'compile_commands.json' does not exist, please run:\nmake vim-ide-integration")
+ sys.exit(-1)
+
+ tidy(compileCommands, paths=argv)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/find-unused-defines.py b/bin/find-unused-defines.py
new file mode 100755
index 000000000..8e708c4a0
--- /dev/null
+++ b/bin/find-unused-defines.py
@@ -0,0 +1,170 @@
+#!/usr/bin/python2
+
+# Search for unused constants in header files.
+#
+# Note that sometimes these constants are calculated, so some careful checking of the output is necessary.
+#
+# Takes about 4 hours to run this on a fast machine with an SSD
+#
+
+import subprocess
+import sys
+import re
+
+exclusionSet = set([
+ # List of RID constants where we compute a value using a base before calling one of the RESSTR methods
+ # Found with: git grep -P 'RID_\w+\s*\+' -- :/ ':!*.hrc' ':!*.src' ':!*.java' ':!*.py' ':!*.xba'
+ "RID_SVXSTR_KEY_",
+ "RID_UPDATE_BUBBLE_TEXT_",
+ "RID_UPDATE_BUBBLE_T_TEXT_",
+ "RID_SVXSTR_TBLAFMT_",
+ "RID_BMP_CONTENT_",
+ "RID_DROPMODE_",
+ "RID_BMP_LEVEL",
+ "RID_SVXSTR_BULLET_DESCRIPTION",
+ "RID_SVXSTR_SINGLENUM_DESCRIPTION",
+ "RID_SVXSTR_OUTLINENUM_DESCRIPTION",
+ "RID_SVXSTR_RULER_",
+ "RID_GALLERYSTR_THEME_",
+ "RID_SVXSTR_BULLET_DESCRIPTION",
+ "RID_SVXSTR_SINGLENUM_DESCRIPTION",
+ "RID_SVXSTR_OUTLINENUM_DESCRIPTION",
+ # doing some weird stuff in svx/source/unodraw/unoprov.cxx involving mapping of UNO api names to translated names and back again
+ "RID_SVXSTR_GRDT",
+ "RID_SVXSTR_HATCH",
+ "RID_SVXSTR_BMP",
+ "RID_SVXSTR_DASH",
+ "RID_SVXSTR_LEND",
+ "RID_SVXSTR_TRASNGR",
+ # other places doing calculations
+ "RID_SVXSTR_DEPTH",
+ "RID_SUBSETSTR_",
+ "ANALYSIS_",
+ "FLD_DOCINFO_CHANGE",
+ "FLD_EU_",
+ "FLD_INPUT_",
+ "FLD_PAGEREF_",
+ "FLD_STAT_",
+ "FMT_AUTHOR_",
+ "FMT_CHAPTER_",
+ "FMT_DBFLD_",
+ "FMT_FF_",
+ "FMT_GETVAR_",
+ "FMT_MARK_",
+ "FMT_REF_",
+ "FMT_SETVAR_",
+ "STR_AUTH_FIELD_ADDRESS_",
+ "STR_AUTH_TYPE_",
+ "STR_AUTOFMTREDL_",
+ "STR_CONTENT_TYPE_",
+ "STR_UPDATE_ALL",
+ "STR_UPDATE_INDEX",
+ "STR_UPDATE_LINK",
+ "BMP_PLACEHOLDER_",
+ "STR_RPT_HELP_",
+ "STR_TEMPLATE_NAME",
+ "UID_BRWEVT_",
+ "HID_EVT_",
+ "HID_PROP_",
+ "STR_VOBJ_MODE_",
+ "STR_COND_",
+ "SCSTR_CONTENT_",
+ "DATE_FUNCDESC_",
+ "DATE_FUNCNAME_",
+ "DATE_DEFFUNCNAME_",
+ "PRICING_DEFFUNCNAME_",
+ "PRICING_FUNCDESC_",
+ "PRICING_FUNCNAME_",
+ "STR_ItemValCAPTION",
+ "STR_ItemValCIRC",
+ "STR_ItemValEDGE",
+ "STR_ItemValFITTOSIZE",
+ "STR_ItemValMEASURE_",
+ "STR_ItemValMEASURETEXT_",
+ "STR_ItemValTEXTANI_",
+ "STR_ItemValTEXTHADJ",
+ "STR_ItemValTEXTVADJ",
+ "RID_SVXITEMS_VERJUST",
+ "RID_SVXITEMS_ORI",
+ "RID_SVXITEMS_JUSTMETHOD",
+ "RID_SVXITEMS_HORJUST",
+ "MM_PART",
+ ])
+
+
+def in_exclusion_set( a ):
+ for f in exclusionSet:
+ if a.startswith(f):
+ return True;
+ return False;
+
+# find defines, excluding the externals folder
+a = subprocess.Popen("git grep -hP '^#define\s+\w\w\w\w+\s*' -- \"[!e][!x][!t]*\" | sort -u", stdout=subprocess.PIPE, shell=True)
+
+name_re = re.compile("#define\s+(\w+)")
+with a.stdout as txt:
+ for line in txt:
+ idName = name_re.match(line).group(1)
+ if idName.startswith("INCLUDED_"): continue
+ # the various _START and _END constants are normally unused outside of the .hrc and .src files, and that's fine
+ if idName.endswith("_START"): continue
+ if idName.endswith("_BEGIN"): continue
+ if idName.endswith("_END"): continue
+ if idName == "RID_SVX_FIRSTFREE": continue
+ if idName == "": continue
+ if idName.startswith("__com"): continue # these are the include/header macros for the UNO stuff
+ if in_exclusion_set(idName): continue
+ # search for the constant
+ b = subprocess.Popen(["git", "grep", "-w", idName], stdout=subprocess.PIPE)
+ found_reason_to_exclude = False
+ with b.stdout as txt2:
+ cnt = 0
+ for line2 in txt2:
+ line2 = line2.strip() # otherwise the comparisons below will not work
+ # ignore if/undef magic, does not indicate an actual use (most of the time)
+ if "ifdef" in line2: continue
+ if "undef" in line2: continue
+ # ignore commented out code
+ if line2.startswith("//"): continue
+ if line2.startswith("/*"): continue
+ # check if we found one in actual code
+ if idName.startswith("SID_"):
+ if not ".hrc:" in line2 and not ".src:" in line2 and not ".sdi:" in line2: found_reason_to_exclude = True
+ else:
+ if not ".hrc:" in line2 and not ".src:" in line2: found_reason_to_exclude = True
+ if idName.startswith("RID_"):
+ # is the constant being used as an identifier by entries in .src files?
+ if ".src:" in line2 and "Identifier = " in line2: found_reason_to_exclude = True
+ # is the constant being used by the property controller extension or reportdesigner inspection,
+ # which use macros to declare constants, hiding them from a search
+ if "extensions/source/propctrlr" in line2: found_reason_to_exclude = True
+ if "reportdesign/source/ui/inspection/inspection.src" in line2: found_reason_to_exclude = True
+ if idName.startswith("HID_"):
+ # is the constant being used as an identifier by entries in .src files
+ if ".src:" in line2 and "HelpId = " in line2: found_reason_to_exclude = True
+ # is it being used as a constant in an ItemList in .src files?
+ if ".src:" in line2 and (";> ;" in line2 or "; >;" in line2): found_reason_to_exclude = True
+ # these are used in calculations in other .hrc files
+ if "sw/inc/rcid.hrc:" in line2: found_reason_to_exclude = True
+ # calculations
+ if "sw/source/uibase/inc/ribbar.hrc:" in line2 and "ST_" in idName: found_reason_to_exclude = True
+ if "sw/source/uibase/inc/ribbar.hrc:" in line2 and "STR_IMGBTN_" in idName: found_reason_to_exclude = True
+ if "sw/source/core/undo/undo.hrc:" in line2: found_reason_to_exclude = True
+ if "sw/inc/poolfmt.hrc:" in line2: found_reason_to_exclude = True
+ # used via a macro that hides them from search
+ if "dbaccess/" in line2 and idName.startswith("PROPERTY_ID_"): found_reason_to_exclude = True
+ if "reportdesign/" in line2 and idName.startswith("HID_RPT_PROP_"): found_reason_to_exclude = True
+ if "reportdesign/" in line2 and idName.startswith("RID_STR_"): found_reason_to_exclude = True
+ if "forms/" in line2 and idName.startswith("PROPERTY_"): found_reason_to_exclude = True
+ if "svx/source/tbxctrls/extrusioncontrols.hrc:" in line2 and idName.startswith("DIRECTION_"): found_reason_to_exclude = True
+ if "svx/source/tbxctrls/extrusioncontrols.hrc:" in line2 and idName.startswith("FROM_"): found_reason_to_exclude = True
+ # if we see more than a few lines then it's probably one of the BASE/START/BEGIN things
+ cnt = cnt + 1
+ if cnt > 2: found_reason_to_exclude = True
+ if not found_reason_to_exclude:
+ print(idName)
+ # otherwise the previous line of output will be incorrectly mixed into the below git output, because of buffering
+ sys.stdout.flush()
+ # search again, so we log the location and filename of stuff we want to remove
+ subprocess.call(["git", "grep", "-wn", idName])
+
diff --git a/bin/find-unused-sid-commands.py b/bin/find-unused-sid-commands.py
new file mode 100755
index 000000000..32f45e0f8
--- /dev/null
+++ b/bin/find-unused-sid-commands.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+#
+# Find potentially unused UNO command entries in SDI files.
+#
+# Note that this is not foolproof, some extra checking is required because some command names might be
+# constructed at runtime.
+#
+
+import subprocess
+
+# search for entries in .sdi files that declare UNO/SID commands
+a = subprocess.Popen("git grep -P '^\s*\w+Item\s+\w+\s+SID_\w+$' -- *.sdi", stdout=subprocess.PIPE, shell=True)
+
+# parse out the UNO command names
+commandSet = list()
+with a.stdout as txt:
+ for line in txt:
+ line = line.strip()
+ idx1 = line.find(" ")
+ idx2 = line.find(" ", idx1 + 1)
+ commandName = line[idx1+1 : idx2].strip()
+ sidName = line[idx2+1:].strip()
+ commandSet.append((commandName,sidName))
+
+# now check to see if that UNO command is called anywhere in the codebase.
+for pair in commandSet:
+ commandName = pair[0]
+ sidName = pair[1]
+
+ # check to see if that UNO command is called anywhere in the codebase.
+ a = subprocess.Popen("git grep -wFn '.uno:" + commandName + "'", stdout=subprocess.PIPE, shell=True)
+ cnt = 0
+ with a.stdout as txt2:
+ for line2 in txt2:
+ cnt = cnt + 1
+ if cnt > 0: continue
+
+ # check to see if the SID is used programmatically
+ foundLines = ""
+ a = subprocess.Popen("git grep -wn " + sidName, stdout=subprocess.PIPE, shell=True)
+ with a.stdout as txt2:
+ for line2 in txt2:
+ foundLines = foundLines + line2
+ if foundLines.find("ExecuteList") != -1: continue
+ if foundLines.find("GetDispatcher()->Execute") != -1: continue
+ if foundLines.find("ExecuteScenarioSlot") != -1: continue
+ # TODO not sure about this, but let's tackle the easy ones first
+ if foundLines.find("Invalidate(") != -1: continue
+
+ # dump any lines that contain the SID, so we can eyeball the results
+ print("remove: " + commandName)
+ print(foundLines)
+ print("----------------------------------------------------------------------------")
diff --git a/bin/find-unused-typedefs.py b/bin/find-unused-typedefs.py
new file mode 100755
index 000000000..b07c16d2b
--- /dev/null
+++ b/bin/find-unused-typedefs.py
@@ -0,0 +1,34 @@
+#!/usr/bin/python2
+
+import subprocess
+
+# find typedefs, excluding the externals folder
+a = subprocess.Popen("git grep -P 'typedef\s+.+\s+\w+;' -- \"[!e][!x][!t]*\"", stdout=subprocess.PIPE, shell=True)
+
+# parse out the typedef names
+typedefSet = set()
+with a.stdout as txt:
+ for line in txt:
+ idx2 = line.rfind(";")
+ idx1 = line.rfind(" ", 0, idx2)
+ typedefName = line[idx1+1 : idx2]
+ if typedefName.startswith("*"):
+ typedefName = typedefName[1:]
+ # ignore anything less than 5 characters, it's probably a parsing error
+ if len(typedefName) < 5: continue
+ typedefSet.add(typedefName)
+
+for typedefName in sorted(typedefSet):
+ print("checking: " + typedefName)
+ a = subprocess.Popen(["git", "grep", "-wn", typedefName], stdout=subprocess.PIPE)
+ foundLine2 = ""
+ cnt = 0
+ with a.stdout as txt2:
+ for line2 in txt2:
+ cnt = cnt + 1
+ foundLine2 += line2
+ if cnt == 1:
+ print("remove: " + foundLine2)
+ elif cnt == 2:
+ print("inline: " + foundLine2)
+
diff --git a/bin/find-unusedheaders.py b/bin/find-unusedheaders.py
new file mode 100755
index 000000000..7ca9bea4b
--- /dev/null
+++ b/bin/find-unusedheaders.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Find dirs in:
+workdir/Dep/CObject
+workdir/Dep/CxxObject
+
+Concat these files and compare them with the output of
+`git ls-tree HEAD -r --name-only` and report files in the git ls-tree that aren't in the first.
+"""
+
+import os
+import subprocess
+
+
+def get_files_dict_recursively(directory):
+ data = {}
+ for root, _, files in os.walk(directory, topdown=False):
+ for f in files:
+ basename = os.path.splitext(f)[0]
+ data[basename] = os.path.join(root, f)
+ return data
+
+
+def main():
+ data = {}
+ for d in ('workdir/Dep/CObject', 'workdir/Dep/CxxObject'):
+ tmp = get_files_dict_recursively(d)
+ data.update(tmp)
+
+ gitfiles = subprocess.check_output(['git', 'ls-tree', 'HEAD', '-r', '--name-only']).decode('utf-8').split('\n')
+
+ for f in gitfiles:
+ ext = os.path.splitext(f)[1]
+ if ext[1:] in ('c', 'cxx', 'h', 'hxx'):
+ tmp = os.path.basename(f)
+ tmp = os.path.splitext(tmp)[0]
+ if tmp not in data:
+ print(f)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/fixincludeguards.sh b/bin/fixincludeguards.sh
new file mode 100755
index 000000000..2655534aa
--- /dev/null
+++ b/bin/fixincludeguards.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# corrects include guards for hxx/h files automatically by its path.
+
+# Usage:
+# a) fixincludeguards.sh header.hxx
+# b) find . -name *.hxx -or -name *.h | xargs bash ./bin/fixincludeguards.sh
+
+# TODO: This doesn't fix wrong #endif comments, like:
+# #ifndef FOO_BAR_HXX
+# #define FOO_BAR_HXX
+# ...
+# #endif // OTHER_BAR_HXX
+
+# TODO: Make this portable. As it is now, it likely only works on Linux, or
+# other platforms with a purely GNU toolset.
+
+guard_prefix="INCLUDED_"
+
+for fn in "$@"; do
+ # remove leading ./, if invoked with find
+ fn=`echo "$fn" | sed 's/^.\///g'`
+
+ # global header in include/ top level dir:
+ # drop the project dir
+ fnfixed=`echo $fn | sed 's,include/,,g'`
+ # add examples prefix to headers in odk/examples
+ fnfixed=`echo $fnfixed | sed 's,odk/examples/\(cpp\|DevelopersGuide\|OLE\)/,examples_,g'`
+
+ # convert file path to header guard
+ guard=`echo "$fnfixed" | sed 's/[\/\.-]/_/g' | tr 'a-z' 'A-Z'`
+
+ if [ aa"`git grep -h "^\s*#ifndef ${guard_prefix}$guard" "$fn" | wc -l`" != "aa1" ] ||
+ [ aa"`git grep -h "^\s*#define ${guard_prefix}$guard" "$fn" | wc -l`" != "aa1" ]; then
+
+ # pattern which identifies guards, common one look like
+ # _MODULE_FILE_HXX, FILE_H, FILE_INC
+ pattern=".*\(_HXX\|_H\|_INC\|_hxx\|_h\|_inc\)"
+
+ ### extract guard definition
+ # head to take only the first match
+ old_guard=`git grep -h "#ifndef $pattern" "$fn" | head -n1 | sed "s/.*\s\($pattern.*\)/\1/"`
+
+ if [ aa"$old_guard" == aa"" ]; then
+ echo -e "$fn: \e[00;31mwarning:\e[00m guard not detectable"
+ continue
+ fi
+
+
+ if [ aa"`git grep -w "$old_guard" | cut -d ':' -f1 | sort -u | wc -l `" != aa"1" ]; then
+ echo -e "$fn: \e[00;31mwarning:\e[00m $old_guard guard definition used in other files"
+ continue
+ fi
+
+ ### skip some special files...
+
+ # skip this comphelper stuff:
+ # INCLUDED_COMPHELPER_IMPLBASE_VAR_HXX_14
+ if [ aa"INCLUDED_COMPHELPER_IMPLBASE_" == aa"`echo $old_guard | sed "s/VAR_HXX_[0-9]\+//g"`" ]; then
+ continue
+ fi
+
+ # skip files like xmloff/source/forms/elementimport_impl.hxx
+ if [ aa"`git grep -h "#error.*directly" "$fn" | wc -l`" != "aa0" ]; then
+ continue
+ fi
+
+
+ ### replace old guard with new scheme guard
+ echo "$fn: $old_guard"
+
+ # includes leading whitespace removal
+ sed -i "s/\s*${old_guard}/ ${guard_prefix}${guard}/g" "$fn"
+
+
+ ### clean up endif
+ sed -i "s/#endif\s*\(\/\/\|\/\*\)\s*\#\?\(ifndef\)\?\s*!\?\s*\(${guard_prefix}${guard}\).*/#endif \/\/ \3/g" "$fn"
+
+ fi
+done
diff --git a/bin/fuzzfiles b/bin/fuzzfiles
new file mode 100755
index 000000000..ed0432d23
--- /dev/null
+++ b/bin/fuzzfiles
@@ -0,0 +1,41 @@
+#! /bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+#check that zzuf is installed
+hash zzuf &> /dev/null
+if [ $? -eq 1 ];then
+ echo >&2 "zzuf not found. Please install and/or fix the PATH environment variable. Aborting"
+ exit -1
+fi
+
+#check that file(s) to fuzz are mentioned
+if [[ $# -eq 0 ]]; then
+ echo "Usage: fuzzfiles.sh <list of seed files to fuzz>"
+ echo "The generated fuzzed files will be output to the current working directory"
+ echo "The fuzzed files will be named XYZ-ratio-NNNN where:"
+ echo -e "\tXYZ: the original file name"
+ echo -e "\tratio: the fuzz ratio (what % of bytes were fuzzed)"
+ echo -e "\tNNNN: the mutation # for that file and ratio combo"
+ exit -1
+fi
+
+for file in $@; do
+ if [ -d $file ]; then
+ echo "$file is a directory. Only files are allowed"
+ elif [ -e $file ]; then
+ basename=${file##*/}
+ #Sequence from 0.001 to 0.5
+ for ratio in `seq -w 1 2 500 | sed -e 's/^/0./'`; do
+ echo "Fuzzing $file with ratio $ratio"
+ for i in {1..1000}; do
+ zzuf -r $ratio < $file > "$basename-$ratio-$i"
+ done #end of for i in {1..
+ done #end of for ratio in ...
+ fi #end if of file validity check
+done #end for file in $@
diff --git a/bin/gbuild-to-ide b/bin/gbuild-to-ide
new file mode 100755
index 000000000..f74712caf
--- /dev/null
+++ b/bin/gbuild-to-ide
@@ -0,0 +1,1911 @@
+#! /usr/bin/env python3
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import argparse
+import ntpath
+import os
+import os.path
+import shutil
+import re
+import sys
+import uuid
+import json
+import xml.etree.ElementTree as ET
+import xml.dom.minidom as minidom
+import traceback
+import subprocess
+from sys import platform
+import collections
+
+class GbuildLinkTarget:
+ def __init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs):
+ (self.name, self.location, self.include, self.include_sys, self.defs, self.cxxobjects, self.cxxflags, self.cobjects, self.cflags, self.linked_libs) = (
+ name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs)
+
+ def short_name(self):
+ return self.name
+
+ def is_empty(self):
+ return not self.include and not self.defs and not self.cxxobjects and not self.cobjects and not self.linked_libs
+
+ def __str__(self):
+ return '%s at %s with include path: %s, isystem includes: %s, defines: %s, objects: %s, cxxflags: %s, cobjects: %s, cflags: %s and linked libs: %s' % (
+ self.short_name(), self.location, self.include, self.include_sys, self.defs, self.cxxobjects,
+ self.cxxflags, self.cobjects, self.cflags, self.linked_libs)
+
+
+class GbuildLib(GbuildLinkTarget):
+ def __init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs):
+ GbuildLinkTarget.__init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs)
+
+ def short_name(self):
+ """Return the short name of target based on the Library_* makefile name"""
+ return 'Library %s' % self.name
+
+ def target_name(self):
+ return 'Library_%s' % self.name
+
+ def library_name(self):
+ return self.name
+
+class GbuildTest(GbuildLinkTarget):
+ def __init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs):
+ GbuildLinkTarget.__init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs)
+
+ def short_name(self):
+ """Return the short name of target based n the CppunitTest_* makefile names"""
+ return 'CppunitTest %s' % self.name
+
+ def target_name(self):
+ return 'CppunitTest_%s' % self.name
+
+class GbuildExe(GbuildLinkTarget):
+ def __init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs):
+ GbuildLinkTarget.__init__(self, name, location, include, include_sys, defs, cxxobjects, cxxflags, cobjects, cflags, linked_libs)
+
+ def short_name(self):
+ """Return the short name of target based on the Executable_* makefile name"""
+ return 'Executable %s' % self.name
+
+ def target_name(self):
+ return 'Executable_%s' % self.name
+
+
+class GbuildParser:
+ """Main data model object.
+
+ Attributes:
+ target_by_path : dict[path:string, set(target)]
+ where target is one of the GbuildLinkTarget subclasses
+ target_by_location : dict[path:string, set(target)]
+ where target is one of the GbuildLinkTarget subclasses
+ """
+ def __init__(self, makecmd):
+ self.makecmd = makecmd
+ self.binpath = os.path.dirname(os.environ['GPERF']) # woha, this is quite a hack
+ (self.srcdir, self.builddir, self.instdir, self.workdir) = (os.environ['SRCDIR'], os.environ['BUILDDIR'], os.environ['INSTDIR'], os.environ['WORKDIR'])
+ (self.libs, self.exes, self.tests, self.modulenamelist) = ([], [], [], [])
+ (self.target_by_path, self.target_by_location) = ({}, {})
+
+ includepattern = re.compile('-I(\S+)')
+ isystempattern = re.compile('-isystem\s*(\S+)')
+ warningpattern = re.compile('-W\S+')
+ libpattern = re.compile('Library_(.*)\.mk')
+ exepattern = re.compile('Executable_(.*)\.mk')
+ testpattern = re.compile('CppunitTest_(.*)\.mk')
+
+ @staticmethod
+ def __split_includes(includes):
+ foundisystem = GbuildParser.isystempattern.findall(includes)
+ foundincludes = [includeswitch.strip() for includeswitch in GbuildParser.includepattern.findall(includes) if
+ len(includeswitch) > 2]
+ return (foundincludes, foundisystem)
+
+ @staticmethod
+ def __split_objs(objsline):
+ return [obj for obj in objsline.strip().split(' ') if len(obj) > 0 and obj != 'CXXOBJECTS' and obj != 'COBJECTS' and obj != '+=']
+
+ @staticmethod
+ def __split_defs(defsline):
+ defs = {}
+ alldefs = [defswitch.strip() for defswitch in defsline.strip().lstrip('-D').split(' -D') if len(defswitch) > 2]
+ for d in alldefs:
+ dparts = d.split(' -U')
+ """after dparts.pop(0), dparts will contain only undefs"""
+ defparts = dparts.pop(0).strip().split('=')
+ if len(defparts) == 1:
+ defparts.append(None)
+ defs[defparts[0]] = defparts[1]
+ """Drop undefed items (if any) from previous defs"""
+ for u in dparts:
+ defs.pop(u.strip(), '')
+ defs["LIBO_INTERNAL_ONLY"] = None
+ return defs
+
+ @staticmethod
+ def __split_flags(flagsline, flagslineappend):
+ return [cxxflag.strip() for cxxflag in GbuildParser.warningpattern.sub('', '%s %s' % (flagsline, flagslineappend)).split(' ') if len(cxxflag) > 1]
+
+ @staticmethod
+ def __lib_from_json(json):
+ (foundincludes, foundisystem) = GbuildParser.__split_includes(json['INCLUDE'])
+ return GbuildLib(
+ GbuildParser.libpattern.match(os.path.basename(json['MAKEFILE'])).group(1),
+ os.path.dirname(json['MAKEFILE']),
+ foundincludes,
+ foundisystem,
+ GbuildParser.__split_defs(json['DEFS']),
+ GbuildParser.__split_objs(json['CXXOBJECTS']),
+ GbuildParser.__split_flags(json['CXXFLAGS'], json['CXXFLAGSAPPEND']),
+ GbuildParser.__split_objs(json['COBJECTS']),
+ GbuildParser.__split_flags(json['CFLAGS'], json['CFLAGSAPPEND']),
+ json['LINKED_LIBS'].strip().split(' '))
+
+ @staticmethod
+ def __test_from_json(json):
+ (foundincludes, foundisystem) = GbuildParser.__split_includes(json['INCLUDE'])
+ testname_match = GbuildParser.testpattern.match(os.path.basename(json['MAKEFILE']))
+
+ # Workaround strange writer test makefile setup
+ if testname_match is None:
+ testname = "StrangeWriterMakefiles"
+ else:
+ testname = testname_match.group(1)
+
+ return GbuildTest(
+ testname,
+ os.path.dirname(json['MAKEFILE']),
+ foundincludes,
+ foundisystem,
+ GbuildParser.__split_defs(json['DEFS']),
+ GbuildParser.__split_objs(json['CXXOBJECTS']),
+ GbuildParser.__split_flags(json['CXXFLAGS'], json['CXXFLAGSAPPEND']),
+ GbuildParser.__split_objs(json['COBJECTS']),
+ GbuildParser.__split_flags(json['CFLAGS'], json['CFLAGSAPPEND']),
+ json['LINKED_LIBS'].strip().split(' '))
+
+ @staticmethod
+ def __exe_from_json(json):
+ (foundincludes, foundisystem) = GbuildParser.__split_includes(json['INCLUDE'])
+ return GbuildExe(
+ GbuildParser.exepattern.match(os.path.basename(json['MAKEFILE'])).group(1),
+ os.path.dirname(json['MAKEFILE']),
+ foundincludes,
+ foundisystem,
+ GbuildParser.__split_defs(json['DEFS']),
+ GbuildParser.__split_objs(json['CXXOBJECTS']),
+ GbuildParser.__split_flags(json['CXXFLAGS'], json['CXXFLAGSAPPEND']),
+ GbuildParser.__split_objs(json['COBJECTS']),
+ GbuildParser.__split_flags(json['CFLAGS'], json['CFLAGSAPPEND']),
+ json['LINKED_LIBS'].strip().split(' '))
+
+ def parse(self):
+ for jsonfilename in os.listdir(os.path.join(self.workdir, 'GbuildToJson', 'Library')):
+ with open(os.path.join(self.workdir, 'GbuildToJson', 'Library', jsonfilename), 'r') as f:
+ lib = self.__lib_from_json(json.load(f))
+ self.libs.append(lib)
+ for jsonfilename in os.listdir(os.path.join(self.workdir, 'GbuildToJson', 'Executable')):
+ with open(os.path.join(self.workdir, 'GbuildToJson', 'Executable', jsonfilename), 'r') as f:
+ exe = self.__exe_from_json(json.load(f))
+ self.exes.append(exe)
+ for jsonfilename in os.listdir(os.path.join(self.workdir, 'GbuildToJson', 'CppunitTest')):
+ with open(os.path.join(self.workdir, 'GbuildToJson', 'CppunitTest', jsonfilename), 'r') as f:
+ test = self.__test_from_json(json.load(f))
+ self.tests.append(test)
+ for target in set(self.libs) | set(self.exes) | set(self.tests):
+ if target.location not in self.target_by_location:
+ self.target_by_location[target.location] = set()
+ self.target_by_location[target.location] |= set([target])
+ for cxx in target.cxxobjects:
+ path = '/'.join(cxx.split('/')[:-1])
+ if path not in self.target_by_path:
+ self.target_by_path[path] = set()
+ self.target_by_path[path] |= set([target])
+ for c in target.cobjects:
+ path = '/'.join(c.split('/')[:-1])
+ if path not in self.target_by_path:
+ self.target_by_path[path] = set()
+ self.target_by_path[path] |= set([target])
+ for location in self.target_by_location:
+ self.modulenamelist.append(os.path.split(location)[1])
+ return self
+
+
+class IdeIntegrationGenerator:
+
+ def __init__(self, gbuildparser, ide):
+ self.gbuildparser = gbuildparser
+ self.ide = ide
+
+ def emit(self):
+ pass
+
+class EclipseCDTIntegrationGenerator(IdeIntegrationGenerator):
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+
+ def create_include_paths(self):
+ for module in self.gbuildparser.modulenamelist:
+ modulepath = os.path.join(self.gbuildparser.builddir, module)
+ includedirfile = open(os.path.join(modulepath, '.eclipsesettingfile'), 'w')
+ modulelibs = []
+ for lib in self.gbuildparser.target_by_path.keys():
+ if lib.startswith(module+'/'):
+ modulelibs.append(lib)
+ include = set()
+ for lib in modulelibs:
+ for target in self.gbuildparser.target_by_path[lib]:
+ include |= set(target.include)
+ includedirfile.write('\n'.join(include))
+ includedirfile.close()
+
+
+ def create_macros(self):
+ for module in self.gbuildparser.modulenamelist:
+ modulepath = os.path.join(self.gbuildparser.builddir, module)
+ macrofile = open(os.path.join(modulepath, '.macros'), 'w')
+ modulelibs = []
+ for lib in self.gbuildparser.target_by_path.keys():
+ if lib.startswith(module+'/'):
+ modulelibs.append(lib)
+ define = []
+ defineset = set()
+ for lib in modulelibs:
+ for target in self.gbuildparser.target_by_path[lib]:
+ for i in target.defs.keys():
+ tmp = str(i) +','+str(target.defs[i])
+ if tmp not in defineset:
+ defineset.add(tmp)
+ macrofile.write('\n'.join(defineset))
+ macrofile.close()
+
+
+ def create_settings_file(self):
+
+ settingsfiletemplate = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<cdtprojectproperties>
+<section name="org.eclipse.cdt.internal.ui.wizards.settingswizards.IncludePaths">
+<language name="C++ Source File">
+
+
+</language>
+<language name="C Source File">
+
+</language>
+<language name="Object File">
+
+</language>
+<language name="Assembly Source File">
+
+</language>
+</section>
+<section name="org.eclipse.cdt.internal.ui.wizards.settingswizards.Macros">
+<language name="C++ Source File">
+
+</language>
+<language name="C Source File">
+
+</language>
+<language name="Object File">
+
+</language>
+<language name="Assembly Source File">
+
+</language>
+</section>
+</cdtprojectproperties>
+"""
+
+ for module in self.gbuildparser.modulenamelist:
+ tempxml = []
+ modulepath = os.path.join(self.gbuildparser.builddir, module)
+
+ settingsfile = open(os.path.join(modulepath, 'eclipsesettingfile.xml'), 'w')
+ settingsfile.write(settingsfiletemplate)
+ settingsfile.close()
+
+ settingsfile = open(os.path.join(modulepath, 'eclipsesettingfile.xml'), 'r')
+ tempxml = settingsfile.readlines()
+ tempinclude = open(os.path.join(modulepath, '.eclipsesettingfile'), 'r')
+ tempmacro = open(os.path.join(modulepath, '.macros'), 'r')
+ for includepath in tempinclude:
+ if includepath[-1:] == "\n":
+ includepath = includepath[:-1]
+ templine = "<includepath>%s</includepath>\n" % includepath
+ tempxml.insert(5, templine)
+
+ for line in tempmacro:
+ macroskeyvalue = line.split(',')
+ macrokey = macroskeyvalue[0]
+ macrovalue = macroskeyvalue[1]
+ if macrovalue[-1:] == "\n":
+ macrovalue = macrovalue[:-1]
+ templine = "<macro><name>%s</name><value>%s</value></macro>\n" %(macrokey, macrovalue)
+ tempxml.insert(-13, templine)
+ tempxml="".join(tempxml)
+ settingsfile.close
+
+ settingsfile = open(os.path.join(modulepath, 'eclipsesettingfile.xml'), 'w')
+ settingsfile.write(tempxml)
+ settingsfile.close()
+ os.remove(os.path.join(modulepath, '.eclipsesettingfile'))
+ os.remove(os.path.join(modulepath, '.macros'))
+
+ def emit(self):
+ self.create_include_paths()
+ self.create_macros()
+ self.create_settings_file()
+
+class CodeliteIntegrationGenerator(IdeIntegrationGenerator):
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+
+ def emit(self):
+ self.create_workspace_file()
+ for module in self.gbuildparser.modulenamelist:
+ self.create_project_file(module)
+ #self.create_project_file('vcl')
+
+ def create_workspace_file(self):
+ root_node = ET.Element('CodeLite_Workspace', Name='libo2', Database='./libo2.tags', Version='10.0.0')
+ for module in self.gbuildparser.modulenamelist:
+ ET.SubElement(root_node, 'Project', Name=module, Path='%s/%s.project' % (module, module), Active='No')
+ build_matrix_node = ET.SubElement(root_node, 'BuildMatrix')
+ workspace_config_node = ET.SubElement(build_matrix_node, 'WorkspaceConfiguration', Name='Debug', Selected='yes')
+ ET.SubElement(workspace_config_node, 'Environment')
+ for module in self.gbuildparser.modulenamelist:
+ ET.SubElement(workspace_config_node, 'Project', Name=module, ConfigName='Debug')
+ workspace_config_node = ET.SubElement(build_matrix_node, 'WorkspaceConfiguration', Name='Release', Selected='yes')
+ ET.SubElement(workspace_config_node, 'Environment')
+ for module in self.gbuildparser.modulenamelist:
+ ET.SubElement(workspace_config_node, 'Project', Name=module, ConfigName='Release')
+
+ self.write_pretty_xml(root_node, os.path.join(self.gbuildparser.builddir, 'libo2.workspace'))
+
+ def create_project_file(self, module_name):
+ root_node = ET.Element('CodeLite_Project', Name=module_name, InternalType='')
+ ET.SubElement(root_node, 'Plugins')
+
+ # add CXX files
+ virtual_dirs = collections.defaultdict(set)
+ for target_path in self.gbuildparser.target_by_path.keys():
+ if target_path.startswith(module_name+'/'):
+ for target in self.gbuildparser.target_by_path[target_path]:
+ for file in target.cxxobjects:
+ relative_file = '/'.join(file.split('/')[1:])
+ path = '/'.join(file.split('/')[1:-1])
+ virtual_dirs[path].add(relative_file + '.cxx')
+ # add HXX files
+ all_libs = set(self.gbuildparser.libs) | set(self.gbuildparser.exes)
+ for lib in all_libs:
+ if lib.name == module_name:
+ for hdir in lib.include:
+ # only want the module-internal ones
+ if hdir.startswith(module_name+'/'):
+ for hf in os.listdir(hdir):
+ if hf.endswith(('.h', '.hxx', '.hpp', '.hrc')):
+ path = '/'.join(hf.split('/')[1:-1])
+ virtual_dirs[path].add(hf)
+ # add HXX files from the root/include/** folders
+ module_include = os.path.join(self.gbuildparser.builddir, 'include', module_name)
+ if os.path.exists(module_include):
+ for hf in os.listdir(module_include):
+ if hf.endswith(('.h', '.hxx', '.hpp', '.hrc')):
+ path = '../include/' + ('/'.join(hf.split('/')[1:-1]))
+ virtual_dirs['include/' + module_name].add('../include/' + module_name + '/' + hf)
+
+ for vd_name in sorted(virtual_dirs.keys()):
+ vd_files = sorted(virtual_dirs[vd_name])
+ parent_node = root_node
+ for subname in vd_name.split('/'):
+ parent_node = ET.SubElement(parent_node, 'VirtualDirectory', Name=subname)
+ for file in vd_files:
+ ET.SubElement(parent_node, 'File', Name=file)
+
+ ET.SubElement(root_node, 'Description')
+ ET.SubElement(root_node, 'Dependencies')
+ ET.SubElement(root_node, 'Dependencies', Name='Debug')
+ ET.SubElement(root_node, 'Dependencies', Name='Release')
+
+ settingstemplate = """\
+ <Settings Type="Dynamic Library">
+ <GlobalSettings>
+ <Compiler Options="" C_Options="" Assembler="">
+ <IncludePath Value="."/>
+ </Compiler>
+ <Linker Options="">
+ <LibraryPath Value="."/>
+ </Linker>
+ <ResourceCompiler Options=""/>
+ </GlobalSettings>
+ <Configuration Name="Debug" CompilerType="clang( based on LLVM 3.5.0 )" DebuggerType="GNU gdb debugger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+ <Compiler Options="-g" C_Options="-g" Assembler="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" PCHFlags="" PCHFlagsPolicy="0">
+ <IncludePath Value="."/>
+ </Compiler>
+ <Linker Options="" Required="yes"/>
+ <ResourceCompiler Options="" Required="no"/>
+ <General OutputFile="" IntermediateDirectory="./Debug" Command="" CommandArguments="" UseSeparateDebugArgs="no" DebugArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes" IsGUIProgram="no" IsEnabled="yes"/>
+ <BuildSystem Name="Default"/>
+ <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+ <![CDATA[]]>
+ </Environment>
+ <Debugger IsRemote="no" RemoteHostName="" RemoteHostPort="" DebuggerPath="" IsExtended="no">
+ <DebuggerSearchPaths/>
+ <PostConnectCommands/>
+ <StartupCommands/>
+ </Debugger>
+ <PreBuild/>
+ <PostBuild/>
+ <CustomBuild Enabled="yes">
+ <RebuildCommand/>
+ <CleanCommand>make %s.clean</CleanCommand>
+ <BuildCommand>make %s.build</BuildCommand>
+ <PreprocessFileCommand/>
+ <SingleFileCommand/>
+ <MakefileGenerationCommand/>
+ <ThirdPartyToolName>None</ThirdPartyToolName>
+ <WorkingDirectory>$(WorkspacePath)</WorkingDirectory>
+ </CustomBuild>
+ <AdditionalRules>
+ <CustomPostBuild/>
+ <CustomPreBuild/>
+ </AdditionalRules>
+ <Completion EnableCpp11="no" EnableCpp14="no">
+ <ClangCmpFlagsC/>
+ <ClangCmpFlags/>
+ <ClangPP/>
+ <SearchPaths/>
+ </Completion>
+ </Configuration>
+ <Configuration Name="Release" CompilerType="clang( based on LLVM 3.5.0 )" DebuggerType="GNU gdb debugger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+ <Compiler Options="" C_Options="" Assembler="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" PCHFlags="" PCHFlagsPolicy="0">
+ <IncludePath Value="."/>
+ </Compiler>
+ <Linker Options="-O2" Required="yes"/>
+ <ResourceCompiler Options="" Required="no"/>
+ <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateDebugArgs="no" DebugArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes" IsGUIProgram="no" IsEnabled="yes"/>
+ <BuildSystem Name="Default"/>
+ <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+ <![CDATA[]]>
+ </Environment>
+ <Debugger IsRemote="no" RemoteHostName="" RemoteHostPort="" DebuggerPath="" IsExtended="no">
+ <DebuggerSearchPaths/>
+ <PostConnectCommands/>
+ <StartupCommands/>
+ </Debugger>
+ <PreBuild/>
+ <PostBuild/>
+ <CustomBuild Enabled="yes">
+ <RebuildCommand/>
+ <CleanCommand>make %s.clean</CleanCommand>
+ <BuildCommand>make %s.build</BuildCommand>
+ <PreprocessFileCommand/>
+ <SingleFileCommand/>
+ <MakefileGenerationCommand/>
+ <ThirdPartyToolName>None</ThirdPartyToolName>
+ <WorkingDirectory>$(WorkspacePath)</WorkingDirectory>
+ </CustomBuild>
+ <AdditionalRules>
+ <CustomPostBuild/>
+ <CustomPreBuild/>
+ </AdditionalRules>
+ <Completion EnableCpp11="no" EnableCpp14="no">
+ <ClangCmpFlagsC/>
+ <ClangCmpFlags/>
+ <ClangPP/>
+ <SearchPaths/>
+ </Completion>
+ </Configuration>
+ </Settings>
+"""
+ root_node.append(ET.fromstring(settingstemplate % (module_name, module_name, module_name, module_name)))
+
+ self.write_pretty_xml(root_node, os.path.join(self.gbuildparser.builddir, module_name, '%s.project' % module_name))
+
+ def write_pretty_xml(self, node, file_path):
+ xml_str = ET.tostring(node, encoding='unicode')
+ pretty_str = minidom.parseString(xml_str).toprettyxml(encoding='utf-8')
+ with open(file_path, 'w') as f:
+ f.write(pretty_str.decode())
+
+class DebugIntegrationGenerator(IdeIntegrationGenerator):
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+
+ def emit(self):
+ print(self.gbuildparser.srcdir)
+ print(self.gbuildparser.builddir)
+ for lib in self.gbuildparser.libs:
+ print(lib)
+ for exe in self.gbuildparser.exes:
+ print(exe)
+ for test in self.gbuildparser.tests:
+ print(test)
+
+
+class VimIntegrationGenerator(IdeIntegrationGenerator):
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+
+ def emit(self):
+ global_list = []
+ for lib in set(self.gbuildparser.libs) | set(self.gbuildparser.tests) | set(self.gbuildparser.exes):
+ entries = []
+ for file in lib.cxxobjects:
+ filePath = os.path.join(self.gbuildparser.srcdir, file) + ".cxx"
+ entry = {'directory': lib.location, 'file': filePath, 'command': self.generateCommand(lib, filePath)}
+ entries.append(entry)
+ global_list.extend(entries)
+ export_file = open('compile_commands.json', 'w')
+ json.dump(global_list, export_file)
+
+ def generateCommand(self, lib, file):
+ command = 'clang++ -Wall'
+ for key, value in lib.defs.items():
+ command += ' -D'
+ command += key
+ if value is not None:
+ command += '='
+ command += value
+
+ for include in lib.include:
+ command += ' -I'
+ command += include
+ for isystem in lib.include_sys:
+ command += ' -isystem '
+ command += isystem
+ for cxxflag in lib.cxxflags:
+ command += ' '
+ command += cxxflag
+ command += ' -c '
+ command += file
+ return command
+
+
+class KdevelopIntegrationGenerator(IdeIntegrationGenerator):
+
+ def encode_int(self, i):
+ temp = '%08x' % i
+ return '\\x%s\\x%s\\x%s\\x%s' % (temp[0:2], temp[2:4], temp[4:6], temp[6:8])
+
+ def encode_string(self, string):
+ result = self.encode_int(len(string) * 2)
+ for c in string.encode('utf-16-be'):
+ if c in range(32, 126):
+ result += chr(c)
+ else:
+ result += '\\x%02x' % c
+ return result
+
+ def generate_buildsystemconfigtool(self, configid, tool, args, exe, typenr):
+ return KdevelopIntegrationGenerator.buildsystemconfigtooltemplate % {'configid': configid, 'tool': tool,
+ 'args': args, 'exe': exe, 'typenr': typenr}
+
+ buildsystemconfigtooltemplate = """
+[CustomBuildSystem][BuildConfig%(configid)d][Tool%(tool)s]
+Arguments=%(args)s
+Enabled=true
+Environment=
+Executable=%(exe)s
+Type=%(typenr)d
+
+"""
+
+ def generate_buildsystemconfig(self, configid, moduledir, builddir, title, buildparms=''):
+ result = KdevelopIntegrationGenerator.buildsystemconfigtemplate % {'configid': configid, 'builddir': builddir,
+ 'title': title}
+ result += self.generate_buildsystemconfigtool(configid, 'Clean', 'clean %s' % buildparms,
+ self.gbuildparser.makecmd, 3)
+ result += self.generate_buildsystemconfigtool(configid, 'Build', 'all %s' % buildparms,
+ self.gbuildparser.makecmd, 0)
+ return result
+
+ buildsystemconfigtemplate = """
+[CustomBuildSystem][BuildConfig%(configid)d]
+BuildDir=file://%(builddir)s
+Title=%(title)s
+
+"""
+
+ def generate_buildsystem(self, moduledir):
+ result = KdevelopIntegrationGenerator.buildsystemtemplate % {'defaultconfigid': 0}
+ result += self.generate_buildsystemconfig(0, moduledir, moduledir, 'Module Build -- Release')
+ result += self.generate_buildsystemconfig(1, moduledir, self.gbuildparser.builddir, 'Full Build -- Release')
+ result += self.generate_buildsystemconfig(2, moduledir, moduledir, 'Module Build -- Debug', 'debug=T')
+ result += self.generate_buildsystemconfig(3, moduledir, self.gbuildparser.builddir, 'Full Build -- Debug',
+ 'debug=T')
+ return result
+
+ buildsystemtemplate = """
+[CustomBuildSystem]
+CurrentConfiguration=BuildConfig%(defaultconfigid)d
+
+"""
+
+ def generate_launch(self, launchid, launchname, executablepath, args, workdir):
+ return KdevelopIntegrationGenerator.launchtemplate % {'launchid': launchid, 'launchname': launchname,
+ 'executablepath': executablepath, 'args': args,
+ 'workdir': workdir}
+
+ launchtemplate = """
+[Launch][Launch Configuration %(launchid)d]
+Configured Launch Modes=execute
+Configured Launchers=nativeAppLauncher
+Name=%(launchname)s
+Type=Native Application
+
+[Launch][Launch Configuration %(launchid)d][Data]
+Arguments=%(args)s
+Dependencies=@Variant(\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00)
+Dependency Action=Nothing
+EnvironmentGroup=default
+Executable=file://%(executablepath)s
+External Terminal=konsole --noclose --workdir %%workdir -e %%exe
+Project Target=
+Use External Terminal=false
+Working Directory=file://%(workdir)s
+isExecutable=true
+
+"""
+
+ def generate_launches(self, moduledir):
+ launches = ','.join(['Launch Configuration %d' % i for i in range(7)])
+ result = KdevelopIntegrationGenerator.launchestemplate % {'launches': launches}
+ result += self.generate_launch(0, 'Local tests -- quick tests (unitcheck)', self.gbuildparser.makecmd,
+ 'unitcheck', moduledir)
+ result += self.generate_launch(1, 'Local tests -- slow tests (unitcheck, slowcheck, screenshot)', self.gbuildparser.makecmd,
+ 'unitcheck slowcheck screenshot', moduledir)
+ result += self.generate_launch(2, 'Local tests -- integration tests (unitcheck, slowcheck, screenshot, subsequentcheck)',
+ self.gbuildparser.makecmd, 'unitcheck slowcheck screenshot subsequentcheck', moduledir)
+ result += self.generate_launch(3, 'Global tests -- quick tests (unitcheck)', self.gbuildparser.makecmd,
+ 'unitcheck', self.gbuildparser.builddir)
+ result += self.generate_launch(4, 'Global tests -- slow tests (unitcheck, slowcheck, screenshot)',
+ self.gbuildparser.makecmd, 'unitcheck slowcheck screenshot', self.gbuildparser.builddir)
+ result += self.generate_launch(5, 'Global tests -- integration tests (unitcheck, slowcheck, screenshot, subsequentcheck)',
+ self.gbuildparser.makecmd, 'unitcheck slowcheck screenshot subsequentcheck',
+ self.gbuildparser.builddir)
+ result += self.generate_launch(6, 'Run LibreOffice',
+ os.path.join(self.gbuildparser.instdir, 'program/soffice.bin'), '',
+ self.gbuildparser.instdir)
+ return result
+
+ launchestemplate = """
+[Launch]
+Launch Configurations=%(launches)s
+
+"""
+
+ def write_modulebeef(self, moduledir, modulename):
+ beefdir = os.path.join(moduledir, '.kdev4')
+ os.mkdir(beefdir)
+ beeffile = open(os.path.join(beefdir, 'Module_%s.kdev4' % modulename), 'w')
+ beeffile.write(self.generate_buildsystem(moduledir))
+ beeffile.write(self.generate_launches(moduledir))
+ beeffile.close()
+
+ def write_modulestub(self, moduledir, modulename):
+ stubfile = open(os.path.join(moduledir, 'Module_%s.kdev4' % modulename), 'w')
+ stubfile.write(KdevelopIntegrationGenerator.modulestubtemplate % {'modulename': modulename,
+ 'builditem': self.encode_string(
+ 'Module_%s' % modulename)})
+ stubfile.close()
+
+ modulestubtemplate = """
+[Buildset]
+BuildItems=@Variant(\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x0b\\x00\\x00\\x00\\x00\\x01%(builditem)s)
+
+[Project]
+Name=Module_%(modulename)s
+Manager=KDevCustomBuildSystem
+VersionControl=kdevgit
+"""
+
+ def write_includepaths(self, path):
+ includedirfile = open(os.path.join(path, '.kdev_include_paths'), 'w')
+ include = set()
+ for target in self.gbuildparser.target_by_path[path]:
+ include |= set(target.include)
+ includedirfile.write('\n'.join(include))
+ includedirfile.close()
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+
+ def emit(self):
+ for path in self.gbuildparser.target_by_path:
+ self.write_includepaths(path)
+ for location in self.gbuildparser.target_by_location:
+ for f in os.listdir(location):
+ if f.endswith('.kdev4'):
+ try:
+ os.remove(os.path.join(location, f))
+ except OSError:
+ shutil.rmtree(os.path.join(location, f))
+ for location in self.gbuildparser.target_by_location:
+ modulename = os.path.split(location)[1]
+ self.write_modulestub(location, modulename)
+ self.write_modulebeef(location, modulename)
+
+
+class XcodeIntegrationGenerator(IdeIntegrationGenerator):
+
+ def indent(self, file, level):
+ if level == 0:
+ return
+ for i in range(0, level):
+ file.write(' ')
+
+ def write_object(self, object, file, indent):
+ if isinstance(object, int):
+ file.write('%d' % object)
+ elif isinstance(object, str) and not re.search('[^A-Za-z0-9_]', object):
+ file.write('%s' % object)
+ elif isinstance(object, str):
+ file.write('"%s"' % object)
+ elif isinstance(object, dict):
+ self.write_dict(object, file, indent)
+
+ # Write a dictionary out as an "old-style (NeXT) ASCII plist"
+ def write_dict(self, dict, file, indent):
+ file.write('{')
+ file.write('\n')
+ for key in sorted(dict.keys()):
+ self.indent(file, indent + 1)
+ file.write('%s = ' % key)
+ self.write_object(dict[key], file, indent + 1)
+ file.write(';\n')
+ self.indent(file, indent)
+ file.write('}')
+
+ def write_dict_to_plist(self, dict, file):
+ file.write('// !$*UTF8*$!\n')
+ self.write_dict(dict, file, 0)
+
+ def get_product_type(self, modulename):
+ if modulename in self.gbuildparser.libs:
+ return 'com.apple.product-type.library.dynamic'
+ elif modulename in self.gbuildparser.exes:
+ return 'com.apple.product-type.something'
+
+ counter = 0
+
+ def generate_id(self):
+ XcodeIntegrationGenerator.counter = XcodeIntegrationGenerator.counter + 1
+ return str('X%07x' % XcodeIntegrationGenerator.counter)
+
+ def generate_build_phases(self, modulename):
+ result = [self.sourcesBuildPhaseId]
+ return result
+
+ def generate_root_object(self, modulename):
+ result = {'isa': 'PBXProject',
+ 'attributes': {'LastUpgradeCheck': '0500',
+ 'ORGANIZATIONNAME': 'LibreOffice'},
+ 'buildConfigurationList': self.generate_id(),
+ 'compatibilityVersion': 'Xcode 3.2',
+ 'hasScannedForEncodings': 0,
+ 'knownRegions': ['en'],
+ 'mainGroup': self.mainGroupId,
+ 'productRefGroup': self.productRefGroupId,
+ 'projectDirPath': '',
+ 'projectRoot': '',
+ 'targets': self.targetId}
+ return result
+
+ def generate_target(self, modulename):
+ result = {'isa': 'PBXNativeTarget',
+ 'buildConfigurationList': self.generate_id(),
+ 'buildPhases': self.generate_build_phases(modulename),
+ 'buildRules': [],
+ 'dependencies': [],
+ 'name': modulename,
+ 'productName': modulename,
+ 'productReference': self.productReferenceId,
+ 'productType': self.get_product_type(modulename)}
+ return result
+
+ def generate_main_group(self, modulename):
+ result = {'isa': 'PBXGroup',
+ 'children': [self.subMainGroupId, self.productGroupId],
+ 'sourceTree': '<group>'}
+ return result
+
+ def generate_sub_main_children(self, modulename):
+ return {}
+
+ def generate_sub_main_group(self, modulename):
+ result = {'isa': 'PBXGroup',
+ 'children': self.generate_sub_main_children(modulename),
+ 'path': modulename,
+ 'sourceTree': '<group>'}
+ return result
+
+ def generate_product_group(self, modulename):
+ result = {'isa': 'PBXGroup',
+ 'children': [self.productReferenceId],
+ 'name': 'Products',
+ 'sourceTree': '<group>'}
+ return result
+
+ def build_source_list(self, module):
+ self.sourceRefList = {}
+ self.sourceList = {}
+
+ for i in module.cxxobjects:
+ ref = self.generate_id()
+ self.sourceList[self.generate_id()] = ref
+ self.sourceRefList[ref] = {'lastKnownFileType': 'sourcecode.cpp.cpp',
+ 'path': i + '.cxx',
+ 'sourceTree': '<group>'}
+
+ def generate_sources_build_phase(self, modulename):
+ result = {'isa': 'PBXSourcesBuildPhase',
+ 'buildActionMask': 2147483647,
+ 'files': self.sourceList.keys(),
+ 'runOnlyForDeploymentPostprocessing': 0}
+ return result
+
+ def generate_project(self, target):
+ self.rootObjectId = self.generate_id()
+ self.mainGroupId = self.generate_id()
+ self.subMainGroupId = self.generate_id()
+ self.productReferenceId = self.generate_id()
+ self.productRefGroupId = self.generate_id()
+ self.productGroupId = self.generate_id()
+ self.targetId = self.generate_id()
+ self.build_source_list(target)
+ self.sourcesBuildPhaseId = self.generate_id()
+ objects = {self.rootObjectId: self.generate_root_object(target),
+ self.targetId: self.generate_target(target),
+ self.mainGroupId: self.generate_main_group(target),
+ self.subMainGroupId: self.generate_sub_main_group(target),
+ self.productGroupId: self.generate_product_group(target),
+ self.sourcesBuildPhaseId: self.generate_sources_build_phase(target)
+ }
+ for i in self.sourceList.keys():
+ ref = self.sourceList[i]
+ objects[i] = {'isa': 'PBXBuildFile',
+ 'fileRef': ref}
+ objects[ref] = {'isa': 'PBXFileReference',
+ 'lastKnownFileType': self.sourceRefList[ref]['lastKnownFileType'],
+ 'path': self.sourceRefList[ref]['path']}
+ project = {'archiveVersion': 1,
+ 'classes': {},
+ 'objectVersion': 46,
+ 'objects': objects,
+ 'rootObject': self.rootObjectId}
+ return project
+
+ # For some reverse-engineered documentation on the project.pbxproj format,
+ # see http://www.monobjc.net/xcode-project-file-format.html .
+ def write_xcodeproj(self, moduledir, target):
+ xcodeprojdir = os.path.join(moduledir, '%s.xcodeproj' % target.target_name())
+ try:
+ os.mkdir(xcodeprojdir)
+ except:
+ pass
+ self.write_dict_to_plist(self.generate_project(target),
+ open(os.path.join(xcodeprojdir, 'project.pbxproj'), 'w'))
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+
+ def emit(self):
+ self.rootlocation = './'
+ for location in self.gbuildparser.target_by_location:
+ # module = location.split('/')[-1]
+ # module_directory = os.path.join(self.rootlocation, module)
+ for target in self.gbuildparser.target_by_location[location]:
+ # project_path = os.path.join(module_directory, '%s.pbxroj' % target.target_name())
+ self.write_xcodeproj(location, target)
+
+
+class VisualStudioIntegrationGenerator(IdeIntegrationGenerator):
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+ self.toolset = self.retrieve_toolset(ide)
+ self.solution_directory = self.gbuildparser.builddir
+ self.configurations = {
+ 'Build': {
+ 'build': self.module_make_command('%(target)s'),
+ 'clean': self.module_make_command('%(target)s.clean'),
+ 'rebuild': self.module_make_command('%(target)s.clean %(target)s')
+ },
+ 'Unit Tests': {
+ 'build': self.module_make_command('unitcheck'),
+ 'clean': self.module_make_command('clean'),
+ 'rebuild': self.module_make_command('clean unitcheck'),
+ },
+ 'Integration tests': {
+ 'build': self.module_make_command('unitcheck slowcheck screenshot subsequentcheck'),
+ 'clean': self.module_make_command('clean'),
+ 'rebuild': self.module_make_command('clean unitcheck slowcheck screenshot subsequentcheck')
+ }
+ }
+
+ def retrieve_toolset(self, ide):
+ ide_toolset_map = {'vs2017': 'v141', 'vs2019': 'v142'}
+ return ide_toolset_map[ide]
+
+ def module_make_command(self, targets):
+ return '%(sh)s -c "PATH=\\"/bin:$PATH\\";BUILDDIR=\\"%(builddir)s\\" %(makecmd)s -rsC %(location)s ' + targets + '"'
+
+ class Project:
+
+ def __init__(self, guid, target, project_path):
+ self.guid = guid
+ self.target = target
+ self.path = project_path
+
+ def emit(self):
+ all_projects = []
+ for location in self.gbuildparser.target_by_location:
+ projects = []
+ module = location.split('/')[-1]
+ module_directory = os.path.join(self.solution_directory, module)
+ for target in self.gbuildparser.target_by_location[location]:
+ project_path = os.path.join(module_directory, '%s.vcxproj' % target.target_name())
+ project_guid = self.write_project(project_path, target)
+ p = VisualStudioIntegrationGenerator.Project(project_guid, target, project_path)
+ projects.append(p)
+ self.write_solution(os.path.join(module_directory, '%s.sln' % module), projects)
+ all_projects += projects
+
+ self.write_solution(os.path.join(self.solution_directory, 'LibreOffice.sln'), all_projects)
+
+ nmake_project_guid = '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
+
+ def get_dependency_libs(self, linked_libs, library_projects):
+ dependency_libs = {}
+ for linked_lib in linked_libs:
+ for library_project in library_projects:
+ if library_project.target.library_name() == linked_lib:
+ dependency_libs[library_project.guid] = library_project
+ return dependency_libs
+
+ def write_solution(self, solution_path, projects):
+ print('Solution %s:' % os.path.splitext(os.path.basename(solution_path))[0], end='')
+ library_projects = [project for project in projects if project.target in self.gbuildparser.libs]
+ with open(solution_path, 'w') as f:
+ f.write('Microsoft Visual Studio Solution File, Format Version 12.00\n')
+ for project in projects:
+ target = project.target
+ print(' %s' % target.target_name(), end='')
+ proj_path = os.path.relpath(project.path, os.path.abspath(os.path.dirname(solution_path)))
+ f.write('Project("{%s}") = "%s", "%s", "{%s}"\n' %
+ (VisualStudioIntegrationGenerator.nmake_project_guid,
+ target.short_name(), proj_path, project.guid))
+ libs_in_solution = self.get_dependency_libs(target.linked_libs,
+ library_projects)
+ if libs_in_solution:
+ f.write('\tProjectSection(ProjectDependencies) = postProject\n')
+ for lib_guid in libs_in_solution.keys():
+ f.write('\t\t{%(guid)s} = {%(guid)s}\n' % {'guid': lib_guid})
+ f.write('\tEndProjectSection\n')
+ f.write('EndProject\n')
+ f.write('Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{B9292527-A979-4D13-A598-C75A33222174}"\n')
+ f.write('\tProjectSection(SolutionItems) = preProject\n')
+ # The natvis file gives pretty-printed variable values when debugging
+ natvis_path = os.path.join(gbuildparser.srcdir, 'solenv/vs/LibreOffice.natvis')
+ f.write('\t\t%(natvis)s = %(natvis)s\n' % {'natvis': natvis_path})
+ f.write('\tEndProjectSection\n')
+ f.write('EndProject\n')
+ f.write('Global\n')
+ platform = 'Win32'
+ f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
+ for cfg in self.configurations:
+ f.write('\t\t%(cfg)s|%(platform)s = %(cfg)s|%(platform)s\n' % {'cfg': cfg, 'platform': platform})
+ f.write('\tEndGlobalSection\n')
+ f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
+ # Specifies project configurations for solution configuration
+ for project in projects:
+ for cfg in self.configurations:
+ params = {'guid': project.guid, 'sol_cfg': cfg, 'proj_cfg': cfg, 'platform': platform}
+ f.write('\t\t{%(guid)s}.%(sol_cfg)s|%(platform)s.ActiveCfg = %(proj_cfg)s|%(platform)s\n' % params)
+ # Build.0 is basically 'Build checkbox' in configuration manager
+ f.write('\t\t{%(guid)s}.%(sol_cfg)s|%(platform)s.Build.0 = %(proj_cfg)s|%(platform)s\n' % params)
+ f.write('\tEndGlobalSection\n')
+ f.write('EndGlobal\n')
+ print('')
+
+ @staticmethod
+ def to_long_names(shortnames):
+ if platform == "cygwin":
+ return (subprocess.check_output(["cygpath", "-wal"] + shortnames).decode("utf-8", "strict").rstrip()).split("\n")
+ else:
+ return shortnames
+
+ @staticmethod
+ def defs_list(defs):
+ defines_list = []
+ # List defines
+ for key, value in defs.items():
+ define = key
+ if value is not None:
+ define += '=' + value
+ defines_list.append(define)
+ return defines_list
+
+ def write_project(self, project_path, target):
+ # See info at http://blogs.msdn.com/b/visualstudio/archive/2010/05/14/a-guide-to-vcxproj-and-props-file-structure.aspx
+ folder = os.path.dirname(project_path)
+ if not os.path.exists(folder):
+ os.makedirs(folder)
+ project_guid = str(uuid.uuid4()).upper()
+ cxxflags = ' '.join(target.cxxflags)
+ ns = 'http://schemas.microsoft.com/developer/msbuild/2003'
+ ET.register_namespace('', ns)
+ proj_node = ET.Element('{%s}Project' % ns, DefaultTargets='Build', ToolsVersion='4.0')
+ proj_confs_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns, Label='ProjectConfigurations')
+ platform = 'Win32'
+ for configuration in self.configurations:
+ proj_conf_node = ET.SubElement(proj_confs_node,
+ '{%s}ProjectConfiguration' % ns,
+ Include='%s|%s' % (configuration, platform))
+ conf_node = ET.SubElement(proj_conf_node, '{%s}Configuration' % ns)
+ conf_node.text = configuration
+ platform_node = ET.SubElement(proj_conf_node, '{%s}Platform' % ns)
+ platform_node.text = platform
+
+ globals_node = ET.SubElement(proj_node, '{%s}PropertyGroup' % ns, Label='Globals')
+ proj_guid_node = ET.SubElement(globals_node, '{%s}ProjectGuid' % ns)
+ proj_guid_node.text = '{%s}' % project_guid
+ proj_keyword_node = ET.SubElement(globals_node, '{%s}Keyword' % ns)
+ proj_keyword_node.text = 'MakeFileProj'
+ proj_name_node = ET.SubElement(globals_node, '{%s}ProjectName' % ns)
+ proj_name_node.text = target.short_name()
+
+ ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ for configuration in self.configurations:
+ conf_node = ET.SubElement(proj_node, '{%s}PropertyGroup' % ns, Label="Configuration",
+ Condition="'$(Configuration)|$(Platform)'=='%s|%s'" % (configuration, platform))
+ # Type of project used by the MSBuild to determine build process, see Microsoft.Makefile.targets
+ conf_type_node = ET.SubElement(conf_node, '{%s}ConfigurationType' % ns)
+ conf_type_node.text = 'Makefile'
+ # This defines the version of Visual Studio which can show next to project names in the Solution Explorer
+ platform_toolset_node = ET.SubElement(conf_node, '{%s}PlatformToolset' % ns)
+ platform_toolset_node.text = self.toolset
+
+ ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(proj_node, '{%s}ImportGroup' % ns, Label='ExtensionSettings')
+ for configuration in self.configurations:
+ prop_sheets_node = ET.SubElement(proj_node, '{%s}ImportGroup' % ns, Label='Configuration',
+ Condition="'$(Configuration)|$(Platform)'=='%s|%s'" % (configuration, platform))
+ ET.SubElement(prop_sheets_node, '{%s}Import' % ns,
+ Project='$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props',
+ Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')",
+ Label='LocalAppDataPlatform')
+
+ ET.SubElement(proj_node, '{%s}PropertyGroup' % ns, Label='UserMacros')
+ # VS IDE (at least "Peek definition") is allergic to paths like "C:/PROGRA~2/WI3CF2~1/10/Include/10.0.14393.0/um"; see
+ # https://developercommunity.visualstudio.com/content/problem/139659/vc-peek-definition-fails-to-navigate-to-windows-ki.html
+ # We need to convert to long paths here. Do this once, since it's time-consuming operation.
+ include_path_node_text = ';'.join(self.to_long_names(target.include))
+ for cfg_name, cfg_targets in self.configurations.items():
+ conf_node = ET.SubElement(proj_node, '{%s}PropertyGroup' % ns,
+ Condition="'$(Configuration)|$(Platform)'=='%s|%s'" % (cfg_name, platform))
+ nmake_params = {
+ 'sh': os.path.join(self.gbuildparser.binpath, 'dash.exe'),
+ 'builddir': self.gbuildparser.builddir,
+ 'location': target.location,
+ 'makecmd': self.gbuildparser.makecmd,
+ 'target': target.target_name()}
+ nmake_build_node = ET.SubElement(conf_node, '{%s}NMakeBuildCommandLine' % ns)
+ nmake_build_node.text = cfg_targets['build'] % nmake_params
+ nmake_clean_node = ET.SubElement(conf_node, '{%s}NMakeCleanCommandLine' % ns)
+ nmake_clean_node.text = cfg_targets['clean'] % nmake_params
+ nmake_rebuild_node = ET.SubElement(conf_node, '{%s}NMakeReBuildCommandLine' % ns)
+ nmake_rebuild_node.text = cfg_targets['rebuild'] % nmake_params
+ nmake_output_node = ET.SubElement(conf_node, '{%s}NMakeOutput' % ns)
+ nmake_output_node.text = os.path.join(self.gbuildparser.instdir, 'program', 'soffice.bin')
+ nmake_defs_node = ET.SubElement(conf_node, '{%s}NMakePreprocessorDefinitions' % ns)
+ nmake_defs_node.text = ';'.join(self.defs_list(target.defs) + ['$(NMakePreprocessorDefinitions)'])
+ include_path_node = ET.SubElement(conf_node, '{%s}IncludePath' % ns)
+ include_path_node.text = include_path_node_text
+ additional_options_node = ET.SubElement(conf_node, '{%s}AdditionalOptions' % ns)
+ additional_options_node.text = cxxflags
+
+ ET.SubElement(proj_node, '{%s}ItemDefinitionGroup' % ns)
+
+ cxxobjects_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns)
+ for cxxobject in target.cxxobjects:
+ cxxabspath = os.path.join(self.gbuildparser.srcdir, cxxobject)
+ cxxfile = cxxabspath + '.cxx'
+ if os.path.isfile(cxxfile):
+ ET.SubElement(cxxobjects_node, '{%s}ClCompile' % ns, Include=cxxfile)
+ else:
+ print('Source %s in project %s does not exist' % (cxxfile, target.target_name()))
+
+ cobjects_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns)
+ for cobject in target.cobjects:
+ cabspath = os.path.join(self.gbuildparser.srcdir, cobject)
+ cfile = cabspath + '.c'
+ if os.path.isfile(cfile):
+ ET.SubElement(cobjects_node, '{%s}ClCompile' % ns, Include=cfile)
+ else:
+ print('Source %s in project %s does not exist' % (cfile, target.target_name()))
+
+ includes_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns)
+ for cxxobject in target.cxxobjects:
+ include_abs_path = os.path.join(self.gbuildparser.srcdir, cxxobject)
+ hxxfile = include_abs_path + '.hxx'
+ if os.path.isfile(hxxfile):
+ ET.SubElement(includes_node, '{%s}ClInclude' % ns, Include=hxxfile)
+ # Few files have corresponding .h files
+ hfile = include_abs_path + '.h'
+ if os.path.isfile(hfile):
+ ET.SubElement(includes_node, '{%s}ClInclude' % ns, Include=hfile)
+ for cobject in target.cobjects:
+ include_abs_path = os.path.join(self.gbuildparser.srcdir, cobject)
+ hfile = include_abs_path + '.h'
+ if os.path.isfile(hfile):
+ ET.SubElement(includes_node, '{%s}ClInclude' % ns, Include=hfile)
+ ET.SubElement(proj_node, '{%s}Import' % ns, Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(proj_node, '{%s}ImportGroup' % ns, Label='ExtensionTargets')
+ self.write_pretty_xml(proj_node, project_path)
+ self.write_filters(project_path + '.filters',
+ os.path.join(self.gbuildparser.srcdir, os.path.basename(target.location)),
+ [cxx_node.get('Include') for cxx_node in cxxobjects_node.findall('{%s}ClCompile' % ns)],
+ [c_node.get('Include') for c_node in cobjects_node.findall('{%s}ClCompile' % ns)],
+ [include_node.get('Include') for include_node in includes_node.findall('{%s}ClInclude' % ns)])
+ return project_guid
+
+ def get_filter(self, module_dir, proj_file):
+ return '\\'.join(os.path.relpath(proj_file, module_dir).split('/')[:-1])
+
+ def get_subfilters(self, proj_filter):
+ parts = proj_filter.split('\\')
+ subfilters = set([proj_filter]) if proj_filter else set()
+ for i in range(1, len(parts)):
+ subfilters.add('\\'.join(parts[:i]))
+ return subfilters
+
+ def write_pretty_xml(self, node, file_path):
+ xml_str = ET.tostring(node, encoding='unicode')
+ pretty_str = minidom.parseString(xml_str).toprettyxml(encoding='utf-8')
+ with open(file_path, 'w') as f:
+ f.write(pretty_str.decode())
+
+ def add_nodes(self, files_node, module_dir, tag, project_files):
+ ns = 'http://schemas.microsoft.com/developer/msbuild/2003'
+ filters = set()
+ for project_file in project_files:
+ file_node = ET.SubElement(files_node, tag, Include=project_file)
+ if os.path.commonprefix([module_dir, project_file]) == module_dir:
+ project_filter = self.get_filter(module_dir, project_file)
+ filter_node = ET.SubElement(file_node, '{%s}Filter' % ns)
+ filter_node.text = project_filter
+ filters |= self.get_subfilters(project_filter)
+ return filters
+
+ def write_filters(self, filters_path, module_dir, cxx_files, c_files, include_files):
+ ns = 'http://schemas.microsoft.com/developer/msbuild/2003'
+ ET.register_namespace('', ns)
+ proj_node = ET.Element('{%s}Project' % ns, ToolsVersion='4.0')
+ filters = set()
+ compiles_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns)
+ filters |= self.add_nodes(compiles_node, module_dir, '{%s}ClCompile' % ns, cxx_files)
+ filters |= self.add_nodes(compiles_node, module_dir, '{%s}ClCompile' % ns, c_files)
+ include_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns)
+ filters |= self.add_nodes(include_node, module_dir, '{%s}ClInclude' % ns, include_files)
+
+ filters_node = ET.SubElement(proj_node, '{%s}ItemGroup' % ns)
+ for proj_filter in filters:
+ filter_node = ET.SubElement(filters_node, '{%s}Filter' % ns, Include=proj_filter)
+ filter_id_node = ET.SubElement(filter_node, '{%s}UniqueIdentifier' % ns)
+ filter_id_node.text = '{%s}' % str(uuid.uuid4())
+ self.write_pretty_xml(proj_node, filters_path)
+
+
+class QtCreatorIntegrationGenerator(IdeIntegrationGenerator):
+
+ def __init__(self, gbuildparser, ide):
+ IdeIntegrationGenerator.__init__(self, gbuildparser, ide)
+ self.target_by_location = {}
+ for target in set(self.gbuildparser.libs) | set(self.gbuildparser.exes) | set(self.gbuildparser.tests):
+ if target.location not in self.target_by_location:
+ self.target_by_location[target.location] = set()
+ self.target_by_location[target.location] |= set([target])
+
+ self._do_log = False # set to 'True' to activate log of QtCreatorIntegrationGenerator
+ if self._do_log:
+ qtlog_path = os.path.abspath('../qtlog_.txt')
+ self.qtlog = open(qtlog_path, 'w')
+
+ def _log(self, message):
+ if self._do_log:
+ self.qtlog.write(message)
+
+ def log_close(self):
+ if self._do_log:
+ self.qtlog.close()
+
+ def generate_build_configs(self, lib_folder):
+ module_folder = os.path.join(self.base_folder, lib_folder)
+ xml = ""
+ # In QtCreator UI, build configs are listed alphabetically,
+ # so it can be different from the creation order.
+ # So we prefix the names with the index.
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '0',
+ 'base_folder': module_folder,
+ 'arg': "",
+ 'name': "1-Build %s" % lib_folder,
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '1',
+ 'base_folder': module_folder,
+ 'arg': "unitcheck",
+ 'name': "2-Local tests -- quick tests (unitcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '2',
+ 'base_folder': module_folder,
+ 'arg': "unitcheck slowcheck screenshot",
+ 'name': "3-Local tests -- slow tests (unitcheck, slowcheck, screenshot)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '3',
+ 'base_folder': module_folder,
+ 'arg': "unitcheck slowcheck screenshot subsequentcheck",
+ 'name': "4-Local tests -- integration tests (unitcheck, slowcheck, screenshot, subsequentcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '4',
+ 'base_folder': self.base_folder,
+ 'arg': "unitcheck",
+ 'name': "5-Global tests -- quick tests (unitcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '5',
+ 'base_folder': self.base_folder,
+ 'arg': "unitcheck slowcheck screenshot",
+ 'name': "6-Global tests -- slow tests (unitcheck, slowcheck, screenshot)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '6',
+ 'base_folder': self.base_folder,
+ 'arg': "unitcheck slowcheck screenshot subsequentcheck",
+ 'name': "7-Global tests -- integration tests (unitcheck, slowcheck, screenshot, subsequentcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '7',
+ 'base_folder': self.base_folder,
+ 'arg': "build-nocheck",
+ 'name': "8-Global build -- nocheck",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '8',
+ 'base_folder': self.base_folder,
+ 'arg': "",
+ 'name': "9-Global build",
+ }
+
+ xml += QtCreatorIntegrationGenerator.build_configs_count_template % {
+ 'nb': '9',
+ }
+ return xml
+
+ def generate_meta_build_configs(self):
+ xml = ""
+ # In QtCreator UI, build configs are listed alphabetically,
+ # so it can be different from the creation order.
+ # So we prefix the names with the index.
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '0',
+ 'base_folder': self.base_folder,
+ 'arg': "",
+ 'name': "01-Global Build",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '1',
+ 'base_folder': self.base_folder,
+ 'arg': "unitcheck",
+ 'name': "02-Global tests -- quick tests (unitcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '2',
+ 'base_folder': self.base_folder,
+ 'arg': "unitcheck slowcheck screenshot",
+ 'name': "03-Global tests -- slow tests (unitcheck, slowcheck, screenshot)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '3',
+ 'base_folder': self.base_folder,
+ 'arg': "unitcheck slowcheck screenshot subsequentcheck",
+ 'name': "04-Global tests -- integration tests (unitcheck, slowcheck, screenshot, subsequentcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '4',
+ 'base_folder': self.base_folder,
+ 'arg': "perfcheck",
+ 'name': "05-Global tests -- performance tests (perfcheck)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '5',
+ 'base_folder': self.base_folder,
+ 'arg': "check",
+ 'name': "06-Global tests -- tests (check)",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '6',
+ 'base_folder': self.base_folder,
+ 'arg': "build-nocheck",
+ 'name': "07-Global build -- nocheck",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '7',
+ 'base_folder': self.base_folder,
+ 'arg': "build-l10n-only",
+ 'name': "08-Global build -- build-l10n-only",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '8',
+ 'base_folder': self.base_folder,
+ 'arg': "build-non-l10n-only",
+ 'name': "09-Global build -- build-non-l10n-only",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '9',
+ 'base_folder': self.base_folder,
+ 'arg': "clean",
+ 'name': "10-Global build -- clean",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '10',
+ 'base_folder': self.base_folder,
+ 'arg': "clean-build",
+ 'name': "11-Global build -- clean-build",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_template % {
+ 'index': '11',
+ 'base_folder': self.base_folder,
+ 'arg': "clean-host",
+ 'name': "12-Global build -- clean-host",
+ }
+ xml += QtCreatorIntegrationGenerator.build_configs_count_template % {
+ 'nb': '12',
+ }
+ return xml
+
+ # By default, QtCreator creates 2 BuildStepList : "Build" et "Clean"
+ # but the "clean" can be empty.
+ build_configs_template = """
+ <valuemap type="QVariantMap" key="ProjectExplorer.Target.BuildConfiguration.%(index)s">
+ <value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory">%(base_folder)s</value>
+
+ <valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
+
+ <valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
+ <value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Make</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName"></value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
+ <valuelist type="QVariantList" key="Qt4ProjectManager.MakeStep.AutomaticallyAddedMakeArguments">
+ <value type="QString">-w</value>
+ <value type="QString">-r</value>
+ </valuelist>
+ <value type="bool" key="Qt4ProjectManager.MakeStep.Clean">false</value>
+ <value type="QString" key="Qt4ProjectManager.MakeStep.MakeArguments">%(arg)s</value>
+ <value type="QString" key="Qt4ProjectManager.MakeStep.MakeCommand"></value>
+ </valuemap>
+
+ <value type="int" key="ProjectExplorer.BuildStepList.StepsCount">1</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Build</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName"></value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Build</value>
+ </valuemap>
+
+ <value type="int" key="ProjectExplorer.BuildConfiguration.BuildStepListCount">1</value>
+ <value type="bool" key="ProjectExplorer.BuildConfiguration.ClearSystemEnvironment">false</value>
+ <valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.UserEnvironmentChanges"/>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">%(name)s</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName"></value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.Qt4BuildConfiguration</value>
+ <value type="int" key="Qt4ProjectManager.Qt4BuildConfiguration.BuildConfiguration">%(index)s</value>
+ <value type="bool" key="Qt4ProjectManager.Qt4BuildConfiguration.UseShadowBuild">true</value>
+ </valuemap>
+ """
+
+ build_configs_count_template = """
+ <!-- nb build configurations -->
+ <value type="int" key="ProjectExplorer.Target.BuildConfigurationCount">%(nb)s</value>
+ """
+
+ def generate_deploy_configs(self, lib_folder):
+ xml = QtCreatorIntegrationGenerator.deploy_configs_template % {}
+ return xml
+
+ deploy_configs_template = """
+ <valuemap type="QVariantMap" key="ProjectExplorer.Target.DeployConfiguration.0">
+ <valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
+ <value type="int" key="ProjectExplorer.BuildStepList.StepsCount">0</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Deploy</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName"></value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Deploy</value>
+ </valuemap>
+ <value type="int" key="ProjectExplorer.BuildConfiguration.BuildStepListCount">1</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Deploy locally</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName"></value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.DefaultDeployConfiguration</value>
+ </valuemap>
+ <value type="int" key="ProjectExplorer.Target.DeployConfigurationCount">1</value>
+ """
+
+ def generate_run_configs(self, lib_folder):
+
+ # If we use 'soffice', it's ok only for "Run", not for "Debug".
+ # So we put "soffice.bin" that is ok for both.
+ loexec = "%s/instdir/program/soffice.bin" % self.base_folder
+ xml = QtCreatorIntegrationGenerator.run_configs_template % {
+ 'loexec': loexec,
+ 'workdir': self.base_folder
+ }
+ return xml
+
+ run_configs_template = """
+ <valuemap type="QVariantMap" key="ProjectExplorer.Target.RunConfiguration.0">
+ <valuelist type="QVariantList" key="Analyzer.Valgrind.AddedSuppressionFiles"/>
+ <value type="bool" key="Analyzer.Valgrind.Callgrind.CollectBusEvents">false</value>
+ <value type="bool" key="Analyzer.Valgrind.Callgrind.CollectSystime">false</value>
+ <value type="bool" key="Analyzer.Valgrind.Callgrind.EnableBranchSim">false</value>
+ <value type="bool" key="Analyzer.Valgrind.Callgrind.EnableCacheSim">false</value>
+ <value type="bool" key="Analyzer.Valgrind.Callgrind.EnableEventToolTips">true</value>
+ <value type="double" key="Analyzer.Valgrind.Callgrind.MinimumCostRatio">0.01</value>
+ <value type="double" key="Analyzer.Valgrind.Callgrind.VisualisationMinimumCostRatio">10</value>
+ <value type="bool" key="Analyzer.Valgrind.FilterExternalIssues">true</value>
+ <value type="int" key="Analyzer.Valgrind.LeakCheckOnFinish">1</value>
+ <value type="int" key="Analyzer.Valgrind.NumCallers">25</value>
+ <valuelist type="QVariantList" key="Analyzer.Valgrind.RemovedSuppressionFiles"/>
+ <value type="int" key="Analyzer.Valgrind.SelfModifyingCodeDetection">1</value>
+ <value type="bool" key="Analyzer.Valgrind.Settings.UseGlobalSettings">true</value>
+ <value type="bool" key="Analyzer.Valgrind.ShowReachable">false</value>
+ <value type="bool" key="Analyzer.Valgrind.TrackOrigins">true</value>
+ <value type="QString" key="Analyzer.Valgrind.ValgrindExecutable">valgrind</value>
+ <valuelist type="QVariantList" key="Analyzer.Valgrind.VisibleErrorKinds">
+ <value type="int">0</value>
+ <value type="int">1</value>
+ <value type="int">2</value>
+ <value type="int">3</value>
+ <value type="int">4</value>
+ <value type="int">5</value>
+ <value type="int">6</value>
+ <value type="int">7</value>
+ <value type="int">8</value>
+ <value type="int">9</value>
+ <value type="int">10</value>
+ <value type="int">11</value>
+ <value type="int">12</value>
+ <value type="int">13</value>
+ <value type="int">14</value>
+ </valuelist>
+ <value type="int" key="PE.EnvironmentAspect.Base">2</value>
+ <valuelist type="QVariantList" key="PE.EnvironmentAspect.Changes"/>
+
+ <value type="QString" key="ProjectExplorer.CustomExecutableRunConfiguration.Arguments"></value>
+ <value type="QString" key="ProjectExplorer.CustomExecutableRunConfiguration.Executable">%(loexec)s</value>
+ <value type="bool" key="ProjectExplorer.CustomExecutableRunConfiguration.UseTerminal">false</value>
+ <value type="QString" key="ProjectExplorer.CustomExecutableRunConfiguration.WorkingDirectory">%(workdir)s</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Run libreoffice/instdir/program/soffice</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName"></value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.CustomExecutableRunConfiguration</value>
+ <value type="uint" key="RunConfiguration.QmlDebugServerPort">3768</value>
+ <value type="bool" key="RunConfiguration.UseCppDebugger">false</value>
+ <value type="bool" key="RunConfiguration.UseCppDebuggerAuto">true</value>
+ <value type="bool" key="RunConfiguration.UseMultiProcess">false</value>
+ <value type="bool" key="RunConfiguration.UseQmlDebugger">false</value>
+ <value type="bool" key="RunConfiguration.UseQmlDebuggerAuto">true</value>
+
+ </valuemap>
+ <value type="int" key="ProjectExplorer.Target.RunConfigurationCount">1</value>
+ """
+
+ def generate_pro_user_content(self, lib_folder):
+
+ build_configs = self.generate_build_configs(lib_folder)
+ deploy_configs = self.generate_deploy_configs(lib_folder)
+ run_configs = self.generate_run_configs(lib_folder)
+
+ xml = QtCreatorIntegrationGenerator.pro_user_template % {
+ 'build_configs': build_configs,
+ 'deploy_configs': deploy_configs,
+ 'run_configs': run_configs,
+ }
+ return xml
+
+ def generate_meta_pro_user_content(self):
+
+ build_configs = self.generate_meta_build_configs()
+ deploy_configs = self.generate_deploy_configs("")
+ run_configs = self.generate_run_configs("")
+
+ xml = QtCreatorIntegrationGenerator.pro_user_template % {
+ 'build_configs': build_configs,
+ 'deploy_configs': deploy_configs,
+ 'run_configs': run_configs,
+ }
+ return xml
+
+ pro_user_template = """<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE QtCreatorProject>
+<!-- Written by QtCreator 3.1.1, 2015-05-14T15:54:34. -->
+<qtcreator>
+ <data>
+ <variable>ProjectExplorer.Project.ActiveTarget</variable>
+ <value type="int">0</value>
+ </data>
+
+ <!-- editor settings -->
+ <data>
+ <variable>ProjectExplorer.Project.EditorSettings</variable>
+ <valuemap type="QVariantMap">
+ <value type="bool" key="EditorConfiguration.AutoIndent">true</value>
+ <value type="bool" key="EditorConfiguration.AutoSpacesForTabs">false</value>
+ <value type="bool" key="EditorConfiguration.CamelCaseNavigation">true</value>
+ <valuemap type="QVariantMap" key="EditorConfiguration.CodeStyle.0">
+ <value type="QString" key="language">Cpp</value>
+ <valuemap type="QVariantMap" key="value">
+ <value type="QByteArray" key="CurrentPreferences">CppGlobal</value>
+ </valuemap>
+ </valuemap>
+ <valuemap type="QVariantMap" key="EditorConfiguration.CodeStyle.1">
+ <value type="QString" key="language">QmlJS</value>
+ <valuemap type="QVariantMap" key="value">
+ <value type="QByteArray" key="CurrentPreferences">QmlJSGlobal</value>
+ </valuemap>
+ </valuemap>
+ <value type="int" key="EditorConfiguration.CodeStyle.Count">2</value>
+ <value type="QByteArray" key="EditorConfiguration.Codec">UTF-8</value>
+ <value type="bool" key="EditorConfiguration.ConstrainTooltips">false</value>
+ <value type="int" key="EditorConfiguration.IndentSize">4</value>
+ <value type="bool" key="EditorConfiguration.KeyboardTooltips">false</value>
+ <value type="int" key="EditorConfiguration.MarginColumn">80</value>
+ <value type="bool" key="EditorConfiguration.MouseHiding">true</value>
+ <value type="bool" key="EditorConfiguration.MouseNavigation">true</value>
+ <value type="int" key="EditorConfiguration.PaddingMode">1</value>
+ <value type="bool" key="EditorConfiguration.ScrollWheelZooming">true</value>
+ <value type="bool" key="EditorConfiguration.ShowMargin">false</value>
+ <value type="int" key="EditorConfiguration.SmartBackspaceBehavior">1</value>
+ <value type="bool" key="EditorConfiguration.SpacesForTabs">true</value>
+ <value type="int" key="EditorConfiguration.TabKeyBehavior">0</value>
+ <value type="int" key="EditorConfiguration.TabSize">8</value>
+ <value type="bool" key="EditorConfiguration.UseGlobal">true</value>
+ <value type="int" key="EditorConfiguration.Utf8BomBehavior">1</value>
+ <value type="bool" key="EditorConfiguration.addFinalNewLine">true</value>
+ <value type="bool" key="EditorConfiguration.cleanIndentation">true</value>
+ <value type="bool" key="EditorConfiguration.cleanWhitespace">true</value>
+ <value type="bool" key="EditorConfiguration.inEntireDocument">false</value>
+ </valuemap>
+ </data>
+
+ <data>
+ <variable>ProjectExplorer.Project.PluginSettings</variable>
+ <valuemap type="QVariantMap"/>
+ </data>
+
+ <!-- target -->
+ <data>
+ <variable>ProjectExplorer.Project.Target.0</variable>
+ <valuemap type="QVariantMap">
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Desktop</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">Desktop</value>
+ <value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">{0701de51-c96e-4e4f-85c3-e70b223c5076}</value>
+ <value type="int" key="ProjectExplorer.Target.ActiveBuildConfiguration">0</value>
+ <value type="int" key="ProjectExplorer.Target.ActiveDeployConfiguration">0</value>
+ <value type="int" key="ProjectExplorer.Target.ActiveRunConfiguration">0</value>
+
+ <!-- build configurations -->
+ %(build_configs)s
+
+ <!-- deploy configurations -->
+ %(deploy_configs)s
+
+ <!-- plugin settings -->
+ <valuemap type="QVariantMap" key="ProjectExplorer.Target.PluginSettings"/>
+
+ <!-- run configurations -->
+ %(run_configs)s
+
+ </valuemap>
+ </data>
+ <!-- nb targets : 1 -->
+ <data>
+ <variable>ProjectExplorer.Project.TargetCount</variable>
+ <value type="int">1</value>
+ </data>
+ <data>
+ <variable>ProjectExplorer.Project.Updater.EnvironmentId</variable>
+ <value type="QByteArray">{5abcafed-86f6-49f6-b1cb-380fadd21211}</value>
+ </data>
+ <data>
+ <variable>ProjectExplorer.Project.Updater.FileVersion</variable>
+ <value type="int">15</value>
+ </data>
+</qtcreator>
+"""
+
+ def remove_qt_files(self):
+
+ def do_remove_file(loc, afile):
+ try:
+ os.remove(os.path.join(loc, afile))
+ self._log("removed %s\n" % afile)
+ except OSError:
+ self._log("unable to remove %s\n" % afile)
+
+ do_remove_file(self.base_folder, "lo.pro")
+ do_remove_file(self.base_folder, "lo.pro.user")
+ for location in self.target_by_location:
+ for f in os.listdir(location):
+ if f.endswith('.pro') or f.endswith('.pro.user'):
+ do_remove_file(location, f)
+
+ def get_source_extension(self, src_file):
+ path = os.path.join(self.base_folder, src_file)
+ for ext in (".cxx", ".cpp", ".c", ".mm"):
+ if os.path.isfile(path + ext):
+ return ext
+ return ""
+
+ def get_header_extension(self, src_file):
+ path = os.path.join(self.base_folder, src_file)
+ for ext in (".hxx", ".hpp", ".h"):
+ if os.path.isfile(path + ext):
+ return ext
+ return ""
+
+ def build_data_libs(self):
+
+ self.data_libs = {}
+
+ all_libs = set(self.gbuildparser.libs) | set(self.gbuildparser.exes) | set(self.gbuildparser.tests)
+ for lib in all_libs:
+ self._log("\nlibrary : %s, loc=%s" % (lib.short_name(), lib.location))
+ lib_name = os.path.basename(lib.location)
+ lib_folder = os.path.relpath(lib.location, self.base_folder)
+
+ def lopath(path):
+ if platform =="cygwin":
+ # absolute paths from GbuildToJson are Windows paths,
+ # so convert everything to such ones
+ abs_path = path
+ if not ntpath.isabs(abs_path):
+ abs_path = ntpath.join(self.gbuildparser.srcdir, path)
+ return ntpath.relpath(abs_path, lib.location).replace('\\', '/')
+
+ return os.path.relpath(path, lib.location)
+
+ defines_list = []
+ sources_list = []
+ includepath_list = []
+ # The explicit headers list is not mandatory :
+ # QtCreator just needs 'include_path_list' to find all headers files.
+ # But files listed in 'header_list' will be shown
+ # in a specific "Headers" folder in QtCreator's Project panel.
+ # We will list here only headers files of current lib.
+ headers_list = []
+ for file_ in lib.cxxobjects:
+ # the file has no extension : search it
+ # self._log("\n file : %s" % file_)
+ ext = self.get_source_extension(file_)
+ if ext:
+ sources_list.append(lopath(file_ + ext))
+
+ # few cxxobject files have a header beside
+ ext = self.get_header_extension(file_)
+ if ext:
+ headers_list.append(lopath(file_ + ext))
+
+ cxxflags_list = []
+ for cxxflag in lib.cxxflags:
+ # extract flag for C++ standard version
+ if cxxflag.startswith('-std'):
+ cxxflags_list.append(cxxflag)
+
+ # List all include paths
+ for hdir in (lib.include + lib.include_sys):
+ hf_lopath = lopath(hdir)
+ includepath_list.append(hf_lopath)
+
+ # List headers files from current lib
+ for hdir in lib.include:
+ if hdir.startswith(lib.location):
+ for dirpath, _, files in os.walk(hdir):
+ for hf in files:
+ if hf.endswith(('.h', '.hxx', '.hpp', '.hrc')):
+ hf_lopath = lopath(os.path.join(dirpath, hf))
+ headers_list.append(hf_lopath)
+
+ # List defines
+ for key, value in lib.defs.items():
+ define = key
+ if value is not None:
+ define += '=' + value
+ defines_list.append(define)
+
+ # All data are prepared, store them for the lib.
+ if lib_folder in self.data_libs:
+ self.data_libs[lib_folder]['sources'] |= set(sources_list)
+ self.data_libs[lib_folder]['headers'] |= set(headers_list)
+ self.data_libs[lib_folder]['cxxflags'] |= set(cxxflags_list)
+ self.data_libs[lib_folder]['includepath'] |= set(includepath_list)
+ self.data_libs[lib_folder]['defines'] |= set(defines_list)
+ else:
+ self.data_libs[lib_folder] = {
+ 'sources': set(sources_list),
+ 'headers': set(headers_list),
+ 'cxxflags': set(cxxflags_list),
+ 'includepath': set(includepath_list),
+ 'defines': set(defines_list),
+ 'loc': lib.location,
+ 'name': lib_name
+ }
+
+ def emit(self):
+
+ self.base_folder = self.gbuildparser.builddir
+
+ # we remove existing '.pro' and '.pro.user' files
+ self.remove_qt_files()
+
+ # for .pro files, we must explicitly list all files (.c, .h)
+ # so we can't reuse directly the same method than for kde integration.
+ self.build_data_libs()
+
+ subdirs_list = self.data_libs.keys()
+ # Now we can create Qt files
+ for lib_folder in subdirs_list:
+ sources_list = sorted(self.data_libs[lib_folder]['sources'])
+ headers_list = sorted(self.data_libs[lib_folder]['headers'])
+ cxxflags_list = sorted(self.data_libs[lib_folder]['cxxflags'])
+ includepath_list = sorted(self.data_libs[lib_folder]['includepath'])
+ defines_list = sorted(self.data_libs[lib_folder]['defines'])
+ lib_loc = self.data_libs[lib_folder]['loc']
+ lib_name = self.data_libs[lib_folder]['name']
+
+ sources = " \\\n".join(sources_list)
+ headers = " \\\n".join(headers_list)
+ cxxflags = " \\\n".join(cxxflags_list)
+ includepath = " \\\n".join(includepath_list)
+ defines = " \\\n".join(defines_list)
+
+ # create .pro file
+ qt_pro_file = '%s/%s.pro' % (lib_loc, lib_name)
+ try:
+ content = QtCreatorIntegrationGenerator.pro_template % {'sources': sources, 'headers': headers,
+ 'cxxflags': cxxflags, 'includepath': includepath, 'defines': defines}
+ mode = 'w+'
+ with open(qt_pro_file, mode) as fpro:
+ fpro.write(content)
+ self._log("created %s\n" % qt_pro_file)
+
+ except Exception as e:
+ print("ERROR : creating pro file=" + qt_pro_file, file=sys.stderr)
+ print(e, file=sys.stderr)
+ temp = traceback.format_exc() # .decode('utf8')
+ print(temp, file=sys.stderr)
+ print("\n\n", file=sys.stderr)
+
+ # create .pro.user file
+ qt_pro_user_file = '%s/%s.pro.user' % (lib_loc, lib_name)
+ try:
+ with open(qt_pro_user_file, mode) as fprouser:
+ fprouser.write(self.generate_pro_user_content(lib_folder))
+ self._log("created %s\n" % qt_pro_user_file)
+
+ except Exception as e:
+ print("ERROR : creating pro.user file=" + qt_pro_user_file, file=sys.stderr)
+ print(e, file=sys.stderr)
+ temp = traceback.format_exc()
+ print(temp, file=sys.stderr)
+ print("\n\n", file=sys.stderr)
+
+ # create meta .pro file (lists all sub projects)
+ qt_meta_pro_file = 'lo.pro'
+ try:
+ subdirs = " \\\n".join(sorted(subdirs_list))
+ content = QtCreatorIntegrationGenerator.pro_meta_template % {'subdirs': subdirs}
+ with open(qt_meta_pro_file, 'w+') as fmpro:
+ fmpro.write(content)
+
+ except Exception as e:
+ print("ERROR : creating lo.pro file=" + qt_meta_pro_file, file=sys.stderr)
+ print(e, file=sys.stderr)
+ temp = traceback.format_exc()
+ print(temp, file=sys.stderr)
+ print("\n\n", file=sys.stderr)
+
+ # create meta .pro.user file
+ qt_meta_pro_user_file = 'lo.pro.user'
+ try:
+ with open(qt_meta_pro_user_file, mode) as fmprouser:
+ fmprouser.write(self.generate_meta_pro_user_content())
+ self._log("created %s\n" % qt_meta_pro_user_file)
+
+ except Exception as e:
+ print("ERROR : creating lo.pro.user file=" + qt_meta_pro_user_file, file=sys.stderr)
+ print(e, file=sys.stderr)
+ temp = traceback.format_exc()
+ print(temp, file=sys.stderr)
+ print("\n\n", file=sys.stderr)
+
+ self.log_close()
+
+ pro_template = """TEMPLATE = app
+CONFIG += console
+CONFIG -= app_bundle
+CONFIG -= qt
+
+QMAKE_CXXFLAGS += %(cxxflags)s
+
+INCLUDEPATH += %(includepath)s
+
+SOURCES += %(sources)s
+
+HEADERS += %(headers)s
+
+DEFINES += %(defines)s
+
+"""
+ pro_meta_template = """TEMPLATE = subdirs
+
+SUBDIRS = %(subdirs)s
+"""
+
+
+def get_options():
+ parser = argparse.ArgumentParser(
+ description='LibreOffice gbuild IDE project generator')
+ parser.add_argument('--ide', dest='ide', required=True,
+ help='the IDE to generate project files for')
+ parser.add_argument('--make', dest='makecmd', required=True,
+ help='the command to execute make')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = get_options()
+ # FIXME: Hack
+ if args.makecmd == 'make':
+ args.makecmd = '/usr/bin/make'
+
+ paths = {}
+ generators = {
+ 'codelite': CodeliteIntegrationGenerator,
+ 'eclipsecdt': EclipseCDTIntegrationGenerator,
+ 'kdevelop': KdevelopIntegrationGenerator,
+ 'xcode': XcodeIntegrationGenerator,
+ 'vs2017': VisualStudioIntegrationGenerator,
+ 'vs2019': VisualStudioIntegrationGenerator,
+ 'vim': VimIntegrationGenerator,
+ 'debug': DebugIntegrationGenerator,
+ 'qtcreator': QtCreatorIntegrationGenerator,
+ }
+
+ if args.ide not in generators.keys():
+ print("Invalid ide. valid values are %s" % ','.join(generators.keys()))
+ sys.exit(1)
+
+ gbuildparser = GbuildParser(args.makecmd).parse()
+
+ generators[args.ide](gbuildparser, args.ide).emit()
+ print("Successfully created the project files.")
+
+# Local Variables:
+# indent-tabs-mode: nil
+# End:
+#
+# vim: set et sw=4 ts=4:
diff --git a/bin/gen-boost-headers b/bin/gen-boost-headers
new file mode 100755
index 000000000..5fe0e3a20
--- /dev/null
+++ b/bin/gen-boost-headers
@@ -0,0 +1,67 @@
+#!/bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# generate a bunch of dummy headers that wrap the crappy boost headers and
+# suppress a myriad of warnings; requires GCC's #include_next extension
+
+set -euo pipefail
+IFS=$'\n\t'
+
+GENDIR=${SRCDIR}/external/boost/include
+
+rm -rf ${GENDIR}
+mkdir ${GENDIR}
+
+# note: clucene contains a copy of half of boost, so ignore it too
+# note: firebird contains a copy of half of boost, so ignore it too
+
+cat <(cd ${SRCDIR} && git grep -h '^# *include') \
+ <(find ${WORKDIR}/UnpackedTarball/ -mindepth 1 -maxdepth 1 -type d \
+ | grep -v boost \
+ | grep -v clucene \
+ | grep -v firebird \
+ | xargs grep -hr '^# *include') \
+ | grep -o '\bboost.*\.\(h\|hpp\|ipp\)' \
+ | sort | uniq \
+ | while read -r HEADER; do
+ mkdir -p "$(dirname ${GENDIR}/${HEADER})"
+ cat > "${GENDIR}/${HEADER}" << _EOF
+/* generated by $0, do not edit! */
+#pragma once
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wpragmas" /* first! for GCC */
+#pragma GCC diagnostic ignored "-Wunknown-warning-option" // second! for Clang 5
+#pragma GCC diagnostic ignored "-Wdelete-non-virtual-dtor"
+#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#pragma GCC diagnostic ignored "-Wdeprecated-copy"
+#pragma GCC diagnostic ignored "-Wdeprecated-copy-dtor"
+#pragma GCC diagnostic ignored "-Wextra"
+#pragma GCC diagnostic ignored "-Wignored-qualifiers"
+#pragma GCC diagnostic ignored "-Wimplicit-fallthrough"
+#pragma GCC diagnostic ignored "-Winvalid-constexpr"
+#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
+#pragma GCC diagnostic ignored "-Wmicrosoft-unqualified-friend"
+#pragma GCC diagnostic ignored "-Wnon-virtual-dtor"
+#pragma GCC diagnostic ignored "-Wparentheses"
+#pragma GCC diagnostic ignored "-Wplacement-new"
+#pragma GCC diagnostic ignored "-Wreturn-type"
+#pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wstrict-aliasing"
+#pragma GCC diagnostic ignored "-Wtautological-constant-out-of-range-compare"
+#pragma GCC diagnostic ignored "-Wtype-limits"
+#pragma GCC diagnostic ignored "-Wundef"
+#pragma GCC diagnostic ignored "-Wunused-local-typedefs"
+#pragma GCC diagnostic ignored "-Wunused-macros"
+#pragma GCC diagnostic ignored "-Wunused-parameter"
+#pragma GCC diagnostic ignored "-Wunused-variable"
+#include_next <${HEADER}>
+#pragma GCC diagnostic pop
+_EOF
+ done
+
diff --git a/bin/gen-iwyu-dummy-lib b/bin/gen-iwyu-dummy-lib
new file mode 100755
index 000000000..c7d64817d
--- /dev/null
+++ b/bin/gen-iwyu-dummy-lib
@@ -0,0 +1,79 @@
+#!/bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Create a makefile that builds every non-generated header as a source file.
+# This should help to ensure the headers are self-contained and don't
+# impose unnecessary requirements (unnecessary includes) on client code.
+#
+# This script is fully compliant with the UNIX philosophy
+# (and if you can't read it you are clearly not worthy)
+
+set -e
+
+iwyu_INCLUDES=$(grep -h -r ":$" "$BUILDDIR"/workdir/Dep/*Object* \
+ | grep -v 'workdir\|config_host' | grep -v "^/usr" \
+ | sed -e "s,^${SRCDIR}/,," | sed -e "s/:$//" | sort -u)
+
+iwyu_INCLUDEDIRS=$(echo "$iwyu_INCLUDES" | sed -e "s,/[^/]*$,," | grep -v "^include" | sort -u)
+
+iwyu_EXTERNALS=$(ls "$SRCDIR"/*/*Library*mk "$SRCDIR"/*/*Executable*mk \
+ | xargs awk -f "$SRCDIR"/bin/gen-iwyu-dummy-lib.awk \
+ | grep -v '$(\|)\|\\$\|apr\|breakpad\|bzip2\|expat_x64\|mDNSResponder\|serf\|zlib_x64')
+
+iwyu_DIR="$BUILDDIR"/iwyudummy/
+mkdir -p "$iwyu_DIR"
+
+{
+ echo 'module_directory:=$(dir $(realpath $(firstword $(MAKEFILE_LIST))))'
+ echo "include ${SRCDIR}/solenv/gbuild/partial_build.mk"
+} > "$iwyu_DIR"Makefile
+
+{
+ echo '$(eval $(call gb_Module_Module,iwyudummy))'
+ echo '$(eval $(call gb_Module_add_targets,iwyudummy,StaticLibrary_iwyudummy))'
+} > "$iwyu_DIR"Module_iwyudummy.mk
+
+{
+ # prevent some common configuration errors
+ echo 'ifneq ($(COMPILER_PLUGINS),)'
+ echo ' $(call gb_Output_error,--enable-compiler-plugins does not work well with this: bailing out)'
+ echo 'endif'
+
+ echo '$(eval $(call gb_StaticLibrary_StaticLibrary,iwyudummy))'
+ # clang will "compile" headers to .gch by default
+ echo '$(eval $(call gb_StaticLibrary_add_cxxflags,iwyudummy,-x c++ -D__cplusplus=201402L -D__STDC_VERSION__=201112L -Wno-unused-macros -Wno-unused-const-variable))'
+ echo '$(eval $(call gb_StaticLibrary_use_custom_headers,iwyudummy,officecfg/registry))'
+ echo '$(eval $(call gb_StaticLibrary_use_sdk_api,iwyudummy))'
+ echo '$(eval $(call gb_StaticLibrary_use_externals,iwyudummy,\'
+ for ext in ${iwyu_EXTERNALS}; do
+ echo "${ext} \\";
+ done
+ echo '))'
+
+ echo '$(eval $(call gb_StaticLibrary_set_include,iwyudummy,\'
+ echo '$$(INCLUDE) \'
+ for dir in ${iwyu_INCLUDEDIRS}; do
+ if echo "$dir" | grep ".*/inc/" &>/dev/null; then
+ iwyu_INCLUDEDIRS_EXTRA+=" ${dir%/inc/*}/inc"
+ fi
+ done
+ for dir in $(echo ${iwyu_INCLUDEDIRS_EXTRA} | sed -e "s/ /\n/g" | uniq) ${iwyu_INCLUDEDIRS}; do
+ echo "-I${SRCDIR}/${dir} \\";
+ done
+ # it fails to find stddef.h?
+ echo "-I/usr/lib/clang/$(llvm-config --version)/include \\"
+ echo "))"
+
+ echo '$(eval $(call gb_StaticLibrary__add_iwyu_headers,iwyudummy,\'
+ for hdr in ${iwyu_INCLUDES}; do
+ echo "${hdr} \\";
+ done
+ echo '))'
+} > "$iwyu_DIR"StaticLibrary_iwyudummy.mk
+
diff --git a/bin/gen-iwyu-dummy-lib.awk b/bin/gen-iwyu-dummy-lib.awk
new file mode 100644
index 000000000..464d9515c
--- /dev/null
+++ b/bin/gen-iwyu-dummy-lib.awk
@@ -0,0 +1,34 @@
+BEGIN { domatch = 0; }
+
+{
+ if ($0 ~ /use_external(s)?,/ )
+ {
+ if (index($0, "))"))
+ {
+ gsub(/.*,/, "");
+ gsub(/\)+/, "");
+ if (!($0 in exts))
+ {
+ exts[$0];
+ print $0;
+ }
+ }
+ else
+ {
+ domatch = 1;
+ }
+ }
+ else if ($0 ~ /\)\)/ )
+ {
+ domatch = 0;
+ }
+ else if (domatch == 1)
+ {
+ if (!($1 in exts))
+ {
+ exts[$1];
+ print $1;
+ }
+ }
+}
+
diff --git a/bin/generate-bash-completion.py b/bin/generate-bash-completion.py
new file mode 100755
index 000000000..0702a3635
--- /dev/null
+++ b/bin/generate-bash-completion.py
@@ -0,0 +1,162 @@
+#!/usr/bin/env python3
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+"""
+Script to generate LibreOffice bash_completion file for the main applications
+"""
+
+import argparse
+import sys
+
+MASTERDOCS = ["sxg", "odm", "sgl"]
+
+BASEDOCS = ["odb"]
+
+CALCDOCS = ["sxc", "stc", "dif", "dbf", "xls", "xlw", "xlt", "rtf", "sdc", "vor",
+ "slk", "txt", "htm", "html", "wk1", "wks", "123", "xml", "ods", "ots",
+ "fods", "csv", "xlsb", "xlsm", "xlsx", "xltm", "xltx"]
+
+DRAWDOCS = ["sxd", "std", "dxf", "emf", "eps", "met", "pct", "sgf", "sgv", "sda",
+ "sdd", "vor", "svm", "wmf", "bmp", "gif", "jpg", "jpeg", "jfif", "fif",
+ "jpe", "pcd", "pcx", "pgm", "png", "ppm", "psd", "ras", "tga", "tif",
+ "tiff", "xbm", "xpm", "odg", "otg", "fodg", "odc", "odi", "sds",
+ "wpg", "svg", "vdx", "vsd", "vsdm", "vsdx"]
+
+IMPRESSDOCS = ["sxi", "sti", "ppt", "pps", "pot", "sxd", "sda", "sdd", "sdp",
+ "vor", "cgm", "odp", "otp", "fodp", "ppsm", "ppsx", "pptm", "pptx",
+ "potm", "potx"]
+
+MATHDOCS = ["sxm", "smf", "mml", "odf"]
+
+WEBDOCS = ["htm", "html", "stw", "txt", "vor", "oth"]
+
+WRITERDOCS = ["doc", "dot", "rtf", "sxw", "stw", "sdw", "vor", "txt", "htm?",
+ "xml", "wp", "wpd", "wps", "odt", "ott", "fodt", "docm", "docx",
+ "dotm", "dotx"]
+
+TEMPLATES = ["stw", "dot", "vor", "stc", "xlt", "sti", "pot", "std", "stw",
+ "dotm", "dotx", "potm", "potx", "xltm", "xltx"]
+
+ALLDOCS = MASTERDOCS + BASEDOCS + CALCDOCS + DRAWDOCS + IMPRESSDOCS + MATHDOCS + WEBDOCS + WRITERDOCS + TEMPLATES
+
+EXTENSIONS = ["oxt"]
+
+
+class App(object):
+ def __init__(self, name, compat_name, suffix_list):
+ self.name = name
+ self.compat_name = compat_name
+ self.suffix_list = suffix_list
+
+
+class SetAppCompatName(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ setattr(namespace, self.dest, True)
+ for app in APPS.values():
+ app.name = app.compat_name
+
+
+class SetAppName(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ APPS[self.dest].name = values
+
+
+# default names of lowrappers
+# use "" for name if you want to disable any wrapper
+APPS = {
+ 'office': App("libreoffice", 'openoffice', ALLDOCS), # libreoffice should contain all
+ 'office_short': App("loffice", 'ooffice', ALLDOCS), # libreoffice should contain all
+ 'master': App("", '', MASTERDOCS),
+ 'base': App("lobase", 'oobase', BASEDOCS),
+ 'calc': App("localc", 'oocalc', CALCDOCS),
+ 'draw': App("lodraw", 'oodraw', DRAWDOCS),
+ 'impress': App("loimpress", 'ooimpress', IMPRESSDOCS),
+ 'math': App("lomath", 'oomath', MATHDOCS),
+ 'template': App("lofromtemplate", 'oofromtemplate', TEMPLATES),
+ 'unopkg': App("unopkg", 'unopkg', EXTENSIONS), # unopkg is a standalone tool
+ 'web': App("loweb", 'ooweb', WEBDOCS),
+ 'writer': App("lowriter", 'oowriter', WRITERDOCS + MASTERDOCS)
+}
+
+
+def check_open(filename, mode):
+ try:
+ with open(filename, mode):
+ pass
+ except OSError as e:
+ mode = 'reading' if mode == 'r' else 'writing'
+ sys.exit("Error: can't open %s for %s: %s" % (filename, mode, e))
+
+
+def print_app_suffixes_check(out, app):
+ if not app.suffix_list:
+ sys.exit('Error: No suffix defined for %s' % app.name)
+
+ suffix_str = '|'.join(['%s|%s' % (s, s.upper()) for s in app.suffix_list])
+ out.write(" %s)\t\te=\'!*.+(%s)\' ;;\n" % (app.name, suffix_str))
+
+
+def print_suffixes_check(out):
+ for app in APPS.values():
+ if not app.name: # skip the disabled wrapper
+ continue
+ print_app_suffixes_check(out, app)
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Script to Generate bash completion for LO wrappers',
+ epilog='The other options allows to redefine the wrapper names.\n'
+ 'The value "" can be used to disable any wrapper.',
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument('input_file')
+ parser.add_argument('output_file')
+ parser.add_argument('--binsuffix', metavar='suffix',
+ help='defines a suffix that is added after each wrapper')
+ parser.add_argument('--compat-oowrappers', metavar='', nargs=0, action=SetAppCompatName, default=False,
+ help='set wrapper names to the old default oo* wrapper names')
+ for app in APPS:
+ parser.add_argument('--%s' % app, metavar='wrapper_name', action=SetAppName)
+
+ args = parser.parse_args()
+
+ check_open(args.input_file, 'r')
+ check_open(args.output_file, 'w')
+
+ # add binsuffix
+ if args.binsuffix:
+ for app in APPS.values():
+ if app.name:
+ app.name += args.binsuffix
+
+ if args.compat_oowrappers:
+ office_shell_function = '_ooexp_'
+ else:
+ office_shell_function = '_loexp_'
+
+ # the last app will be printed without the final backslash
+ apps_to_print = ' \\\n'.join(['\t\t\t\t\t%s' % app.name for app in APPS.values() if app.name])
+
+ with open(args.input_file, 'r') as in_fh, open(args.output_file, 'w') as out_fh:
+ for line in in_fh:
+ line = line.replace('@OFFICE_SHELL_FUNCTION@', office_shell_function)
+ if '@BASH_COMPLETION_SUFFIXES_CHECKS@' in line:
+ print_suffixes_check(out_fh)
+ elif '@BASH_COMPLETION_OOO_APPS@' in line:
+ if not apps_to_print:
+ sys.exit('Error: No LO wrapper was selected')
+ out_fh.write('%s\n' % apps_to_print)
+ else:
+ out_fh.write(line)
+
+
+if __name__ == '__main__':
+ main()
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/get-bugzilla-attachments-by-mimetype b/bin/get-bugzilla-attachments-by-mimetype
new file mode 100755
index 000000000..1d1f45165
--- /dev/null
+++ b/bin/get-bugzilla-attachments-by-mimetype
@@ -0,0 +1,584 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# This digs through a pile of bugzilla's and populates the cwd with a big
+# collection of bug-docs in per-filetype dirs with bug-ids as names with
+# prefixes to indicate which bug-tracker, e.g.
+#
+# fdo-bugid-X.suffix
+# rhbz-bugid-X.suffix
+# moz-bugid-X.suffix
+#
+# where X is the n'th attachment of that type in the bug
+#
+# The results are stored in the current directory, categorized by the
+# extension of the downloaded file. When a file already exists, it is assumed
+# it is already downloaded by a previous run, and up-to-date.
+
+from __future__ import print_function
+import feedparser
+import base64
+import datetime
+import glob
+import re
+import os, os.path
+import stat
+import sys
+import threading
+try:
+ import queue
+except:
+ import Queue as queue
+try:
+ from urllib.request import urlopen
+except:
+ from urllib import urlopen
+try:
+ import xmlrpc.client as xmlrpclib
+except:
+ import xmlrpclib
+from xml.dom import minidom
+from xml.sax.saxutils import escape
+
+def urlopen_retry(url):
+ maxretries = 3
+ for i in range(maxretries + 1):
+ try:
+ return urlopen(url)
+ except IOError as e:
+ print("caught IOError: " + str(e))
+ if maxretries == i:
+ raise
+ print("retrying...")
+
+def get_from_bug_url_via_xml(url, mimetype, prefix, suffix):
+ id = url.rsplit('=', 2)[1]
+ print("id is " + prefix + id + " " + suffix)
+ print("parsing " + id)
+ sock = urlopen_retry(url+"&ctype=xml")
+ dom = minidom.parse(sock)
+ sock.close()
+ attachmentid=0
+ for attachment in dom.getElementsByTagName('attachment'):
+ attachmentid += 1
+ print(" mimetype is", end=' ')
+ for node in attachment.childNodes:
+ if node.nodeName == 'type':
+ # check if attachment is deleted
+ if not node.firstChild:
+ print('deleted attachment, skipping')
+ continue
+
+ print(node.firstChild.nodeValue, end=' ')
+ if node.firstChild.nodeValue.lower() != mimetype.lower():
+ print('skipping')
+ break
+ elif node.nodeName == 'data':
+ # check if attachment is deleted (i.e. https://bugs.kde.org/show_bug.cgi?id=53343&ctype=xml)
+ if not node.firstChild:
+ print('deleted attachment, skipping')
+ continue
+
+ download = suffix + '/' +prefix + id + '-' + str(attachmentid) + '.' + suffix
+ if os.path.isfile(download):
+ print("assuming " + download + " is up to date")
+ continue
+
+ # prevent re-downloading FDO attachments from TDF
+ if prefix == "tdf" and int(id) < 88776:
+ fdodownload = download.replace("tdf", "fdo")
+ if os.path.isfile(fdodownload):
+ print("assuming FDO " + fdodownload + " is up to date")
+ continue
+
+ print('downloading as ' + download)
+ tmpfile = download + ".tmp"
+ f = open(tmpfile, 'wb')
+ f.write(base64.b64decode(node.firstChild.nodeValue))
+ f.close()
+ os.rename(tmpfile, download)
+ break
+
+def get_novell_bug_via_xml(url, mimetype, prefix, suffix):
+ id = url.rsplit('=', 2)[1]
+ print("id is " + prefix + id + " " + suffix)
+ print("parsing " + id)
+ sock = urlopen_retry(url+"&ctype=xml")
+ dom = minidom.parse(sock)
+ sock.close()
+ attachmentid=0
+ for comment in dom.getElementsByTagName('thetext'):
+ commentText = comment.firstChild.nodeValue
+ match = re.search(r".*Created an attachment \(id=([0-9]+)\)", commentText)
+ if not match:
+ continue
+
+ attachmentid += 1
+
+ download = suffix + '/' + prefix + id + '-' + str(attachmentid) + '.' + suffix
+ if os.path.isfile(download):
+ print("assuming " + download + " is up to date")
+ continue
+
+ realAttachmentId = match.group(1)
+ handle = urlopen_retry(novellattach + realAttachmentId)
+ if not handle:
+ print("attachment %s is not accessible" % realAttachmentId)
+ continue
+ print(" mimetype is", end=' ')
+
+ info = handle.info()
+ if info.get_content_type:
+ remoteMime = info.get_content_type()
+ else:
+ remoteMime = info.gettype()
+ print(remoteMime, end=' ')
+ if remoteMime != mimetype:
+ print("skipping")
+ continue
+
+ print('downloading as ' + download)
+ tmpfile = download + ".tmp"
+ f = open(tmpfile, 'wb')
+ f.write(handle.read())
+ f.close()
+ os.rename(tmpfile, download)
+
+def create_query(mimetype):
+ query = dict()
+ query['query_format']='advanced'
+ query['field0-0-0']='attachments.mimetype'
+ query['type0-0-0']='equals'
+ query['value0-0-0']=mimetype
+ return query
+
+def get_downloaded_files(prefix, suffix):
+ return glob.glob(os.path.join(suffix, '%s*.%s' % (prefix, suffix)))
+
+def get_file_bz_ids(files, prefix):
+ return set([os.path.basename(f).split('-')[0].replace(prefix, '', 1) for f in files])
+
+def get_changed_date(files):
+ newest = max([os.stat(f)[stat.ST_MTIME] for f in files])
+ # Subtract a day to avoid timezone differences. The worst thing that
+ # can happen is that we are going to process more bugs than necessary.
+ return datetime.date.fromtimestamp(newest - 24 * 60 * 60)
+
+def get_through_rpc_query(rpcurl, showurl, mimetype, prefix, suffix):
+ try:
+ os.mkdir(suffix)
+ except:
+ pass
+
+ def process(query, full, have=[]):
+ try:
+ proxy = xmlrpclib.ServerProxy(rpcurl)
+ result = proxy.Bug.search(query)
+ bugs = result['bugs']
+ print(str(len(bugs)) + ' bugs to process')
+
+ if full:
+ available = set([str(bug['id']) for bug in bugs])
+ # we already have files from all available bugs
+ if available.difference(set(have)) == set():
+ print("assuming all downloaded files are up to date")
+ return
+
+ for bug in bugs:
+ url = showurl + str(bug['id'])
+ get_from_bug_url_via_xml(url, mimetype, prefix, suffix)
+ except xmlrpclib.Fault as err:
+ print("A fault occurred")
+ print("Fault code: %s" % err.faultCode)
+ print(err.faultString)
+
+ query = create_query(mimetype)
+ query['column_list']='bug_id'
+
+ files = get_downloaded_files(prefix, suffix)
+
+ if files != []:
+ print('looking for updated bugs having %s attachment(s)' % mimetype)
+ query_changed = query.copy()
+ query_changed['field0-1-0'] = 'days_elapsed'
+ query_changed['type0-1-0'] = 'lessthaneq'
+ query_changed['value0-1-0'] = str((datetime.date.today() - get_changed_date(files)).days)
+ process(query_changed, False)
+
+ print('looking for all bugs having %s attachment(s)' % mimetype)
+ process(query, True, get_file_bz_ids(files, prefix))
+
+def get_through_rss_query(queryurl, mimetype, prefix, suffix):
+ try:
+ os.mkdir(suffix)
+ except:
+ pass
+
+ #Getting detailed bug information and downloading an attachment body is not possible without logging in to Novell bugzilla
+ #get_novell_bug_via_xml function is a workaround for that situation
+ get_bug_function = get_novell_bug_via_xml if prefix == "novell" else get_from_bug_url_via_xml
+
+ def process(query, full, have=[]):
+ url = queryurl + '?' + '&'.join(['='.join(kv) for kv in query.items()])
+ print('url is ' + url)
+ d = feedparser.parse(url)
+ print(str(len(d['entries'])) + ' bugs to process')
+
+ entries = []
+ for entry in d['entries']:
+ bugid = entry['id'].split('=')[-1]
+ entries.append(entry)
+
+ if full:
+ available = set([str(entry['id'].split('=')[-1]) for entry in entries])
+ # we already have files from all available bugs
+ if available.difference(set(have)) == set():
+ print("assuming all downloaded files are up to date")
+ return
+
+ for entry in entries:
+ try:
+ get_bug_function(entry['id'], mimetype, prefix, suffix)
+ except KeyboardInterrupt:
+ raise # Ctrl+C should work
+ except:
+ print(entry['id'] + " failed: " + str(sys.exc_info()[0]))
+ pass
+
+ query = create_query(escape(mimetype.replace("+","%2B")))
+ query['ctype'] = 'rss'
+
+ files = get_downloaded_files(prefix, suffix)
+
+ if files != []:
+ print('looking for updated bugs having %s attachment(s)' % mimetype)
+ query_changed = query.copy()
+ query_changed['field0-1-0'] = 'delta_ts'
+ query_changed['type0-1-0'] = 'greaterthaneq'
+ query_changed['value0-1-0'] = get_changed_date(files).isoformat()
+ process(query_changed, False)
+
+ print('looking for all bugs having %s attachment(s)' % mimetype)
+ process(query, True, get_file_bz_ids(files, prefix))
+
+#since searching bugs having attachments with specific mimetypes is not available in launchpad API
+#we're iterating over all bugs of the most interesting source packages
+launchpad_pkgs = (
+ "abiword",
+ "calibre",
+ "calligra",
+ "gnumeric",
+ "inkscape",
+ "koffice",
+ "libabw",
+ "libcdr",
+ "libe-book",
+ "libetonyek",
+ "libfreehand",
+ "libmspub",
+ "libmwaw",
+ "liborcus",
+ "libpagemaker",
+ "libreoffice",
+ "libvisio",
+ "libwpd",
+ "libwpg",
+ "libwps",
+ "openoffice.org",
+ "python-uniconvertor",
+ "scribus",
+ "sk1",
+ "unoconv",
+)
+
+def get_launchpad_bugs(prefix):
+ #launchpadlib python module is required to download launchpad attachments
+ from launchpadlib.launchpad import Launchpad
+
+ launchpad = Launchpad.login_anonymously("attachmentdownload", "production")
+ ubuntu = launchpad.distributions["ubuntu"]
+
+ for pkg in launchpad_pkgs:
+ srcpkg = ubuntu.getSourcePackage(name=pkg)
+ pkgbugs = srcpkg.searchTasks(status=["New", "Fix Committed", "Invalid", "Won't Fix", "Confirmed", "Triaged", "In Progress", "Incomplete", "Incomplete (with response)", "Incomplete (without response)", "Fix Released", "Opinion", "Expired"])
+
+ for bugtask in pkgbugs:
+ bug = bugtask.bug
+ id = str(bug.id)
+ print("parsing " + id + " status: " + bugtask.status + " title: " + bug.title[:50])
+ attachmentid = 0
+ for attachment in bug.attachments:
+ attachmentid += 1
+ handle = attachment.data.open()
+ if not handle.content_type in mimetypes:
+ #print "skipping"
+ continue
+
+ suffix = mimetypes[handle.content_type]
+ if not os.path.isdir(suffix):
+ try:
+ os.mkdir(suffix)
+ except:
+ pass
+
+ download = suffix + '/' + prefix + id + '-' + str(attachmentid) + '.' + suffix
+
+ if os.path.isfile(download):
+ print("assuming " + id + " is up to date")
+ break
+
+ print('mimetype is ' + handle.content_type + ' downloading as ' + download)
+
+ tmpfile = download + ".tmp"
+ f = open(tmpfile, "wb")
+ f.write(handle.read())
+ f.close()
+ os.rename(tmpfile, download)
+
+rss_bugzillas = (
+ ( 'abi', 'http://bugzilla.abisource.com/buglist.cgi' ), #added for abiword
+ ( 'fdo', 'http://bugs.freedesktop.org/buglist.cgi' ),
+ ( 'gentoo', 'http://bugs.gentoo.org/buglist.cgi' ),
+ ( 'gnome', 'http://bugzilla.gnome.org/buglist.cgi' ), # added for gnumeric
+ ( 'kde', 'http://bugs.kde.org/buglist.cgi' ), # added for koffice/calligra
+ ( 'mandriva', 'https://qa.mandriva.com/buglist.cgi' ),
+ ( 'moz', 'https://bugzilla.mozilla.org/buglist.cgi' ),
+ # It seems something has changed and it is no longer possible to
+ # download any files from there.
+ # NOTE: I am leaving it in the list, commented out, just so someone
+ # does not add it back immediately .-)
+ # 'novell': 'https://bugzilla.novell.com/buglist.cgi',
+# note: running this script against bz.apache.org apparently causes one's IP
+# to be blacklisted or something; you won't get new files in any case...
+# ( 'ooo', 'https://bz.apache.org/ooo/buglist.cgi' ),
+ ( 'tdf', 'http://bugs.documentfoundation.org/buglist.cgi' ),
+)
+
+redhatrpc = 'https://bugzilla.redhat.com/xmlrpc.cgi'
+redhatbug = 'https://bugzilla.redhat.com/show_bug.cgi?id='
+
+#Novell Bugzilla requires users to log in, in order to get details of the bugs such as attachment bodies etc.
+#As a dirty workaround, we parse comments containing "Created an attachment (id=xxxxxx)" and download attachments manually
+#python-bugzilla claims that it supports Novell bugzilla login but it's not working right now and novell bugzilla login
+#system is a nightmare
+novellattach = 'https://bugzilla.novell.com/attachment.cgi?id='
+
+mimetypes = {
+# ODF
+ 'application/vnd.oasis.opendocument.base': 'odb',
+ 'application/vnd.oasis.opendocument.database': 'odb',
+ 'application/vnd.oasis.opendocument.chart': 'odc',
+ 'application/vnd.oasis.opendocument.chart-template': 'otc',
+ 'application/vnd.oasis.opendocument.formula': 'odf',
+ 'application/vnd.oasis.opendocument.formula-template': 'otf',
+ 'application/vnd.oasis.opendocument.graphics': 'odg',
+ 'application/vnd.oasis.opendocument.graphics-template': 'otg',
+ 'application/vnd.oasis.opendocument.graphics-flat-xml': 'fodg',
+ 'application/vnd.oasis.opendocument.presentation': 'odp',
+ 'application/vnd.oasis.opendocument.presentation-template': 'otp',
+ 'application/vnd.oasis.opendocument.presentation-flat-xml': 'fodp',
+ 'application/vnd.oasis.opendocument.spreadsheet': 'ods',
+ 'application/vnd.oasis.opendocument.spreadsheet-template': 'ots',
+ 'application/vnd.oasis.opendocument.spreadsheet-flat-xml': 'fods',
+ 'application/vnd.oasis.opendocument.text': 'odt',
+ 'application/vnd.oasis.opendocument.text-flat-xml': 'fodt',
+ 'application/vnd.oasis.opendocument.text-master': 'odm',
+ 'application/vnd.oasis.opendocument.text-template': 'ott',
+ 'application/vnd.oasis.opendocument.text-master-template': 'otm',
+ 'application/vnd.oasis.opendocument.text-web': 'oth',
+# OOo XML
+ 'application/vnd.sun.xml.base': 'odb',
+ 'application/vnd.sun.xml.calc': 'sxc',
+ 'application/vnd.sun.xml.calc.template': 'stc',
+ 'application/vnd.sun.xml.chart': 'sxs',
+ 'application/vnd.sun.xml.draw': 'sxd',
+ 'application/vnd.sun.xml.draw.template': 'std',
+ 'application/vnd.sun.xml.impress': 'sxi',
+ 'application/vnd.sun.xml.impress.template': 'sti',
+ 'application/vnd.sun.xml.math': 'sxm',
+ 'application/vnd.sun.xml.writer': 'sxw',
+ 'application/vnd.sun.xml.writer.global': 'sxg',
+ 'application/vnd.sun.xml.writer.template': 'stw',
+ 'application/vnd.sun.xml.writer.web': 'stw',
+# MSO
+ 'application/rtf': 'rtf',
+ 'text/rtf': 'rtf',
+ 'application/msword': 'doc',
+ 'application/vnd.ms-powerpoint': 'ppt',
+ 'application/vnd.ms-excel': 'xls',
+ 'application/vnd.ms-excel.sheet.binary.macroEnabled.12': 'xlsb',
+ 'application/vnd.ms-excel.sheet.macroEnabled.12': 'xlsm',
+ 'application/vnd.ms-excel.template.macroEnabled.12': 'xltm',
+ 'application/vnd.ms-powerpoint.presentation.macroEnabled.12': 'pptm',
+ 'application/vnd.ms-powerpoint.slide.macroEnabled.12': 'sldm',
+ 'application/vnd.ms-powerpoint.slideshow.macroEnabled.12': 'ppsm',
+ 'application/vnd.ms-powerpoint.template.macroEnabled.12': 'potm',
+ 'application/vnd.ms-word.document.macroEnabled.12': 'docm',
+ 'application/vnd.ms-word.template.macroEnabled.12': 'dotm',
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'xlsx',
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.template': 'xltx',
+ 'application/vnd.openxmlformats-officedocument.presentationml.presentation': 'pptx',
+ 'application/vnd.openxmlformats-officedocument.presentationml.template': 'potx',
+ 'application/vnd.openxmlformats-officedocument.presentationml.slideshow': 'ppsx',
+ 'application/vnd.openxmlformats-officedocument.presentationml.slide': 'sldx',
+ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'docx',
+ 'application/vnd.openxmlformats-officedocument.wordprocessingml.template': 'dotx',
+ 'application/vnd.visio': 'vsd',
+ 'application/visio.drawing': 'vsd',
+ 'application/vnd.visio2013': 'vsdx',
+ 'application/vnd.visio.xml': 'vdx',
+ 'application/x-mspublisher': 'pub',
+#WPS Office
+ 'application/wps-office.doc': 'doc',
+ 'application/wps-office.docx': 'docx',
+ 'application/wps-office.xls': 'xls',
+ 'application/wps-office.xlsx': 'xlsx',
+ 'application/wps-office.ppt': 'ppt',
+ 'application/wps-office.pptx': 'pptx',
+# W3C
+ 'application/xhtml+xml': 'xhtml',
+ 'application/mathml+xml': 'mml',
+ 'text/html': 'html',
+ 'application/docbook+xml': 'docbook',
+# misc
+ 'text/csv': 'csv',
+ 'text/spreadsheet': 'slk',
+ 'application/x-qpro': 'qpro',
+ 'application/x-dbase': 'dbf',
+ 'application/vnd.corel-draw': 'cdr',
+ 'application/vnd.lotus-wordpro': 'lwp',
+ 'application/vnd.lotus-1-2-3': 'wks',
+ 'application/vnd.wordperfect': 'wpd',
+ 'application/wordperfect5.1': 'wpd',
+ 'application/vnd.ms-works': 'wps',
+ 'application/clarisworks' : 'cwk',
+ 'application/macwriteii' : 'mw',
+ 'application/vnd.apple.keynote': 'key',
+ 'application/vnd.apple.numbers': 'numbers',
+ 'application/vnd.apple.pages': 'pages',
+ 'application/x-iwork-keynote-sffkey': 'key',
+ 'application/x-iwork-numbers-sffnumbers': 'numbers',
+ 'application/x-iwork-pages-sffpages': 'pages',
+ 'application/x-hwp': 'hwp',
+ 'application/x-aportisdoc': 'pdb',
+ 'application/prs.plucker' : 'pdb_plucker',
+ 'application/vnd.palm' : 'pdb_palm',
+ 'application/x-sony-bbeb' : 'lrf',
+ 'application/x-pocket-word': 'psw',
+ 'application/x-t602': '602',
+ 'application/x-fictionbook+xml': 'fb2',
+ 'application/x-abiword': 'abw',
+ 'application/x-pagemaker': 'pmd',
+ 'application/x-gnumeric': 'gnumeric',
+ 'application/vnd.stardivision.calc': 'sdc',
+ 'application/vnd.stardivision.draw': 'sda',
+ 'application/vnd.stardivision.writer': 'sdw',
+ 'application/x-starcalc': 'sdc',
+ 'application/x-stardraw': 'sdd',
+ 'application/x-starwriter': 'sdw',
+# relatively uncommon image mimetypes
+ 'image/x-freehand': 'fh',
+ 'image/cgm': 'cgm',
+ 'image/tif': 'tiff',
+ 'image/tiff': 'tiff',
+ 'image/vnd.dxf': 'dxf',
+ 'image/emf': 'emf',
+ 'image/x-emf': 'emf',
+ 'image/x-targa': 'tga',
+ 'image/x-sgf': 'sgf',
+ 'image/x-svm': 'svm',
+ 'image/wmf': 'wmf',
+ 'image/x-wmf': 'wmf',
+ 'image/x-pict': 'pict',
+ 'image/x-cmx': 'cmx',
+ 'image/svg+xml': 'svg',
+ 'image/bmp': 'bmp',
+ 'image/x-ms-bmp': 'bmp',
+ 'image/x-MS-bmp': 'bmp',
+ 'image/x-wpg': 'wpg',
+ 'image/x-eps': 'eps',
+ 'image/x-met': 'met',
+ 'image/x-portable-bitmap': 'pbm',
+ 'image/x-photo-cd': 'pcd',
+ 'image/x-pcx': 'pcx',
+ 'image/x-portable-graymap': 'pgm',
+ 'image/x-portable-pixmap': 'ppm',
+ 'image/vnd.adobe.photoshop': 'psd',
+ 'image/x-cmu-raster': 'ras',
+ 'image/x-sun-raster': 'ras',
+ 'image/x-xbitmap': 'xbm',
+ 'image/x-xpixmap': 'xpm',
+}
+
+# disabled for now, this would download gigs of pngs/jpegs...
+common_noncore_mimetypes = {
+# graphics
+ 'image/gif': 'gif',
+ 'image/jpeg': 'jpeg',
+ 'image/png': 'png',
+# pdf, etc.
+ 'application/pdf': 'pdf',
+}
+
+class manage_threads(threading.Thread):
+ def run(self):
+ #print(threading.current_thread().get_ident())
+ while 1:
+ # Try to receive a job from queue
+ try:
+ # Get job from queue
+ # Use job parameters to call our query
+ # Then let the queue know we are done with this job
+ (uri, mimetype, prefix, extension) = jobs.get(True,6)
+ try:
+ get_through_rss_query(uri, mimetype, prefix, extension)
+ finally:
+ jobs.task_done()
+ except KeyboardInterrupt:
+ raise # Ctrl+C should work
+ except queue.Empty:
+ break
+
+def generate_multi_threading():
+ for (prefix, uri) in rss_bugzillas:
+
+ # Initialize threads
+ for i in range(max_threads):
+ manage_threads().start()
+
+ # Create a job for every mimetype for a bugzilla
+ for (mimetype,extension) in mimetypes.items():
+ # It seems that bugzilla has problems returning that many results
+ # (10000 results is probably a limit set somewhere) so we always
+ # end processing the complete list.
+ if mimetype == 'text/html' and prefix == 'moz':
+ continue
+
+ jobs.put([uri, mimetype, prefix, extension], block=True)
+ print("successfully placed a job in the queue searching for " + mimetype + " in bugtracker " + prefix)
+
+ # Continue when all mimetypes are done for a bugzilla
+ jobs.join()
+ print("DONE with bugtracker " + prefix)
+
+max_threads = 20 # Number of threads to create, (1 = without multi-threading)
+jobs = queue.Queue()
+
+generate_multi_threading()
+
+for (mimetype,extension) in mimetypes.items():
+ get_through_rpc_query(redhatrpc, redhatbug, mimetype, "rhbz", extension)
+
+try:
+ get_launchpad_bugs("lp")
+except ImportError:
+ print("launchpadlib unavailable, skipping Ubuntu tracker")
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/get_config_variables b/bin/get_config_variables
new file mode 100644
index 000000000..60a2bdc04
--- /dev/null
+++ b/bin/get_config_variables
@@ -0,0 +1,23 @@
+#!/bin/sh
+#set -x
+
+glv_var="$1"
+glv_config="config_host.mk"
+
+if [ "$glv_var" = "--build" ] ; then
+ glv_config="config_build.mk"
+ shift
+elif [ "$glv_var" = "--host" ] ; then
+ shift
+fi
+
+while [ -n "$1" ] ; do
+ glv_var="$1"
+ shift
+ glv_value=$(grep "^ *export ${glv_var}=" ${glv_config} | sed -e "s/[^=]*=//")
+ export ${glv_var}="${glv_value}"
+done
+
+unset glv_var
+unset glv_value
+unset glv_config
diff --git a/bin/git-ps1 b/bin/git-ps1
new file mode 100755
index 000000000..8a0980091
--- /dev/null
+++ b/bin/git-ps1
@@ -0,0 +1,52 @@
+#!/usr/bin/env bash
+r=
+b=
+g="$(git rev-parse --git-dir 2>/dev/null)"
+
+if [ -n "$g" ]; then
+ if [ -d "$g/../.dotest" ]
+ then
+ if test -f "$g/../.dotest/rebasing"
+ then
+ r="|REBASE"
+ elif test -f "$g/../.dotest/applying"
+ then
+ r="|AM"
+ else
+ r="|AM/REBASE"
+ fi
+ b="$(git symbolic-ref HEAD 2>/dev/null)"
+ elif [ -f "$g/.dotest-merge/interactive" ]
+ then
+ r="|REBASE-i"
+ b="$(cat "$g/.dotest-merge/head-name")"
+ elif [ -d "$g/.dotest-merge" ]
+ then
+ r="|REBASE-m"
+ b="$(cat "$g/.dotest-merge/head-name")"
+ elif [ -f "$g/MERGE_HEAD" ]
+ then
+ r="|MERGING"
+ b="$(git symbolic-ref HEAD 2>/dev/null)"
+ else
+ if [ -f "$g/BISECT_LOG" ]
+ then
+ r="|BISECTING"
+ fi
+ if ! b="$(git symbolic-ref HEAD 2>/dev/null)"
+ then
+ if ! b="$(git describe --exact-match HEAD 2>/dev/null)"
+ then
+ b="$(cut -c1-7 "$g/HEAD")..."
+ fi
+ fi
+ fi
+
+ if [ -n "$1" ]; then
+ printf "$1" "${b##refs/heads/}$r"
+ else
+ printf "%s" "${b##refs/heads/}$r"
+ fi
+else
+ printf "not-in-git"
+fi
diff --git a/bin/gla11y b/bin/gla11y
new file mode 100755
index 000000000..b1d98c7c0
--- /dev/null
+++ b/bin/gla11y
@@ -0,0 +1,1401 @@
+#!/usr/bin/env python
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# This file incorporates work covered by the following license notice:
+#
+# Copyright (c) 2018 Martin Pieuchot
+# Copyright (c) 2018-2020 Samuel Thibault <sthibault@hypra.fr>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+# Take LibreOffice (glade) .ui files and check for non accessible widgets
+
+from __future__ import print_function
+
+import os
+import sys
+import getopt
+try:
+ import lxml.etree as ET
+ lxml = True
+except ImportError:
+ if sys.version_info < (2,7):
+ print("gla11y needs lxml or python >= 2.7")
+ exit()
+ import xml.etree.ElementTree as ET
+ lxml = False
+
+# Toplevel widgets
+widgets_toplevel = [
+ 'GtkWindow',
+ 'GtkOffscreenWindow',
+ 'GtkApplicationWindow',
+ 'GtkDialog',
+ 'GtkFileChooserDialog',
+ 'GtkColorChooserDialog',
+ 'GtkFontChooserDialog',
+ 'GtkMessageDialog',
+ 'GtkRecentChooserDialog',
+ 'GtkAssistant',
+ 'GtkAppChooserDialog',
+ 'GtkPrintUnixDialog',
+ 'GtkShortcutsWindow',
+]
+
+widgets_ignored = widgets_toplevel + [
+ # Containers
+ 'GtkBox',
+ 'GtkGrid',
+ 'GtkNotebook',
+ 'GtkFrame',
+ 'GtkAspectFrame',
+ 'GtkListBox',
+ 'GtkFlowBox',
+ 'GtkOverlay',
+ 'GtkMenuBar',
+ 'GtkToolbar',
+ 'GtkToolpalette',
+ 'GtkPaned',
+ 'GtkHPaned',
+ 'GtkVPaned',
+ 'GtkButtonBox',
+ 'GtkHButtonBox',
+ 'GtkVButtonBox',
+ 'GtkLayout',
+ 'GtkFixed',
+ 'GtkEventBox',
+ 'GtkExpander',
+ 'GtkViewport',
+ 'GtkScrolledWindow',
+ 'GtkAlignment',
+ 'GtkRevealer',
+ 'GtkSearchBar',
+ 'GtkHeaderBar',
+ 'GtkStack',
+ 'GtkStackSwticher',
+ 'GtkPopover',
+ 'GtkPopoverMenu',
+ 'GtkActionBar',
+ 'GtkHandleBox',
+ 'GtkShortcutsSection',
+ 'GtkShortcutsGroup',
+ 'GtkTable',
+
+ 'GtkVBox',
+ 'GtkHBox',
+ 'GtkToolItem',
+ 'GtkMenu',
+
+ # Invisible actions
+ 'GtkSeparator',
+ 'GtkHSeparator',
+ 'GtkVSeparator',
+ 'GtkAction',
+ 'GtkToggleAction',
+ 'GtkActionGroup',
+ 'GtkCellRendererGraph',
+ 'GtkCellRendererPixbuf',
+ 'GtkCellRendererProgress',
+ 'GtkCellRendererSpin',
+ 'GtkCellRendererText',
+ 'GtkCellRendererToggle',
+ 'GtkSeparatorMenuItem',
+ 'GtkSeparatorToolItem',
+
+ # Storage objects
+ 'GtkListStore',
+ 'GtkTreeStore',
+ 'GtkTreeModelFilter',
+ 'GtkTreeModelSort',
+
+ 'GtkEntryBuffer',
+ 'GtkTextBuffer',
+ 'GtkTextTag',
+ 'GtkTextTagTable',
+
+ 'GtkSizeGroup',
+ 'GtkWindowGroup',
+ 'GtkAccelGroup',
+ 'GtkAdjustment',
+ 'GtkEntryCompletion',
+ 'GtkIconFactory',
+ 'GtkStatusIcon',
+ 'GtkFileFilter',
+ 'GtkRecentFilter',
+ 'GtkRecentManager',
+ 'GThemedIcon',
+
+ 'GtkTreeSelection',
+
+ 'GtkListBoxRow',
+ 'GtkTreeViewColumn',
+
+ # Useless to label
+ 'GtkScrollbar',
+ 'GtkHScrollbar',
+ 'GtkStatusbar',
+ 'GtkInfoBar',
+
+ # These are actually labels
+ 'GtkLinkButton',
+
+ # This precisely give a11y information :)
+ 'AtkObject',
+]
+
+widgets_suffixignored = [
+]
+
+# These widgets always need a label
+widgets_needlabel = [
+ 'GtkEntry',
+ 'GtkSearchEntry',
+ 'GtkScale',
+ 'GtkHScale',
+ 'GtkVScale',
+ 'GtkSpinButton',
+ 'GtkSwitch',
+]
+
+# These widgets normally have their own label
+widgets_buttons = [
+ 'GtkButton',
+ 'GtkToolButton',
+ 'GtkToggleButton',
+ 'GtkToggleToolButton',
+ 'GtkRadioButton',
+ 'GtkRadioToolButton',
+ 'GtkCheckButton',
+ 'GtkModelButton',
+ 'GtkLockButton',
+ 'GtkColorButton',
+ 'GtkMenuButton',
+
+ 'GtkMenuItem',
+ 'GtkImageMenuItem',
+ 'GtkMenuToolButton',
+ 'GtkRadioMenuItem',
+ 'GtkCheckMenuItem',
+]
+
+# These widgets are labels that can label other widgets
+widgets_labels = [
+ 'GtkLabel',
+ 'GtkAccelLabel',
+]
+
+# The rest should probably be labelled if there are orphan labels
+
+# GtkSpinner
+# GtkProgressBar
+# GtkLevelBar
+
+# GtkComboBox
+# GtkComboBoxText
+# GtkFileChooserButton
+# GtkAppChooserButton
+# GtkFontButton
+# GtkCalendar
+# GtkColorChooserWidget
+
+# GtkCellView
+# GtkTreeView
+# GtkTextView
+# GtkIconView
+
+# GtkImage
+# GtkArrow
+# GtkDrawingArea
+
+# GtkScaleButton
+# GtkVolumeButton
+
+
+# TODO:
+# GtkColorPlane ?
+# GtkColorScale ?
+# GtkColorSwatch ?
+# GtkFileChooserWidget ?
+# GtkFishbowl ?
+# GtkFontChooserWidget ?
+# GtkIcon ?
+# GtkInspector* ?
+# GtkMagnifier ?
+# GtkPathBar ?
+# GtkPlacesSidebar ?
+# GtkPlacesView ?
+# GtkPrinterOptionWidget ?
+# GtkStackCombo ?
+# GtkStackSidebar ?
+# GtkStackSwitcher ?
+
+progname = os.path.basename(sys.argv[0])
+
+suppressions = {}
+suppressions_to_line = {}
+false_positives = {}
+ids = {}
+ids_dup = {}
+labelled_by_elm = {}
+label_for_elm = {}
+mnemonic_for_elm = {}
+
+gen_suppr = None
+gen_supprfile = None
+suppr_prefix = ""
+outfile = None
+
+pflag = False
+
+warn_orphan_labels = True
+
+errors = 0
+errexists = 0
+warnings = 0
+warnexists = 0
+fatals = 0
+fatalexists = 0
+
+enables = [ ]
+dofatals = [ ]
+
+#
+# XML browsing and printing functions
+#
+
+def elm_parent(root, elm):
+ """
+ Return the parent of the element.
+ """
+ if lxml:
+ return elm.getparent()
+ else:
+ def find_parent(cur, elm):
+ for o in cur:
+ if o == elm:
+ return cur
+ parent = find_parent(o, elm)
+ if parent is not None:
+ return parent
+ return None
+ return find_parent(root, elm)
+
+def step_elm(elm):
+ """
+ Return the XML class path step corresponding to elm.
+ This can be empty if the elm does not have any class or id.
+ """
+ step = elm.attrib.get('class')
+ if step is None:
+ step = ""
+ oid = elm.attrib.get('id')
+ if oid is not None:
+ oid = oid.encode('ascii','ignore').decode('ascii')
+ step += "[@id='%s']" % oid
+ if len(step) > 0:
+ step += '/'
+ return step
+
+def find_elm(root, elm):
+ """
+ Return the XML class path of the element from the given root.
+ This is the slow version used when getparent is not available.
+ """
+ if root == elm:
+ return ""
+ for o in root:
+ path = find_elm(o, elm)
+ if path is not None:
+ step = step_elm(o)
+ return step + path
+ return None
+
+def errpath(filename, tree, elm):
+ """
+ Return the XML class path of the element
+ """
+ if elm is None:
+ return ""
+ path = ""
+ if 'class' in elm.attrib:
+ path += elm.attrib['class']
+ oid = elm.attrib.get('id')
+ if oid is not None:
+ oid = oid.encode('ascii','ignore').decode('ascii')
+ path = "//" + path + "[@id='%s']" % oid
+ else:
+ if lxml:
+ elm = elm.getparent()
+ while elm is not None:
+ step = step_elm(elm)
+ path = step + path
+ elm = elm.getparent()
+ else:
+ path = find_elm(tree.getroot(), elm)[:-1]
+ path = filename + ':' + path
+ return path
+
+#
+# Warning/Error printing functions
+#
+
+def elm_prefix(filename, elm):
+ """
+ Return the display prefix of the element
+ """
+ if elm == None or not lxml:
+ return "%s:" % filename
+ else:
+ return "%s:%u" % (filename, elm.sourceline)
+
+def elm_name(elm):
+ """
+ Return a display name of the element
+ """
+ if elm is not None:
+ name = ""
+ if 'class' in elm.attrib:
+ name = "'%s' " % elm.attrib['class']
+ if 'id' in elm.attrib:
+ id = elm.attrib['id'].encode('ascii','ignore').decode('ascii')
+ name += "'%s' " % id
+ if not name:
+ name = "'" + elm.tag + "'"
+ if lxml:
+ name += " line " + str(elm.sourceline)
+ return name
+ return ""
+
+def elm_name_line(elm):
+ """
+ Return a display name of the element with line number
+ """
+ if elm is not None:
+ name = elm_name(elm)
+ if lxml and " line " not in name:
+ name += "line " + str(elm.sourceline) + " "
+ return name
+ return ""
+
+def elm_line(elm):
+ """
+ Return the line for the given element.
+ """
+ if lxml:
+ return " line " + str(elm.sourceline)
+ else:
+ return ""
+
+def elms_lines(elms):
+ """
+ Return the list of lines for the given elements.
+ """
+ if lxml:
+ return " lines " + ', '.join([str(l.sourceline) for l in elms])
+ else:
+ return ""
+
+def elms_names_lines(elms):
+ """
+ Return the list of names and lines for the given elements.
+ """
+ return ', '.join([elm_name_line(elm) for elm in elms])
+
+def elm_suppr(filename, tree, elm, msgtype, dogen):
+ """
+ Return the prefix to be displayed to the user and the suppression line for
+ the warning type "msgtype" for element "elm"
+ """
+ global gen_suppr, gen_supprfile, suppr_prefix, pflag
+
+ if suppressions or false_positives or gen_suppr is not None or pflag:
+ prefix = errpath(filename, tree, elm)
+ if prefix[0:len(suppr_prefix)] == suppr_prefix:
+ prefix = prefix[len(suppr_prefix):]
+
+ if suppressions or false_positives or gen_suppr is not None:
+ suppr = '%s %s' % (prefix, msgtype)
+
+ if gen_suppr is not None and msgtype is not None and dogen:
+ if gen_supprfile is None:
+ gen_supprfile = open(gen_suppr, 'w')
+ print(suppr, file=gen_supprfile)
+ else:
+ suppr = None
+
+ if not pflag:
+ # Use user-friendly line numbers
+ prefix = elm_prefix(filename, elm)
+ if prefix[0:len(suppr_prefix)] == suppr_prefix:
+ prefix = prefix[len(suppr_prefix):]
+
+ return (prefix, suppr)
+
+def is_enabled(elm, msgtype, l, default):
+ """
+ Test whether warning type msgtype is enabled for elm in l
+ """
+ enabled = default
+ for (enable, thetype, klass) in l:
+ # Match warning type
+ if thetype is not None:
+ if thetype != msgtype:
+ continue
+ # Match elm class
+ if klass is not None and elm is not None:
+ if klass != elm.attrib.get('class'):
+ continue
+ enabled = enable
+ return enabled
+
+def err(filename, tree, elm, msgtype, msg, error = True):
+ """
+ Emit a warning or error for an element
+ """
+ global errors, errexists, warnings, warnexists, fatals, fatalexists
+
+ # Let user tune whether a warning or error
+ fatal = is_enabled(elm, msgtype, dofatals, error)
+
+ # By default warnings and errors are enabled, but let user tune it
+ if not is_enabled(elm, msgtype, enables, True):
+ return
+
+ (prefix, suppr) = elm_suppr(filename, tree, elm, msgtype, True)
+ if suppr in false_positives:
+ # That was actually expected
+ return
+ if suppr in suppressions:
+ # Suppressed
+ suppressions[suppr] = False
+ if fatal:
+ fatalexists += 1
+ if error:
+ errexists += 1
+ else:
+ warnexists += 1
+ return
+
+ if error:
+ errors += 1
+ else:
+ warnings += 1
+ if fatal:
+ fatals += 1
+
+ msg = "%s %s%s: %s%s" % (prefix,
+ "FATAL " if fatal else "",
+ "ERROR" if error else "WARNING",
+ elm_name(elm), msg)
+ print(msg)
+ if outfile is not None:
+ print(msg, file=outfile)
+
+def warn(filename, tree, elm, msgtype, msg):
+ """
+ Emit a warning for an element
+ """
+ err(filename, tree, elm, msgtype, msg, False)
+
+#
+# Labelling testing functions
+#
+
+def find_button_parent(root, elm):
+ """
+ Find a parent which is a button
+ """
+ if lxml:
+ parent = elm.getparent()
+ if parent is not None:
+ if parent.attrib.get('class') in widgets_buttons:
+ return parent
+ return find_button_parent(root, parent)
+ else:
+ def find_parent(cur, elm):
+ for o in cur:
+ if o == elm:
+ if cur.attrib.get('class') in widgets_buttons:
+ # we are the button, immediately above the target
+ return cur
+ else:
+ # we aren't the button, but target is over there
+ return True
+ parent = find_parent(o, elm)
+ if parent == True:
+ # It is over there, but didn't find a button yet
+ if cur.attrib.get('class') in widgets_buttons:
+ # we are the button
+ return cur
+ else:
+ return True
+ if parent is not None:
+ # we have the button parent over there
+ return parent
+ return None
+ parent = find_parent(root, elm)
+ if parent == True:
+ parent = None
+ return parent
+
+
+def is_labelled_parent(elm):
+ """
+ Return whether this element is a labelled parent
+ """
+ klass = elm.attrib.get('class')
+ if klass in widgets_toplevel:
+ return True
+ if klass == 'GtkShortcutsGroup':
+ children = elm.findall("property[@name='title']")
+ if len(children) >= 1:
+ return True
+ if klass == 'GtkFrame' or klass == 'GtkNotebook':
+ children = elm.findall("child[@type='tab']") + elm.findall("child[@type='label']")
+ if len(children) >= 1:
+ return True
+ return False
+
+def elm_labelled_parent(root, elm):
+ """
+ Return the first labelled parent of the element, which can thus be used as
+ the root of widgets with common labelled context
+ """
+
+ if lxml:
+ def find_labelled_parent(elm):
+ if is_labelled_parent(elm):
+ return elm
+ parent = elm.getparent()
+ if parent is None:
+ return None
+ return find_labelled_parent(parent)
+ parent = elm.getparent()
+ if parent is None:
+ return None
+ return find_labelled_parent(elm.getparent())
+ else:
+ def find_labelled_parent(cur, elm):
+ if cur == elm:
+ # the target element is over there
+ return True
+ for o in cur:
+ parent = find_labelled_parent(o, elm)
+ if parent == True:
+ # target element is over there, check ourself
+ if is_labelled_parent(cur):
+ # yes, and we are the first ancestor of the target element
+ return cur
+ else:
+ # no, but target element is over there.
+ return True
+ if parent != None:
+ # the first ancestor of the target element was over there
+ return parent
+ return None
+ parent = find_labelled_parent(root, elm)
+ if parent == True:
+ parent = None
+ return parent
+
+def is_orphan_label(filename, tree, root, obj, orphan_root, doprint = False):
+ """
+ Check whether this label has no accessibility relation, or doubtful relation
+ because another label labels the same target
+ """
+ global label_for_elm, labelled_by_elm, mnemonic_for_elm, warnexists
+
+ # label-for
+ label_for = obj.findall("accessibility/relation[@type='label-for']")
+ for rel in label_for:
+ target = rel.attrib['target']
+ l = label_for_elm[target]
+ if len(l) > 1:
+ return True
+
+ # mnemonic_widget
+ mnemonic_for = obj.findall("property[@name='mnemonic_widget']") + \
+ obj.findall("property[@name='mnemonic-widget']")
+ for rel in mnemonic_for:
+ target = rel.text
+ l = mnemonic_for_elm[target]
+ if len(l) > 1:
+ return True
+
+ if len(label_for) > 0:
+ # At least one label-for, we are not orphan.
+ return False
+
+ if len(mnemonic_for) > 0:
+ # At least one mnemonic_widget, we are not orphan.
+ return False
+
+ labelled_by = obj.findall("accessibility/relation[@type='labelled-by']")
+ if len(labelled_by) > 0:
+ # Oh, a labelled label, probably not to be labelling anything
+ return False
+
+ # explicit role?
+ roles = [x.text for x in obj.findall("child[@internal-child='accessible']/object[@class='AtkObject']/property[@name='AtkObject::accessible-role']")]
+ roles += [x.attrib.get("type") for x in obj.findall("accessibility/role")]
+ if len(roles) > 1 and doprint:
+ err(filename, tree, obj, "multiple-role", "has multiple <child internal-child='accessible'><object class='AtkObject'><property name='AtkBoject::accessible-role'>"
+ "%s" % elms_lines(children))
+ for role in roles:
+ if role == 'static' or role == 'ATK_ROLE_STATIC':
+ # This is static text, not meant to label anything
+ return False
+
+ parent = elm_parent(root, obj)
+ if parent is not None:
+ childtype = parent.attrib.get('type')
+ if childtype is None:
+ childtype = parent.attrib.get('internal-child')
+ if parent.tag == 'child' and childtype == 'label' \
+ or childtype == 'tab':
+ # This is a frame or a notebook label, not orphan.
+ return False
+
+ if find_button_parent(root, obj) is not None:
+ # This label is part of a button
+ return False
+
+ oid = obj.attrib.get('id')
+ if oid is not None:
+ if oid in labelled_by_elm:
+ # Some widget is labelled by us, we are not orphan.
+ # We should have had a label-for, will warn about it later.
+ return False
+
+ # No label-for, no mnemonic-for, no labelled-by, we are orphan.
+ (_, suppr) = elm_suppr(filename, tree, obj, "orphan-label", False)
+ if suppr in false_positives:
+ # That was actually expected
+ return False
+ if suppr in suppressions:
+ # Warning suppressed for this label
+ if suppressions[suppr]:
+ warnexists += 1
+ suppressions[suppr] = False
+ return False
+
+ if doprint:
+ context = elm_name(orphan_root)
+ if context:
+ context = " within " + context
+ warn(filename, tree, obj, "orphan-label", "does not specify what it labels" + context)
+ return True
+
+def is_orphan_widget(filename, tree, root, obj, orphan, orphan_root, doprint = False):
+ """
+ Check whether this widget has no accessibility relation.
+ """
+ global warnexists
+ if obj.tag != 'object':
+ return False
+
+ oid = obj.attrib.get('id')
+ klass = obj.attrib.get('class')
+
+ # "Don't care" special case
+ if klass in widgets_ignored:
+ return False
+ for suffix in widgets_suffixignored:
+ if klass[-len(suffix):] == suffix:
+ return False
+
+ # Widgets usual do not strictly require a label, i.e. a labelled parent
+ # is enough for context, but some do always need one.
+ requires_label = klass in widgets_needlabel
+
+ labelled_by = obj.findall("accessibility/relation[@type='labelled-by']")
+
+ # Labels special case
+ if klass in widgets_labels:
+ return False
+
+ # Case 1: has an explicit <child internal-child="accessible"> sub-element
+ children = obj.findall("child[@internal-child='accessible']")
+ if len(children) > 1 and doprint:
+ err(filename, tree, obj, "multiple-accessible", "has multiple <child internal-child='accessible'>"
+ "%s" % elms_lines(children))
+ if len(children) >= 1:
+ return False
+
+ # Case 2: has an <accessibility> sub-element with a "labelled-by"
+ # <relation> pointing to an existing element.
+ if len(labelled_by) > 0:
+ return False
+
+ # Case 3: has a label-for
+ if oid in label_for_elm:
+ return False
+
+ # Case 4: has a mnemonic
+ if oid in mnemonic_for_elm:
+ return False
+
+ # Case 5: Has a <property name="tooltip_text">
+ tooltips = obj.findall("property[@name='tooltip_text']") + \
+ obj.findall("property[@name='tooltip-text']")
+ if len(tooltips) > 1 and doprint:
+ err(filename, tree, obj, "multiple-tooltip", "has multiple tooltip_text properties")
+ if len(tooltips) >= 1 and klass != 'GtkCheckButton':
+ return False
+
+ # Case 6: Has a <property name="placeholder_text">
+ placeholders = obj.findall("property[@name='placeholder_text']") + \
+ obj.findall("property[@name='placeholder-text']")
+ if len(placeholders) > 1 and doprint:
+ err(filename, tree, obj, "multiple-placeholder", "has multiple placeholder_text properties")
+ if len(placeholders) >= 1:
+ return False
+
+ # Buttons usually don't need an external label, their own is enough, (but they do need one)
+ if klass in widgets_buttons:
+
+ labels = obj.findall("property[@name='label']")
+ if len(labels) > 1 and doprint:
+ err(filename, tree, obj, "multiple-label", "has multiple label properties")
+ if len(labels) >= 1:
+ # Has a <property name="label">
+ return False
+
+ actions = obj.findall("property[@name='action_name']")
+ if len(actions) > 1 and doprint:
+ err(filename, tree, obj, "multiple-action_name", "has multiple action_name properties")
+ if len(actions) >= 1:
+ # Has a <property name="action_name">
+ return False
+
+ # Uses id as an action_name
+ if 'id' in obj.attrib:
+ if obj.attrib['id'].startswith(".uno:"):
+ return False
+
+ gtklabels = obj.findall(".//object[@class='GtkLabel']") + obj.findall(".//object[@class='GtkAccelLabel']")
+ if len(gtklabels) >= 1:
+ # Has a custom label
+ return False
+
+ # no label for a button, warn
+ if doprint:
+ warn(filename, tree, obj, "button-no-label", "does not have its own label");
+ if not is_enabled(obj, "button-no-label", enables, True):
+ # Warnings disabled
+ return False
+ (_, suppr) = elm_suppr(filename, tree, obj, "button-no-label", False)
+ if suppr in false_positives:
+ # That was actually expected
+ return False
+ if suppr in suppressions:
+ # Warning suppressed for this widget
+ if suppressions[suppr]:
+ warnexists += 1
+ suppressions[suppr] = False
+ return False
+ return True
+
+ # GtkImages special case
+ if klass == "GtkImage":
+ uses = [u for u in tree.iterfind(".//object/property[@name='image']") if u.text == oid]
+ if len(uses) > 0:
+ # This image is just used by another element, don't warn
+ # about the image itself, we probably want the warning on
+ # the element instead.
+ return False
+
+ if find_button_parent(root, obj) is not None:
+ # This image is part of a button, we want the warning on the button
+ # instead, if any.
+ return False
+
+ # GtkEntry special case
+ if klass == 'GtkEntry' or klass == 'GtkSearchEntry':
+ parent = elm_parent(root, obj)
+ if parent is not None:
+ if parent.tag == 'child' and \
+ parent.attrib.get('internal-child') == "entry":
+ # This is an internal entry of another widget. Relations
+ # will be handled by that widget.
+ return False
+
+ # GtkShortcutsShortcut special case
+ if klass == 'GtkShortcutsShortcut':
+ children = obj.findall("property[@name='title']")
+ if len(children) >= 1:
+ return False
+
+
+ # Really no label, perhaps emit a warning
+ if not is_enabled(obj, "no-labelled-by", enables, True):
+ # Warnings disabled for this class of widgets
+ return False
+ (_, suppr) = elm_suppr(filename, tree, obj, "no-labelled-by", False)
+ if suppr in false_positives:
+ # That was actually expected
+ return False
+ if suppr in suppressions:
+ # Warning suppressed for this widget
+ if suppressions[suppr]:
+ warnexists += 1
+ suppressions[suppr] = False
+ return False
+
+ if not orphan:
+ # No orphan label, so probably the labelled parent provides enough
+ # context.
+ if requires_label:
+ # But these always need a label.
+ if doprint:
+ warn(filename, tree, obj, "no-labelled-by", "has no accessibility label")
+ return True
+ return False
+
+ if doprint:
+ context = elm_name(orphan_root)
+ if context:
+ context = " within " + context
+ warn(filename, tree, obj, "no-labelled-by", "has no accessibility label while there are orphan labels" + context)
+ return True
+
+def orphan_items(filename, tree, root, elm):
+ """
+ Check whether from some element there exists orphan labels and orphan widgets
+ """
+ orphan_labels = False
+ orphan_widgets = False
+ if elm.attrib.get('class') in widgets_labels:
+ orphan_labels = is_orphan_label(filename, tree, root, elm, None)
+ else:
+ orphan_widgets = is_orphan_widget(filename, tree, root, elm, True, None)
+ for obj in elm:
+ # We are not interested in orphan labels under another labelled
+ # parent. This also allows to keep linear complexity.
+ if not is_labelled_parent(obj):
+ label, widget = orphan_items(filename, tree, root, obj)
+ if label:
+ orphan_labels = True
+ if widget:
+ orphan_widgets = True
+ if orphan_labels and orphan_widgets:
+ # No need to look up more
+ break
+ return orphan_labels, orphan_widgets
+
+#
+# UI accessibility checks
+#
+
+def check_props(filename, tree, root, elm, forward):
+ """
+ Check the given list of relation properties
+ """
+ props = elm.findall("property[@name='" + forward + "']")
+ for prop in props:
+ if prop.text not in ids:
+ err(filename, tree, elm, "undeclared-target", forward + " uses undeclared target '%s'" % prop.text)
+ return props
+
+def is_visible(obj):
+ visible = False
+ visible_prop = obj.findall("property[@name='visible']")
+ visible_len = len(visible_prop)
+ if visible_len:
+ visible_txt = visible_prop[visible_len - 1].text
+ if visible_txt.lower() == "true":
+ visible = True
+ elif visible_txt.lower() == "false":
+ visible = False
+ return visible
+
+def check_rels(filename, tree, root, elm, forward, backward = None):
+ """
+ Check the relations given by forward
+ """
+ oid = elm.attrib.get('id')
+ rels = elm.findall("accessibility/relation[@type='" + forward + "']")
+ for rel in rels:
+ target = rel.attrib['target']
+ if target not in ids:
+ err(filename, tree, elm, "undeclared-target", forward + " uses undeclared target '%s'" % target)
+ elif backward is not None:
+ widget = ids[target]
+ backrels = widget.findall("accessibility/relation[@type='" + backward + "']")
+ if len([x for x in backrels if x.attrib['target'] == oid]) == 0:
+ err(filename, tree, elm, "missing-" + backward, "has " + forward + \
+ ", but is not " + backward + " by " + elm_name_line(widget))
+ return rels
+
+def check_a11y_relation(filename, tree):
+ """
+ Emit an error message if any of the 'object' elements of the XML
+ document represented by `root' doesn't comply with Accessibility
+ rules.
+ """
+ global widgets_ignored, ids, label_for_elm, labelled_by_elm, mnemonic_for_elm
+
+ def check_elm(orphan_root, obj, orphan_labels, orphan_widgets):
+ """
+ Check one element, knowing that orphan_labels/widgets tell whether
+ there are orphan labels and widgets within orphan_root
+ """
+
+ oid = obj.attrib.get('id')
+ klass = obj.attrib.get('class')
+
+ # "Don't care" special case
+ if klass in widgets_ignored:
+ return
+ for suffix in widgets_suffixignored:
+ if klass[-len(suffix):] == suffix:
+ return
+
+ # Widgets usual do not strictly require a label, i.e. a labelled parent
+ # is enough for context, but some do always need one.
+ requires_label = klass in widgets_needlabel
+
+ if oid is not None:
+ # Check that ids are unique
+ if oid in ids_dup:
+ if ids[oid] == obj:
+ # We are the first, warn
+ duplicates = tree.findall(".//object[@id='" + oid + "']")
+ err(filename, tree, obj, "duplicate-id", "has the same id as other elements " + elms_names_lines(duplicates))
+
+ # Check label-for and their dual labelled-by
+ label_for = check_rels(filename, tree, root, obj, "label-for", "labelled-by")
+
+ # Check labelled-by and its dual label-for
+ labelled_by = check_rels(filename, tree, root, obj, "labelled-by", "label-for")
+
+ visible = is_visible(obj)
+
+ # Should have only one label
+ if len(labelled_by) >= 1:
+ if oid in mnemonic_for_elm:
+ warn(filename, tree, obj, "labelled-by-and-mnemonic",
+ "has both a mnemonic " + elm_name_line(mnemonic_for_elm[oid][0]) + "and labelled-by relation")
+ if len(labelled_by) > 1:
+ warn(filename, tree, obj, "multiple-labelled-by", "has multiple labelled-by relations")
+ if oid in label_for_elm:
+ if len(label_for_elm[oid]) > 1:
+ warn(filename, tree, obj, "duplicate-label-for", "is referenced by multiple label-for " + elms_names_lines(label_for_elm[oid]))
+ elif len(label_for_elm[oid]) == 1:
+ paired = label_for_elm[oid][0]
+ if visible != is_visible(paired):
+ warn(filename, tree, obj, "visibility-conflict", "visibility conflicts with paired " + elm_name_line(paired))
+ if oid in mnemonic_for_elm:
+ if len(mnemonic_for_elm[oid]) > 1:
+ warn(filename, tree, obj, "duplicate-mnemonic", "is referenced by multiple mnemonic_widget " + elms_names_lines(mnemonic_for_elm[oid]))
+
+ # Check member-of
+ member_of = check_rels(filename, tree, root, obj, "member-of")
+
+ # Labels special case
+ if klass in widgets_labels:
+ properties = check_props(filename, tree, root, obj, "mnemonic_widget") + \
+ check_props(filename, tree, root, obj, "mnemonic-widget")
+ if len(properties) > 1:
+ err(filename, tree, obj, "multiple-mnemonic", "has multiple mnemonic_widgets properties"
+ "%s" % elms_lines(properties))
+
+ # Emit orphaning warnings
+ if warn_orphan_labels or orphan_widgets:
+ is_orphan_label(filename, tree, root, obj, orphan_root, True)
+
+ # We are done with the label
+ return
+
+ # Not a label, will perhaps need one
+
+ # Emit orphaning warnings
+ is_orphan_widget(filename, tree, root, obj, orphan_labels, orphan_root, True)
+
+ root = tree.getroot()
+
+ # Flush ids and relations from previous files
+ ids = {}
+ ids_dup = {}
+ labelled_by_elm = {}
+ label_for_elm = {}
+ mnemonic_for_elm = {}
+
+ # First pass to get links into hash tables, no warning, just record duplicates
+ for obj in root.iter('object'):
+ oid = obj.attrib.get('id')
+ if oid is not None:
+ if oid not in ids:
+ ids[oid] = obj
+ else:
+ ids_dup[oid] = True
+
+ labelled_by = obj.findall("accessibility/relation[@type='labelled-by']")
+ for rel in labelled_by:
+ target = rel.attrib.get('target')
+ if target is not None:
+ if target not in labelled_by_elm:
+ labelled_by_elm[target] = [ obj ]
+ else:
+ labelled_by_elm[target].append(obj)
+
+ label_for = obj.findall("accessibility/relation[@type='label-for']")
+ for rel in label_for:
+ target = rel.attrib.get('target')
+ if target is not None:
+ if target not in label_for_elm:
+ label_for_elm[target] = [ obj ]
+ else:
+ label_for_elm[target].append(obj)
+
+ mnemonic_for = obj.findall("property[@name='mnemonic_widget']") + \
+ obj.findall("property[@name='mnemonic-widget']")
+ for rel in mnemonic_for:
+ target = rel.text
+ if target is not None:
+ if target not in mnemonic_for_elm:
+ mnemonic_for_elm[target] = [ obj ]
+ else:
+ mnemonic_for_elm[target].append(obj)
+
+ # Second pass, recursive depth-first, to be able to efficiently know whether
+ # there are orphan labels within a part of the tree.
+ def recurse(orphan_root, obj, orphan_labels, orphan_widgets):
+ if obj == root or is_labelled_parent(obj):
+ orphan_root = obj
+ orphan_labels, orphan_widgets = orphan_items(filename, tree, root, obj)
+
+ if obj.tag == 'object':
+ check_elm(orphan_root, obj, orphan_labels, orphan_widgets)
+
+ for o in obj:
+ recurse(orphan_root, o, orphan_labels, orphan_widgets)
+
+ recurse(root, root, False, False)
+
+#
+# Main
+#
+
+def usage(fatal = True):
+ print("`%s' checks accessibility of glade .ui files" % progname)
+ print("")
+ print("Usage: %s [-p] [-g SUPPR_FILE] [-s SUPPR_FILE] [-f SUPPR_FILE] [-P PREFIX] [-o LOG_FILE] [file ...]" % progname)
+ print("")
+ print(" -p Print XML class path instead of line number")
+ print(" -g Generate suppression file SUPPR_FILE")
+ print(" -s Suppress warnings given by file SUPPR_FILE, but count them")
+ print(" -f Suppress warnings given by file SUPPR_FILE completely")
+ print(" -P Remove PREFIX from file names in warnings")
+ print(" -o Also prints errors and warnings to given file")
+ print("")
+ print(" --widgets-FOO [+][CLASS1[,CLASS2[,...]]]")
+ print(" Give or extend one of the lists of widget classes, where FOO can be:")
+ print(" - toplevel : widgets to be considered toplevel windows")
+ print(" - ignored : widgets which do not need labelling (e.g. GtkBox)")
+ print(" - suffixignored : suffixes of widget classes which do not need labelling")
+ print(" - needlabel : widgets which always need labelling (e.g. GtkEntry)")
+ print(" - buttons : widgets which need their own label but not more")
+ print(" (e.g. GtkButton)")
+ print(" - labels : widgets which provide labels (e.g. GtkLabel)")
+ print(" --widgets-print print default widgets lists")
+ print("")
+ print(" --enable-all enable all warnings/dofatals (default)")
+ print(" --disable-all disable all warnings/dofatals")
+ print(" --fatal-all make all warnings dofatals")
+ print(" --not-fatal-all do not make all warnings dofatals (default)")
+ print("")
+ print(" --enable-type=TYPE enable warning/fatal type TYPE")
+ print(" --disable-type=TYPE disable warning/fatal type TYPE")
+ print(" --fatal-type=TYPE make warning type TYPE a fatal")
+ print(" --not-fatal-type=TYPE make warning type TYPE not a fatal")
+ print("")
+ print(" --enable-widgets=CLASS enable warning/fatal type CLASS")
+ print(" --disable-widgets=CLASS disable warning/fatal type CLASS")
+ print(" --fatal-widgets=CLASS make warning type CLASS a fatal")
+ print(" --not-fatal-widgets=CLASS make warning type CLASS not a fatal")
+ print("")
+ print(" --enable-specific=TYPE.CLASS enable warning/fatal type TYPE for widget")
+ print(" class CLASS")
+ print(" --disable-specific=TYPE.CLASS disable warning/fatal type TYPE for widget")
+ print(" class CLASS")
+ print(" --fatal-specific=TYPE.CLASS make warning type TYPE a fatal for widget")
+ print(" class CLASS")
+ print(" --not-fatal-specific=TYPE.CLASS make warning type TYPE not a fatal for widget")
+ print(" class CLASS")
+ print("")
+ print(" --disable-orphan-labels only warn about orphan labels when there are")
+ print(" orphan widgets in the same context")
+ print("")
+ print("Report bugs to <bugs@hypra.fr>")
+ sys.exit(2 if fatal else 0)
+
+def widgets_opt(widgets_list, arg):
+ """
+ Replace or extend `widgets_list' with the list of classes contained in `arg'
+ """
+ append = arg and arg[0] == '+'
+ if append:
+ arg = arg[1:]
+
+ if arg:
+ widgets = arg.split(',')
+ else:
+ widgets = []
+
+ if not append:
+ del widgets_list[:]
+
+ widgets_list.extend(widgets)
+
+
+def main():
+ global pflag, gen_suppr, gen_supprfile, suppressions, suppr_prefix, false_positives, dofatals, enables, dofatals, warn_orphan_labels
+ global widgets_toplevel, widgets_ignored, widgets_suffixignored, widgets_needlabel, widgets_buttons, widgets_labels
+ global outfile
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "hpiIg:s:f:P:o:L:", [
+ "help",
+ "version",
+
+ "widgets-toplevel=",
+ "widgets-ignored=",
+ "widgets-suffixignored=",
+ "widgets-needlabel=",
+ "widgets-buttons=",
+ "widgets-labels=",
+ "widgets-print",
+
+ "enable-all",
+ "disable-all",
+ "fatal-all",
+ "not-fatal-all",
+
+ "enable-type=",
+ "disable-type=",
+ "fatal-type=",
+ "not-fatal-type=",
+
+ "enable-widgets=",
+ "disable-widgets=",
+ "fatal-widgets=",
+ "not-fatal-widgets=",
+
+ "enable-specific=",
+ "disable-specific=",
+ "fatal-specific=",
+ "not-fatal-specific=",
+
+ "disable-orphan-labels",
+ ] )
+ except getopt.GetoptError:
+ usage()
+
+ suppr = None
+ false = None
+ out = None
+ filelist = None
+
+ for o, a in opts:
+ if o == "--help" or o == "-h":
+ usage(False)
+ if o == "--version":
+ print("0.1")
+ sys.exit(0)
+ elif o == "-p":
+ pflag = True
+ elif o == "-g":
+ gen_suppr = a
+ elif o == "-s":
+ suppr = a
+ elif o == "-f":
+ false = a
+ elif o == "-P":
+ suppr_prefix = a
+ elif o == "-o":
+ out = a
+ elif o == "-L":
+ filelist = a
+
+ elif o == "--widgets-toplevel":
+ widgets_opt(widgets_toplevel, a)
+ elif o == "--widgets-ignored":
+ widgets_opt(widgets_ignored, a)
+ elif o == "--widgets-suffixignored":
+ widgets_opt(widgets_suffixignored, a)
+ elif o == "--widgets-needlabel":
+ widgets_opt(widgets_needlabel, a)
+ elif o == "--widgets-buttons":
+ widgets_opt(widgets_buttons, a)
+ elif o == "--widgets-labels":
+ widgets_opt(widgets_labels, a)
+ elif o == "--widgets-print":
+ print("--widgets-toplevel '" + ','.join(widgets_toplevel) + "'")
+ print("--widgets-ignored '" + ','.join(widgets_ignored) + "'")
+ print("--widgets-suffixignored '" + ','.join(widgets_suffixignored) + "'")
+ print("--widgets-needlabel '" + ','.join(widgets_needlabel) + "'")
+ print("--widgets-buttons '" + ','.join(widgets_buttons) + "'")
+ print("--widgets-labels '" + ','.join(widgets_labels) + "'")
+ sys.exit(0)
+
+ elif o == '--enable-all':
+ enables.append( (True, None, None) )
+ elif o == '--disable-all':
+ enables.append( (False, None, None) )
+ elif o == '--fatal-all':
+ dofatals.append( (True, None, None) )
+ elif o == '--not-fatal-all':
+ dofatals.append( (False, None, None) )
+
+ elif o == '--enable-type':
+ enables.append( (True, a, None) )
+ elif o == '--disable-type':
+ enables.append( (False, a, None) )
+ elif o == '--fatal-type':
+ dofatals.append( (True, a, None) )
+ elif o == '--not-fatal-type':
+ dofatals.append( (False, a, None) )
+
+ elif o == '--enable-widgets':
+ enables.append( (True, None, a) )
+ elif o == '--disable-widgets':
+ enables.append( (False, None, a) )
+ elif o == '--fatal-widgets':
+ dofatals.append( (True, None, a) )
+ elif o == '--not-fatal-widgets':
+ dofatals.append( (False, None, a) )
+
+ elif o == '--enable-specific':
+ (thetype, klass) = a.split('.', 1)
+ enables.append( (True, thetype, klass) )
+ elif o == '--disable-specific':
+ (thetype, klass) = a.split('.', 1)
+ enables.append( (False, thetype, klass) )
+ elif o == '--fatal-specific':
+ (thetype, klass) = a.split('.', 1)
+ dofatals.append( (True, thetype, klass) )
+ elif o == '--not-fatal-specific':
+ (thetype, klass) = a.split('.', 1)
+ dofatals.append( (False, thetype, klass) )
+
+ elif o == '--disable-orphan-labels':
+ warn_orphan_labels = False
+
+ # Read suppression file before overwriting it
+ if suppr is not None:
+ try:
+ supprfile = open(suppr, 'r')
+ line_no = 1;
+ for line in supprfile.readlines():
+ prefix = line.rstrip()
+ suppressions[prefix] = True
+ suppressions_to_line[prefix] = line_no
+ line_no = line_no + 1;
+ supprfile.close()
+ except IOError:
+ pass
+
+ # Read false positives file
+ if false is not None:
+ try:
+ falsefile = open(false, 'r')
+ for line in falsefile.readlines():
+ prefix = line.rstrip()
+ false_positives[prefix] = True
+ falsefile.close()
+ except IOError:
+ pass
+
+ if out is not None:
+ outfile = open(out, 'w')
+
+ if filelist is not None:
+ try:
+ filelistfile = open(filelist, 'r')
+ for line in filelistfile.readlines():
+ line = line.strip()
+ if line:
+ args += line.split(' ')
+ filelistfile.close()
+ except IOError:
+ err(filelist, None, None, "unable to read file list file")
+
+ for filename in args:
+ try:
+ tree = ET.parse(filename)
+ except ET.ParseError:
+ err(filename, None, None, "parse", "malformatted xml file")
+ continue
+ except IOError:
+ err(filename, None, None, None, "unable to read file")
+ continue
+
+ try:
+ check_a11y_relation(filename, tree)
+ except Exception as error:
+ import traceback
+ traceback.print_exc()
+ err(filename, None, None, "parse", "error parsing file")
+
+ if errors > 0 or errexists > 0:
+ estr = "%s new error%s" % (errors, 's' if errors > 1 else '')
+ if errexists > 0:
+ estr += " (%s suppressed by %s)" % (errexists, suppr)
+ print(estr)
+
+ if warnings > 0 or warnexists > 0:
+ wstr = "%s new warning%s" % (warnings, 's' if warnings > 1 else '')
+ if warnexists > 0:
+ wstr += " (%s suppressed by %s)" % (warnexists, suppr)
+ print(wstr)
+
+ if fatals > 0 or fatalexists > 0:
+ wstr = "%s new fatal%s" % (fatals, 's' if fatals > 1 else '')
+ if fatalexists > 0:
+ wstr += " (%s suppressed by %s)" % (fatalexists, suppr)
+ print(wstr)
+
+ n = 0
+ for (suppr,unused) in suppressions.items():
+ if unused:
+ n += 1
+
+ if n > 0:
+ print("%s suppression%s unused:" % (n, 's' if n > 1 else ''))
+ for (suppr,unused) in suppressions.items():
+ if unused:
+ print(" %s:%s" % (suppressions_to_line[suppr], suppr))
+
+ if gen_supprfile is not None:
+ gen_supprfile.close()
+ if outfile is not None:
+ outfile.close()
+ if fatals > 0 and gen_suppr is None:
+ print("Explanations are available on https://wiki.documentfoundation.org/Development/Accessibility")
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/includebloat.awk b/bin/includebloat.awk
new file mode 100755
index 000000000..3792ef950
--- /dev/null
+++ b/bin/includebloat.awk
@@ -0,0 +1,51 @@
+#!/usr/bin/gawk -f
+# -*- tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Generate a list of files included by the C++ compiler during the build
+# sorted by the total bytes an included file contributed to preprocessor input.
+# usage: first do a full build with "make check", then run this from $BUILDDIR
+
+# NOTE: by default gbuild does not generate dependencies for system headers
+# (in particular the C++ standard library), so those will NOT be counted
+
+BEGIN {
+ cmd = "find workdir/Dep/CxxObject/ -name *.d | xargs cat"
+ while ((cmd | getline) > 0) {
+ if ($0 ~ /^ .*\\$/) {
+ gsub(/^ /, "");
+ gsub(/ *\\$/, "");
+ includes[$1]++
+ if ($2) {
+ # GCC emits 2 per line if short enough!
+ includes[$2]++
+ }
+ }
+ }
+ exit
+}
+
+END {
+ for (inc in includes) {
+ cmd = "wc -c " inc
+ if ((cmd | getline) < 0)
+ print "ERROR on: " cmd
+ sizes[inc] = $1 # $0 is wc -c output, $1 is size
+ totals[inc] = $1 * includes[inc]
+ totalsize += totals[inc]
+ close(cmd)
+ }
+ PROCINFO["sorted_in"] = "@val_num_desc"
+ print "sum total bytes included (excluding system headers): " totalsize
+ for (inc in totals) {
+ print totals[inc], sizes[inc], includes[inc], inc
+ }
+}
+
+# vim: set noet sw=4 ts=4:
diff --git a/bin/ios-mapfile-statistics b/bin/ios-mapfile-statistics
new file mode 100755
index 000000000..07f3f0aa6
--- /dev/null
+++ b/bin/ios-mapfile-statistics
@@ -0,0 +1,78 @@
+#!/usr/bin/perl -w
+
+use strict;
+
+use Getopt::Std;
+$Getopt::Std::STANDARD_HELP_VERSION++;
+
+my %args;
+
+getopts('f:s', \%args);
+
+sub VERSION_MESSAGE {
+ # Nothing
+}
+
+sub HELP_MESSAGE {
+ print <<EOS
+This program parses a linker map file, especially one produced when linking an iOS executable.
+
+Input is read from a map file provided as command-line argument
+
+By default a list of libraries used and the size of code and data
+linked in from each library is printed, in reverse order of size.
+
+The following options are available:
+-s Print a list of symbols instead.
+-f 'filter' Filter which libraries are handled. The filter can be
+ a regular expression, typically several library names
+ combined with the '|' operator. Makes sense only when
+ -s is used too.
+EOS
+}
+
+die "The -f switch makes sense only if -s is also used\n" if defined($args{'f'}) && !defined($args{'s'});
+
+die "Please provide one map file name\n" if !defined($ARGV[0]);
+
+die "Just one argument please\n" if defined($ARGV[1]);
+
+my $state = 0;
+my %libofnumber;
+my %sizeoflib;
+my %sizeofsym;
+
+open(INPUT, '<', $ARGV[0]) || die "Could not open $ARGV[0]: $!\n";
+
+while (<INPUT>) {
+ if ($state == 0 && m!^# Object files:!) {
+ $state = 1;
+ } elsif ($state == 1 && m!^\[ *([0-9]+)\] .*/([-_a-z0-9]+\.a)\(.*!i) {
+ $libofnumber{$1} = $2;
+ } elsif ($state == 1 && m!^# Sections:!) {
+ $state = 2;
+ } elsif ($state == 2 && m!^# Address\s+Size\s+File\s+Name!) {
+ $state = 3;
+ } elsif ($state == 3 && m!^0x[0-9A-F]+\s+(0x[0-9A-F]+)\s+\[ *([0-9]+)\] (.*)!) {
+ my ($size,$libnum,$symbol) = ($1, $2, $3);
+ if (defined($libofnumber{$libnum})) {
+ $sizeoflib{$libofnumber{$libnum}} += hex($size);
+ if (!defined($args{'f'}) || $libofnumber{$libnum} =~ /$args{'f'}/) {
+ $sizeofsym{$symbol} = hex($size);
+ }
+ }
+ }
+}
+
+if ($args{'s'}) {
+ # Print symbols in reverse size order
+ foreach (sort { $sizeofsym{$b} <=> $sizeofsym{$a} } keys(%sizeofsym)) {
+ print $_, ": ", $sizeofsym{$_}, "\n";
+ }
+} else {
+ # Print libraries in reverse size order
+ foreach (sort { $sizeoflib{$b} <=> $sizeoflib{$a} } keys(%sizeoflib)) {
+ print $_, ": ", $sizeoflib{$_}, "\n";
+ }
+}
+
diff --git a/bin/java-set-classpath.in b/bin/java-set-classpath.in
new file mode 100644
index 000000000..507264a3d
--- /dev/null
+++ b/bin/java-set-classpath.in
@@ -0,0 +1,53 @@
+#!/bin/sh
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# java-set-classpath - Utility to update the default
+# CLASSPATH for LibreOffice
+
+if test "z$1" = "z" ; then
+ echo "Update the default CLASSPATH for LibreOffice"
+ echo ""
+ echo "Usage: $0 [dir|jar]..."
+ echo ""
+ echo "The utility updates the LibreOffice system setting. It adds or removes"
+ echo "the given directories and jar-files to or from the default CLASSPATH"
+ echo "depending on if they are available on the system or not."
+ echo ""
+ echo "Parameters:"
+ echo " dir - absolute path to a directory"
+ echo " jar - absolute path to a jar-file"
+ exit 0;
+fi
+
+JVM_CONFIG_FILE=@INSTALLDIR@/program/fundamentalrc
+
+for path in $@ ; do
+ if test "z${path%%/*}" != "z" ; then
+ echo "Warning: the path "$path" is not absolute and will be ignored"
+ continue
+ fi
+ if test -e $path ; then
+ # the file exist
+ grep "URE_MORE_JAVA_CLASSPATH_URLS.*file:/*$path\([[:space:]].*\)\?$" $JVM_CONFIG_FILE >/dev/null && continue
+ # it is not registered
+ TMP_FILE=`mktemp /tmp/ooset-java-class.XXXXXXXXXX` || exit 1
+ sed -e "s|^\(.*URE_MORE_JAVA_CLASSPATH_URLS.*\)$|\1 file://$path|" $JVM_CONFIG_FILE >$TMP_FILE
+ mv -f $TMP_FILE $JVM_CONFIG_FILE
+ chmod 644 $JVM_CONFIG_FILE
+ else
+ # the file does not exist, remove it from the configuration
+ TMP_FILE=`mktemp /tmp/ooset-java-class.XXXXXXXXXX` || exit 1;
+ sed -e "s|^\(.*URE_MORE_JAVA_CLASSPATH_URLS.*\)file:/*$path\([[:space:]].*\)\?$|\1\2|" \
+ -e "s/\(URE_MORE_JAVA_CLASSPATH_URLS=\)[[:space:]]\+/\1/" \
+ -e "/^.*URE_MORE_JAVA_CLASSPATH_URLS/s/[[:space:]]\+/ /g" \
+ -e "/^.*URE_MORE_JAVA_CLASSPATH_URLS/s/[[:space:]]*$//" $JVM_CONFIG_FILE >$TMP_FILE
+ mv -f $TMP_FILE $JVM_CONFIG_FILE
+ chmod 644 $JVM_CONFIG_FILE
+ fi
+done
diff --git a/bin/lint-ui.py b/bin/lint-ui.py
new file mode 100755
index 000000000..d9d0784df
--- /dev/null
+++ b/bin/lint-ui.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, you can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# Takes a LibreOffice .ui file and provides linting tips for maintaining
+# a consistent look for dialogs
+
+import sys
+import xml.etree.ElementTree as ET
+import re
+
+DEFAULT_WARNING_STR = 'Lint assertion failed'
+
+POSSIBLE_TOP_LEVEL_WIDGETS = ['GtkDialog', 'GtkMessageDialog', 'GtkBox', 'GtkFrame', 'GtkGrid']
+IGNORED_TOP_LEVEL_WIDGETS = ['GtkAdjustment', 'GtkImage', 'GtkListStore', 'GtkSizeGroup', 'GtkMenu', 'GtkTextBuffer']
+BORDER_WIDTH = '6'
+BUTTON_BOX_SPACING = '12'
+ALIGNMENT_TOP_PADDING = '6'
+#https://developer.gnome.org/hig-book/3.0/windows-alert.html.en#alert-spacing
+MESSAGE_BOX_SPACING = '24'
+MESSAGE_BORDER_WIDTH = '12'
+
+IGNORED_WORDS = ['the', 'of', 'to', 'for', 'a', 'and', 'as', 'from', 'on', 'into', 'by', 'at', 'or', 'do', 'in', 'when']
+
+def lint_assert(predicate, warning=DEFAULT_WARNING_STR):
+ if not predicate:
+ print(" * " + warning)
+
+def check_top_level_widget(element):
+ # check widget type
+ widget_type = element.attrib['class']
+ lint_assert(widget_type in POSSIBLE_TOP_LEVEL_WIDGETS,
+ "Top level widget should be 'GtkDialog', 'GtkFrame', 'GtkBox', or 'GtkGrid'")
+
+ # check border_width property
+ border_width_properties = element.findall("property[@name='border_width']")
+ if len(border_width_properties) < 1:
+ lint_assert(False, "No border_width set on top level widget. Should probably be " + BORDER_WIDTH)
+ if len(border_width_properties) == 1:
+ border_width = border_width_properties[0]
+ if widget_type == "GtkMessageDialog":
+ lint_assert(border_width.text == MESSAGE_BORDER_WIDTH,
+ "Top level 'border_width' property should be " + MESSAGE_BORDER_WIDTH)
+ else:
+ lint_assert(border_width.text == BORDER_WIDTH,
+ "Top level 'border_width' property should be " + BORDER_WIDTH)
+
+ # check that any widget which has 'has-default' also has 'can-default'
+ for widget in element.findall('.//object'):
+ if not widget.attrib['class']:
+ continue
+ widget_type = widget.attrib['class']
+ has_defaults = widget.findall("./property[@name='has_default']")
+ if len(has_defaults) > 0 and has_defaults[0].text == "True":
+ can_defaults = widget.findall("./property[@name='can_default']")
+ lint_assert(len(can_defaults)>0 and can_defaults[0].text == "True",
+ "has_default without can_default in " + widget_type + " with id = '" + widget.attrib['id'] + "'", widget)
+
+def check_button_box_spacing(element):
+ spacing = element.findall("property[@name='spacing']")[0]
+ lint_assert(spacing.text == BUTTON_BOX_SPACING,
+ "Button box 'spacing' should be " + BUTTON_BOX_SPACING)
+
+def check_message_box_spacing(element):
+ spacing = element.findall("property[@name='spacing']")[0]
+ lint_assert(spacing.text == MESSAGE_BOX_SPACING,
+ "Button box 'spacing' should be " + MESSAGE_BOX_SPACING)
+
+def check_radio_buttons(root):
+ radios = [element for element in root.findall('.//object') if element.attrib['class'] == 'GtkRadioButton']
+ for radio in radios:
+ radio_underlines = radio.findall("./property[@name='use_underline']")
+ assert len(radio_underlines) <= 1
+ if len(radio_underlines) < 1:
+ lint_assert(False, "No use_underline in GtkRadioButton with id = '" + radio.attrib['id'] + "'")
+
+def check_check_buttons(root):
+ radios = [element for element in root.findall('.//object') if element.attrib['class'] == 'GtkCheckButton']
+ for radio in radios:
+ radio_underlines = radio.findall("./property[@name='use_underline']")
+ assert len(radio_underlines) <= 1
+ if len(radio_underlines) < 1:
+ lint_assert(False, "No use_underline in GtkCheckButton with id = '" + radio.attrib['id'] + "'")
+
+def check_frames(root):
+ frames = [element for element in root.findall('.//object') if element.attrib['class'] == 'GtkFrame']
+ for frame in frames:
+ frame_alignments = frame.findall("./child/object[@class='GtkAlignment']")
+ assert len(frame_alignments) <= 1
+ if len(frame_alignments) < 1:
+ lint_assert(False, "No GtkAlignment in GtkFrame with id = '" + frame.attrib['id'] + "'")
+ if len(frame_alignments) == 1:
+ alignment = frame_alignments[0]
+ check_alignment_top_padding(alignment)
+
+def check_alignment_top_padding(alignment):
+ top_padding_properties = alignment.findall("./property[@name='top_padding']")
+ assert len(top_padding_properties) <= 1
+ if len(top_padding_properties) < 1:
+ lint_assert(False, "No GtkAlignment 'top_padding' set. Should probably be " + ALIGNMENT_TOP_PADDING)
+ if len(top_padding_properties) == 1:
+ top_padding = top_padding_properties[0]
+ lint_assert(top_padding.text == ALIGNMENT_TOP_PADDING,
+ "GtkAlignment 'top_padding' should be " + ALIGNMENT_TOP_PADDING)
+
+def check_title_labels(root):
+ labels = root.findall(".//child[@type='label']")
+ titles = [label.find(".//property[@name='label']") for label in labels]
+ for title in titles:
+ if title is None:
+ continue
+ words = re.split(r'[^a-zA-Z0-9:_-]', title.text)
+ first = True
+ for word in words:
+ if word[0].islower() and (word not in IGNORED_WORDS or first):
+ lint_assert(False, "The word '" + word + "' should be capitalized")
+ first = False
+
+def main():
+ print(" == " + sys.argv[1] + " ==")
+ tree = ET.parse(sys.argv[1])
+ root = tree.getroot()
+
+ lint_assert('domain' in root.attrib, "interface needs to specific translation domain")
+
+ top_level_widgets = [element for element in root.findall('object') if element.attrib['class'] not in IGNORED_TOP_LEVEL_WIDGETS]
+ assert len(top_level_widgets) == 1
+
+ top_level_widget = top_level_widgets[0]
+ check_top_level_widget(top_level_widget)
+
+ # TODO - only do this if we have a GtkDialog?
+ # check button box spacing
+ button_box = top_level_widget.findall("./child/object[@id='dialog-vbox1']")
+ if len(button_box) > 0:
+ element = button_box[0]
+ check_button_box_spacing(element)
+
+ message_box = top_level_widget.findall("./child/object[@id='messagedialog-vbox']")
+ if len(message_box) > 0:
+ element = message_box[0]
+ check_message_box_spacing(element)
+
+ check_frames(root)
+
+ check_radio_buttons(root)
+
+ check_check_buttons(root)
+
+ check_title_labels(root)
+
+if __name__ == "__main__":
+ main()
diff --git a/bin/list-dispatch-commands.py b/bin/list-dispatch-commands.py
new file mode 100755
index 000000000..0b13f89e8
--- /dev/null
+++ b/bin/list-dispatch-commands.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Script to generate https://wiki.documentfoundation.org/Development/DispatchCommands
+"""
+
+import argparse
+import os
+import sys
+
+
+def get_files_list(directory, extension):
+ array_items = []
+
+ dh = os.scandir(directory)
+ for entry in dh:
+ if entry.is_dir():
+ array_items += get_files_list(entry.path, extension)
+ elif entry.is_file():
+ if entry.name.endswith(extension):
+ array_items.append(entry.path)
+
+ return array_items
+
+
+def analyze_file(filename, all_slots):
+ with open(filename) as fh:
+ for line in fh:
+ if not line.startswith('// Slot Nr. '):
+ continue
+
+ tmp = line.split(':')
+ slot_id = tmp[1].strip()
+
+ line = next(fh)
+ tmp = line.split(',')
+ slot_rid = tmp[1]
+
+ next(fh)
+ next(fh)
+ line = next(fh)
+ mode = 'C' if 'CACHABLE' in line else ' '
+ mode += 'U' if 'AUTOUPDATE' in line else ' '
+ mode += 'M' if 'MENUCONFIG' in line else ' '
+ mode += 'T' if 'TOOLBOXCONFIG' in line else ' '
+ mode += 'A' if 'ACCELCONFIG' in line else ' '
+
+ next(fh)
+ next(fh)
+ line = next(fh)
+ if '"' not in line:
+ line = next(fh)
+ tmp = line.split('"')
+ try:
+ slot_name = '.uno:' + tmp[1]
+ except IndexError:
+ print("Warning: expected \" in line '%s' from file %s" % (line.strip(), filename),
+ file=sys.stderr)
+ slot_name = '.uno:'
+
+ if slot_name not in all_slots:
+ all_slots[slot_name] = {'slot_id': slot_id,
+ 'slot_rid': slot_rid,
+ 'mode': mode,
+ 'slot_description': ''}
+
+
+def analyze_xcu(filename, all_slots):
+ with open(filename) as fh:
+ for line in fh:
+ if '<node oor:name=".uno:' not in line:
+ continue
+
+ tmp = line.split('"')
+ slot_name = tmp[1]
+
+ while '<value xml:lang="en-US">' not in line:
+ try:
+ line = next(fh)
+ except StopIteration:
+ print("Warning: couldn't find '<value xml:lang=\"en-US\">' line in %s" % filename,
+ file=sys.stderr)
+ break
+
+ line = line.replace('<value xml:lang="en-US">', '')
+ line = line.replace('</value>', '').strip()
+
+ if slot_name in all_slots:
+ all_slots[slot_name]['slot_description'] = line.replace('~', '')
+
+
+def main():
+ modules = ['basslots', 'scslots', 'sdgslots', 'sdslots', 'sfxslots', 'smslots', 'svxslots', 'swslots']
+ sdi_dir = './workdir/SdiTarget'
+ sdi_ext = '.hxx'
+ xcu_dir = 'officecfg/registry/data/org/openoffice/Office/UI'
+ xcu_ext = '.xcu'
+ all_slots = {}
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('module', choices=modules)
+ args = parser.parse_args()
+
+ module_filename = args.module + sdi_ext
+
+ sdi_files = get_files_list(sdi_dir, sdi_ext)
+ for sdi_file in sdi_files:
+ sdi_file_basename = os.path.basename(sdi_file)
+ if sdi_file_basename == module_filename:
+ analyze_file(sdi_file, all_slots)
+
+ xcu_files = get_files_list(xcu_dir, xcu_ext)
+ for xcu_file in xcu_files:
+ analyze_xcu(xcu_file, all_slots)
+
+ for name in sorted(all_slots.keys()):
+ props = all_slots[name]
+ print('|-\n| %s' % name)
+ print('| %(slot_rid)s\n| %(slot_id)s\n| %(mode)s\n| %(slot_description)s' % props)
+
+ print("|-")
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/list-uitest.py b/bin/list-uitest.py
new file mode 100755
index 000000000..da6703556
--- /dev/null
+++ b/bin/list-uitest.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import datetime
+
+def analyze_file(filename):
+ class_name = ""
+ method_list = []
+ with open(filename, encoding='utf-8') as fh:
+ for line in fh:
+ if line.lstrip().startswith('class '):
+ class_name = line.lstrip().split(" ")[1].split("(")[0]
+ elif line.lstrip().startswith('def test_'):
+ method_list.append(
+ line.lstrip().split("test_")[1].split("(")[0])
+ else:
+ continue
+ return class_name, method_list
+
+def get_files_list(directory, extension):
+ array_items = []
+
+ dh = os.scandir(directory)
+ for entry in dh:
+ if entry.is_dir():
+ array_items += get_files_list(entry.path, extension)
+ elif entry.is_file():
+ if entry.name.endswith(extension):
+ array_items.append(entry.path)
+
+ return array_items
+
+def linkFormat(name):
+ if name.startswith('tdf'):
+ return "[https://bugs.documentfoundation.org/show_bug.cgi?id={} {}]"\
+ .format(name.split('tdf')[1], name)
+ else:
+ return name
+
+
+def main():
+ uitest_ext = '.py'
+ uitest_dirs = {
+ 'Writer' : ['../uitest/writer_tests/', '../writerperfect/qa/uitest/', '../sw/qa/uitest/'],
+ 'Calc' : ['../uitest/calc_tests', '../sc/qa/uitest/'],
+ 'Impress' : ['../uitest/impress_tests/', '../sd/qa/uitest/'],
+ 'Math': ['../uitest/math_tests/'],
+ 'Draw': [''],
+ 'Manual_tests': ['../uitest/manual_tests/']}
+
+ print('{{TopMenu}}')
+ print('{{Menu}}')
+ print('{{Menu.Development}}')
+ print()
+ print('Generated on ' + str(datetime.datetime.now()))
+ for k,v in uitest_dirs.items():
+ print('\n=== ' + k + ' ===')
+ for uitest_dir in v:
+ if uitest_dir:
+ uitest_files = get_files_list(uitest_dir, uitest_ext)
+ for uitest_file in uitest_files:
+ class_name, method_names = analyze_file(uitest_file)
+ if class_name:
+ print("* {} ({})".format(
+ linkFormat(class_name),uitest_file[3:]))
+ for m in method_names:
+ print('**' + linkFormat(m))
+ print()
+ print('[[Category:QA]][[Category:Development]]')
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/lo-all-static-libs b/bin/lo-all-static-libs
new file mode 100755
index 000000000..0fcea02eb
--- /dev/null
+++ b/bin/lo-all-static-libs
@@ -0,0 +1,91 @@
+#!/bin/sh
+
+# Output a list of all our (static) libraries, to be used when
+# building the single executable or single dynamic object that is used
+# in an LO-based iOS or Android app. (All our libraries and bundled
+# 3rd-party ones are built as static archives for these platforms.)
+
+# This script is to be run once a full "make" for iOS or Android has
+# otherwise completed, when just building the actual apps is left.
+
+if test -z "$INSTDIR" ; then
+ echo This script should be invoked only in a build.
+ exit 1
+fi
+
+if test "$OS" != ANDROID -a "$OS" != iOS; then
+ echo This script makes sense only in Android or iOS builds.
+fi
+
+foolibs=
+for var in EBOOK_LIBS FREEHAND_LIBS HARFBUZZ_LIBS HUNSPELL_LIBS HYPHEN_LIB MYTHES_LIBS; do
+ dirs=
+ libs=
+ for i in `eval echo '$'$var`; do
+ case "$i" in
+ -L*) dirs="$dirs ${i#-L}";;
+ -l*) libs="$libs ${i#-l}";;
+ esac
+ done
+ for l in $libs; do
+ for d in $dirs; do
+ test -f $d/lib$l.a && foolibs="$foolibs $d/lib$l.a"
+ done
+ done
+done
+
+case $OS in
+ANDROID)
+ oslibs="$WORKDIR/UnpackedTarball/curl/lib/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/fontconfig/src/.libs/libfontconfig.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/freetype/objs/.libs/libfreetype.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/pixman/pixman/.libs/libpixman-1.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/cairo/src/.libs/libcairo.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/xmlsec/src/.libs/libxmlsec1.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/xmlsec/src/nss/.libs/libxmlsec1-nss.a"
+ # Only liblo-bootstrap.a ends up here:
+ oslibs="$oslibs $WORKDIR/LinkTarget/Library/lib*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/openssl/*.a"
+
+ # coinmp not used for iOS
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/Cbc/src/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/Cgl/src/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/Clp/src/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/Clp/src/OsiClp/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/CoinMP/src/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/CoinUtils/src/.libs/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/coinmp/Osi/src/Osi/.libs/*.a"
+ ;;
+iOS)
+ oslibs="$WORKDIR/UnpackedTarball/icu/source/stubdata/*.a"
+ oslibs="$oslibs $WORKDIR/UnpackedTarball/cppunit/src/cppunit/.libs/*.a"
+ ;;
+*)
+ oslibs=
+ ;;
+esac
+
+echo $INSTDIR/$LIBO_LIB_FOLDER/lib*.a \
+ $foolibs \
+ $WORKDIR/LinkTarget/StaticLibrary/lib*.a \
+ $oslibs \
+ $WORKDIR/UnpackedTarball/icu/source/lib/*.a \
+ $WORKDIR/UnpackedTarball/libjpeg-turbo/.libs/*.a \
+ $WORKDIR/UnpackedTarball/liblangtag/liblangtag/.libs/*.a \
+ $WORKDIR/UnpackedTarball/lcms2/src/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libabw/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libcdr/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libepubgen/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libexttextcat/src/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libmspub/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libmwaw/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libodfgen/src/.libs/*.a \
+ $WORKDIR/UnpackedTarball/liborcus/src/*/.libs/*.a \
+ $WORKDIR/UnpackedTarball/librevenge/src/*/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libvisio/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libwp?/src/lib/.libs/*.a \
+ $WORKDIR/UnpackedTarball/raptor/src/.libs/*.a \
+ $WORKDIR/UnpackedTarball/rasqal/src/.libs/*.a \
+ $WORKDIR/UnpackedTarball/redland/src/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libxml2/.libs/*.a \
+ $WORKDIR/UnpackedTarball/libxslt/libxslt/.libs/*.a
diff --git a/bin/lo-commit-stat b/bin/lo-commit-stat
new file mode 100755
index 000000000..08e8a1785
--- /dev/null
+++ b/bin/lo-commit-stat
@@ -0,0 +1,584 @@
+#!/usr/bin/perl
+ eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
+ if $running_under_some_shell;
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+use LWP::UserAgent;
+use utf8;
+use File::Temp;
+use Encode;
+use open ':encoding(utf8)';
+use open ':std' => ':encoding(utf8)';
+
+my %module_dirname = (
+ "core" => "",
+ "dictionaries" => "dictionaries",
+ "help" => "helpcontent2",
+ "translations" => "translations"
+);
+
+
+my %bugzillas = (
+ fdo => "https://bugs.documentfoundation.org/show_bug.cgi?id=",
+ tdf => "https://bugs.documentfoundation.org/show_bug.cgi?id=",
+ bnc => "https://bugzilla.novell.com/show_bug.cgi?id=",
+ rhbz => "https://bugzilla.redhat.com/show_bug.cgi?id=",
+ i => "https://bz.apache.org/ooo/show_bug.cgi?id=",
+ fate => "https://features.opensuse.org/",
+);
+
+sub search_bugs($$$$)
+{
+ my ($pdata, $module, $commit_id, $line) = @_;
+
+ my $bug = "";
+ my $bug_orig;
+ while (defined $bug) {
+
+ # match fdo#123, rhz#123, i#123, #123
+ # but match only bug number with >= 4 digits
+ if ( $line =~ m/(\w+\#+\d{4,})/ ) {
+ $bug_orig = $1;
+ $bug = $1;
+ # default to issuezilla for the #123 variant
+ # but match only bug number with >= 4 digits
+ } elsif ( $line =~ m/(\#)(\d{4,})/ ) {
+ $bug_orig = $1 . $2;
+ $bug = "i#$2";
+ # match #i123#
+ } elsif ( $line =~ m/(\#i)(\d+)(\#)/ ) {
+ $bug_orig = $1 . $2 . $3;
+ $bug = "i#$2";
+ } else {
+ $bug = undef;
+ next;
+ }
+
+# print " found $bug\n";
+ # remove bug number from the comment; it will be added later a standardized way
+ $bug_orig =~ s/\#/\\#/;
+ $line =~ s/(,\s)*[Rr](elated|esolve[ds]):?\s*$bug_orig\s?:?\s*//;
+ $line =~ s/\s*-\s*$bug_orig\s*//;
+ $line =~ s/\(?$bug_orig\)?\s*[:,-]?\s*//;
+
+ # bnc# is preferred over n# for novell bugs
+ $bug =~ s/^n\#/bnc#/;
+ # deb# is preferred over debian# for debian bugs
+ $bug =~ s/^debian\#/deb#/;
+ # easyhack# is sometimes used for fdo# - based easy hacks
+ $bug =~ s/^easyhack\#/fdo#/;
+ # someone mistyped fdo as fd0
+ $bug =~ s/^fd0\#/fdo#/;
+ # save the bug number
+ $pdata->{$module}{$commit_id}{'bugs'}{$bug} = 1;
+ }
+
+ return $line;
+}
+
+sub standardize_summary($)
+{
+ my $line = shift;
+
+ $line =~ s/^\s*//;
+ $line =~ s/\s*$//;
+
+ # lower first letter if the word contains only lowercase letter
+ if ( $line =~ m/(^.[a-z]+\b)/ ) {
+ $line =~ m/(^.)/;
+ my $first_char = lc($1);
+ $line =~ s/^./$first_char/;
+ }
+
+ # FIXME: remove do at the end of line
+ # remove bug numbers
+ return $line;
+}
+
+sub generate_git_cherry_ids_log($$$$$)
+{
+ my ($pdata, $repo_dir, $module, $branch_name, $git_args) = @_;
+
+ my $commit_ids_log;
+ my $commit_ids_log_fh;
+ $commit_ids_log_fh = File::Temp->new(TEMPLATE => 'lo-commit-stat-ids-XXXXXX',
+ DIR => '/tmp',
+ UNLINK => 0);
+ $commit_ids_log = $commit_ids_log_fh->filename;
+
+ print STDERR "Filtering cherry-picked commits in the git repo: $module...\n";
+
+ my $cmd = "cd $repo_dir; git cherry $git_args";
+ open (GIT, "$cmd 2>&1|") || die "Can't run $cmd: $!";
+
+ while (my $line = <GIT>) {
+
+ # skip cherry-picked commits
+ next if ( $line =~ m/^\-/ );
+
+ if ( $line =~ m/^\+ / ) {
+ $line =~ s/^\+ //;
+ print $commit_ids_log_fh $line;
+ }
+ }
+
+ close GIT;
+ close $commit_ids_log_fh;
+
+ return $commit_ids_log;
+}
+
+sub load_git_log($$$$$$$)
+{
+ my ($pdata, $repo_dir, $module, $branch_name, $git_command, $git_cherry, $git_args) = @_;
+
+ my $cmd = "cd $repo_dir;";
+ my $commit_ids_log;
+
+ if ($git_cherry) {
+ $commit_ids_log = generate_git_cherry_ids_log($pdata, $repo_dir, $module, $branch_name, $git_args);
+ $cmd .= " cat $commit_ids_log | xargs -n 1 $git_command -1";
+ } else {
+ $cmd .= " $git_command $git_args";
+ }
+
+ my $commit_id;
+ my $summary;
+
+ print STDERR "Analyzing log from the git repo: $module...\n";
+
+# FIXME: ./g pull move submodules in unnamed branches
+# my $repo_branch_name = get_branch_name($repo_dir);
+# if ( $branch_name ne $repo_branch_name ) {
+# die "Error: mismatch of branches:\n" .
+# " main repo is on the branch: $branch_name\n" .
+# " $module repo is on the branch: $repo_branch_name\n";
+# }
+
+ open (GIT, "$cmd 2>&1|") || die "Can't run $cmd: $!";
+
+ while (my $line = <GIT>) {
+ chomp $line;
+
+ if ( $line =~ m/^commit ([0-9a-z]{20})/ ) {
+ $commit_id = $1;
+ $summary=undef;
+ next;
+ }
+
+ if ( $line =~ /^Author:\s*([^\<]*)\<([^\>]*)>/ ) {
+ # get rid of extra empty spaces;
+ my $name = $1;
+ my $email = $2;
+ $name =~ s/\s+$//;
+ die "Error: Author already defined for the commit {$commit_id}\n" if defined ($pdata->{$module}{$commit_id}{'author'});
+ $pdata->{$module}{$commit_id}{'author'}{'name'} = $name;
+ $pdata->{$module}{$commit_id}{'author'}{'email'} = $email;
+ next;
+ }
+
+ if ( $line =~ /^Date:\s+/ ) {
+ # ignore date line
+ next;
+ }
+
+ if ( $line =~ /^\s*$/ ) {
+ # ignore empty line
+ next;
+ }
+
+ unless (defined $pdata->{$module}{$commit_id}{'summary'}) {
+ $line = search_bugs($pdata, $module, $commit_id, $line);
+ # FIXME: need to be implemented
+ # search_keywords($pdata, $line);
+
+ $summary = standardize_summary($line);
+ $pdata->{$module}{$commit_id}{'summary'} = $summary;
+ }
+ }
+
+ close GIT;
+ unlink $commit_ids_log if ($git_cherry);
+}
+
+sub get_repo_name($)
+{
+ my $repo_dir = shift;
+
+ open (GIT_CONFIG, "$repo_dir/.git/config") ||
+ die "can't open \"$$repo_dir/.git/config\" for reading: $!\n";
+
+ while (my $line = <GIT_CONFIG>) {
+ chomp $line;
+
+ if ( $line =~ /^\s*url\s*=\s*(\S+)$/ ) {
+ my $repo_name = "$1";
+ $repo_name = s/.*\///g;
+ return "$repo_name";
+ }
+ }
+ die "Error: can't find repo name in \"$$repo_dir/.git/config\"\n";
+}
+
+sub load_data($$$$$$$)
+{
+ my ($pdata, $top_dir, $p_module_dirname, $branch_name, $git_command, $git_cherry, $git_args) = @_;
+
+ foreach my $module (sort { $a cmp $b } keys %{$p_module_dirname}) {
+ load_git_log($pdata, "$top_dir/$p_module_dirname->{$module}", $module, $branch_name, $git_command, $git_cherry, $git_args);
+ }
+}
+
+sub get_branch_name($)
+{
+ my ($top_dir) = @_;
+
+ my $branch;
+ my $cmd = "cd $top_dir && git branch";
+
+ open (GIT, "$cmd 2>&1|") || die "Can't run $cmd: $!";
+
+ while (my $line = <GIT>) {
+ chomp $line;
+
+ if ( $line =~ m/^\*\s*(\S+)/ ) {
+ $branch = "$1";
+ }
+ }
+
+ close GIT;
+
+ die "Error: did not detect git branch name in $top_dir\n" unless defined ($branch);
+
+ return $branch;
+}
+
+sub get_bug_list($$$)
+{
+ my ($pdata, $pbugs, $check_bugzilla) = @_;
+
+ # associate bugs with their summaries and fixers
+ foreach my $module ( keys %{$pdata}) {
+ foreach my $id ( keys %{$pdata->{$module}}) {
+ foreach my $bug (keys %{$pdata->{$module}{$id}{'bugs'}}) {
+ my $author = $pdata->{$module}{$id}{'author'}{'name'};
+ my $summary = $pdata->{$module}{$id}{'summary'};
+ $pbugs->{$bug}{'summary'} = $summary;
+ $pbugs->{$bug}{'author'}{$author} = 1;
+ }
+ }
+ }
+
+ # try to replace summaries with bug names from bugzilla
+ if ($check_bugzilla) {
+ print "Getting bug titles:\n";
+ foreach my $bug ( sort { $a cmp $b } keys %{$pbugs}) {
+ $pbugs->{$bug}{'summary'} = get_bug_name($bug, $pbugs->{$bug}{'summary'});
+ }
+ }
+}
+
+sub open_log_file($$$$$$)
+{
+ my ($log_dir, $log_prefix, $log_suffix, $top_dir, $branch_name, $wiki) = @_;
+
+ my $logfilename = "$log_prefix-$branch_name-$log_suffix";
+ $logfilename = "$log_dir/$logfilename" if (defined $log_dir);
+ if ($wiki) {
+ $logfilename .= ".wiki";
+ } else {
+ $logfilename .= ".log";
+ }
+
+ if (-f $logfilename) {
+ print "WARNING: The log file already exists: $logfilename\n";
+ print "Do you want to overwrite it? (Y/n)?\n";
+ my $answer = <STDIN>;
+ chomp $answer;
+ $answer = "y" unless ($answer);
+ die "Please, rename the file or choose another log suffix\n" if ( lc($answer) ne "y" );
+ }
+
+ my $log;
+ open($log, '>', $logfilename) || die "Can't open \"$logfilename\" for writing: $!\n";
+
+ return $log;
+}
+
+sub print_commit_summary($$$$$$)
+{
+ my ($summary, $pmodule_title, $pbugs, $pauthors, $prefix, $log) = @_;
+
+ return if ( $summary eq "" );
+
+ # print module title if not done yet
+ if ( defined ${$pmodule_title} ) {
+ print $log "${$pmodule_title}\n";
+ ${$pmodule_title} = undef;
+ }
+
+ # finally print the summary line
+ my $bugs = "";
+ if ( %{$pbugs} ) {
+ $bugs = " (" . join (", ", keys %{$pbugs}) . ")";
+ }
+
+ my $authors = "";
+ if ( %{$pauthors} ) {
+ $authors = " [" . join (", ", keys %{$pauthors}) . "]";
+ }
+
+ print $log $prefix, $summary, $bugs, $authors, "\n";
+}
+
+sub print_commits($$$)
+{
+ my ($pdata, $log, $wiki) = @_;
+
+ foreach my $module ( sort { $a cmp $b } keys %{$pdata}) {
+ # check if this module has any entries at all
+ my $module_title = "+ $module";
+ if ( %{$pdata->{$module}} ) {
+ my $old_summary="";
+ my %authors = ();
+ my %bugs = ();
+ foreach my $id ( sort { lc $pdata->{$module}{$a}{'summary'} cmp lc $pdata->{$module}{$b}{'summary'} } keys %{$pdata->{$module}}) {
+ my $summary = $pdata->{$module}{$id}{'summary'};
+ if ($summary ne $old_summary) {
+ print_commit_summary($old_summary, \$module_title, \%bugs, \%authors, " + ", $log);
+ $old_summary = $summary;
+ %authors = ();
+ %bugs = ();
+ }
+ # collect bug numbers
+ if (defined $pdata->{$module}{$id}{'bugs'}) {
+ foreach my $bug (keys %{$pdata->{$module}{$id}{'bugs'}}) {
+ $bugs{$bug} = 1;
+ }
+ }
+ # collect author names
+ my $author = $pdata->{$module}{$id}{'author'}{'name'};
+ $authors{$author} = 1;
+ }
+ print_commit_summary($old_summary, \$module_title, \%bugs, \%authors, " + ", $log);
+ }
+ }
+}
+
+sub get_bug_name($$)
+{
+ my ($bug, $summary) = @_;
+ print "$bug: ";
+
+ $bug =~ m/(?:(\w*)\#+(\d+))/; # fdo#12345
+ my $bugzilla = $1; # fdo
+ my $bug_number = $2; # 12345
+
+ if ( $bugzillas{$bugzilla} ) {
+ my $url = $bugzillas{$bugzilla} . $bug_number;
+ my $ua = LWP::UserAgent->new;
+ $ua->timeout(10);
+ $ua->env_proxy;
+ my $response = $ua->get($url);
+ if ($response->is_success) {
+ my $title = decode('utf8', $response->title);
+ if ( $title =~ s/^(?:Bug $bug_number \S+|$bug_number –) // ) {
+ print "$title\n";
+ return $title;
+ } else {
+ print "warning: not found; using commit message (only got $title)";
+ }
+ }
+ }
+ print "\n";
+
+ return $summary;
+}
+
+sub print_bugs($$$$)
+{
+ my ($pbugs, $log, $wiki) = @_;
+
+ # sort alphabetically by bugzilla-type, but within that numerically
+ foreach my $bug ( sort { ($a =~ /(\D+)/)[0] cmp ($b =~ /(\D+)/)[0] ||
+ ($a =~ /(\d+)/)[0] <=> ($b =~ /(\d+)/)[0] } keys %{$pbugs}) {
+ my $summary = $pbugs->{$bug}{'summary'};
+
+ my $authors = "";
+ if ( %{$pbugs->{$bug}{'author'}} ) {
+ $authors = " [" . join (", ", keys %{$pbugs->{$bug}{'author'}}) . "]";
+ }
+
+ $bug =~ s/(.*)\#(.*)/# {{$1|$2}}/ if ($wiki);
+ print $log $bug, " ", $summary, $authors, "\n";
+ }
+}
+
+sub print_bugs_changelog($$$$)
+{
+ my ($pbugs, $log, $wiki) = @_;
+
+ foreach my $bug ( sort { $a cmp $b } keys %{$pbugs}) {
+ my $summary = $pbugs->{$bug}{'summary'};
+
+ my $authors = "";
+ if ( %{$pbugs->{$bug}{'author'}} ) {
+ $authors = " [" . join (", ", keys %{$pbugs->{$bug}{'author'}}) . "]";
+ }
+
+ print $log " + $summary ($bug)$authors\n";
+ }
+}
+
+sub print_bugnumbers($$$$)
+{
+ my ($pbugs, $log, $wiki) = @_;
+
+ print $log join ("\n", sort { $a cmp $b } keys %{$pbugs}), "\n";
+}
+
+sub generate_log($$$$$$$$)
+{
+ my ($pused_data, $print_func, $log_dir, $log_prefix, $log_suffix, $top_dir, $branch_name, $wiki) = @_;
+
+ my $log = open_log_file($log_dir, $log_prefix, $log_suffix, $top_dir, $branch_name, $wiki);
+ & {$print_func} ($pused_data, $log, $wiki);
+ close $log;
+}
+
+########################################################################
+# help
+
+sub usage()
+{
+ print "This script generates LO git commit summary\n\n" .
+
+ "Usage: lo-commit-stat [--help] [--no-submodules] [--module=<module>] --log-dir=<dir> --log-suffix=<string> topdir [git_arg...]\n\n" .
+
+ "Options:\n" .
+ " --help print this help\n" .
+ " --no-submodule read changes just from the main repository, ignore submodules\n" .
+ " --module=<module> summarize just changes from the given module, use \"core\"\n" .
+ " for the main module\n" .
+ " --log-dir=<dir> directory where to put the generated log\n" .
+ " --log-suffix=<string> suffix of the log file name; the result will be\n" .
+ " commit-log-<branch>-<log-name-suffix>.log; the branch name\n" .
+ " is detected automatically\n" .
+ " --commits generate log with all commits (default)\n" .
+ " --bugs generate log with bugzilla entries\n" .
+ " --bugs-changelog generate log with bugzilla entries, use changelog style\n" .
+ " --bugs-wiki generate log with bugzilla entries, use wiki markup\n" .
+ " --bugs-numbers generate log with bugzilla numbers\n" .
+ " --rev-list use \"git rev-list\" instead of \"git log\"; useful to check\n" .
+ " differences between branches\n" .
+ " --cherry use \"git cherry\" instead of \"git log\"; detects cherry-picked\n" .
+ " commits between branches\n" .
+ " topdir directory with the libreoffice/core clone\n" .
+ " git_arg extra parameters passed to the git command to define\n" .
+ " the area of interest; The default command is \"git log\" and\n" .
+ " parameters might be, for example, --after=\"2010-09-27\" or\n" .
+ " TAG..HEAD; with the option --rev-list, useful might be, for\n" .
+ " example origin/master ^origin/libreoffice-3-3; with the option\n" .
+ " --rev-list, useful might be, for example libreoffice-3.6.3.2\n" .
+ " libreoffice-3.6.4.1\n";
+}
+
+
+#######################################################################
+#######################################################################
+# MAIN
+#######################################################################
+#######################################################################
+
+
+my $module;
+my %generate_log = ();
+my $top_dir;
+my $log_dir;
+my $log_suffix;
+my $log;
+my $list_bugs = 0;
+my $check_bugzilla = 0;
+my $branch_name;
+my $git_command = "git log";
+my $git_cherry;
+my $git_args = "";
+my %data;
+my %bugs = ();
+
+
+foreach my $arg (@ARGV) {
+ if ($arg eq '--help') {
+ usage();
+ exit;
+ } elsif ($arg eq '--no-submodule') {
+ $module = "core";
+ } elsif ($arg =~ m/--module=(.*)/) {
+ $module = $1;
+ } elsif ($arg =~ m/--log-suffix=(.*)/) {
+ $log_suffix = "$1";
+ } elsif ($arg =~ m/--log-dir=(.*)/) {
+ $log_dir = "$1";
+ } elsif ($arg eq '--commits') {
+ $generate_log{"commits"} = 1;
+ } elsif ($arg eq '--bugs') {
+ $generate_log{"bugs"} = 1;
+ $check_bugzilla = 1;
+ $list_bugs = 1;
+ } elsif ($arg eq '--bugs-changelog') {
+ $generate_log{"bugs-changelog"} = 1;
+ $check_bugzilla = 1;
+ $list_bugs = 1;
+ } elsif ($arg eq '--bugs-wiki' || $arg eq '--wikibugs') {
+ $generate_log{"bugs-wiki"} = 1;
+ $check_bugzilla = 1;
+ $list_bugs = 1;
+ } elsif ($arg eq '--bugs-numbers' || $arg eq '--bug-numbers') {
+ $generate_log{"bugs-numbers"} = 1;
+ $list_bugs = 1;
+ } elsif ($arg eq '--rev-list') {
+ $git_command = "git rev-list --pretty=medium"
+ } elsif ($arg eq '--cherry') {
+ $git_command = "git log";
+ $git_cherry = 1;
+ } else {
+ if (! defined $top_dir) {
+ $top_dir=$arg;
+ } else {
+ $git_args .= " $arg";
+ }
+ }
+}
+
+# default log
+unless (%generate_log) {
+ $generate_log{"commits"} = 1;
+}
+
+# we want only one module
+if ($module) {
+ my $name = $module_dirname{$module};
+ %module_dirname = ();
+ $module_dirname{$module} = $name;
+}
+
+(defined $top_dir) || die "Error: top directory is not defined\n";
+(-d "$top_dir") || die "Error: not a directory: $top_dir\n";
+(-f "$top_dir/.git/config") || die "Error: can't find $top_dir/.git/config\n";
+
+(!defined $log_dir) || (-d $log_dir) || die "Error: directory does no exist: $log_dir\n";
+
+(defined $log_suffix) || die "Error: define log suffix using --log-suffix=<string>\n";
+
+$branch_name = get_branch_name($top_dir);
+
+load_data(\%data, $top_dir, \%module_dirname, $branch_name, $git_command, $git_cherry, $git_args);
+get_bug_list(\%data, \%bugs, $check_bugzilla) if ($list_bugs);
+
+generate_log(\%data, \&print_commits, $log_dir, "commits", $log_suffix, $top_dir, $branch_name, 0) if (defined $generate_log{"commits"});
+generate_log(\%bugs, \&print_bugs, $log_dir, "bugs", $log_suffix, $top_dir, $branch_name, 0) if (defined $generate_log{"bugs"});
+generate_log(\%bugs, \&print_bugs, $log_dir, "bugs", $log_suffix, $top_dir, $branch_name, 1) if (defined $generate_log{"bugs-wiki"});
+generate_log(\%bugs, \&print_bugs_changelog, $log_dir, "bugs-changelog", $log_suffix, $top_dir, $branch_name, 0) if (defined $generate_log{"bugs-changelog"});
+generate_log(\%bugs, \&print_bugnumbers, $log_dir, "bug-numbers", $log_suffix, $top_dir, $branch_name, 0) if (defined $generate_log{"bugs-numbers"});
diff --git a/bin/lo-pack-sources b/bin/lo-pack-sources
new file mode 100755
index 000000000..8c795dc17
--- /dev/null
+++ b/bin/lo-pack-sources
@@ -0,0 +1,485 @@
+#!/usr/bin/perl
+ eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
+ if $running_under_some_shell;
+#!/usr/bin/perl
+
+use strict;
+use File::Copy;
+use File::Temp qw/ tempfile tempdir /;
+
+my %module_dirname = (
+ "core" => "",
+ "dictionaries" => "dictionaries",
+ "help" => "helpcontent2",
+ "translations" => "translations"
+);
+my $lo_topdir_name;
+
+# get libreoffice-build version from the given libreoffice-build sources
+sub get_config_version($)
+{
+ my ($lo_core_dir) = @_;
+ my $version;
+
+ open (CONFIGURE, "$lo_core_dir/configure.ac") ||
+ die "can't open \"$lo_core_dir/configure.ac\" for reading: $!\n";
+
+ while (my $line = <CONFIGURE>) {
+ chomp $line;
+
+ if ($line =~ /AC_INIT\s*\(\s*libreoffice-build\s*,\s*([\w\.]*)\)/) {
+ $version="$1";
+ }
+ }
+ close (CONFIGURE);
+ return $version;
+}
+
+# increment the version for a test build:
+# + add 'a' if the version ended with a number
+# + bump the letter otherwise
+sub inc_test_version($)
+{
+ my ($version) = @_;
+
+ my $lastchar = chop $version;
+ my $new_version;
+
+ if ($lastchar =~ /\d/) {
+ return "$version" . "$lastchar" . "a";
+ } elsif ($lastchar =~ /\w/) {
+ # select next letter alphabetically: a->b, b->c, ...
+ $lastchar =~ tr/0a-zA-Z/a-zA-Z0/;
+ return "$version" . "$lastchar";
+ } else {
+ die "Can't generate test version from \"$version$lastchar\n";
+ }
+}
+
+sub get_release_version($$$$)
+{
+ my ($config_version, $state_config_version, $state_release_version, $inc_version) = @_;
+ my $release_version;
+
+ if (defined $state_config_version &&
+ defined $state_release_version &&
+ "$state_config_version" eq "$config_version") {
+ $release_version = "$state_release_version";
+ } else {
+ $release_version = "$config_version";
+ }
+
+ if ( defined $inc_version ) {
+ $release_version = inc_test_version($release_version);
+ }
+
+ return $release_version;
+}
+
+# copy files to temp dir; showing a progress; using a black list
+sub copy_dir_filter_and_show_progress($$)
+{
+ my ($source_dir, $target_dir) = @_;
+
+ print "Copying \"$source_dir\" -> \"$target_dir\"...";
+ # copy sources from git and show progress
+ system ("cd $source_dir && " .
+ "git archive --format=tar HEAD | " .
+ " tar -xf - -C $target_dir ") &&
+ die "Error: copying failed: $!\n";
+ print "\n";
+}
+
+# copy files to temp dir; showing a progress; using a black list
+sub remove_empty_submodules($)
+{
+ my ($target_topdir) = @_;
+
+ foreach my $submodule (sort values %module_dirname) {
+ next unless ($submodule);
+ print "Removing empty submodule: $submodule...\n";
+ rmdir "$target_topdir/$submodule" || die "Error: Can't remove submodule directory: $target_topdir/$submodule";
+ }
+}
+
+# copy the source directory into a tmp directory
+# omit the .git subdirectories
+sub copy_lo_module_to_tempdir($$$)
+{
+ my ($source_dir, $module, $lo_topdir_name) = @_;
+ my $tempdir = tempdir( 'libreoffice-XXXXXX', DIR => File::Spec->tmpdir );
+
+ mkdir "$tempdir/$lo_topdir_name" || die "Can't create directory \"$tempdir/$lo_topdir_name\": $!\n";
+ mkdir "$tempdir/$lo_topdir_name/$module_dirname{$module}" || die "Can't create directory \"$tempdir/$lo_topdir_name/$module_dirname{$module}\": $!\n" if ($module_dirname{$module});
+
+ copy_dir_filter_and_show_progress("$source_dir/$module_dirname{$module}", "$tempdir/$lo_topdir_name/$module_dirname{$module}");
+ remove_empty_submodules("$tempdir/$lo_topdir_name/") if ($module eq "core");
+
+ return $tempdir;
+}
+
+sub generate_lo_module_changelog($$$)
+{
+ my ($source_dir, $lo_module_release_topdir, $module) = @_;
+
+ my $log_name = "ChangeLog";
+ $log_name .= "-$module_dirname{$module}" if ($module_dirname{$module});
+ print "Generating changelog for $module...\n";
+ system ("cd $source_dir/$module_dirname{$module} && " .
+ "git log --date=short --pretty='format:%cd %an <%ae> [%H]%n%n%w(0,8,8)%s%n%e%+b' " .
+ ">$lo_module_release_topdir/$log_name" ) &&
+ die "Error: generating failed: $!\n";
+}
+
+sub run_autogen($$)
+{
+ my ($dir, $module) = @_;
+
+ print "Running autogen for $module...\n";
+ system ("cd $dir && " .
+ "NOCONFIGURE=1 ./autogen.sh && " .
+ "rm -rf autom4te.cache && " .
+ "cd - >/dev/null 2>&1") && die "Error: autogen failed: $!\n";
+}
+
+sub generate_sources_version_file($$)
+{
+ my ($dir, $release_version) = @_;
+
+ open (VERFILE, ">$dir/sources.ver") || die "Can't open $dir/sources.ver: $!\n";
+
+ print VERFILE "lo_sources_ver=$release_version\n";
+
+ close VERFILE;
+}
+
+sub generate_tarball($$$)
+{
+ my ($dir, $tarball, $tar_compress_option) = @_;
+
+ print "Creating $tarball...";
+ # generate the tarball in the current directory; avoid "./" prefix in the stored paths; show progress
+ system ("tar -c $tar_compress_option -f $tarball -C $dir $lo_topdir_name") &&
+ die "Error: releasing failed: $!\n";
+ print "\n";
+}
+
+sub generate_md5($)
+{
+ my ($filename) = @_;
+
+ print "Generating MD5...\n";
+ system ("md5sum $filename >$filename.md5") &&
+ die "Error: releasing failed: $!\n";
+}
+
+sub default_releases_state_file($)
+{
+ my ($lo_core_dir) = @_;
+
+ my $rootdir = $lo_core_dir;
+ $rootdir =~ s/^(.*?)\/?[^\/]+\/?$/$1/;
+
+ my $releases_state_file;
+ if ($rootdir) {
+ $releases_state_file = "$rootdir/.releases";
+ } else {
+ $releases_state_file = ".releases";
+ }
+
+ return "$releases_state_file";
+}
+
+sub load_releases_state($)
+{
+ my ($releases_state_file) = @_;
+
+ my $state_config_version;
+ my $state_release_version;
+
+ if (open (STATE, "$releases_state_file")) {
+
+ while (my $line = <STATE>) {
+ chomp $line;
+
+ if ($line =~ /^\s*configure_version\s*=\s*(.*)$/) {
+ $state_config_version = "$1";
+ } elsif ($line =~ /^\s*released_version\s*=\s*(.*)$/) {
+ $state_release_version = "$1";
+ }
+ }
+ close (STATE);
+ }
+
+ return $state_config_version, $state_release_version;
+}
+
+sub save_releases_state($$$)
+{
+ my ($releases_state_file, $config_version, $release_version) = @_;
+
+ open (STATE, '>', "$releases_state_file") ||
+ die "Can't open \"$releases_state_file\" for writing: $!\n";
+
+ print STATE "configure_version = $config_version\n";
+ print STATE "released_version = $release_version\n";
+
+ close (STATE);
+}
+
+sub remove_tempdir($)
+{
+ my ($tempdir) = @_;
+
+# print "Cleaning $tempdir...\n";
+ system ("rm -rf $tempdir") && die "Error: rm failed: $!\n";
+}
+
+sub check_if_file_exists($$)
+{
+ my ($file, $force) = @_;
+
+ if (-e $file) {
+ if (defined $force) {
+ print "Warning: $file already exists and will be replaced!\n";
+ } else {
+ die "Error: $file already exists.\n".
+ " Use --force if you want to replace it.\n";
+ }
+ }
+}
+
+sub check_if_already_released($$$$$$)
+{
+ my ($p_module_tarball_name, $force, $bzip2, $xz, $pack_lo_core, $pack_lo_modules) = @_;
+
+ foreach my $tarball_name ( sort values %{$p_module_tarball_name} ) {
+ check_if_file_exists("$tarball_name.tar.bz2", $force) if (defined $bzip2);
+ check_if_file_exists("$tarball_name.tar.xz", $force) if (defined $xz);
+ }
+}
+
+sub prepare_module_sources($$$$)
+{
+ my ($source_dir, $release_version, $module, $lo_topdir_name) = @_;
+
+ # prepare sources
+ my $temp_dir = copy_lo_module_to_tempdir($source_dir, $module, $lo_topdir_name);
+ generate_lo_module_changelog($source_dir, "$temp_dir/$lo_topdir_name", $module);
+ run_autogen("$temp_dir/$lo_topdir_name", $module) if ($module eq 'core');
+ generate_sources_version_file("$temp_dir/$lo_topdir_name", $release_version) if ($module eq 'core');
+
+ return $temp_dir;
+}
+
+sub pack_module_sources($$$$)
+{
+ my ($temp_dir, $md5, $tarball, $tar_compress_option) = @_;
+
+ generate_tarball($temp_dir, $tarball, $tar_compress_option);
+ generate_md5($tarball) if (defined $md5);
+}
+
+sub generate_module_tarball($$$$$$$$)
+{
+ my ($source_dir, $release_version, $module, $md5, $bzip2, $xz, $lo_topdir_name, $module_tarball_name) = @_;
+
+ my $temp_dir = prepare_module_sources($source_dir, $release_version, $module, $lo_topdir_name);
+ pack_module_sources($temp_dir, $md5, "$module_tarball_name.tar.bz2", "--bzip2") if (defined $bzip2);
+ pack_module_sources($temp_dir, $md5, "$module_tarball_name.tar.xz", "--xz") if (defined $xz);
+ remove_tempdir($temp_dir);
+}
+
+
+sub generate_tarballs($$$$$$$$$)
+{
+ my ($source_dir, $release_version, $md5, $bzip2, $xz, $lo_topdir_name, $p_module_tarball_name, $pack_lo_core, $pack_lo_modules) = @_;
+
+ foreach my $module (sort keys %{$p_module_tarball_name} ) {
+ print "\n--- Generating $module ---\n";
+ generate_module_tarball($source_dir, $release_version, $module, $md5, $bzip2, $xz, $lo_topdir_name, $p_module_tarball_name->{$module});
+ }
+}
+
+
+sub usage()
+{
+ print "This tool helps to pack the libreoffice-build and module sources\n\n" .
+
+ "Usage:\n".
+ "\tlo-pack-sources [--help]\n" .
+ "\t [--force] [--md5] [--bzip2] [--xz]\n" .
+ "\t [--version][--set-version=<ver>] [--inc-version]\n" .
+ "\t [--no-submodule] [--module=<module>]\n" .
+ "\t [dir]\n\n" .
+
+ "Options:\n\n" .
+ "\t--help: print this help\n" .
+ "\t--force: replace an already existing release of the same version\n" .
+ "\t--md5: generate md5 sum for the final tarball\n" .
+ "\t--bzip2: generate tarballs compressed by bzip2\n" .
+ "\t--xz: generate tarballs compressed by xz (default)\n" .
+ "\t--version: just print version of the released package but do not\n" .
+ "\t\trelease it; the version is affected by the other options, e.g.\n" .
+ "\t\t--inc-version\n" .
+ "\t--set-version: force another version\n" .
+ "\t--inc-version: increment the latest version; there is a difference\n" .
+ "\t\tbetween test release (default) and final (not yet supported)\n" .
+ "\t--no-submodule: do not pack sources from git submodules\n" .
+ "\t--module=<module>: pack just a single module, use \"core\"\n" .
+ "\t\tfor the main git repo,\n" .
+ "\tdir: path of the source directory, either libreoffice-build or module\n";
+}
+
+
+my $module;
+my $ptf;
+my $md5;
+my $bzip2;
+my $xz;
+my $inc_version;
+my $config_version;
+my $set_version;
+my $get_config_version;
+my $release_version;
+my $pack_lo_core=1;
+my $pack_lo_modules=1;
+my $source_dir;
+my $releases_state_file;
+my $state_config_version;
+my $state_release_version;
+my $lo_core_tempdir;
+my $force;
+my $verbose=1;
+my %module_tarball_name;
+
+###################
+# Arguments parsing
+###################
+
+for my $arg (@ARGV) {
+ if ($arg eq '--help' || $arg eq '-h') {
+ usage;
+ exit 0;
+ } elsif ($arg eq '--force') {
+ $force=1;
+ } elsif ($arg eq '--md5') {
+ $md5=1;
+ } elsif ($arg eq '--bzip2') {
+ $bzip2=1;
+ } elsif ($arg eq '--xz') {
+ $xz=1;
+ } elsif ($arg eq '--version') {
+ $get_config_version=1;
+ $verbose = undef;
+ } elsif ($arg eq '--inc-version') {
+ $inc_version=1
+ } elsif ($arg =~ m/--set-version=(.*)/) {
+ $set_version="$1";
+ } elsif ($arg eq '--no-submodule') {
+ $module = "core";
+ } elsif ($arg =~ m/--module=(.*)/) {
+ # process just one module and do not pack libreoffice-build
+ die("Error: unknown module: $1") unless (defined $module_dirname{$1});
+ $module = $1;
+ } elsif ($arg =~ /^-/ ) {
+ die "Error: unknown option: $arg\n";
+ } else {
+ if (! defined $source_dir) {
+ $source_dir = $arg;
+ } else {
+ die "Error: Too many arguments $arg\n";
+ }
+ }
+}
+
+# ugly hack; we want only one module
+if ($module) {
+ my $name = $module_dirname{$module};
+ %module_dirname = ();
+ $module_dirname{$module} = $name;
+}
+
+###################
+# Initial checks
+###################
+
+unless ( defined $source_dir ) {
+ die "Error: undefined source directory, try --help\n";
+}
+
+unless ( -d "$source_dir" ) {
+ die "Error: is not a directory: $source_dir\n";
+}
+
+# check if it is a valid libreoffice-core directory
+unless (-f "$source_dir/autogen.sh" && -f "$source_dir/config_host.mk.in") {
+ die "Error: \"$source_dir\" is not a valid libreoffice-core directory\n";
+}
+
+if (defined $set_version && defined $inc_version) {
+ die "Error: --set-version and --inc-version options can't be used together\n";
+}
+
+# default compression
+$xz = 1 unless (defined $xz || defined $bzip2);
+
+
+###################
+# Main logic
+###################
+
+
+print "Source: $source_dir\n" if ($verbose);
+
+# detect some paths
+$releases_state_file = default_releases_state_file($source_dir) unless (defined $releases_state_file);
+
+# detect versions
+$config_version = get_config_version($source_dir);
+($state_config_version, $state_release_version) = load_releases_state($releases_state_file);
+if (defined $set_version) {
+ $release_version = "$set_version";
+} else {
+ $release_version = get_release_version($config_version, $state_config_version, $state_release_version, $inc_version);
+}
+
+# define tarball names
+print "Detected module:\n";
+foreach my $module (sort keys %module_dirname) {
+ if (-e "$source_dir/$module_dirname{$module}/.git") {
+ print " $module\n";
+ if ($module eq "core") {
+ $module_tarball_name{$module} = "libreoffice-$release_version";
+ } else {
+ $module_tarball_name{$module} = "libreoffice-$module-$release_version";
+ }
+ } else {
+ print "did not found: $source_dir/$module_dirname{$module}/.git\n";
+ print "Warning: $module sources are not available -> skipping\n";
+ }
+}
+
+# top directory inside the source tarballs
+$lo_topdir_name = "libreoffice-$release_version";
+
+print "Default version : $config_version\n" if ($verbose && defined $config_version);
+print "Last used version : $state_release_version\n" if ($verbose && defined $state_release_version);
+print "New version : $release_version\n" if ($verbose);
+
+# do the real job
+if ( defined $get_config_version ) {
+ print "$release_version\n";
+} else {
+ check_if_already_released(\%module_tarball_name, $force, $bzip2, $xz, $pack_lo_core, $pack_lo_modules);
+
+ # give a chance to stop the process
+ print ("\nWaiting 3 seconds...\n");
+ sleep 3;
+
+ generate_tarballs($source_dir, $release_version, $md5, $bzip2, $xz, $lo_topdir_name, \%module_tarball_name, $pack_lo_core, $pack_lo_modules);
+
+ if ( defined $releases_state_file ) {
+ save_releases_state($releases_state_file, $config_version, $release_version);
+ }
+}
diff --git a/bin/lo-xlate-lang b/bin/lo-xlate-lang
new file mode 100755
index 000000000..d158b3fd5
--- /dev/null
+++ b/bin/lo-xlate-lang
@@ -0,0 +1,213 @@
+#!/usr/bin/env perl
+
+use strict;
+
+my $progname=$0; $progname = $& if $progname =~ m,[^/]+$,;
+
+my %PREFIX; # used to search for prefix numbers
+my %ISOCODE; # used to search for iso codes
+my %LANGUAGE; # used to search for language names
+
+#=======================================================================
+# initialisation code - stuff the DATA into the CODES hash
+#=======================================================================
+sub init {
+
+ my $prefix;
+ my $code;
+ my $name;
+
+
+ while (<DATA>)
+ {
+ next unless /\S/;
+ chop;
+ ($prefix, $code, $name ) = split(/:/, $_, 3);
+ $PREFIX{$prefix} = $prefix;
+ $PREFIX{$code} = $prefix;
+ $PREFIX{$name} = $prefix;
+
+ $ISOCODE{$prefix} = $code;
+ $ISOCODE{$code} = $code;
+ $ISOCODE{$name} = $code;
+
+ $LANGUAGE{$prefix} = $name;
+ $LANGUAGE{$code} = $name;
+ $LANGUAGE{$name} = $name;
+ }
+}
+
+
+#=======================================================================
+# usage - error message
+#=======================================================================
+sub usage {
+ my $errmsg = shift;
+ my $errcode = shift;
+ print STDERR "$progname: $errmsg\n" if $errmsg;
+ print STDERR "$progname: Converts between prefix codes, iso codes and langnames\n";
+ print STDERR " Usage: $progname (-i|-l|-p|-h) <code>|all\n";
+ print STDERR " -i <code>: convert prefix to iso code (ex: 03 -> pt)\n";
+ print STDERR " -l <code>: convert iso code to language name (ex: pt -> portuguese)\n";
+ print STDERR " -p <code>: convert iso code to prefix (ex: pt -> 03)\n";
+ print STDERR " the code can either be an iso code, a prefix or even a language name\n";
+ print STDERR " The special code \"all\" asks for all possible values.\n\n";
+ print STDERR " -h : print this help\n";
+ exit $errcode;
+}
+
+#=======================================================================
+# main -
+#=======================================================================
+init();
+
+my ($LanguageCode, $LanguageMap);
+
+while ($ARGV[0] =~ /^-/) {
+ $_ = shift;
+ if (m/^-i/) {
+ $LanguageMap = \%ISOCODE;
+ }
+ elsif (m/^-l/) {
+ $LanguageMap = \%LANGUAGE;
+ }
+ elsif (m/^-p/) {
+ $LanguageMap = \%PREFIX;
+ }
+ elsif (m/^-h/) {
+ usage("",0);
+ }
+ else {
+ usage ("unknown option $_",1);
+ }
+}
+
+usage ("no operation specified on command line",1)
+ if (!$LanguageMap);
+
+usage ("no language code specified on command line",1)
+ if (!($LanguageCode = shift));
+
+if ($LanguageCode =~ (m/^all$/)) {
+ # Asked for all codes
+ my $old="";
+ foreach my $key (sort values %$LanguageMap) {
+ if ($key ne $old) {
+ print "$key ";
+ $old=$key;
+ }
+ }
+ print "\n";
+ exit 0;
+}
+
+usage ("no mapping found for $LanguageCode\n",1)
+ if (!($LanguageMap->{$LanguageCode}));
+
+print $LanguageMap->{$LanguageCode}, "\n";
+
+1;
+
+# keep third column names here with openoffice-dir/share/*/<long lang name>/
+
+__DATA__
+:be:belarusian
+:bg:bulgarian
+:bn:bengali
+:bs:bosnian
+:en-GB:english_british
+:gu:gujarati
+:hr:croatian
+:km:khmer
+:kmr-Latn:Kurmanji
+:pa-IN:punjabi
+:rw:kinarwanda
+:xh:xhosa
+:lt:lithuanian
+:ne:nepali
+:vi:vietnamese
+:nso:northern_sotho
+:ss:swazi
+:sr:serbian
+:ve:venda
+:ts:tsonga
+:st:southern_sotho
+:tn:tswana
+:br:breton
+:ga:gaelic
+:gd:scottish_gaelic
+:th:thai
+:hi:hindi
+:bs-BA:bosnian
+:en-ZA:english_southafrican
+:mk:macedonian
+:as:assamese
+:ml:malayalam
+:mr:marathi
+:or:odia
+:ur:urdu
+:fa:farsi
+:lv:latvian
+:nr:ndebele
+:ne:nepalese
+:sh:serbian
+:te:telugu
+:ta:tamil
+:tg:tajik
+:ka:georgian
+:eo:esperanto
+:uk:ukrainian
+:kk:kazakh
+:dz:dzongkha
+:kn:kannada
+:gl:galician
+:uz:uzbek
+:oc:occitan
+:ro:romanian
+:eu:basque
+:mn:mongolian
+:om:oromo
+:bo:tibetan
+:ast:asturian
+:is:icelandic
+:ug:uighur
+:si:sinhala
+:id:indonesian
+:my:burmese
+:am:amharic
+:gug:guarani
+:szl:upper_silesian
+01:en-US:english_american
+03:pt:portuguese
+07:ru:russian
+26:ns:northernsotho
+27:af:afrikaans
+28:zu:zulu
+30:el:greek
+31:nl:dutch
+33:fr:french
+34:es:spanish
+35:fi:finnish
+36:hu:hungarian
+37:ca:catalan
+39:it:italian
+42:cs:czech
+43:sk:slovak
+45:da:danish
+46:sv:swedish
+47:nb:norwegian
+48:pl:polish
+49:de:german
+50:sl:slovenian
+53:cy:welsh
+55:pt-BR:portuguese_brazilian
+77:et:estonian
+79:nn:norwegian_nynorsk
+81:ja:japanese
+82:ko:korean
+86:zh-CN:chinese_simplified
+88:zh-TW:chinese_traditional
+90:tr:turkish
+91:hi:hindi
+96:ar:arabic
+97:he:hebrew
diff --git a/bin/lolcat b/bin/lolcat
new file mode 100755
index 000000000..27bb32624
--- /dev/null
+++ b/bin/lolcat
@@ -0,0 +1,21 @@
+#!/usr/bin/perl -w
+
+use strict;
+use IO::Handle;
+
+die "Usage: $0 identifier\n" .
+ "(identifier is for example org.libreoffice)" unless $#ARGV == 0;
+
+my $id = $ARGV[0];
+
+open (LOGCAT, "adb logcat |") || die "Could not open pipe from adb logcat";
+my $pid = '';
+
+while (<LOGCAT>) {
+ if (m!^I/ActivityManager\( *\d+\): Start proc $id for activity .*: pid=(\d+)!) {
+ $pid = $1;
+ } elsif (m!^[EIWD]/[^(]+\( *$pid\)!) {
+ print $_;
+ STDOUT->flush();
+ }
+}
diff --git a/bin/module-deps.pl b/bin/module-deps.pl
new file mode 100755
index 000000000..abec124e4
--- /dev/null
+++ b/bin/module-deps.pl
@@ -0,0 +1,556 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use Getopt::Long qw(GetOptions VersionMessage);
+use Pod::Usage;
+
+my $gnumake;
+my $src_root;
+my $makefile_build;
+my $verbose = 0;
+my $no_leaf;
+my $from_file;
+my $to_file;
+my $output_file;
+my $preserve_libs = 0;
+my $toposort = 0;
+my %merged_libs;
+
+sub logit($)
+{
+ print STDERR shift if ($verbose);
+}
+
+sub read_deps()
+{
+ my $p;
+ my $to;
+ my $invalid_tolerance = 100;
+ my $line_count = 0;
+ my %deps;
+ if (defined $to_file)
+ {
+ open($to, ">$to_file") or die "can not open file for writing $to_file";
+ }
+ if (defined $from_file) {
+ open ($p, $from_file) || die "can't read deps from cache file: $!";
+ } else {
+ open ($p, "ENABLE_PRINT_DEPS=1 $gnumake -qrf $makefile_build|") || die "can't launch make: $!";
+ }
+ $|=1;
+ print STDERR "reading deps ";
+ while (<$p>) {
+ my $line = $_;
+ $line_count++;
+ print STDERR '.' if ($line_count % 10 == 0);
+ logit($line);
+ print $to $line if defined $to_file;
+ chomp ($line);
+ if ($line =~ m/^MergeLibContents:\s+(\S+.*)\s*$/) {
+ for my $dep (split / /, $1) {
+ $merged_libs{$dep} = 1 if $dep ne '';
+ }
+ } elsif ($line =~ m/^LibraryDep:\s+(\S+) links against (.*)$/) {
+# if ($line =~ m/^LibraryDep:\s+(\S+)\s+links against/) {
+ $deps{$1} = ' ' if (!defined $deps{$1});
+ $deps{$1} = $deps{$1} . ' ' . $2;
+ } elsif ($line =~ m/^LibraryDep:\s+links against/) {
+# these need fixing, we call gb_LinkTarget__use_$...
+# and get less than normal data back to gb_LinkTarget_use_libraries
+# print STDERR "ignoring unhelpful external dep\n";
+ } elsif ($invalid_tolerance < 0) {
+# print "read all dependencies to: '$line'\n";
+ last;
+ } else {
+# print "no match '$line'\n";
+ $invalid_tolerance--;
+ }
+ }
+ close ($p);
+ print STDERR " done\n";
+
+ return \%deps;
+}
+
+# graphviz etc. don't like some names
+sub clean_name($)
+{
+ my $name = shift;
+ $name =~ s/[\-\/\.]/_/g;
+ return $name;
+}
+
+# first create nodes for each entry
+sub clean_tree($)
+{
+ my $deps = shift;
+ my %tree;
+ for my $name (sort keys %{$deps}) {
+ my $need_str = $deps->{$name};
+ $need_str =~ s/^\s+//g;
+ $need_str =~ s/\s+$//g;
+ my @needs = split /\s+/, $need_str;
+ $name =~ m/^([^_]+)_(\S+)$/ || die "invalid target name: '$name'";
+ my $type = $1;
+ my $target = clean_name ($2);
+ $type eq 'Executable' || $type eq 'Library' ||
+ $type eq 'CppunitTest' || die "Unknown type '$type'";
+
+ my %result;
+ $result{type} = $type;
+ $result{target} = $target;
+ $result{merged} = 0;
+ my @clean_needs;
+ for my $need (@needs) {
+ push @clean_needs, clean_name($need);
+ }
+ $result{deps} = \@clean_needs;
+ if (defined $tree{$target}) {
+ logit("warning -duplicate target: '$target'\n");
+ delete($tree{$target});
+ }
+ $tree{$target} = \%result;
+
+ logit("$target ($type): " . join (',', @clean_needs) . "\n");
+ }
+ return \%tree;
+}
+
+sub has_child_dep($$$)
+{
+ my ($tree,$search,$name) = @_;
+ my $node = $tree->{$name};
+ return defined $node->{flat_deps}->{$search};
+}
+
+# flatten deps recursively into a single hash per module
+sub build_flat_dep_hash($$);
+sub build_flat_dep_hash($$)
+{
+ my ($tree, $name) = @_;
+ my %flat_deps;
+
+ my $node = $tree->{$name};
+ return if (defined $node->{flat_deps});
+
+ # build flat deps for children
+ for my $child (@{$node->{deps}}) {
+ build_flat_dep_hash($tree, $child)
+ }
+
+ for my $child (@{$node->{deps}}) {
+ $flat_deps{$child} = 1;
+ for my $dep (@{$tree->{$child}->{deps}}) {
+ $flat_deps{$dep} = 1;
+ }
+ }
+ $node->{flat_deps} = \%flat_deps;
+
+ # useful debugging ...
+ if (defined $ENV{DEP_CACHE_FILE}) {
+ logit("node '$name' has flat-deps: '" . join(',', keys %flat_deps) . "' " .
+ "vs. '" . join(',', @{$node->{deps}}) . "'\n");
+ }
+}
+
+# many modules depend on vcl + sal, but vcl depends on sal
+# so we want to strip sal out - and the same for many
+# similar instances
+sub prune_redundant_deps($)
+{
+ my $tree = shift;
+ for my $name (sort keys %{$tree}) {
+ build_flat_dep_hash($tree, $name);
+ }
+}
+
+# glob on libo directory
+sub create_lib_module_map()
+{
+ my %l2m;
+ # hardcode the libs that don't have a directory
+ $l2m{'merged'} = 'merged';
+
+ for (glob($src_root."/*/Library_*.mk"))
+ {
+ /.*\/(.*)\/Library_(.*)\.mk/;
+ # add module -> module
+ $l2m{$1} = $1;
+ # add lib -> module
+ $l2m{$2} = $1;
+ }
+ return \%l2m;
+}
+
+# call prune redundant_deps
+# rewrite the deps array
+sub optimize_tree($)
+{
+ my $tree = shift;
+ prune_redundant_deps($tree);
+ for my $name (sort keys %{$tree}) {
+ my $result = $tree->{$name};
+ logit("minimising deps for $result->{target}\n");
+ my @newdeps;
+ for my $dep (@{$result->{deps}}) {
+ # is this implied by any other child ?
+ logit("checking if '$dep' is redundant\n");
+ my $preserve = 1;
+ for my $other_dep (@{$result->{deps}}) {
+ next if ($other_dep eq $dep);
+ if (has_child_dep($tree,$dep,$other_dep)) {
+ logit("$dep is implied by $other_dep - ignoring\n");
+ $preserve = 0;
+ last;
+ }
+ }
+ push @newdeps, $dep if ($preserve);
+ }
+ # re-write the shrunk set to accelerate things
+ $result->{deps} = \@newdeps;
+ }
+ return $tree;
+}
+
+# walking through the library based graph and creating a module based graph.
+sub collapse_lib_to_module($)
+{
+ my $tree = shift;
+ my %digraph;
+ my $l2m = create_lib_module_map();
+ my %unknown_libs;
+ for my $lib_name (sort keys %{$tree}) {
+ my $result = $tree->{$lib_name};
+ $unknown_libs{$lib_name} = 1 && next if (!grep {/$lib_name/} keys %$l2m);
+
+ # new collapsed name.
+ my $name = $l2m->{$lib_name};
+
+ # sal has no dependencies, take care of it
+ # otherwise it doesn't have target key
+ if (!@{$result->{deps}}) {
+ if (!exists($digraph{$name})) {
+ my @empty;
+ $digraph{$name}{deps} = \@empty;
+ $digraph{$name}{target} = $result->{target};
+ $digraph{$name}{merged} = $result->{merged};
+ }
+ }
+ for my $dep (@{$result->{deps}}) {
+ my $newdep;
+ $newdep = $l2m->{$dep};
+
+ die "Mis-named */Library_*.mk file - should match rules: '$dep'" if (!defined $newdep);
+ $dep = $newdep;
+
+ # ignore: two libraries from the same module depend on each other
+ next if ($name eq $dep);
+ if (exists($digraph{$name}))
+ {
+ my @deps = @{$digraph{$name}{deps}};
+ # only add if we haven't seen already that edge?
+ if (!grep {/$dep/} @deps)
+ {
+ push @deps, $dep;
+ $digraph{$name}{deps} = \@deps;
+ }
+ }
+ else
+ {
+ my @deps;
+ push @deps, $dep;
+ $digraph{$name}{deps} = \@deps;
+ $digraph{$name}{target} = $result->{target};
+ $digraph{$name}{merged} = $result->{merged};
+ }
+ }
+ }
+ logit("warn: no module for libs were found and dropped: [" .
+ join(",", (sort (keys(%unknown_libs)))) . "]\n");
+ return optimize_tree(\%digraph);
+}
+
+sub prune_leaves($)
+{
+ my $tree = shift;
+ my %newtree;
+ my %name_has_deps;
+
+ # we like a few leaves around:
+ for my $nonleaf ('desktop', 'sw', 'sc', 'sd', 'starmath') {
+ $name_has_deps{$nonleaf} = 1;
+ }
+
+ # find which modules are depended on by others
+ for my $name (keys %{$tree}) {
+ for my $dep (@{$tree->{$name}->{deps}}) {
+ $name_has_deps{$dep} = 1;
+ }
+ }
+
+ # prune modules with no deps
+ for my $name (keys %{$tree}) {
+ delete $tree->{$name} if (!defined $name_has_deps{$name});
+ }
+
+ return optimize_tree($tree);
+}
+
+sub annotate_mergelibs($)
+{
+ my $tree = shift;
+ print STDERR "annotating mergelibs\n";
+ for my $name (keys %{$tree}) {
+ if (defined $merged_libs{$name}) {
+ $tree->{$name}->{merged} = 1;
+# print STDERR "mark $name as merged\n";
+ }
+ }
+}
+
+sub dump_graphviz($)
+{
+ my $tree = shift;
+ my $to = \*STDOUT;
+ open($to, ">$output_file") if defined($output_file);
+ print $to <<END;
+digraph LibreOffice {
+edge [color="#31CEF0", len=0.4]
+edge [fontname=Arial, fontsize=10, fontcolor="#31CEF0"]
+END
+;
+
+ my @merged_names;
+ my @normal_names;
+ for my $name (sort keys %{$tree}) {
+ if ($tree->{$name}->{merged}) {
+ push @merged_names, $name;
+ } else {
+ push @normal_names, $name;
+ }
+ }
+ print $to "node [fontname=Verdana, fontsize=10, height=0.02, width=0.02,".
+ 'shape=Mrecord,color="#BBBBBB"' .
+ "];" . join(';', @normal_names) . "\n";
+ print $to "node [fontname=Verdana, fontsize=10, height=0.02, width=0.02,".
+ 'shape=box,style=filled,color="#CCCCCC"' .
+ "];" . join(';', @merged_names) . "\n";
+
+ for my $name (sort keys %{$tree}) {
+ my $result = $tree->{$name};
+ logit("minimising deps for $result->{target}\n");
+ for my $dep (@{$result->{deps}}) {
+ print $to "$name -> $dep;\n" ;
+ }
+ }
+ print $to "}\n";
+}
+
+sub toposort_visit($$$$);
+sub toposort_visit($$$$)
+{
+ my $tree = shift;
+ my $list = shift;
+ my $tags = shift;
+ my $name = shift;
+ die "dependencies don't form a DAG"
+ if (defined($tags->{$name}) && $tags->{$name} == 1);
+ if (!$tags->{$name}) {
+ $tags->{$name} = 1;
+ my $result = $tree->{$name};
+ for my $dep (@{$result->{deps}}) {
+ toposort_visit($tree, $list, $tags, $dep);
+ }
+ $tags->{$name} = 2;
+ push @{$list}, $name;
+ }
+}
+
+sub dump_toposort($)
+{
+ my $tree = shift;
+ my @list;
+ my %tags;
+ for my $name (sort keys %{$tree}) {
+ toposort_visit($tree, \@list, \%tags, $name);
+ }
+ my $to = \*STDOUT;
+ open($to, ">$output_file") if defined($output_file);
+ for (my $i = 0; $i <= $#list; ++$i) {
+ print $to "$list[$i]\n";
+ }
+}
+
+sub filter_targets($)
+{
+ my $tree = shift;
+ for my $name (sort keys %{$tree})
+ {
+ my $result = $tree->{$name};
+ if ($result->{type} eq 'CppunitTest' ||
+ ($result->{type} eq 'Executable' &&
+ $result->{target} ne 'soffice_bin'))
+ {
+ delete($tree->{$name});
+ }
+ }
+}
+
+sub parse_options()
+{
+ my %h = (
+ 'verbose|v' => \$verbose,
+ 'help|h' => \my $help,
+ 'man|m' => \my $man,
+ 'version|r' => sub {
+ VersionMessage(-msg => "You are using: 1.0 of ");
+ },
+ 'preserve-libs|p' => \$preserve_libs,
+ 'toposort|t' => \$toposort,
+ 'write-dep-file|w=s' => \$to_file,
+ 'read-dep-file|f=s' => \$from_file,
+ 'no-leaf|l' => \$no_leaf,
+ 'output-file|o=s' => \$output_file);
+ GetOptions(%h) or pod2usage(2);
+ pod2usage(1) if $help;
+ pod2usage(-exitstatus => 0, -verbose => 2) if $man;
+ ($gnumake, $makefile_build) = @ARGV if $#ARGV == 1;
+ $gnumake = 'make' if (!defined $gnumake);
+ $makefile_build = 'Makefile.gbuild' if (!defined $makefile_build);
+ $src_root = defined $ENV{SRC_ROOT} ? $ENV{SRC_ROOT} : ".";
+}
+
+sub main()
+{
+ parse_options();
+ my $deps = read_deps();
+ my $tree = clean_tree($deps);
+ filter_targets($tree);
+ optimize_tree($tree);
+ annotate_mergelibs($tree);
+ if (!$preserve_libs && !defined($ENV{PRESERVE_LIBS})) {
+ $tree = collapse_lib_to_module($tree);
+ }
+ if ($no_leaf) {
+ $tree = prune_leaves($tree);
+ }
+ if ($toposort) {
+ dump_toposort($tree);
+ } else {
+ dump_graphviz($tree);
+ }
+}
+
+main()
+
+ __END__
+
+=head1 NAME
+
+module-deps - Generate module dependencies for LibreOffice build system
+
+=head1 SYNOPSIS
+
+module_deps [options] [gnumake] [makefile]
+
+=head1 OPTIONS
+
+=over 8
+
+=item B<--help>
+
+=item B<-h>
+
+Print a brief help message and exits.
+
+=item B<--man>
+
+=item B<-m>
+
+Prints the manual page and exits.
+
+=item B<--version>
+
+=item B<-v>
+
+Prints the version and exits.
+
+=item B<--preserve-libs>
+
+=item B<-p>
+
+Don't collapse libs to modules
+
+=item B<--toposort>
+
+=item B<-t>
+
+Output a topological sorting instead of a graph
+
+=item B<--read-dep-file file>
+
+=item B<-f>
+
+Read dependency from file.
+
+=item B<--write-dep-file file>
+
+=item B<-w>
+
+Write dependency to file.
+
+=item B<--output-file file>
+
+=item B<-o>
+
+Write graph or sort output to file
+
+=back
+
+=head1 DESCRIPTION
+
+B<This program> parses the output of LibreOffice make process
+(or cached input file) and generates the digraph build dependency,
+that must be piped to B<graphviz> program (typically B<dot>).
+
+B<Hacking on it>:
+
+The typical (optimized) B<workflow> includes 3 steps:
+
+=over 3
+
+=item 1
+Create cache dependency file: module_deps --write-dep-file lo.dep
+
+=item 2
+Use cache dependency file: module_deps --read-dep-file lo.dep -o lo.graphviz
+
+=item 3
+Pipe the output to graphviz: cat lo.graphviz | dot -Tpng -o lo.png
+
+=back
+
+=head1 TODO
+
+=over 2
+
+=item 1
+Add soft (include only) dependency
+
+=item 2
+Add dependency on external modules
+
+=back
+
+=head1 AUTHOR
+
+=over 2
+
+=item Michael Meeks
+
+=item David Ostrovsky
+
+=back
+
+=cut
diff --git a/bin/moveglobalheaders.sh b/bin/moveglobalheaders.sh
new file mode 100755
index 000000000..ca202832b
--- /dev/null
+++ b/bin/moveglobalheaders.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+topdirs=`find . -mindepth 1 -maxdepth 1 -type d -not -name sal`
+mkdir -p include/
+for dir in $topdirs
+do
+ dir=`echo "$dir"| sed -e 's/^..//'`
+ if test -d $dir/inc/$dir
+ then
+ if test -f $dir/Package_inc.mk
+ then
+ if test -f $dir/Module_$dir.mk
+ then
+ git mv $dir/inc/$dir include/$dir
+ git rm $dir/Package_inc.mk
+ grep -v Package_inc $dir/Module_$dir.mk > $dir/Module_dir.mk.new
+ mv -f $dir/Module_dir.mk.new $dir/Module_$dir.mk
+ git add $dir/Module_$dir.mk
+ else
+ echo "WARN: no $dir/Module_$dir.mk"
+ fi
+ else
+ echo "WARN: no file $dir/Package_inc.mk"
+ fi
+ fi
+done
+#grep -v Package_inc.mk sal/CustomTarget_sal_allheaders.mk > sal/CustomTarget_sal_allheaders.mk.new
+#mv sal/CustomTarget_sal_allheaders.mk.new sal/CustomTarget_sal_allheaders.mk
+#git add sal/CustomTarget_sal_allheaders.mk
+
+# we like to be special ...
+sed -ie 's/\/svtools\/inc\/svtools/\/include\/svtools\//' svtools/Library_svt.mk
+sed -ie 's/\/sfx2\/inc\/sfx2/\/include\/sfx2\//' sfx2/Library_sfx.mk
+git add svtools/Library_svt.mk sfx2/Library_sfx.mk
+
+# urgh
+sed -ie 's/\.\.\/svx\//svx\//' svx/source/svdraw/svdoashp.cxx
+git add svx/source/svdraw/svdoashp.cxx
diff --git a/bin/odfvalidator.sh.in b/bin/odfvalidator.sh.in
new file mode 100644
index 000000000..605e74731
--- /dev/null
+++ b/bin/odfvalidator.sh.in
@@ -0,0 +1,2 @@
+#!/usr/bin/env bash
+java -Djavax.xml.validation.SchemaFactory:http://relaxng.org/ns/structure/1.0=org.iso_relax.verifier.jaxp.validation.RELAXNGSchemaFactoryImpl -Dorg.iso_relax.verifier.VerifierFactoryLoader=com.sun.msv.verifier.jarv.FactoryLoaderImpl -jar @TARFILE_LOCATION@/@ODFVALIDATOR_JAR@ "$@"
diff --git a/bin/officeotron.sh.in b/bin/officeotron.sh.in
new file mode 100644
index 000000000..7281f1bcd
--- /dev/null
+++ b/bin/officeotron.sh.in
@@ -0,0 +1,2 @@
+#!/usr/bin/env bash
+java -jar @TARFILE_LOCATION@/@OFFICEOTRON_JAR@ "$@"
diff --git a/bin/oss-fuzz-build.sh b/bin/oss-fuzz-build.sh
new file mode 100755
index 000000000..646accc8a
--- /dev/null
+++ b/bin/oss-fuzz-build.sh
@@ -0,0 +1,55 @@
+#!/bin/bash -e
+
+if [ -z "${OUT}" ] || [ -z "${SRC}" ] || [ -z "${WORK}" ]; then
+ echo "OUT, SRC or WORK not set - script expects to be called inside oss-fuzz build env"
+ exit 1
+fi
+
+#shuffle CXXFLAGS -stdlib=libc++ arg into CXX as well because we use
+#the CXX as the linker and need to pass -stdlib=libc++ to build
+export CXX="$CXX -stdlib=libc++ -fsanitize-blacklist=$SRC/libreoffice/bin/sanitize-blacklist.txt"
+#similarly force the -fsanitize etc args in as well as pthread to get
+#things to link successfully during the build
+export LDFLAGS="$CFLAGS -Wl,--compress-debug-sections,zlib -lpthread"
+
+df -h $OUT $WORK
+
+cd $WORK
+$SRC/libreoffice/autogen.sh --with-distro=LibreOfficeOssFuzz --with-external-tar=$SRC
+
+make clean
+
+#build-time rsc tool leaks a titch
+export ASAN_OPTIONS="detect_leaks=0"
+
+make fuzzers
+
+pushd instdir/program
+head -c -14 services.rdb > templateservices.rdb
+tail -c +85 ./services/services.rdb >> templateservices.rdb
+for a in *fuzzer; do
+ #some minimal fonts required
+ mv $a $OUT
+ mkdir -p $OUT/$a.fonts
+ cp $SRC/884ed41809687c3e168fc7c19b16585149ff058eca79acbf3ee784f6630704cc-opens___.ttf ../share/fonts/truetype/Liberation* $OUT/$a.fonts
+ #minimal runtime requirements
+ cp templateservices.rdb $OUT/$a.services.rdb
+ cp types.rdb $OUT/$a.types.rdb
+ cp types/offapi.rdb $OUT/$a.moretypes.rdb
+ cat > $OUT/$a.unorc << EOF
+[Bootstrap]
+URE_INTERNAL_LIB_DIR=\${ORIGIN}
+UNO_TYPES=\${ORIGIN}/$a.types.rdb \${ORIGIN}/$a.moretypes.rdb
+UNO_SERVICES=\${ORIGIN}/$a.services.rdb
+EOF
+done
+popd
+
+df -h $OUT $WORK
+
+#starting corpuses
+cp $SRC/*_seed_corpus.zip $OUT
+#fuzzing dictionaries
+cp $SRC/*.dict $OUT
+#options files
+cp $SRC/libreoffice/vcl/workben/*.options $OUT
diff --git a/bin/parse-perfcheck.py b/bin/parse-perfcheck.py
new file mode 100755
index 000000000..158ef62fe
--- /dev/null
+++ b/bin/parse-perfcheck.py
@@ -0,0 +1,258 @@
+#!/usr/bin/python
+
+# This file is part of the LibreOffice project.
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import os
+import getopt
+import csv
+
+
+colsResult = {}
+allTests = []
+
+def parseFile(dirname, filename, lastCommit):
+
+ curTestComment, total = None, None
+
+ path = os.path.join(dirname, filename)
+
+ trigger = "desc: Trigger: Client Request: "
+ trigger_len = len(trigger)
+ totals = "totals: "
+ totals_len = len(totals)
+
+ with open(path,'r') as callgrindFile:
+ lines = callgrindFile.readlines()
+
+ for line in lines:
+ if line.startswith(trigger):
+ curTestComment = line[trigger_len:].replace("\n","")
+ elif line.startswith(totals):
+ total = line[totals_len:].replace("\n","")
+
+ if curTestComment is None or total is None:
+ return None
+
+ testName = os.path.basename(dirname).replace(".test.core","")
+
+ lastCommitId, lastCommitDate = lastCommit
+ if lastCommitId not in colsResult:
+ colsResult[lastCommitId] = {}
+ colsResult[lastCommitId]['date'] = lastCommitDate
+ colsResult[lastCommitId]['values'] = {}
+
+ colsResult[lastCommitId]['values'][curTestComment] = total
+
+ return [lastCommitId, lastCommitDate, testName, curTestComment, total, filename]
+
+def processDirectory(rootDir, needsCsvHeader, lastCommit):
+
+ results = []
+
+ if needsCsvHeader:
+ results.append(["lastCommit", "lastCommitDate", "test filename", "dump comment", "count", "filename"])
+
+ for dirName, subdirList, fileList in os.walk(rootDir):
+ files = [f for f in fileList if f.startswith("callgrind.out.")]
+ for fname in files:
+ found = parseFile(dirName, fname, lastCommit)
+ if found is not None:
+ results.append(found)
+ return results
+
+def getLastCommitInfo():
+
+ stream = os.popen("git log --date=iso")
+ line = stream.readline()
+ commitId = line.replace("commit ","").replace("\n","")
+ line = stream.readline()
+ line = stream.readline()
+ commitDate = line.replace("Date: ","").replace("\n","").strip()
+
+ return commitId, commitDate
+
+def displayUsage():
+
+ usage = """
+
+Parses the callgrind results of make perfcheck
+
+Arguments :
+
+ --csv-file\t\t the target CSV file - new or containing previous tests - default : perfcheckResult.csv
+ --source-directory\t directory containing make perfcheck output - default : ./workdir/CppunitTest
+ --alert-type\t\t mode for calculating alerts - valid values : previous first
+ --alert-value\t\t alert threshold in % - default = 10
+
+ --help\t\t this message
+
+Columned output is dumped into csv-file + ".col"
+
+Alerts, if any, are displayed in standard output
+
+"""
+ print(usage)
+
+class WrongArguments(Exception):
+ pass
+
+def analyzeArgs(args):
+
+ try:
+ opts, args = getopt.getopt(args, 'x', [
+ 'csv-file=', 'source-directory=', 'alert-type=', 'alert-value=', 'help'])
+ except getopt.GetoptError:
+ raise WrongArguments
+
+ targetFileName = "perfcheckResult.csv"
+ sourceDirectory = "./workdir/CppunitTest"
+ alertType = ""
+ alertValue = 10
+
+ for o, a in opts:
+ if o == '--help':
+ displayUsage()
+ sys.exit()
+ elif o == "--csv-file":
+ targetFileName = a
+ elif o == "--source-directory":
+ sourceDirectory = a
+ elif o == "--alert-type":
+ alertType = a
+ elif o == "--alert-value":
+ alertValue = float(a)
+ else:
+ raise WrongArguments
+
+ return targetFileName, sourceDirectory, alertType, alertValue
+
+def readCsvFile(targetFilename):
+
+ with open(targetFilename, 'r') as csvfile:
+ reader = csv.reader(csvfile, delimiter="\t")
+ # skip header
+ next(reader)
+ for line in reader:
+
+ # do not process empty lines
+ if not line:
+ continue
+
+ curId, curDate, curTestName, curTestComment, curValue, currCallgrindFile = line
+
+ if curTestComment not in allTests:
+ allTests.append(curTestComment)
+
+ if curId not in colsResult:
+ colsResult[curId] = {}
+ colsResult[curId]['date'] = curDate
+ colsResult[curId]['values'] = {}
+
+ colsResult[curId]['values'][curTestComment] = curValue
+
+if __name__ == '__main__':
+
+ #check args
+ try:
+ targetFileName, sourceDirectory, alertType, alertValue = analyzeArgs(sys.argv[1:])
+ except WrongArguments:
+ displayUsage()
+ sys.exit(1)
+
+ # check if sourceDirectory exists
+ if not os.path.isdir(sourceDirectory):
+ print("sourceDirectory %s not found - Aborting" % (sourceDirectory))
+ sys.exit(1)
+
+ # read the complete CSV file
+ if os.path.isfile(targetFileName):
+ readCsvFile(targetFileName)
+ needsCsvHeader = False
+ else:
+ needsCsvHeader = True
+
+ # last commit Id
+ lastCommitId, lastCommitDate = getLastCommitInfo()
+
+ # walker through directory
+ if lastCommitId not in colsResult:
+
+ lastCommit = (lastCommitId, lastCommitDate)
+ results = processDirectory(sourceDirectory, needsCsvHeader, lastCommit)
+ ppResults = "\n".join(["\t".join(row) for row in results])
+
+ print('\nNew results\n' + ppResults)
+
+ # append raw result
+ with open(targetFileName,'a') as csvfile:
+ writer = csv.writer(csvfile, delimiter='\t')
+ writer.writerows(results)
+ print("\nCSV file written at " + targetFileName + '\n')
+
+ else:
+ print("\nCSV file up to date " + targetFileName + '\n')
+
+
+ # build columned output
+
+ # header
+ mLine = '\t'.join(["commit", "date"] + allTests) + '\n'
+
+ alertTest = {}
+
+ with open(targetFileName + '.col','w') as fileResult:
+ for k in colsResult:
+ mLine += k + "\t" + colsResult[k]['date'] + "\t"
+ for t in allTests:
+ if t in colsResult[k]['values']:
+ mValue= colsResult[k]['values'][t]
+ if t not in alertTest:
+ alertTest[t] = {}
+ alertTest[t][colsResult[k]['date']] = mValue
+ else:
+ mValue = ""
+ mLine += mValue + "\t"
+ mLine += "\n"
+
+ # write columned result
+ fileResult.write(mLine)
+
+ print("Columned file written at " + targetFileName + '.col\n')
+
+ # check for Alerts
+
+ if alertType == "":
+ sys.exit(1)
+
+ alertResult = ""
+
+ for t in alertTest:
+
+ testDict = alertTest[t]
+
+ # sort
+ keylist = sorted(testDict.keys())
+ maxVal = float(testDict[keylist[-1]])
+ minVal = 0
+
+ if alertType == "previous":
+ if len(keylist) > 1:
+ minVal = float(testDict[keylist[-2]])
+ else:
+ minVal = float(testDict[keylist[0]])
+
+ if minVal != 0:
+ delta = 100 * ((maxVal-minVal)/minVal)
+ else:
+ delta = 0
+
+ if delta > float(alertValue):
+ alertResult += t + "\t" + "{:.2f}".format(delta) + " %\n"
+
+ if alertResult != "":
+ print("!!!!!!!! ALERT !!!!!!!\n")
+ print(alertResult)
diff --git a/bin/refcount_leak.py b/bin/refcount_leak.py
new file mode 100755
index 000000000..2a24cb51e
--- /dev/null
+++ b/bin/refcount_leak.py
@@ -0,0 +1,179 @@
+#!/usr/bin/python3
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+### script to help debug leaks of reference counted objects
+
+## I. to use it, first override acquire() and release()
+
+# Foo * g_pTrackedFoo = 0;
+
+# Foo::Foo()
+# static int nFoos = 0;
+# if (++nFoos == 42) // track instance #42
+# g_pTrackedFoo = this;
+
+# void Foo::acquire()
+# if (this == g_pTrackedFoo)
+# ; // set gdb breakpoint here
+# Foo_Base::acquire()
+
+# void Foo::release()
+# if (this == g_pTrackedFoo)
+# ; // set gdb breakpoint here
+# Foo_Base::release()
+
+## II. run test/soffice in gdb and set breakpoints in acquire/release
+## with a command to print the backtrace
+
+# set logging on
+# break foo.cxx:123
+# break foo.cxx:234
+
+# command 1 2
+# bt
+# c
+# end
+# run
+
+## III. now feed logfile gdb.txt into this script
+
+# bin/refcount_leak.py < gdb.txt
+
+###
+
+from operator import itemgetter
+import re
+import sys
+
+threshold = 2
+
+class Trace:
+ clock = 0 # global counter
+ # frames: list of stack frames, beginning with outermost
+ def __init__(self, lines):
+ lines.reverse()
+ self.frames = lines
+ Trace.clock += 1
+ self.clock = Trace.clock
+
+def addTrace(traces, lines):
+ if not(traces is None) and len(lines) > 0:
+ traces.append(Trace(lines))
+
+def readGdbLog(infile):
+ traces_acquire = []
+ traces_release = []
+ current = None
+ lines = []
+ apattern = re.compile("^Breakpoint.*::acquire")
+ rpattern = re.compile("^Breakpoint.*::release")
+ for line in infile:
+ if apattern.match(line):
+ addTrace(current, lines)
+ lines = []
+ current = traces_acquire
+ if rpattern.match(line):
+ addTrace(current, lines)
+ lines = []
+ current = traces_release
+ if line.startswith("#"):
+ # strip #123 stack frame number, and newline
+ lines.append(line[line.index("0x"):-1])
+ addTrace(current, lines)
+ print("# parsed traces acquire: ", len(traces_acquire))
+ print("# parsed traces release: ", len(traces_release))
+ return (traces_acquire, traces_release)
+
+def getFunction(frame):
+ start = frame.index(" in ") + len(" in ")
+ try:
+ end = frame.index(" at ", start)
+ except ValueError as e:
+ # argh... stack frames may be split across multiple lines if
+ # a parameter has a fancy pretty printer
+ return frame[start:]
+ return frame[start:end]
+
+
+def matchStack(trace_acquire, trace_release):
+ if trace_release.clock < trace_acquire.clock:
+ return None # acquire must precede release
+ common = 0
+ refpattern = re.compile(r"::Reference<.*>::Reference\(")
+ for (frame1, frame2) in zip(trace_release.frames, trace_acquire.frames):
+ if frame1 == frame2:
+ common += 1
+ else:
+ if getFunction(frame1) == getFunction(frame2):
+ common += 1
+ acquireframes = len(trace_acquire.frames)
+ # there is sometimes a dozen frames of UNO type related junk
+ # on the stack where the acquire() happens, which breaks the
+ # matching; try to avoid that
+ for i in range(common, acquireframes):
+ if refpattern.search(trace_acquire.frames[i]):
+ acquireframes = i+1 # cut off junk above Reference ctor
+ break
+ score = max(len(trace_release.frames), acquireframes) - common
+ # smaller score is better
+ return (score, trace_release.clock - trace_acquire.clock)
+
+# brute force greedy n^2 matching
+def matchStacks(traces_acquire, traces_release):
+ matches = []
+ for release in traces_release:
+ for acquire in traces_acquire:
+ score = matchStack(acquire, release)
+ if score is not None:
+ matches.append((score, acquire, release))
+ matches.sort(key=itemgetter(0))
+ return matches
+
+def bestMatches(traces_acquire, traces_release, matches):
+ traces_aunmatched = traces_acquire
+ traces_runmatched = traces_release
+ bestmatches = []
+ for (score,acquire,release) in matches:
+ if not(acquire in traces_aunmatched and release in traces_runmatched):
+ continue
+ traces_aunmatched.remove(acquire)
+ traces_runmatched.remove(release)
+ bestmatches.append((score,acquire,release))
+ print("# unmatched acquire: ", len(traces_aunmatched))
+ print("# unmatched release: ", len(traces_runmatched))
+ return (bestmatches,traces_aunmatched,traces_runmatched)
+
+def printTrace(trace):
+ for frame in reversed(trace.frames):
+ print(" ", frame)
+
+def printMatched(bestmatches):
+ for (score,acquire,release) in reversed(bestmatches):
+ print("\n*** Matched trace with score: ", score)
+ print(" acquire: ")
+ printTrace(acquire)
+ print(" release: ")
+ printTrace(release)
+
+def printUnmatched(traces, prefix):
+ for trace in traces:
+ print("\n*** Unmatched trace (", prefix, "):")
+ printTrace(trace)
+
+if __name__ == "__main__":
+ (traces_acquire, traces_release) = readGdbLog(sys.stdin)
+ matches = matchStacks(traces_acquire, traces_release)
+ (bestmatches,traces_au,traces_ru) = bestMatches(traces_acquire, traces_release, matches)
+ # print output, sorted with the most suspicious stuff first:
+ printUnmatched(traces_au, "acquire")
+ printUnmatched(traces_ru, "release")
+ printMatched(bestmatches)
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/removetooltip_markups.sh b/bin/removetooltip_markups.sh
new file mode 100755
index 000000000..5699fce99
--- /dev/null
+++ b/bin/removetooltip_markups.sh
@@ -0,0 +1,55 @@
+#!/usr/bin/env bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Run the script in the core directory to remove all tooltip_markup
+# properties from the .ui files
+
+SED_BIN=`which sed`
+CUT_BIN=`which cut`
+LOG_FILE="modified-$(date +%s).log"
+
+removeTooltipMarkup()
+{
+ LINE=$(grep -n "<property name=\"tooltip_markup\"" $1 | $CUT_BIN -f 1 -d ':')
+ TEXT=$(grep "<property name=\"tooltip_markup\"" $1)
+ grep -v "<property name=\"tooltip_markup\"" $1 > temp && mv temp $1
+ echo "removed $TEXT from $1 at line $LINE" >> $LOG_FILE
+}
+
+changeTooltipMarkup()
+{
+ LINE=$(grep -n "<property name=\"tooltip_markup\"" $1 | $CUT_BIN -f 1 -d ':')
+ $SED_BIN "s/tooltip_markup/tooltip_text/g" $i > temp && mv temp $1
+ echo "renamed tooltip_markup from $1 at line $LINE" >> $LOG_FILE
+}
+
+checkTooltipMarkup()
+{
+ TEXT=`grep "<property name=\"tooltip_text\"" $1`
+ MARKUP=`grep "<property name=\"tooltip_markup\"" $1`
+
+ if [[ $MARKUP ]] && [[ $TEXT ]]
+ then
+ removeTooltipMarkup "$1"
+ fi
+ if [[ $MARKUP ]] && [[ ! $TEXT ]]
+ then
+ changeTooltipMarkup "$1"
+ fi
+}
+
+shopt -s globstar
+echo " " > $LOG_FILE
+for i in **/*.ui; do
+ echo -n "."
+ checkTooltipMarkup "$i"
+done
+
+echo
+echo "Done!"
diff --git a/bin/rename-sw-abbreviations.sh b/bin/rename-sw-abbreviations.sh
new file mode 100755
index 000000000..219b4f269
--- /dev/null
+++ b/bin/rename-sw-abbreviations.sh
@@ -0,0 +1,43 @@
+#! /bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# This script renames the most annoying abbreviations in Writer (and partially
+# in the shared code too). Just run it in the source directory.
+
+# sw only:
+
+for I in "FrmFmt/FrameFormat" "Fmt/Format" "Cntnt/Content" "Txt/Text" "Tbl/Table" "GotoFld/GotoFormatField" "Fld/Field" "Ftn/Footnote" "Updt/Update" "Fml/Formula" "Hnt/Hint" "CurCrsr/CurrentCursor" "VisCrsr/VisibleCursor" "Crsr/Cursor" "CntFrm/ContentFrame" "Frm/Frame" "Stk/Stack"
+do
+ S="${I%/*}"
+ # change all except the filenames (in the .mk and in #include)
+ # also avoid numFmt (OOXML token) and other stuff that must stay verbatim
+ git grep -l "$S" sw/ | grep -v -e '\.mk' -e '/data/' -e '/testdocuments/' | xargs sed -i '/\(#include\|numFmt\|ForeignTxt\)/ !{ s/'"$I"'/g }'
+done
+
+# global:
+
+for I in "SvxSwAutoFmtFlags/SvxSwAutoFormatFlags" "GetCharFmtName/GetCharFormatName" \
+ "SvxFmtBreakItem/SvxFormatBreakItem" "SvxFmtKeepItem/SvxFormatKeepItem" \
+ "SvxFmtSplitItem/SvxFormatSplitItem" "etTxtLeft/etTextLeft" \
+ "etTxtFirstLineOfst/etTextFirstLineOfst" "CntntProtected/ContentProtected" \
+ "etTxtColor/etTextColor" "ClearFldColor/ClearFieldColor" \
+ "etCntntProtect/etContentProtect" "etPropTxtFirstLineOfst/etPropTextFirstLineOfst" \
+ "etCharFmtName/etCharFormatName" "HasMergeFmtTbl/HasMergeFormatTable" \
+ "etMergeFmtIndex/etMergeFormatIndex" "bAFmtByInput/bAFormatByInput" \
+ "bAFmt/bAFormat" "IsTxtFmt/IsTextFormat" "BuildWhichTbl/BuildWhichTable" \
+ "etFld/etField" "IsAutoFmtByInput/IsAutoFormatByInput" \
+ "etAutoFmtByInput/etAutoFormatByInput" "etMacroTbl/etMacroTable" \
+ "SvxClipboardFmtItem/SvxClipboardFormatItem" "SwFlyFrmFmt/SwFlyFrameFormat" \
+ "etTxtSize/etTextSize"
+do
+ S="${I%/*}"
+ git grep -l "$S" | grep -v -e '\.mk' -e 'rename-sw-abbreviations.sh' | xargs sed -i "s/$I/g"
+done
+
+# vim: set et sw=4 ts=4 textwidth=0:
diff --git a/bin/run b/bin/run
new file mode 100755
index 000000000..523da3c0e
--- /dev/null
+++ b/bin/run
@@ -0,0 +1,76 @@
+#!/bin/sh
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# simple wrapper script to run non-installed executables from workdir
+
+setdefaults()
+{
+ dir=$(realpath "$(pwd)")
+
+ while test ! -d "${dir}/instdir/program" ; do
+ if test "${dir}" = "/"; then
+ echo "error: cannot find \"program\" dir from \"$(pwd)\""
+ exit 1
+ fi
+ dir=$(realpath "${dir}/..")
+ done
+
+ exedir="${dir}"/workdir/LinkTarget/Executable
+ export URE_BOOTSTRAP=file://"${dir}"/instdir/program/fundamentalrc
+}
+
+if uname | grep -i CYGWIN >/dev/null; then
+
+ setdefaults
+
+ exedir=$(cygpath -m "${dir}"/workdir/LinkTarget/Executable)
+ export URE_BOOTSTRAP=file:///$(cygpath -m "${dir}")/instdir/program/fundamental.ini
+ export PATH=${PATH:+$PATH:}"${dir}"/instdir/program
+ SEARCH_PATH="${PATH}"
+
+elif [ $(uname) = Darwin ]; then
+
+ dir=$(pwd)
+
+ # Get PRODUCTNAME from config_host.mk, LibreOffice or LibreOfficeDev
+ eval `grep 'export PRODUCTNAME=' config_host.mk`
+
+ if [ ! -d "${dir}/instdir/$PRODUCTNAME.app" ]; then
+ echo "error: cannot find \"instdir/$PRODUCTNAME.app\" dir in \"$(pwd)\""
+ exit 1
+ fi
+
+ exedir="$dir"/workdir/LinkTarget/Executable
+ export URE_BOOTSTRAP=file://"${dir}"/instdir/$PRODUCTNAME.app/Contents/Resources/fundamentalrc
+ export DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH:+$DYLD_LIBRARY_PATH:}"${dir}"/instdir/$PRODUCTNAME.app/Contents/Frameworks
+ SEARCH_PATH="${DYLD_LIBRARY_PATH}"
+
+elif [ $(uname) = Haiku ]; then
+
+ setdefaults
+
+ export LIBRARY_PATH=${LIBRARY_PATH:+$LIBRARY_PATH:}"${dir}"/instdir/program
+ SEARCH_PATH="${LIBRARY_PATH}"
+
+else
+
+ setdefaults
+
+ export LD_LIBRARY_PATH=${LD_LIBRARY_PATH:+$LD_LIBRARY_PATH:}"${dir}"/instdir/program
+ SEARCH_PATH="${LD_LIBRARY_PATH}"
+
+fi
+
+# echo "setting URE_BOOTSTRAP to: ${URE_BOOTSTRAP}"
+# echo "setting search path to: ${SEARCH_PATH}"
+# echo "execing: ${exedir}/$1"
+
+exec ${LO_TRACE} "${exedir}/$@"
+
+# vi:set shiftwidth=4 expandtab:
diff --git a/bin/sanitize-blacklist.txt b/bin/sanitize-blacklist.txt
new file mode 100644
index 000000000..e3e995f1a
--- /dev/null
+++ b/bin/sanitize-blacklist.txt
@@ -0,0 +1,12 @@
+[float-divide-by-zero]
+src:*/sc/source/core/tool/interpr1.cxx
+src:*/sc/source/core/tool/interpr2.cxx
+src:*/scaddins/source/analysis/analysis.cxx
+src:*/scaddins/source/analysis/financial.cxx
+[signed-integer-overflow]
+src:*/boost/boost/rational.hpp
+src:*/include/tools/gen.hxx
+src:*/tools/source/generic/gen.cxx
+[vptr]
+fun:_ZN4cppu14throwExceptionERKN3com3sun4star3uno3AnyE
+src:*/include/com/sun/star/uno/Reference.hxx
diff --git a/bin/sanitize-image-links b/bin/sanitize-image-links
new file mode 100755
index 000000000..6b5a2ec48
--- /dev/null
+++ b/bin/sanitize-image-links
@@ -0,0 +1,38 @@
+#!/bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# This will reorder icon-themes/*/links.txt to the right order
+
+for I in icon-themes/*/links.txt ; do
+ D="${I%/links.txt}"
+ cat "$I" | while read LINK ORIG
+ do
+ if [ -f "$D/$LINK" -a -f "$D/$ORIG" ] ; then
+ if diff "$D/$LINK" "$D/$ORIG" >/dev/null 2>&1 ; then
+ echo "$I: removing $LINK from git: both $LINK and $ORIG are the same files" 1>&2
+ git rm "$D/$LINK" 1>/dev/null
+ echo $LINK $ORIG
+ else
+ echo "$I: link and orig differs, check the images, and remove manually: $LINK $ORIG" 1>&2
+ echo $LINK $ORIG
+ fi
+ elif [ -f "$D/$LINK" ] ; then
+ echo "$I: swapping to right order: $ORIG $LINK" 1>&2
+ echo $ORIG $LINK
+ elif [ -n "$LINK" -a "${LINK:0:1}" != "#" -a ! -f "$D/$LINK" -a ! -f "$D/$ORIG" ] ; then
+ echo "$I: neither exists, removing the line: $LINK $ORIG" 1>&2
+ else
+ echo $LINK $ORIG
+ fi
+ done > "$I-fixed"
+
+ mv "$I-fixed" "$I"
+done
+
+# vim: set expandtab sw=4 ts=4:
diff --git a/bin/striplanguagetags.sh b/bin/striplanguagetags.sh
new file mode 100755
index 000000000..0df4b0be5
--- /dev/null
+++ b/bin/striplanguagetags.sh
@@ -0,0 +1,74 @@
+#!/usr/bin/env bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# take a .zip containing a flat hierarchy of odf files and strip out the
+# language and country tags in each of them and repack it all up
+# should convert templates so that documents based on them use
+# the default-document-language rather than the hardcoded lang-tag
+#
+# All a bit hacky, but it should work
+
+if [ -z "$CALLXSLTPROC" ]; then
+ echo "$0: \$CALLXSLTPROC not defined!"
+ echo "$0: Apparently we are not called from the build process, bailing out."
+ exit 1
+fi
+
+tempfoo=`basename $0`
+
+XSL=`mktemp /tmp/${tempfoo}.XXXXXX`
+if [ $? -ne 0 ]; then
+ echo "$0: Can't create temp file, exiting..."
+ exit 1
+fi
+
+# On Windows, xsltproc is a non-Cygwin program, so we can't pass
+# a Cygwin /tmp path to it
+[ "$COM" == MSC ] && XSL=`cygpath -m -s $XSL`
+
+WRKDIR=`mktemp -d /tmp/${tempfoo}.XXXXXX`
+if [ $? -ne 0 ]; then
+ echo "$0: Can't create temp dir, exiting..."
+ exit 1
+fi
+
+cat > $XSL << EOF
+<?xml version="1.0" encoding="UTF-8"?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
+
+<xsl:template match="node()|@*">
+ <xsl:copy>
+ <xsl:apply-templates select="@*|node()"/>
+ </xsl:copy>
+</xsl:template>
+
+<xsl:template match="@fo:language"/>
+<xsl:template match="@fo:country"/>
+<xsl:template match="@fo:script"/>
+<xsl:template match="@number:rfc-language-tag"/>
+<xsl:template match="@style:rfc-language-tag"/>
+<xsl:template match="@table:rfc-language-tag"/>
+<xsl:template match="@style:rfc-language-tag-asian"/>
+<xsl:template match="@style:rfc-language-tag-complex"/>
+
+</xsl:stylesheet>
+EOF
+
+unzip -q $1 -d $WRKDIR
+pushd $WRKDIR
+for a in *; do
+ unzip -qc $a styles.xml > styles.tmp
+ eval "$CALLXSLTPROC -o styles.xml $XSL styles.tmp"
+ zip -qr $a styles.xml
+ rm styles.xml styles.tmp
+done
+popd
+zip -qrj $1 $WRKDIR
+rm -rf $WRKDIR
+rm -f $XSL
diff --git a/bin/stubify.pl b/bin/stubify.pl
new file mode 100755
index 000000000..c61bc531e
--- /dev/null
+++ b/bin/stubify.pl
@@ -0,0 +1,262 @@
+#!/usr/bin/env perl
+
+use Fcntl;
+use POSIX;
+use strict;
+
+# simple pkgconfig goodness
+my $destdir;
+my $recursive = 0;
+my $assembler_out = 0;
+my %pkg_configs = ();
+my @pkg_config_paths = split(/:/, $ENV{PKG_CONFIG_PATH});
+push @pkg_config_paths, "/usr";
+
+# Stubify a shared library ...
+sub read_gen_symbols($$)
+{
+ my ($shlib, $fh) = @_;
+ my $obj;
+
+ print $fh "\t.file \"$shlib\"\n";
+ open $obj, "objdump -T $shlib|" || die "Can't objdump $shlib: $!";
+
+ while (my $line = <$obj>) {
+ $line =~ /([0-9a-f]*)\s+([gw ])\s+..\s+(\S*)\s*([0-9a-f]+)..............(.*)/ || next;
+ my ($address, $linkage, $type, $size, $symbol) = ($1, $2, $3, $4, $5);
+
+ next if ($type eq '*UND*' || $type eq '*ABS*');
+
+# print "Symbol '$symbol' type '$type' '$linkage' addr $address, size $size\n";
+
+ $symbol || die "no symbol for line $line";
+
+ next if ($symbol eq '_init' || $symbol eq '_fini');
+
+ $linkage =~ s/g//g;
+
+ my $progbits = '@progbits';
+ $progbits = '@nobits' if ($type eq '.bss');
+ print $fh "\t.section $type.$symbol,\"a".$linkage."G\",$progbits,$symbol,comdat\n";
+ print $fh ".globl $symbol\n";
+ print $fh "\t.type $symbol,";
+ if ($type eq '.text') {
+ print $fh "\@function\n";
+ } else {
+ print $fh "\@object\n";
+ }
+ print $fh "$symbol:\n";
+ if ($type eq '.text') {
+ print $fh "\tret\n";
+ } else {
+ my $isize = hex($size);
+ print $fh "\t.size $symbol, $isize\n";
+ for (my $i = 0; $i < $isize; $i++) {
+ print $fh "\t.byte 0\n";
+ }
+ }
+ print $fh "\n";
+ }
+
+ close $obj;
+}
+
+sub stubify($$)
+{
+ my $shlib = shift;
+ my $output = shift;
+ my ($pipe, $tmpf);
+
+ my $tmpname;
+ do {
+ $tmpname = tmpnam();
+ } until sysopen($tmpf, $tmpname, O_RDWR|O_CREAT|O_EXCL, 0666);
+ close($tmpf);
+
+ if ($assembler_out) {
+ open ($pipe, ">-");
+ } else {
+ open ($pipe, "| as -o $tmpname") || die "can't start assembler: $!";
+ }
+ read_gen_symbols ($shlib, $pipe);
+ close ($pipe) || die "Failed to assemble to: $tmpname: $!";
+
+ system ("gcc -shared -o $output $tmpname") && die "failed to exec gcc: $!";
+ unlink $tmpname;
+}
+
+sub help_exit()
+{
+ print "Usage: stubify <destdir> <pkg-config-names>\n";
+ print "Converts libraries into stubs, and bundles them and their pkg-config files\n";
+ print "into destdir\n";
+ print " -R stubbify and include all dependent pkgconfig files\n";
+ exit 1;
+}
+
+sub parse_pkgconfig($$)
+{
+ my $name = shift;
+ my $file = shift;
+ my $fh;
+ my %hash;
+ my @hashes;
+
+ print "parse $file\n";
+ open ($fh, $file) || die "Can't open $file: $!";
+ while (<$fh>) {
+ my ($key, $value);
+ if (/^\s*([^=]+)\s*=\s*([^=]+)\s*$/) {
+ $key = $1; $value = $2;
+ } elsif (/^\s*([^:]+)\s*:\s*([^:]+)\s*$/) {
+ $key = $1; $value = $2;
+ } elsif (/^\s*$/) {
+ next;
+ } else {
+ die "invalid pkgconfig line: $_\n";
+ }
+ chomp ($key); chomp ($value);
+ $hash{$key} = $value;
+ }
+ close ($fh);
+ for my $key (keys (%hash)) {
+ print "\t'$key'\t=\t'$hash{$key}'\n";
+ }
+
+ $hash{_Name} = $name;
+ $hash{_File} = $file;
+
+ push @hashes, \%hash;
+ if ($recursive &&
+ !defined $pkg_configs{$name} &&
+ defined $hash{Requires}) {
+ my @reqs = ();
+ for my $req (split (/[ ,]/, $hash{Requires})) {
+ print "parse $req of $name\n";
+ push @reqs, get_pc_files($req);
+ }
+ $hash{_Requires} = \@reqs;
+ push @hashes, @reqs;
+ }
+ $pkg_configs{$name} = \%hash;
+ return @hashes;
+}
+
+sub get_pc_files($)
+{
+ my $name = shift;
+ for my $prefix (@pkg_config_paths) {
+ my $path = "$prefix/lib/pkgconfig/$name.pc";
+ return parse_pkgconfig ($name,$path) if (-f $path);
+ }
+ die "Failed to find pkg-config file for $name";
+}
+
+# primitive substitution
+sub get_var($$)
+{
+ my ($pc, $var) = @_;
+ my $val = $pc->{"$var"};
+ while ($val =~ m/^(.*)\$\{\s*(\S+)\s*\}(.*)$/) {
+ $val = $1 . get_var($pc, $2). $3;
+ }
+ return $val;
+}
+
+sub copy_lib($@)
+{
+ my $lib = shift;
+ while (my $path = shift) {
+ my $name = "$path/$lib";
+ next if (! -f $name);
+
+ # need to run ldconfig post install ...
+ while (-l $name) {
+ my $dir = $name;
+ $dir =~ s/\/[^\/]*$//;
+ my $link = readlink($name);
+ if ($link =~ m/^\//) {
+ $name = $link;
+ } else {
+ $name = "$dir/$link";
+ }
+ }
+
+ # ignore /lib - they use monstrous symbol versioning
+ if ($name =~ m/^\/lib/) {
+ print "\tskipping system library: $lib in $name\n";
+ return;
+ }
+
+ stubify ($name, "$destdir/$name");
+ }
+}
+
+sub copy_and_stubify ($)
+{
+ my $pc = shift;
+
+ `mkdir -p $destdir/usr/lib/pkgconfig`;
+ `mkdir -p $destdir/$pc->{libdir}` if (defined $pc->{libdir});
+ `mkdir -p $destdir/$pc->{includedir}` if (defined $pc->{includedir});
+
+ # copy .pc across - FIXME, may need to re-write paths
+ `cp -a $pc->{_File} $destdir/usr/lib/pkgconfig`;
+
+ # copy includes across
+ my @includes = split (/ /, get_var ($pc, "Cflags"));
+ for my $arg (@includes) {
+ if ($arg =~ m/^-I(.*)$/) {
+ my $srcdir = $1;
+ if (! -d $srcdir || $srcdir eq '/usr/include') {
+ print "Warning: bogus include of '$srcdir' for pkg $pc->{_Name}\n";
+ } else {
+ `mkdir -p $destdir/$srcdir`;
+ `cp -a $srcdir/* $destdir/$srcdir`;
+ }
+ }
+ }
+
+ # stubify libraries
+ my @libs = split (/ /, get_var ($pc, "Libs"));
+ my @libpath = ( "/lib", "/usr/lib" );
+ for my $arg (@libs) {
+ if ($arg =~ m/^-l(.*)$/) {
+ my $lib = "lib".$1.".so";
+# print "lib $lib @libpath?\n";
+ copy_lib ($lib, @libpath);
+ } elsif ($arg =~ m/^-L(.*)$/) {
+ my $path = $1;
+ push (@libpath, $path) if (! grep ($path, @libpath));
+ }
+ }
+}
+
+my @pcnames = ();
+my @tostub;
+
+for my $arg (@ARGV) {
+ if ($arg eq '--help' || $arg eq '-h') {
+ help_exit();
+ } elsif ($arg eq '-r' || $arg eq '-R') {
+ $recursive = 1;
+ } elsif (!defined $destdir) {
+ $destdir = $arg;
+ } else {
+ push @pcnames, $arg;
+ }
+}
+
+help_exit() if (!defined $destdir);
+`mkdir -p $destdir`;
+
+for my $name (@pcnames) {
+ push @tostub, get_pc_files($name);
+}
+print "stubify: ";
+select STDERR; $| = 1;
+for my $pc (@tostub) {
+ print " " . $pc->{_Name} . "\n";
+ copy_and_stubify ($pc);
+}
+print "\n";
diff --git a/bin/symbolstore.py b/bin/symbolstore.py
new file mode 100755
index 000000000..7ddd8d2ac
--- /dev/null
+++ b/bin/symbolstore.py
@@ -0,0 +1,644 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0/LGPL 2.1
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# The Mozilla Foundation
+# Portions created by the Initial Developer are Copyright (C) 2007
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Ted Mielczarek <ted.mielczarek@gmail.com>
+# Ben Turner <mozilla@songbirdnest.com>
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 2 or later (the "GPL"), or
+# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+# in which case the provisions of the GPL or the LGPL are applicable instead
+# of those above. If you wish to allow use of your version of this file only
+# under the terms of either the GPL or the LGPL, and not to allow others to
+# use your version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the notice
+# and other provisions required by the GPL or the LGPL. If you do not delete
+# the provisions above, a recipient may use your version of this file under
+# the terms of any one of the MPL, the GPL or the LGPL.
+#
+# ***** END LICENSE BLOCK *****
+#
+# Usage: symbolstore.py <params> <dump_syms path> <symbol store path>
+# <debug info files or dirs>
+# Runs dump_syms on each debug info file specified on the command line,
+# then places the resulting symbol file in the proper directory
+# structure in the symbol store path. Accepts multiple files
+# on the command line, so can be called as part of a pipe using
+# find <dir> | xargs symbolstore.pl <dump_syms> <storepath>
+# But really, you might just want to pass it <dir>.
+#
+# Parameters accepted:
+# -c : Copy debug info files to the same directory structure
+# as sym files
+# -a "<archs>" : Run dump_syms -a <arch> for each space separated
+# cpu architecture in <archs> (only on macOS)
+# -s <srcdir> : Use <srcdir> as the top source directory to
+# generate relative filenames.
+
+import sys
+import os
+import re
+import shutil
+from optparse import OptionParser
+
+# Utility classes
+
+class VCSFileInfo:
+ """ A base class for version-controlled file information. Ensures that the
+ following attributes are generated only once (successfully):
+
+ self.root
+ self.clean_root
+ self.revision
+ self.filename
+
+ The attributes are generated by a single call to the GetRoot,
+ GetRevision, and GetFilename methods. Those methods are explicitly not
+ implemented here and must be implemented in derived classes. """
+
+ def __init__(self, file):
+ if not file:
+ raise ValueError
+ self.file = file
+
+ def __getattr__(self, name):
+ """ __getattr__ is only called for attributes that are not set on self,
+ so setting self.[attr] will prevent future calls to the GetRoot,
+ GetRevision, and GetFilename methods. We don't set the values on
+ failure on the off chance that a future call might succeed. """
+
+ if name == "root":
+ root = self.GetRoot()
+ if root:
+ self.root = root
+ return root
+
+ elif name == "clean_root":
+ clean_root = self.GetCleanRoot()
+ if clean_root:
+ self.clean_root = clean_root
+ return clean_root
+
+ elif name == "revision":
+ revision = self.GetRevision()
+ if revision:
+ self.revision = revision
+ return revision
+
+ elif name == "filename":
+ filename = self.GetFilename()
+ if filename:
+ self.filename = filename
+ return filename
+
+ raise AttributeError
+
+ def GetRoot(self):
+ """ This method should return the unmodified root for the file or 'None'
+ on failure. """
+ raise NotImplementedError
+
+ def GetCleanRoot(self):
+ """ This method should return the repository root for the file or 'None'
+ on failure. """
+ raise NotImplementedErrors
+
+ def GetRevision(self):
+ """ This method should return the revision number for the file or 'None'
+ on failure. """
+ raise NotImplementedError
+
+ def GetFilename(self):
+ """ This method should return the repository-specific filename for the
+ file or 'None' on failure. """
+ raise NotImplementedError
+
+class CVSFileInfo(VCSFileInfo):
+ """ A class to maintain version information for files in a CVS repository.
+ Derived from VCSFileInfo. """
+
+ def __init__(self, file, srcdir):
+ VCSFileInfo.__init__(self, file)
+ self.srcdir = srcdir
+
+ def GetRoot(self):
+ (path, filename) = os.path.split(self.file)
+ root = os.path.join(path, "CVS", "Root")
+ if not os.path.isfile(root):
+ return None
+ f = open(root, "r")
+ root_name = f.readline().strip()
+ f.close()
+ if root_name:
+ return root_name
+ print >> sys.stderr, "Failed to get CVS Root for %s" % filename
+ return None
+
+ def GetCleanRoot(self):
+ parts = self.root.split('@')
+ if len(parts) > 1:
+ # we don't want the extra colon
+ return parts[1].replace(":","")
+ print >> sys.stderr, "Failed to get CVS Root for %s" % filename
+ return None
+
+ def GetRevision(self):
+ (path, filename) = os.path.split(self.file)
+ entries = os.path.join(path, "CVS", "Entries")
+ if not os.path.isfile(entries):
+ return None
+ f = open(entries, "r")
+ for line in f:
+ parts = line.split("/")
+ if len(parts) > 1 and parts[1] == filename:
+ return parts[2]
+ print >> sys.stderr, "Failed to get CVS Revision for %s" % filename
+ return None
+
+ def GetFilename(self):
+ file = self.file
+ if self.revision and self.clean_root:
+ if self.srcdir:
+ # strip the base path off
+ # but we actually want the last dir in srcdir
+ file = os.path.normpath(file)
+ # the lower() is to handle win32+vc8, where
+ # the source filenames come out all lowercase,
+ # but the srcdir can be mixed case
+ if file.lower().startswith(self.srcdir.lower()):
+ file = file[len(self.srcdir):]
+ (head, tail) = os.path.split(self.srcdir)
+ if tail == "":
+ tail = os.path.basename(head)
+ file = tail + file
+ return "cvs:%s:%s:%s" % (self.clean_root, file, self.revision)
+ return file
+
+class SVNFileInfo(VCSFileInfo):
+ url = None
+ repo = None
+ svndata = {}
+
+ # This regex separates protocol and optional username/password from a url.
+ # For instance, all the following urls will be transformed into
+ # 'foo.com/bar':
+ #
+ # http://foo.com/bar
+ # svn+ssh://user@foo.com/bar
+ # svn+ssh://user:pass@foo.com/bar
+ #
+ rootRegex = re.compile(r'^\S+?:/+(?:[^\s/]*@)?(\S+)$')
+
+ def __init__(self, file):
+ """ We only want to run subversion's info tool once so pull all the data
+ here. """
+
+ VCSFileInfo.__init__(self, file)
+
+ if os.path.isfile(file):
+ command = os.popen("svn info %s" % file, "r")
+ for line in command:
+ # The last line of the output is usually '\n'
+ if line.strip() == '':
+ continue
+ # Split into a key/value pair on the first colon
+ key, value = line.split(':', 1)
+ if key in ["Repository Root", "Revision", "URL"]:
+ self.svndata[key] = value.strip()
+
+ exitStatus = command.close()
+ if exitStatus:
+ print >> sys.stderr, "Failed to get SVN info for %s" % file
+
+ def GetRoot(self):
+ key = "Repository Root"
+ if key in self.svndata:
+ match = self.rootRegex.match(self.svndata[key])
+ if match:
+ return match.group(1)
+ print >> sys.stderr, "Failed to get SVN Root for %s" % self.file
+ return None
+
+ # File bug to get this teased out from the current GetRoot, this is temporary
+ def GetCleanRoot(self):
+ return self.root
+
+ def GetRevision(self):
+ key = "Revision"
+ if key in self.svndata:
+ return self.svndata[key]
+ print >> sys.stderr, "Failed to get SVN Revision for %s" % self.file
+ return None
+
+ def GetFilename(self):
+ if self.root and self.revision:
+ if "URL" in self.svndata and "Repository Root" in self.svndata:
+ url, repo = self.svndata["URL"], self.svndata["Repository Root"]
+ file = url[len(repo) + 1:]
+ return "svn:%s:%s:%s" % (self.root, file, self.revision)
+ print >> sys.stderr, "Failed to get SVN Filename for %s" % self.file
+ return self.file
+
+# Utility functions
+
+# A cache of files for which VCS info has already been determined. Used to
+# prevent extra filesystem activity or process launching.
+vcsFileInfoCache = {}
+
+def GetVCSFilename(file, srcdir):
+ """Given a full path to a file, and the top source directory,
+ look for version control information about this file, and return
+ a tuple containing
+ 1) a specially formatted filename that contains the VCS type,
+ VCS location, relative filename, and revision number, formatted like:
+ vcs:vcs location:filename:revision
+ For example:
+ cvs:cvs.mozilla.org/cvsroot:mozilla/browser/app/nsBrowserApp.cpp:1.36
+ 2) the unmodified root information if it exists"""
+ (path, filename) = os.path.split(file)
+ if path == '' or filename == '':
+ return (file, None)
+
+ fileInfo = None
+ root = ''
+ if file in vcsFileInfoCache:
+ # Already cached this info, use it.
+ fileInfo = vcsFileInfoCache[file]
+ else:
+ if os.path.isdir(os.path.join(path, "CVS")):
+ fileInfo = CVSFileInfo(file, srcdir)
+ if fileInfo:
+ root = fileInfo.root
+ elif os.path.isdir(os.path.join(path, ".svn")) or \
+ os.path.isdir(os.path.join(path, "_svn")):
+ fileInfo = SVNFileInfo(file);
+ vcsFileInfoCache[file] = fileInfo
+
+ if fileInfo:
+ file = fileInfo.filename
+
+ # we want forward slashes on win32 paths
+ return (file.replace("\\", "/"), root)
+
+def GetPlatformSpecificDumper(**kwargs):
+ """This function simply returns a instance of a subclass of Dumper
+ that is appropriate for the current platform."""
+ return {'win32': Dumper_Win32,
+ 'cygwin': Dumper_Win32,
+ 'linux2': Dumper_Linux,
+ 'sunos5': Dumper_Solaris,
+ 'darwin': Dumper_Mac}[sys.platform](**kwargs)
+
+def SourceIndex(fileStream, outputPath, cvs_root):
+ """Takes a list of files, writes info to a data block in a .stream file"""
+ # Creates a .pdb.stream file in the mozilla\objdir to be used for source indexing
+ # Create the srcsrv data block that indexes the pdb file
+ result = True
+ pdbStreamFile = open(outputPath, "w")
+ pdbStreamFile.write('''SRCSRV: ini ------------------------------------------------\r\nVERSION=1\r\nSRCSRV: variables ------------------------------------------\r\nCVS_EXTRACT_CMD=%fnchdir%(%targ%)cvs.exe -d %fnvar%(%var2%) checkout -r %var4% -d %var4% -N %var3%\r\nMYSERVER=''')
+ pdbStreamFile.write(cvs_root)
+ pdbStreamFile.write('''\r\nSRCSRVTRG=%targ%\%var4%\%fnbksl%(%var3%)\r\nSRCSRVCMD=%CVS_EXTRACT_CMD%\r\nSRCSRV: source files ---------------------------------------\r\n''')
+ pdbStreamFile.write(fileStream) # can't do string interpolation because the source server also uses this and so there are % in the above
+ pdbStreamFile.write("SRCSRV: end ------------------------------------------------\r\n\n")
+ pdbStreamFile.close()
+ return result
+
+class Dumper:
+ """This class can dump symbols from a file with debug info, and
+ store the output in a directory structure that is valid for use as
+ a Breakpad symbol server. Requires a path to a dump_syms binary--
+ |dump_syms| and a directory to store symbols in--|symbol_path|.
+ Optionally takes a list of processor architectures to process from
+ each debug file--|archs|, the full path to the top source
+ directory--|srcdir|, for generating relative source file names,
+ and an option to copy debug info files alongside the dumped
+ symbol files--|copy_debug|, mostly useful for creating a
+ Microsoft Symbol Server from the resulting output.
+
+ You don't want to use this directly if you intend to call
+ ProcessDir. Instead, call GetPlatformSpecificDumper to
+ get an instance of a subclass."""
+ def __init__(self, dump_syms, symbol_path,
+ archs=None, srcdir=None, copy_debug=False, vcsinfo=False, srcsrv=False):
+ # popen likes absolute paths, at least on windows
+ self.dump_syms = dump_syms
+ self.symbol_path = symbol_path
+ if archs is None:
+ # makes the loop logic simpler
+ self.archs = ['']
+ else:
+ self.archs = ['-a %s' % a for a in archs.split()]
+ if srcdir is not None:
+ self.srcdir = os.path.normpath(srcdir)
+ else:
+ self.srcdir = None
+ self.copy_debug = copy_debug
+ self.vcsinfo = vcsinfo
+ self.srcsrv = srcsrv
+
+ # subclasses override this
+ def ShouldProcess(self, file):
+ return False
+
+ def RunFileCommand(self, file):
+ """Utility function, returns the output of file(1)"""
+ try:
+ # we use -L to read the targets of symlinks,
+ # and -b to print just the content, not the filename
+ return os.popen("file -Lb " + file).read()
+ except:
+ return ""
+
+ # This is a no-op except on Win32
+ def FixFilenameCase(self, file):
+ return file
+
+ # This is a no-op except on Win32
+ def SourceServerIndexing(self, debug_file, guid, sourceFileStream, cvs_root):
+ return ""
+
+ # subclasses override this if they want to support this
+ def CopyDebug(self, file, debug_file, guid):
+ pass
+
+ def Process(self, file_or_dir):
+ "Process a file or all the (valid) files in a directory."
+ if os.path.isdir(file_or_dir):
+ return self.ProcessDir(file_or_dir)
+ elif os.path.isfile(file_or_dir):
+ return self.ProcessFile(file_or_dir)
+ # maybe it doesn't exist?
+ return False
+
+ def ProcessDir(self, dir):
+ """Process all the valid files in this directory. Valid files
+ are determined by calling ShouldProcess."""
+ result = True
+ for root, dirs, files in os.walk(dir):
+ for f in files:
+ fullpath = os.path.join(root, f)
+ if self.ShouldProcess(fullpath):
+ if not self.ProcessFile(fullpath):
+ result = False
+ return result
+
+ def ProcessFile(self, file):
+ """Dump symbols from this file into a symbol file, stored
+ in the proper directory structure in |symbol_path|."""
+ result = False
+ sourceFileStream = ''
+ # tries to get cvsroot from the .mozconfig first - if it's not set
+ # the tinderbox cvs_path will be assigned further down
+ cvs_root = os.environ.get("SRCSRV_ROOT")
+ for arch in self.archs:
+ try:
+ cmd = os.popen("%s %s %s" % (self.dump_syms, arch, file), "r")
+ module_line = cmd.next()
+ if module_line.startswith("MODULE"):
+ # MODULE os cpu guid debug_file
+ (guid, debug_file) = (module_line.split())[3:5]
+ # strip off .pdb extensions, and append .sym
+ sym_file = re.sub("\.pdb$", "", debug_file) + ".sym"
+ # we do want forward slashes here
+ rel_path = os.path.join(debug_file,
+ guid,
+ sym_file).replace("\\", "/")
+ full_path = os.path.normpath(os.path.join(self.symbol_path,
+ rel_path))
+ try:
+ os.makedirs(os.path.dirname(full_path))
+ except OSError: # already exists
+ pass
+ f = open(full_path, "w")
+ f.write(module_line)
+ # now process the rest of the output
+ for line in cmd:
+ if line.startswith("FILE"):
+ # FILE index filename
+ (x, index, filename) = line.split(None, 2)
+ if sys.platform == "sunos5":
+ start = filename.find(self.srcdir)
+ if start == -1:
+ start = 0
+ filename = filename[start:]
+ filename = self.FixFilenameCase(filename.rstrip())
+ sourcepath = filename
+ if self.vcsinfo:
+ (filename, rootname) = GetVCSFilename(filename, self.srcdir)
+ # sets cvs_root in case the loop through files were to end on an empty rootname
+ if cvs_root is None:
+ if rootname:
+ cvs_root = rootname
+ # gather up files with cvs for indexing
+ if filename.startswith("cvs"):
+ (ver, checkout, source_file, revision) = filename.split(":", 3)
+ sourceFileStream += sourcepath + "*MYSERVER*" + source_file + '*' + revision + "\r\n"
+ f.write("FILE %s %s\n" % (index, filename))
+ else:
+ # pass through all other lines unchanged
+ f.write(line)
+ f.close()
+ cmd.close()
+ # we output relative paths so callers can get a list of what
+ # was generated
+ print rel_path
+ if self.copy_debug:
+ self.CopyDebug(file, debug_file, guid)
+ if self.srcsrv:
+ # Call on SourceServerIndexing
+ result = self.SourceServerIndexing(debug_file, guid, sourceFileStream, cvs_root)
+ result = True
+ except StopIteration:
+ pass
+ except:
+ print >> sys.stderr, "Unexpected error: ", sys.exc_info()[0]
+ raise
+ return result
+
+# Platform-specific subclasses. For the most part, these just have
+# logic to determine what files to extract symbols from.
+
+class Dumper_Win32(Dumper):
+ fixedFilenameCaseCache = {}
+
+ def ShouldProcess(self, file):
+ """This function will allow processing of pdb files that have dll
+ or exe files with the same base name next to them."""
+ if file.endswith(".pdb"):
+ (path,ext) = os.path.splitext(file)
+ if os.path.isfile(path + ".exe") or os.path.isfile(path + ".dll") or os.path.isfile(path + ".bin"):
+ return True
+ return False
+
+ def FixFilenameCase(self, file):
+ """Recent versions of Visual C++ put filenames into
+ PDB files as all lowercase. If the file exists
+ on the local filesystem, fix it."""
+
+ # Use a cached version if we have one.
+ if file in self.fixedFilenameCaseCache:
+ return self.fixedFilenameCaseCache[file]
+
+ result = file
+
+ (path, filename) = os.path.split(file)
+ if os.path.isdir(path):
+ lc_filename = filename.lower()
+ for f in os.listdir(path):
+ if f.lower() == lc_filename:
+ result = os.path.join(path, f)
+ break
+
+ # Cache the corrected version to avoid future filesystem hits.
+ self.fixedFilenameCaseCache[file] = result
+ return result
+
+ def CopyDebug(self, file, debug_file, guid):
+ rel_path = os.path.join(debug_file,
+ guid,
+ debug_file).replace("\\", "/")
+ print rel_path
+ full_path = os.path.normpath(os.path.join(self.symbol_path,
+ rel_path))
+ shutil.copyfile(file, full_path)
+ pass
+
+ def SourceServerIndexing(self, debug_file, guid, sourceFileStream, cvs_root):
+ # Creates a .pdb.stream file in the mozilla\objdir to be used for source indexing
+ cwd = os.getcwd()
+ streamFilename = debug_file + ".stream"
+ stream_output_path = os.path.join(cwd, streamFilename)
+ # Call SourceIndex to create the .stream file
+ result = SourceIndex(sourceFileStream, stream_output_path, cvs_root)
+
+ if self.copy_debug:
+ pdbstr_path = os.environ.get("PDBSTR_PATH")
+ pdbstr = os.path.normpath(pdbstr_path)
+ pdb_rel_path = os.path.join(debug_file, guid, debug_file)
+ pdb_filename = os.path.normpath(os.path.join(self.symbol_path, pdb_rel_path))
+ # move to the dir with the stream files to call pdbstr
+ os.chdir(os.path.dirname(stream_output_path))
+ os.spawnv(os.P_WAIT, pdbstr, [pdbstr, "-w", "-p:" + pdb_filename, "-i:" + streamFilename, "-s:srcsrv"])
+ # clean up all the .stream files when done
+ os.remove(stream_output_path)
+ return result
+
+class Dumper_Linux(Dumper):
+ def ShouldProcess(self, file):
+ """This function will allow processing of files that are
+ executable, or end with the .so extension, and additionally
+ file(1) reports as being ELF files. It expects to find the file
+ command in PATH."""
+ if file.endswith(".so") or file.endswith(".bin") or os.access(file, os.X_OK):
+ return self.RunFileCommand(file).startswith("ELF")
+ return False
+
+ def CopyDebug(self, file, debug_file, guid):
+ # We want to strip out the debug info, and add a
+ # .gnu_debuglink section to the object, so the debugger can
+ # actually load our debug info later.
+ file_dbg = file + ".dbg"
+ os.system("objcopy --only-keep-debug %s %s" % (file, file_dbg))
+ os.system("objcopy --add-gnu-debuglink=%s %s" % (file_dbg, file))
+
+ rel_path = os.path.join(debug_file,
+ guid,
+ debug_file + ".dbg")
+ full_path = os.path.normpath(os.path.join(self.symbol_path,
+ rel_path))
+ shutil.copyfile(file_dbg, full_path)
+ # gzip the shipped debug files
+ os.system("gzip %s" % full_path)
+ print rel_path + ".gz"
+
+class Dumper_Solaris(Dumper):
+ def RunFileCommand(self, file):
+ """Utility function, returns the output of file(1)"""
+ try:
+ output = os.popen("file " + file).read()
+ return output.split('\t')[1];
+ except:
+ return ""
+
+ def ShouldProcess(self, file):
+ """This function will allow processing of files that are
+ executable, or end with the .so extension, and additionally
+ file(1) reports as being ELF files. It expects to find the file
+ command in PATH."""
+ if file.endswith(".so") or os.access(file, os.X_OK):
+ return self.RunFileCommand(file).startswith("ELF")
+ return False
+
+class Dumper_Mac(Dumper):
+ def ShouldProcess(self, file):
+ """This function will allow processing of files that are
+ executable, or end with the .dylib extension, and additionally
+ file(1) reports as being Mach-O files. It expects to find the file
+ command in PATH."""
+ if file.endswith(".dylib") or os.access(file, os.X_OK):
+ return self.RunFileCommand(file).startswith("Mach-O")
+ return False
+
+# Entry point if called as a standalone program
+def main():
+ parser = OptionParser(usage="usage: %prog [options] <dump_syms binary> <symbol store path> <debug info files>")
+ parser.add_option("-c", "--copy",
+ action="store_true", dest="copy_debug", default=False,
+ help="Copy debug info files into the same directory structure as symbol files")
+ parser.add_option("-a", "--archs",
+ action="store", dest="archs",
+ help="Run dump_syms -a <arch> for each space separated cpu architecture in ARCHS (only on macOS)")
+ parser.add_option("-s", "--srcdir",
+ action="store", dest="srcdir",
+ help="Use SRCDIR to determine relative paths to source files")
+ parser.add_option("-v", "--vcs-info",
+ action="store_true", dest="vcsinfo",
+ help="Try to retrieve VCS info for each FILE listed in the output")
+ parser.add_option("-i", "--source-index",
+ action="store_true", dest="srcsrv", default=False,
+ help="Add source index information to debug files, making them suitable for use in a source server.")
+ (options, args) = parser.parse_args()
+
+ #check to see if the pdbstr.exe exists
+ if options.srcsrv:
+ pdbstr = os.environ.get("PDBSTR_PATH")
+ if not os.path.exists(pdbstr):
+ print >> sys.stderr, "Invalid path to pdbstr.exe - please set/check PDBSTR_PATH.\n"
+ sys.exit(1)
+
+ if len(args) < 3:
+ parser.error("not enough arguments")
+ exit(1)
+
+ dumper = GetPlatformSpecificDumper(dump_syms=args[0],
+ symbol_path=args[1],
+ copy_debug=options.copy_debug,
+ archs=options.archs,
+ srcdir=options.srcdir,
+ vcsinfo=options.vcsinfo,
+ srcsrv=options.srcsrv)
+ for arg in args[2:]:
+ dumper.Process(arg)
+
+# run main if run directly
+if __name__ == "__main__":
+ main()
diff --git a/bin/symstore.sh b/bin/symstore.sh
new file mode 100755
index 000000000..532efb166
--- /dev/null
+++ b/bin/symstore.sh
@@ -0,0 +1,174 @@
+#!/usr/bin/env bash
+
+# Files listed here would not be store in the symbolestore-server.
+# The format is a string with files e.g.
+# BLACKLIST="python.exe
+# file.dll
+# next_file.exe"
+#
+# It removes "python.exe", "file.dll", and "next_file.exe" from what's
+# added to the symstore. Separator is the newline
+BLACK_LIST="python.exe"
+
+# List files here where it's ok for this script to find more than one
+# occurrence in the build tree. Files _not_ included here will generate
+# an error, if duplicates are found.
+#
+# Same format as for BLACK_LIST above
+MOREPDBS_OKLIST="libcurl.dll
+freebl3.dll
+libeay32.dll
+nspr4.dll
+nss3.dll
+nssckbi.dll
+nssdbm3.dll
+nssutil3.dll
+plc4.dll
+plds4.dll
+smime3.dll
+softokn3.dll
+sqlite3.dll
+ssl3.dll
+ssleay32.dll"
+
+verbose_none()
+{
+ do_none=
+}
+
+add_pdb()
+{
+ extension=$1
+ pdbext=$2
+ list=$3
+ stats_notfound=0
+ stats_found=0
+ stats_morefound=0
+ declare -a pdball
+ echo "Collect $extension"
+ ret=$(find "${INSTDIR}/" -type f -name "*.${extension}" | grep -vF "$BLACK_LIST")
+ while IFS= read -r file
+ do
+ ${VERBOSE} -n "Found: $file"
+ # store dll/exe itself (needed for minidumps)
+ if [ $WITHEXEC == 1 ] ; then
+ cygpath -w "$file" >> "$list"
+ ${VERBOSE} " insert"
+ else
+ ${VERBOSE} " "
+ fi
+
+ # store pdb file
+ filename=$(basename "$file" ".${extension}")
+ pdball+=($(grep -i "/${filename}${pdbext}" <<< ${ALL_PDBS}))
+ if [ -n "${pdball[0]}" ]; then
+ cygpath -w "${pdball[0]}" >> "$list"
+ fi
+ case ${#pdball[@]} in
+ 0) ((++stats_notfound))
+ ${VERBOSE} " PDB not found"
+ ;;
+ 1) ((++stats_found))
+ ${VERBOSE} " ${pdball[0]} insert"
+ ;;
+ *) ((++stats_morefound))
+ if [ -z "$(echo $file | grep -F "$MOREPDBS_OKLIST")" ]; then
+ echo "Error: found duplicate PDBs:"
+ for morepdbs in ${pdball[@]} ; do
+ echo " $morepdbs"
+ done
+ exit 1
+ else
+ ${VERBOSE} " ${pdball[0]} insert (is in more okay list)"
+ fi
+ ;;
+ esac
+ unset pdball
+ done <<EOF
+${ret}
+EOF
+
+ echo " Found PDBs : $stats_found"
+ echo " Missing PDBs : $stats_notfound"
+ echo " Multiple PDBs : $stats_morefound"
+}
+
+# check preconditions
+if [ -z "${INSTDIR}" -o -z "${WORKDIR}" ]; then
+ echo "INSTDIR or WORKDIR not set - script expects calling inside buildenv"
+ exit 1
+fi
+if [ ! -d "${INSTDIR}" -o ! -d "${WORKDIR}" ]; then
+ echo "INSTDIR or WORKDIR not present - script expects calling after full build"
+ exit 1
+fi
+which symstore.exe > /dev/null 2>&1 || {
+ echo "symstore.exe is expected in the PATH"
+ exit 1
+}
+
+# defaults
+MAX_KEEP=5
+SYM_PATH=${WORKDIR}/symstore
+COMMENT=""
+COMCMD=""
+WITHEXEC=1
+VERBOSE=verbose_none
+
+USAGE="Usage: $0 [-h|-k <keep_num_versions>|-p <symbol_store_path>|-c <comment>|-n|-v]
+ -h: this cruft
+ -c <comment> specifies a comment for the transaction
+ -n do not store exe/dll on the symbole server
+ -k <int>: keep this number of old symbol versions around
+ (default: ${MAX_KEEP}. Set to 0 for unlimited)
+ -v verbose mode, output detail report of files
+ -p <path>: specify full path to symbol store tree
+If no path is specified, defaults to ${SYM_PATH}.
+"
+
+# process args
+while :
+do
+ case "$1" in
+ -k|--keep) MAX_KEEP="$2"; shift 2;;
+ -p|--path) SYM_PATH="$2"; shift 2;;
+ -c|--comment) COMCMD="/c"; COMMENT="$2"; shift 2;;
+ -n|--noexec) WITHEXEC=0; shift ;;
+ -v|--verbose) VERBOSE=echo; shift ;;
+ -h|--help) echo "${USAGE}"; exit 0;;
+ -*) echo "${USAGE}" >&2; exit 1;;
+ *) break;;
+ esac
+done
+
+if [ $# -gt 0 ]; then
+ echo "${USAGE}" >&2
+ exit 1
+fi
+
+# populate symbol store from here
+TMPFILE=$(mktemp) || exit 1
+trap '{ rm -f ${TMPFILE}; }' EXIT
+
+# collect all PDBs
+ALL_PDBS=$(find "${WORKDIR}/" -type f -name "*.pdb")
+
+# add dlls and executables
+add_pdb dll .pdb "${TMPFILE}"
+add_pdb exe .pdb "${TMPFILE}"
+add_pdb bin .bin.pdb "${TMPFILE}"
+
+# stick all of it into symbol store
+symstore.exe add /compress /f "@$(cygpath -w "${TMPFILE}")" /s "$(cygpath -w "${SYM_PATH}")" /t "${PRODUCTNAME}" /v "${LIBO_VERSION_MAJOR}.${LIBO_VERSION_MINOR}.${LIBO_VERSION_MICRO}.${LIBO_VERSION_PATCH}${LIBO_VERSION_SUFFIX}${LIBO_VERSION_SUFFIX_SUFFIX}" "${COMCMD}" "${COMMENT}"
+rm -f "${TMPFILE}"
+
+# Cleanup symstore, older revisions will be removed. Unless the
+# .dll/.exe changes, the .pdb should be shared, so with incremental
+# tinderbox several revisions should not be that space-demanding.
+if [ "${MAX_KEEP}" -gt 0 -a -d "${SYM_PATH}/000Admin" ]; then
+ to_remove=$(ls -1 "${SYM_PATH}/000Admin" | grep -v '\.txt' | grep -v '\.deleted' | sort | head -n "-${MAX_KEEP}")
+ for revision in $to_remove; do
+ echo "Remove $revision from symstore"
+ symstore.exe del /i "${revision}" /s "$(cygpath -w "${SYM_PATH}")"
+ done
+fi
diff --git a/bin/test-hid-vs-ui.py b/bin/test-hid-vs-ui.py
new file mode 100755
index 000000000..635a121ad
--- /dev/null
+++ b/bin/test-hid-vs-ui.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, you can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# Parses all help files (.xhp) to check that hids referencing .ui are up-to-date
+# From fdo#67350
+
+
+import sys
+import argparse
+import os
+import subprocess
+import xml.etree.ElementTree as ET
+import collections
+import re
+import smtplib
+import email
+import email.mime.text
+import time
+import datetime
+
+# retrieve all hids related to .ui files
+def init_hids():
+ global args, local_repo
+ if local_repo:
+ repo_dir = os.path.join(core_repo_dir,'helpcontent2')
+ os.chdir(repo_dir)
+ return subprocess.check_output(['git','grep','hid="[^"]*/[^"]*">','.'])
+ else:
+ repo_dir = '/var/tmp/help.git'
+ if not os.path.exists(repo_dir):os.makedirs(repo_dir)
+ os.chdir(repo_dir)
+
+ if not os.path.exists(os.path.join(repo_dir,'config')):
+ subprocess.call(['git','clone','--bare','git://gerrit.libreoffice.org/help',repo_dir])
+ elif not args['git_static']:
+ subprocess.call(['git','fetch','origin'])
+ return subprocess.check_output(['git','grep','hid="[^"]*/[^"]*">','master','--'])
+
+# retrieve .ui files list from the core
+def init_core_files():
+ global core_repo_dir, local_repo
+ core_repo_dir = args['core_repo_dir']
+ if core_repo_dir is None:
+ core_repo_dir = os.path.dirname(os.path.abspath(os.path.dirname(sys.argv[0])))
+ local_repo = True
+
+ if not os.path.exists(core_repo_dir):os.makedirs(core_repo_dir)
+ os.chdir(core_repo_dir)
+
+ if not os.path.exists(os.path.join(core_repo_dir,'.git')):
+ subprocess.call(['git','clone','git://gerrit.libreoffice.org/core',core_repo_dir])
+ elif not args['git_static']:
+ subprocess.call(['git','fetch','origin'])
+ allfiles = subprocess.check_output(['git','ls-tree','--name-only','--full-name','-r','master'])
+ return re.findall('.*\.ui',allfiles)
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser('hid for ui consistency parser')
+ parser.add_argument('-s', '--send-to', action='append', help='email address to send the report to. Use one flag per address.', required=False)
+ parser.add_argument('-g', '--git-static', action='store_true', help='to avoid contacting remote server to refresh repositories.', required=False)
+ parser.add_argument('-r', '--core-repo-dir', help='enforce path to core repository when analyzing .ui files.', required=False)
+ args=vars(parser.parse_args())
+
+ uifileslist = init_core_files() # play it early to gain the local repo identification
+
+ rows = init_hids().splitlines()
+ #<tree>:<relative_file>:<text>
+ # handled as sets to remove duplicates (and we don't need an iterator)
+ targets = collections.defaultdict(set)
+ origin = collections.defaultdict(set)
+
+ # fill all matching hids and their parent file
+ for row in rows:
+ fname, rawtext = row.split(':',1)[0:]
+ hid = rawtext.split('hid="')[1].split('"')[0]
+ if hid.startswith('.uno'): continue
+ uifileraw, compname = hid.rsplit('/',1)
+ uifile = uifileraw + ".ui"
+ # map modules/ etc, which exist only in install
+ # back to their source location
+ if uifile.startswith("modules/scalc"):
+ uifile = "sc/scalc" + uifile[13:]
+ elif uifile.startswith("modules/swriter"):
+ uifile = "sw/swriter" + uifile[15:]
+ elif uifile.startswith("modules/schart"):
+ uifile = "chart2" + uifile[14:]
+ elif uifile.startswith("modules/smath"):
+ uifile = "starmath/smath" + uifile[13:]
+ elif uifile.startswith("modules/sdraw"):
+ uifile = "sd/sdraw" + uifile[13:]
+ elif uifile.startswith("modules/simpress"):
+ uifile = "sd/simpress" + uifile[16:]
+ elif uifile.startswith("modules/BasicIDE"):
+ uifile = "basctl/basicide" + uifile[16:]
+ elif uifile.startswith("modules/sabpilot"):
+ uifile = "extensions/sabpilot" + uifile[16:]
+ elif uifile.startswith("modules/sbibliography"):
+ uifile = "extensions/sbibliography" + uifile[21:]
+ elif uifile.startswith("modules/scanner"):
+ uifile = "extensions/scanner" + uifile[15:]
+ elif uifile.startswith("modules/spropctrlr"):
+ uifile = "extensions/spropctrlr" + uifile[18:]
+ elif uifile.startswith("sfx"):
+ uifile = "sfx2" + uifile[3:]
+ elif uifile.startswith("svt"):
+ uifile = "svtools" + uifile[3:]
+ elif uifile.startswith("fps"):
+ uifile = "fpicker" + uifile[3:]
+ components = uifile.split('/',1);
+ uifile = components[0] + '/uiconfig/' + components[1]
+ targets[uifile].add(compname.split(':')[0])
+ origin[uifile].add(fname) # help file(s)
+
+ errors = ''
+ # search in all .ui files referenced in help
+ # 2 possible errors: file not found in repo, id not found in file
+ for uikey in dict.keys(targets):
+ if uikey not in uifileslist:
+ if len(origin[uikey]) == 1:
+ errors += '\nFrom ' + origin[uikey].pop()
+ else:
+ errors += '\nFrom one of ' + str(origin[uikey]).replace('set(','').replace(')','')
+ errors += ', we did not find file '+ uikey+'.'
+ continue
+
+ full_path = os.path.join(core_repo_dir,uikey)
+ # print full_path
+ root = ET.parse(full_path).getroot()
+ ids = [element.attrib['id'].split(':')[0] for element in root.findall('.//object[@id]')]
+ # print targets[uikey]
+ missing_ids = [ element for element in targets[uikey] if element not in ids ]
+ if missing_ids:
+ if len(origin[uikey]) == 1:
+ errors += '\nFrom ' + origin[uikey].pop()
+ else:
+ errors += '\nFrom one of ' + str(origin[uikey]).replace('set(','').replace(')','')
+ errors += ', referenced items '+ str(missing_ids) + ' were not found inside '+ uikey+'.'
+
+ if not errors:
+ errors = '\nall is clean\n'
+
+ if args['send_to']:
+ msg_from = os.path.basename(sys.argv[0]) + '@libreoffice.org'
+ if isinstance(args['send_to'], basestring):
+ msg_to = [args['send_to']]
+ else:
+ msg_to = args['send_to']
+ print "send to array " + msg_to[0]
+
+ server = smtplib.SMTP('localhost')
+ body = '''
+Hello,
+
+Here is the report for wrong hids from help related to .ui files
+
+'''
+ body += errors
+ body += '''
+
+Best,
+
+Your friendly LibreOffice Help-ids Checker
+
+Note: The bot generating this message can be found and improved here:
+ https://gerrit.libreoffice.org/gitweb?p=dev-tools.git;a=blob;f=scripts/test-hid-vs-ui.py'''
+ now = datetime.datetime.now()
+ msg = email.mime.text.MIMEText(body, 'plain', 'UTF-8')
+ msg['From'] = msg_from
+ msg['To'] = msg_to[0]
+ msg['Cc'] = ', '.join(msg_to[1:]) # Works only if at least 2 items in tuple
+ msg['Date'] = email.utils.formatdate(time.mktime(now.timetuple()))
+ msg['Subject'] = 'LibreOffice Gerrit News for python on %s' % (now.date().isoformat())
+ msg['Reply-To'] = msg_to[0]
+ msg['X-Mailer'] = 'LibreOfficeGerritDigestMailer 1.1'
+
+ server.sendmail(msg_from, msg_to, str(msg))
+ else:
+ print errors
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/text_cat/COPYING b/bin/text_cat/COPYING
new file mode 100644
index 000000000..5ab7695ab
--- /dev/null
+++ b/bin/text_cat/COPYING
@@ -0,0 +1,504 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard. To achieve this, non-free programs must be
+allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the library's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+ <signature of Ty Coon>, 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff --git a/bin/text_cat/Copyright b/bin/text_cat/Copyright
new file mode 100644
index 000000000..c1e75d3af
--- /dev/null
+++ b/bin/text_cat/Copyright
@@ -0,0 +1,21 @@
+Copyright (c) 1994, 1995, 1996, 1997 by Gertjan van Noord.
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the
+ Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston,
+ MA 02110-1301 USA
+
+cf. the file COPYING
+
+
diff --git a/bin/text_cat/LM/english.lm b/bin/text_cat/LM/english.lm
new file mode 100644
index 000000000..ab71632c6
--- /dev/null
+++ b/bin/text_cat/LM/english.lm
@@ -0,0 +1,400 @@
+_ 20326
+e 6617
+t 4843
+o 3834
+n 3653
+i 3602
+a 3433
+s 2945
+r 2921
+h 2507
+e_ 2000
+d 1816
+_t 1785
+c 1639
+l 1635
+th 1535
+he 1351
+_th 1333
+u 1309
+f 1253
+m 1175
+p 1151
+_a 1145
+the 1142
+_the 1060
+s_ 978
+er 968
+_o 967
+he_ 928
+d_ 888
+t_ 885
+the_ 844
+_the_ 843
+on 842
+in 817
+y 783
+n_ 773
+b 761
+re 754
+, 734
+,_ 732
+an 732
+g 728
+w 718
+_i 707
+en 676
+f_ 599
+y_ 595
+of 594
+_of 592
+es 589
+ti 587
+v 580
+_of_ 575
+of_ 575
+nd 568
+at 549
+r_ 540
+_w 534
+it 522
+ed 496
+_p 494
+nt 485
+_c 462
+o_ 457
+io 450
+_an 439
+te 432
+or 425
+_b 418
+nd_ 407
+to 406
+st 402
+is 401
+_s 396
+_in 389
+ion 385
+and 385
+de 384
+ve 382
+ha 375
+ar 366
+_m 361
+and_ 360
+_and 360
+_and_ 358
+se 353
+_to 347
+me 346
+to_ 344
+ed_ 339
+. 330
+be 329
+_f 329
+._ 329
+_to_ 320
+co 317
+ic 316
+ns 308
+al 307
+le 304
+ou 304
+ce 293
+ent 279
+l_ 278
+_co 277
+tio 275
+on_ 274
+_d 274
+tion 268
+ri 266
+_e 264
+ng 253
+hi 251
+er_ 249
+ea 246
+as 245
+_be 242
+pe 242
+h_ 234
+_r 232
+ec 227
+ch 223
+ro 222
+ct 220
+_h 219
+pr 217
+in_ 217
+ne 214
+ll 214
+rt 213
+s,_ 210
+s, 210
+li 209
+ra 208
+T 207
+wh 204
+a_ 203
+ac 201
+_wh 199
+_n 196
+ts 196
+di 196
+es_ 195
+si 194
+re_ 193
+at_ 192
+nc 192
+ie 190
+_a_ 188
+_in_ 185
+ing 184
+us 182
+_re 182
+g_ 179
+ng_ 178
+op 178
+con 177
+tha 175
+_l 174
+_tha 174
+ver 173
+ma 173
+ion_ 171
+_con 171
+ci 170
+ons 170
+_it 170
+po 169
+ere 168
+is_ 167
+ta 167
+la 166
+_pr 165
+fo 164
+ho 164
+ir 162
+ss 161
+men 160
+be_ 160
+un 159
+ty 159
+_be_ 158
+ing_ 157
+om 156
+ot 156
+hat 155
+ly 155
+_g 155
+em 153
+_T 151
+rs 150
+mo 148
+ch_ 148
+wi 147
+we 147
+ad 147
+ts_ 145
+res 143
+_wi 143
+I 143
+hat_ 142
+ei 141
+ly_ 141
+ni 140
+os 140
+ca 139
+ur 139
+A 138
+ut 138
+that 138
+_that 137
+ati 137
+_fo 137
+st_ 137
+il 136
+or_ 136
+for 136
+pa 136
+ul 135
+ate 135
+ter 134
+it_ 134
+nt_ 133
+that_ 132
+_ha 129
+al_ 128
+el 128
+as_ 127
+ll_ 127
+_ma 125
+no 124
+ment 124
+an_ 124
+tion_ 122
+su 122
+bl 122
+_de 122
+nce 120
+pl 120
+fe 119
+tr 118
+so 118
+int 115
+ov 114
+e, 114
+e,_ 114
+_u 113
+ent_ 113
+Th 113
+her 113
+j 112
+atio 112
+ation 112
+_Th 111
+le_ 110
+ai 110
+_it_ 110
+_on 110
+_for 109
+ect 109
+k 109
+hic 108
+est 108
+der 107
+tu 107
+na 106
+_by_ 106
+by_ 106
+E 106
+by 106
+_by 106
+ve_ 106
+_di 106
+en_ 104
+vi 104
+m_ 103
+_whi 102
+iv 102
+whi 102
+ns_ 102
+_A 101
+ich 100
+ge 100
+pro 99
+ess 99
+_whic 99
+ers 99
+hich 99
+ce_ 99
+which 99
+whic 99
+all 98
+ove 98
+_is 98
+ich_ 97
+ee 97
+hich_ 97
+n,_ 96
+n, 96
+im 95
+ir_ 94
+hei 94
+ions 94
+sti 94
+se_ 94
+per 93
+The 93
+_pa 93
+heir 93
+id 93
+eir 93
+eir_ 93
+ig 93
+heir_ 93
+_no 93
+ev 93
+era 92
+_int 92
+ted 91
+_The 91
+ies 91
+art 91
+thei 90
+_ar 90
+_thei 90
+their 90
+_pro 90
+et 89
+_pe 88
+_mo 88
+ther 88
+x 87
+gh 87
+S 87
+_is_ 87
+ol 87
+ty_ 87
+_I 86
+nde 86
+am 86
+rn 86
+nte 86
+mp 85
+_su 84
+_we 84
+par 84
+_v 84
+pu 82
+his 82
+ow 82
+mi 82
+go 81
+N 81
+ue 81
+ple 81
+ep 80
+ab 80
+;_ 80
+; 80
+ex 80
+ain 80
+over 80
+_un 79
+q 79
+qu 79
+pp 79
+ith 79
+ry 79
+_as 79
+ber 79
+ub 78
+av 78
+uc 78
+s._ 77
+s. 77
+enc 77
+are 77
+iti 77
+gr 76
+his_ 76
+ua 76
+part 76
+ff 75
+eve 75
+O 75
+rea 74
+ous 74
+ia 74
+The_ 73
+ag 73
+mb 73
+_go 73
+fa 72
+on,_ 72
+ern 72
+t,_ 72
+on, 72
+t, 72
+_me 71
diff --git a/bin/text_cat/LM/german.lm b/bin/text_cat/LM/german.lm
new file mode 100644
index 000000000..6f14f51ef
--- /dev/null
+++ b/bin/text_cat/LM/german.lm
@@ -0,0 +1,400 @@
+_ 31586
+e 15008
+n 9058
+i 7299
+r 6830
+t 5662
+s 5348
+a 4618
+h 4176
+d 4011
+er 3415
+en 3412
+u 3341
+l 3266
+n_ 2848
+c 2636
+ch 2460
+g 2407
+o 2376
+e_ 2208
+r_ 2128
+m 2077
+_d 1948
+de 1831
+en_ 1786
+ei 1718
+er_ 1570
+in 1568
+te 1505
+ie 1505
+b 1458
+t_ 1425
+f 1306
+k 1176
+ge 1144
+s_ 1137
+un 1113
+, 1104
+,_ 1099
+w 1099
+z 1060
+nd 1039
+he 1004
+st 989
+_s 952
+_de 949
+. 909
+_e 906
+ne 906
+der 880
+._ 847
+be 841
+es 829
+ic 796
+_a 791
+ie_ 779
+is 769
+ich 763
+an 755
+re 749
+di 732
+ein 730
+se 730
+" 720
+ng 709
+_i 706
+sc 683
+sch 681
+it 673
+der_ 652
+h_ 651
+ch_ 642
+S 630
+le 609
+p 609
+ä 607
+ü 603
+au 603
+v 602
+che 599
+_w 596
+d_ 585
+die 576
+_di 572
+m_ 562
+_die 559
+el 548
+_S 540
+_der 529
+li 527
+_der_ 523
+si 515
+al 514
+ns 507
+on 501
+or 495
+ti 490
+ten 487
+ht 486
+die_ 485
+_die_ 483
+D 479
+rt 478
+nd_ 476
+_u 470
+nt 468
+A 466
+in_ 464
+den 461
+cht 447
+und 443
+me 440
+_z 429
+ung 426
+ll 423
+_un 421
+_ei 419
+_n 415
+hr 412
+ine 412
+_A 408
+_ein 405
+ar 404
+ra 403
+_v 400
+_g 400
+as 395
+zu 392
+et 389
+em 385
+_D 380
+eine 376
+gen 376
+g_ 376
+da 368
+we 366
+K 365
+lt 360
+B 354
+_" 353
+nde 349
+ni 347
+und_ 345
+E 345
+ur 345
+_m 342
+ri 341
+ha 340
+eh 339
+ten_ 338
+es_ 336
+_K 336
+_und 335
+ig 335
+_b 335
+hen 334
+_und_ 332
+_au 329
+_B 327
+_da 325
+_zu 324
+_in 322
+at 321
+us 318
+wi 307
+n, 305
+n,_ 304
+nn 304
+te_ 301
+eit 301
+_h 300
+ter 299
+M 298
+n. 295
+ß 294
+ng_ 289
+sche 289
+- 283
+rs 282
+den_ 282
+_si 280
+G 280
+im 278
+_ge 277
+chen 276
+rd 273
+_E 273
+n._ 270
+icht 270
+rn 268
+uf 267
+isch 264
+isc 264
+nen 263
+_in_ 262
+_M 260
+_er 257
+ich_ 255
+ac 253
+lic 252
+_G 252
+ber 252
+la 251
+vo 251
+eb 250
+ke 249
+F 248
+as_ 248
+hen_ 248
+ach 245
+en, 244
+ung_ 243
+lich 243
+ste 243
+en,_ 243
+_k 241
+ben 241
+_f 241
+en. 241
+_be 239
+it_ 239
+L 238
+_se 237
+mi 236
+ve 236
+na 236
+on_ 236
+P 235
+ss 234
+ist 234
+ö 234
+ht_ 233
+ru 233
+st_ 229
+_F 229
+ts 227
+ab 226
+W 226
+ol 225
+_eine 225
+hi 225
+so 224
+em_ 223
+"_ 223
+ren 222
+en._ 221
+chen_ 221
+R 221
+ta 221
+ere 220
+ische 219
+ers 218
+ert 217
+_P 217
+tr 217
+ed 215
+ze 215
+eg 215
+ens 215
+ür 213
+ah 212
+_vo 212
+ne_ 211
+cht_ 210
+uc 209
+_wi 209
+nge 208
+lle 208
+fe 207
+_L 207
+ver 206
+hl 205
+V 204
+ma 203
+wa 203
+auf 201
+H 198
+_W 195
+T 195
+nte 193
+uch 193
+l_ 192
+sei 192
+nen_ 190
+u_ 189
+_den 189
+_al 189
+_V 188
+t. 188
+lte 187
+ut 186
+ent 184
+sich 183
+sic 183
+il 183
+ier 182
+am 181
+gen_ 180
+sen 179
+fü 178
+um 178
+t._ 177
+f_ 174
+he_ 174
+ner 174
+nst 174
+ls 174
+_sei 173
+ro 173
+ir 173
+ebe 173
+mm 173
+ag 172
+ern 169
+t,_ 169
+t, 169
+eu 169
+ft 168
+icht_ 167
+hre 167
+Be 166
+nz 165
+nder 165
+_T 164
+_den_ 164
+iche 163
+tt 163
+zu_ 162
+and 162
+J 161
+rde 160
+rei 160
+_we 159
+_H 159
+ige 159
+_Be 158
+rte 157
+hei 156
+das 155
+aus 155
+che_ 154
+_das 154
+_zu_ 154
+tz 154
+_ni 153
+das_ 153
+_R 153
+N 153
+des 153
+_ve 153
+_J 152
+I 152
+_das_ 152
+men 151
+_so 151
+_ver 151
+_auf 150
+ine_ 150
+_ha 150
+rg 149
+ind 148
+eben 148
+kt 147
+mit 147
+_an 147
+her 146
+Ge 146
+Sc 145
+_sich 145
+U 145
+Sch 145
+_sic 145
+end 145
+Di 144
+abe 143
+ck 143
+sse 142
+ür_ 142
+ell 142
+ik 141
+o_ 141
+nic 141
+nich 141
+sa 141
+_fü 140
+hn 140
+zi 140
+no 140
+nicht 140
+im_ 139
+von_ 139
+von 139
+_nic 139
+_nich 139
+eine_ 139
+oc 138
+wei 138
+io 138
+schen 138
+gt 138
diff --git a/bin/text_cat/text_cat b/bin/text_cat/text_cat
new file mode 100755
index 000000000..74dae861d
--- /dev/null
+++ b/bin/text_cat/text_cat
@@ -0,0 +1,242 @@
+#!/usr/bin/perl -w
+# © Gertjan van Noord, 1997.
+# mailto:vannoord@let.rug.nl
+
+use strict;
+use vars qw($opt_d $opt_f $opt_h $opt_i $opt_l $opt_n $opt_s $opt_t $opt_v $opt_u $opt_a);
+use Getopt::Std;
+use Benchmark;
+
+my $non_word_characters='0-9\s';
+my @languages; # languages (sorted by name)
+my %ngram_for; # map language x ngram => rang
+
+# OPTIONS
+getopts('a:d:f:hi:lnst:u:v');
+
+# defaults: set $opt_X unless already defined (Perl Cookbook p. 6):
+$opt_a ||= 10;
+$opt_d ||= '/users1/vannoord/Perl/TextCat/LM';
+$opt_f ||= 0;
+$opt_t ||= 400;
+$opt_u ||= 1.05;
+
+$| = 1; # auto-flush stdout
+
+sub help {
+ print <<HELP
+Text Categorization. Typically used to determine the language of a
+given document.
+
+Usage
+-----
+
+* print help message:
+
+$0 -h
+
+* for guessing:
+
+$0 [-a Int] [-d Dir] [-f Int] [-i N] [-l] [-t Int] [-u Int] [-v]
+
+ -a the program returns the best-scoring language together
+ with all languages which are $opt_u times worse (cf option -u).
+ If the number of languages to be printed is larger than the value
+ of this option (default: $opt_a) then no language is returned, but
+ instead a message that the input is of an unknown language is
+ printed. Default: $opt_a.
+ -d indicates in which directory the language models are
+ located (files ending in .lm). Currently only a single
+ directory is supported. Default: $opt_d.
+ -f Before sorting is performed the Ngrams which occur this number
+ of times or less are removed. This can be used to speed up
+ the program for longer inputs. For short inputs you should use
+ -f 0.
+ Default: $opt_f.
+ -i N only read first N lines
+ -l indicates that input is given as an argument on the command line,
+ e.g. text_cat -l "this is english text"
+ Cannot be used in combination with -n.
+ -s Determine language of each line of input. Not very efficient yet,
+ because language models are re-loaded after each line.
+ -t indicates the topmost number of ngrams that should be used.
+ If used in combination with -n this determines the size of the
+ output. If used with categorization this determines
+ the number of ngrams that are compared with each of the language
+ models (but each of those models is used completely).
+ -u determines how much worse result must be in order not to be
+ mentioned as an alternative. Typical value: 1.05 or 1.1.
+ Default: $opt_u.
+ -v verbose. Continuation messages are written to standard error.
+
+* for creating new language model, based on text read from standard input:
+
+$0 -n [-v]
+
+ -v verbose. Continuation messages are written to standard error.
+
+
+HELP
+}
+
+if ($opt_h) { help(); exit 0; };
+
+if ($opt_n) {
+ my %ngram=();
+ my @result = create_lm(input(),\%ngram);
+ print join("\n",map { "$_\t $ngram{$_}" ; } @result),"\n";
+} elsif ($opt_l) {
+ classify($ARGV[0]);
+} elsif ($opt_s) {
+ while (<>) {
+ chomp;
+ classify($_);
+ }
+} else {
+ classify(input());
+}
+
+sub read_model {
+ my ($file) = @_;
+ open(LM,"$file") or die "cannot open $file: $!\n";
+ my %ngram;
+ my $rang = 1;
+ while (<LM>) {
+ chomp;
+ # only use lines starting with appropriate character. Others are
+ # ignored.
+ if (/^[^$non_word_characters]+/o) {
+ $ngram{$&} = $rang++;
+ }
+ }
+ return \%ngram;
+}
+
+sub read_models {
+ # open directory to find which languages are supported
+ opendir DIR, "$opt_d" or die "directory $opt_d: $!\n";
+ @languages = sort(grep { s/\.lm// && -r "$opt_d/$_.lm" } readdir(DIR));
+ closedir DIR;
+ @languages or die "sorry, can't read any language models from $opt_d\n" .
+ "language models must reside in files with .lm ending\n";
+
+ foreach my $language (@languages) {
+ $ngram_for{$language} = read_model("$opt_d/$language.lm");
+ }
+}
+
+# CLASSIFICATION
+sub classify {
+ my ($input)=@_;
+ my %results=();
+ my $maxp = $opt_t;
+ read_models() if !@languages;
+
+ # create ngrams for input. Note that hash %unknown is not used;
+ # it contains the actual counts which are only used under -n: creating
+ # new language model (and even then they are not really required).
+ my @unknown=create_lm($input);
+
+ my $t1 = new Benchmark;
+ foreach my $language (@languages) {
+ # compares the language model with input ngrams list
+ my $ngram = $ngram_for{$language} or die "no ngrams for $language";
+
+ my ($i,$p)=(0,0);
+ while ($i < @unknown) {
+ if ($ngram->{$unknown[$i]}) {
+ $p=$p+abs($ngram->{$unknown[$i]}-$i);
+ } else {
+ $p=$p+$maxp;
+ }
+ ++$i;
+ }
+ #print STDERR "$language: $p\n" if $opt_v;
+
+ $results{$language} = $p;
+ }
+ print STDERR "read language models done (" .
+ timestr(timediff(new Benchmark, $t1)) .
+ ".\n" if $opt_v;
+ my @results = sort { $results{$a} <=> $results{$b} } keys %results;
+
+ print join("\n",map { "$_\t $results{$_}"; } @results),"\n" if $opt_v;
+ my $a = $results{$results[0]};
+
+ my @answers=(shift(@results));
+ while (@results && $results{$results[0]} < ($opt_u *$a)) {
+ @answers=(@answers,shift(@results));
+ }
+ if (@answers > $opt_a) {
+ print "I don't know; " .
+ "Perhaps this is a language I haven't seen before?\n";
+ } else {
+ print join(" or ", @answers), "\n";
+ }
+}
+
+# first and only argument is reference to hash.
+# this hash is filled, and a sorted list (opt_n elements)
+# is returned.
+sub input {
+ my $read="";
+ if ($opt_i) {
+ while(<>) {
+ if ($. == $opt_i) {
+ return $read . $_;
+ }
+ $read = $read . $_;
+ }
+ return $read;
+ } else {
+ local $/; # so it doesn't affect $/ elsewhere
+ undef $/;
+ $read = <>; # swallow input.
+ $read || die "determining the language of an empty file is hard...\n";
+ return $read;
+ }
+}
+
+
+sub create_lm {
+ my $t1 = new Benchmark;
+ my $ngram;
+ ($_,$ngram) = @_; #$ngram contains reference to the hash we build
+ # then add the ngrams found in each word in the hash
+ my $word;
+ foreach $word (split("[$non_word_characters]+")) {
+ $word = "_" . $word . "_";
+ my $len = length($word);
+ my $flen=$len;
+ my $i;
+ for ($i=0;$i<$flen;$i++) {
+ $$ngram{substr($word,$i,5)}++ if $len > 4;
+ $$ngram{substr($word,$i,4)}++ if $len > 3;
+ $$ngram{substr($word,$i,3)}++ if $len > 2;
+ $$ngram{substr($word,$i,2)}++ if $len > 1;
+ $$ngram{substr($word,$i,1)}++;
+ $len--;
+ }
+ }
+ ###print "@{[%$ngram]}";
+ my $t2 = new Benchmark;
+ print STDERR "count_ngrams done (".
+ timestr(timediff($t2, $t1)) .").\n" if $opt_v;
+
+ # as suggested by Karel P. de Vos, k.vos@elsevier.nl, we speed up
+ # sorting by removing singletons
+ map { my $key=$_; if ($$ngram{$key} <= $opt_f)
+ { delete $$ngram{$key}; }; } keys %$ngram;
+ #however I have very bad results for short inputs, this way
+
+
+ # sort the ngrams, and spit out the $opt_t frequent ones.
+ # adding `or $a cmp $b' in the sort block makes sorting five
+ # times slower..., although it would be somewhat nicer (unique result)
+ my @sorted = sort { $$ngram{$b} <=> $$ngram{$a} } keys %$ngram;
+ splice(@sorted,$opt_t) if (@sorted > $opt_t);
+ print STDERR "sorting done (" .
+ timestr(timediff(new Benchmark, $t2)) .
+ ").\n" if $opt_v;
+ return @sorted;
+}
diff --git a/bin/text_cat/version b/bin/text_cat/version
new file mode 100644
index 000000000..e6ba9d571
--- /dev/null
+++ b/bin/text_cat/version
@@ -0,0 +1,2 @@
+1.10
+
diff --git a/bin/ui-checkdomain.sh b/bin/ui-checkdomain.sh
new file mode 100755
index 000000000..30e0c5b0f
--- /dev/null
+++ b/bin/ui-checkdomain.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script finds .ui files with incorrect translation domain set
+# and prints the domain, the file name and the expected domain
+# See also the discussion at https://gerrit.libreoffice.org/#/c/72973/
+
+declare -A modules
+
+# List of modules with .ui files and their expected translation domain
+modules+=( \
+ [basctl]=basctl \
+ [chart2]=chart \
+ [cui]=cui \
+ [dbaccess]=dba \
+ [desktop]=dkt \
+ [editeng]=editeng \
+ [extensions]=pcr \
+ [filter]=flt \
+ [formula]="for" \
+ [fpicker]=fps \
+ [framework]=fwk \
+ [reportdesign]=rpt \
+ [sc]=sc \
+ [sd]=sd \
+ [sfx2]=sfx \
+ [starmath]=sm \
+ [svtools]=svt \
+ [svx]=svx \
+ [sw]=sw \
+ [uui]=uui \
+ [vcl]=vcl \
+ [writerperfect]=wpt \
+ [xmlsecurity]=xsc \
+)
+
+# Iterate the keys, i.e. modules with a uiconfig subdir
+for key in ${!modules[@]}; do
+ # Enumerate all .ui files in each module
+ for uifile in $(git ls-files ${key}/uiconfig/*\.ui); do
+ # Check that they contain the expected domain in double quotation marks, print the line if they don't
+ grep "\<interface domain=" $uifile | grep -v "\"${modules[${key}]}\"";
+ if [ "$?" -eq 0 ] ;
+ # Report the file name and the expected domain
+ then echo "^Problematic interface domain in file: $uifile ; should be: "${modules[${key}]}"";
+ fi
+ done
+done
diff --git a/bin/ui-translatable.sh b/bin/ui-translatable.sh
new file mode 100755
index 000000000..d8188778b
--- /dev/null
+++ b/bin/ui-translatable.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script prints lines from .ui files, where the translatable="yes" attribute
+# was not set -- presumably by mistake. It prints a few false positives though.
+
+for i in `git ls-files *.ui`; do
+ for j in "label" "title" "text" "format" "copyright" "comments" "preview_text" "tooltip" "message" ; do
+ grep -s "\<property name\=\"$j\"" $i | grep -v "translatable\=\"yes" | grep -v "translatable\=\"no" | grep -v gtk\- | grep ">.*[A-Za-z].*<";
+ if [ "$?" -eq 0 ] ;
+ then echo "Source: $i^";
+ fi
+ done
+ grep -s "<item" $i | grep -v "translatable\=\"yes" | grep -v "translatable\=\"no" | grep ">.*[A-Za-z].*<";
+ if [ "$?" -eq 0 ] ;
+ then echo "Source: $i^";
+ fi
+done
diff --git a/bin/unpack-sources b/bin/unpack-sources
new file mode 100755
index 000000000..2408eda1e
--- /dev/null
+++ b/bin/unpack-sources
@@ -0,0 +1,91 @@
+#!/usr/bin/env bash
+
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+usage()
+{
+ echo "Helper script to unpack the LO source tarballs"
+ echo
+ echo "Usage: ${0##*/} [--help] start-dir tarball..."
+ echo
+ echo "Options:"
+ echo
+ echo " --help this help"
+ echo " start-dir path where the sources are unpacked (bootstrap directory)"
+ echo " tarball list of LO source tarball that need to be unpacked"
+}
+
+start_dir=
+tarballs=
+
+while test -n "$1" ; do
+ case "$1" in
+ --help)
+ usage
+ exit 0;
+ ;;
+ --download)
+ download="yes"
+ ;;
+ -*)
+ echo "Error: unknown option: $1"
+ exit 1;
+ ;;
+ *)
+ if test -z "$start_dir" ; then
+ start_dir="$1"
+ else
+ tarballs="$tarballs $1"
+ fi
+ ;;
+ esac
+ shift
+done
+
+if test -z "$start_dir" ; then
+ echo "Error: Please, define where to unpack sources, try --help"
+fi
+
+if ! test -f $start_dir/Repository.mk ; then
+ echo "Error: $start_dir is not a valid LibreOffice core source directory"
+ exit 1;
+fi
+
+if test ! -f $start_dir/sources.ver -o -d $start_dir/.git ; then
+ echo "Warning: sources are from git and not from tarball"
+ echo " Do nothing."
+ exit 0;
+fi
+
+lo_src_dir="$start_dir/src"
+mkdir -p "$lo_src_dir"
+
+for tarball in $tarballs ; do
+ tarname=`basename $tarball | sed -e "s/\.tar\..*//"`
+ if test -d $lo_src_dir/$tarname ; then
+ echo "Warning: $lo_src_dir/$tarname already exists => skipping"
+ continue;
+ fi
+
+ echo "Unpacking $tarname..."
+ echo mkdir -p "$lo_src_dir/$tarname"
+ if ! mkdir -p "$lo_src_dir/$tarname" ; then
+ echo "Error: could not create directory $lo_src_dir/$tarname"
+ fi
+ echo tar -xf "$tarball" -C "$lo_src_dir/$tarname" --strip-components=1
+ if ! tar -xf "$tarball" -C "$lo_src_dir/$tarname" --strip-components=1; then
+ echo "Error: could not unpack $tarname"
+ exit 1
+ fi
+
+ # create symlinks for module directories; ignore git-hooks directory
+ for dir in `find "$lo_src_dir/$tarname" -mindepth 1 -maxdepth 1 -type d -path $lo_src_dir/$tarname/git-hooks -o -printf "$tarname/%f\n"` ; do
+ ln -sf "src/$dir" "$start_dir"
+ done
+done
diff --git a/bin/update/common.sh b/bin/update/common.sh
new file mode 100644
index 000000000..5bba576c7
--- /dev/null
+++ b/bin/update/common.sh
@@ -0,0 +1,222 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# Code shared by update packaging scripts.
+# Author: Darin Fisher
+#
+
+# -----------------------------------------------------------------------------
+# By default just assume that these tools exist on our path
+MAR=${MAR:-mar}
+BZIP2=${BZIP2:-bzip2}
+MBSDIFF=${MBSDIFF:-mbsdiff}
+
+# -----------------------------------------------------------------------------
+# Helper routines
+
+notice() {
+ echo "$*" 1>&2
+}
+
+get_file_size() {
+ info=($(ls -ln "$1"))
+ echo ${info[4]}
+}
+
+check_externals() {
+
+ # check whether we can call the mar executable
+ "$MAR" --version > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ notice "Could not find a valid mar executable in the path or in the MAR environment variable"
+ exit 1
+ fi
+
+ # check whether we can access the bzip2 executable
+ "$BZIP2" --help > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ notice "Could not find a valid bzip2 executable in the PATH or in the BZIP2 environment variable"
+ exit 1
+ fi
+}
+
+copy_perm() {
+ reference="$1"
+ target="$2"
+
+ if [ -x "$reference" ]; then
+ chmod 0755 "$target"
+ else
+ chmod 0644 "$target"
+ fi
+}
+
+make_add_instruction() {
+ f="$1"
+ filev2="$2"
+ # The third param will be an empty string when a file add instruction is only
+ # needed in the version 2 manifest. This only happens when the file has an
+ # add-if-not instruction in the version 3 manifest. This is due to the
+ # precomplete file prior to the version 3 manifest having a remove instruction
+ # for this file so the file is removed before applying a complete update.
+ filev3="$3"
+
+ # Used to log to the console
+ if [ $4 ]; then
+ forced=" (forced)"
+ else
+ forced=
+ fi
+
+ is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/')
+ if [ $is_extension = "1" ]; then
+ # Use the subdirectory of the extensions folder as the file to test
+ # before performing this add instruction.
+ testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
+ notice " add-if \"$testdir\" \"$f\""
+ echo "add-if \"$testdir\" \"$f\"" >> $filev2
+ if [ ! $filev3 = "" ]; then
+ echo "add-if \"$testdir\" \"$f\"" >> $filev3
+ fi
+ else
+ notice " add \"$f\"$forced"
+ echo "add \"$f\"" >> $filev2
+ if [ ! $filev3 = "" ]; then
+ echo "add \"$f\"" >> $filev3
+ fi
+ fi
+}
+
+check_for_add_if_not_update() {
+ add_if_not_file_chk="$1"
+
+ if [ `basename $add_if_not_file_chk` = "channel-prefs.js" -o \
+ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+check_for_add_to_manifestv2() {
+ add_if_not_file_chk="$1"
+
+ if [ `basename $add_if_not_file_chk` = "update-settings.ini" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+make_add_if_not_instruction() {
+ f="$1"
+ filev3="$2"
+
+ notice " add-if-not \"$f\" \"$f\""
+ echo "add-if-not \"$f\" \"$f\"" >> $filev3
+}
+
+make_patch_instruction() {
+ f="$1"
+ filev2="$2"
+ filev3="$3"
+
+ is_extension=$(echo "$f" | grep -c 'distribution/extensions/.*/')
+ if [ $is_extension = "1" ]; then
+ # Use the subdirectory of the extensions folder as the file to test
+ # before performing this add instruction.
+ testdir=$(echo "$f" | sed 's/\(.*distribution\/extensions\/[^\/]*\)\/.*/\1/')
+ notice " patch-if \"$testdir\" \"$f.patch\" \"$f\""
+ echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev2
+ echo "patch-if \"$testdir\" \"$f.patch\" \"$f\"" >> $filev3
+ else
+ notice " patch \"$f.patch\" \"$f\""
+ echo "patch \"$f.patch\" \"$f\"" >> $filev2
+ echo "patch \"$f.patch\" \"$f\"" >> $filev3
+ fi
+}
+
+append_remove_instructions() {
+ dir="$1"
+ filev2="$2"
+ filev3="$3"
+
+ if [ -f "$dir/removed-files" ]; then
+ listfile="$dir/removed-files"
+ elif [ -f "$dir/Contents/Resources/removed-files" ]; then
+ listfile="$dir/Contents/Resources/removed-files"
+ fi
+ if [ -n "$listfile" ]; then
+ # Map spaces to pipes so that we correctly handle filenames with spaces.
+ files=($(cat "$listfile" | tr " " "|" | sort -r))
+ num_files=${#files[*]}
+ for ((i=0; $i<$num_files; i=$i+1)); do
+ # Map pipes back to whitespace and remove carriage returns
+ f=$(echo ${files[$i]} | tr "|" " " | tr -d '\r')
+ # Trim whitespace
+ f=$(echo $f)
+ # Exclude blank lines.
+ if [ -n "$f" ]; then
+ # Exclude comments
+ if [ ! $(echo "$f" | grep -c '^#') = 1 ]; then
+ if [ $(echo "$f" | grep -c '\/$') = 1 ]; then
+ notice " rmdir \"$f\""
+ echo "rmdir \"$f\"" >> $filev2
+ echo "rmdir \"$f\"" >> $filev3
+ elif [ $(echo "$f" | grep -c '\/\*$') = 1 ]; then
+ # Remove the *
+ f=$(echo "$f" | sed -e 's:\*$::')
+ notice " rmrfdir \"$f\""
+ echo "rmrfdir \"$f\"" >> $filev2
+ echo "rmrfdir \"$f\"" >> $filev3
+ else
+ notice " remove \"$f\""
+ echo "remove \"$f\"" >> $filev2
+ echo "remove \"$f\"" >> $filev3
+ fi
+ fi
+ fi
+ done
+ fi
+}
+
+# List all files in the current directory, stripping leading "./"
+# Pass a variable name and it will be filled as an array.
+list_files() {
+ count=0
+
+ find . -type f \
+ ! -name "update.manifest" \
+ ! -name "updatev2.manifest" \
+ ! -name "updatev3.manifest" \
+ ! -name "temp-dirlist" \
+ ! -name "temp-filelist" \
+ | sed 's/\.\/\(.*\)/\1/' \
+ | sort -r > "temp-filelist"
+ while read file; do
+ eval "${1}[$count]=\"$file\""
+ (( count++ ))
+ done < "temp-filelist"
+ rm "temp-filelist"
+}
+
+# List all directories in the current directory, stripping leading "./"
+list_dirs() {
+ count=0
+
+ find . -type d \
+ ! -name "." \
+ ! -name ".." \
+ | sed 's/\.\/\(.*\)/\1/' \
+ | sort -r > "temp-dirlist"
+ while read dir; do
+ eval "${1}[$count]=\"$dir\""
+ (( count++ ))
+ done < "temp-dirlist"
+ rm "temp-dirlist"
+}
diff --git a/bin/update/config.py b/bin/update/config.py
new file mode 100644
index 000000000..0bc60a07f
--- /dev/null
+++ b/bin/update/config.py
@@ -0,0 +1,28 @@
+
+import configparser
+import os
+
+class Config(object):
+
+ def __init__(self):
+ self.certificate_path = None
+ self.certificate_name = None
+ self.channel = None
+ self.base_url = None
+ self.upload_url = None
+ self.server_url = None
+
+def parse_config(config_file):
+ config = configparser.ConfigParser()
+ config.read(os.path.expanduser(config_file))
+
+ data = Config()
+ updater_data = config['Updater']
+ data.base_url = updater_data['base-url']
+ data.certificate_name = updater_data['certificate-name']
+ data.certificate_path = updater_data['certificate-path']
+ data.channel = updater_data['channel']
+ data.upload_url = updater_data['upload-url']
+ data.server_url = updater_data["ServerURL"]
+
+ return data
diff --git a/bin/update/create_build_config.py b/bin/update/create_build_config.py
new file mode 100755
index 000000000..7cc8ac4be
--- /dev/null
+++ b/bin/update/create_build_config.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python3
+
+import json
+import sys
+import os
+
+from config import parse_config
+
+from tools import replace_variables_in_string
+
+def update_all_url_entries(data, **kwargs):
+ data['complete']['url'] = replace_variables_in_string(data['complete']['url'], **kwargs)
+
+ if sys.platform != "cygwin":
+ for language in data['languages']:
+ language['complete']['url'] = replace_variables_in_string(language['complete']['url'], **kwargs)
+
+ if 'partials' in data:
+ for partial in data['partials']:
+ partial['file']['url'] = replace_variables_in_string(partial['file']['url'], **kwargs)
+
+ if sys.platform == "cygwin":
+ continue
+
+ for lang, lang_file in partial['languages'].items():
+ lang_file['url'] = replace_variables_in_string(lang_file['url'], **kwargs)
+
+def main(argv):
+ if len(argv) < 7:
+ print("Usage: create_build_config.py $PRODUCTNAME $VERSION $BUILDID $PLATFORM $TARGETDIR $UPDATE_CONFIG")
+ sys.exit(1)
+
+ config = parse_config(argv[6])
+
+ data = { 'productName' : argv[1],
+ 'version' : argv[2],
+ 'buildNumber' : argv[3],
+ 'updateChannel' : config.channel,
+ 'platform' : argv[4]
+ }
+
+ extra_data_files = ['complete_info.json', 'partial_update_info.json']
+ if sys.platform != "cygwin":
+ extra_data_files.append('complete_lang_info.json')
+
+ for extra_file in extra_data_files:
+ extra_file_path = os.path.join(argv[5], extra_file)
+ if not os.path.exists(extra_file_path):
+ continue
+ with open(extra_file_path, "r") as f:
+ extra_data = json.load(f)
+ data.update(extra_data)
+
+ update_all_url_entries(data, channel=config.channel, platform=argv[4], buildid=argv[3], version=argv[2])
+
+ with open(os.path.join(argv[5], "build_config.json"), "w") as f:
+ json.dump(data, f, indent=4)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/update/create_full_mar.py b/bin/update/create_full_mar.py
new file mode 100755
index 000000000..48686be21
--- /dev/null
+++ b/bin/update/create_full_mar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import subprocess
+import json
+
+from tools import uncompress_file_to_dir, get_file_info, make_complete_mar_name
+from config import parse_config
+from signing import sign_mar_file
+from path import UpdaterPath, convert_to_unix, convert_to_native
+
+current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__)))
+
+def main():
+ if len(sys.argv) < 5:
+ print("Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $FILENAMEPREFIX $UPDATE_CONFIG")
+ sys.exit(1)
+
+ update_config = sys.argv[4]
+ filename_prefix = sys.argv[3]
+ workdir = sys.argv[2]
+ product_name = sys.argv[1]
+
+ if len(update_config) == 0:
+ print("missing update config")
+ sys.exit(1)
+
+ update_path = UpdaterPath(workdir)
+ update_path.ensure_dir_exist()
+
+ target_dir = update_path.get_update_dir()
+ temp_dir = update_path.get_current_build_dir()
+
+ config = parse_config(update_config)
+
+ tar_dir = os.path.join(update_path.get_workdir(), "installation", product_name, "archive", "install", "en-US")
+ tar_file = os.path.join(tar_dir, os.listdir(tar_dir)[0])
+
+ uncompress_dir = uncompress_file_to_dir(tar_file, temp_dir)
+
+ mar_file = make_complete_mar_name(target_dir, filename_prefix)
+ path = os.path.join(current_dir_path, 'make_full_update.sh')
+ subprocess.call([path, convert_to_native(mar_file), convert_to_native(uncompress_dir)])
+
+ sign_mar_file(target_dir, config, mar_file, filename_prefix)
+
+ file_info = { 'complete' : get_file_info(mar_file, config.base_url) }
+
+ with open(os.path.join(target_dir, 'complete_info.json'), "w") as complete_info_file:
+ json.dump(file_info, complete_info_file, indent = 4)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/create_full_mar_for_languages.py b/bin/update/create_full_mar_for_languages.py
new file mode 100755
index 000000000..039521dd1
--- /dev/null
+++ b/bin/update/create_full_mar_for_languages.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+
+import sys
+import os
+import subprocess
+import json
+
+from tools import uncompress_file_to_dir, get_file_info
+
+from config import parse_config
+from path import UpdaterPath
+from signing import sign_mar_file
+
+current_dir_path = os.path.dirname(os.path.realpath(__file__))
+
+def make_complete_mar_name(target_dir, filename_prefix, language):
+ filename = filename_prefix + "_" + language + "_complete_langpack.mar"
+ return os.path.join(target_dir, filename)
+
+def create_lang_infos(mar_file_name, language, url):
+ data = {'lang' : language,
+ 'complete' : get_file_info(mar_file_name, url)
+ }
+ return data
+
+def main():
+ if len(sys.argv) < 5:
+ print("Usage: create_full_mar_for_languages.py $PRODUCTNAME $WORKDIR $TARGETDIR $TEMPDIR $FILENAMEPREFIX $UPDATE_CONFIG")
+ sys.exit(1)
+
+ update_config = sys.argv[4]
+ filename_prefix = sys.argv[3]
+ workdir = sys.argv[2]
+ product_name = sys.argv[1]
+
+ updater_path = UpdaterPath(workdir)
+ target_dir = updater_path.get_update_dir()
+ temp_dir = updater_path.get_language_dir()
+
+ config = parse_config(update_config)
+
+ language_pack_dir = os.path.join(workdir, "installation", product_name + "_languagepack", "archive", "install")
+ language_packs = os.listdir(language_pack_dir)
+ lang_infos = []
+ for language in language_packs:
+ if language == 'log':
+ continue
+
+ language_dir = os.path.join(language_pack_dir, language)
+ language_file = os.path.join(language_dir, os.listdir(language_dir)[0])
+
+ directory = uncompress_file_to_dir(language_file, os.path.join(temp_dir, language))
+
+ mar_file_name = make_complete_mar_name(target_dir, filename_prefix, language)
+
+ subprocess.call([os.path.join(current_dir_path, 'make_full_update.sh'), mar_file_name, directory])
+
+ sign_mar_file(target_dir, config, mar_file_name, filename_prefix)
+
+ lang_infos.append(create_lang_infos(mar_file_name, language, config.base_url))
+
+ with open(os.path.join(target_dir, "complete_lang_info.json"), "w") as language_info_file:
+ json.dump({'languages' : lang_infos}, language_info_file, indent=4)
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/create_partial_update.py b/bin/update/create_partial_update.py
new file mode 100755
index 000000000..9412bcd6e
--- /dev/null
+++ b/bin/update/create_partial_update.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+import requests
+import json
+import sys
+import hashlib
+import os
+import subprocess
+import errno
+import json
+
+from config import parse_config
+from uncompress_mar import extract_mar
+from tools import get_file_info, get_hash
+from signing import sign_mar_file
+
+from path import UpdaterPath, mkdir_p, convert_to_unix, convert_to_native
+
+BUF_SIZE = 1024
+current_dir_path = os.path.dirname(os.path.realpath(convert_to_unix(__file__)))
+
+class InvalidFileException(Exception):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(self, *args, **kwargs)
+
+def download_file(filepath, url, hash_string):
+ with open(filepath, "wb") as f:
+ response = requests.get(url, stream=True)
+
+ if not response.ok:
+ return
+
+ for block in response.iter_content(1024):
+ f.write(block)
+
+ file_hash = get_hash(filepath)
+
+ if file_hash != hash_string:
+ raise InvalidFileException("file hash does not match for file %s: Expected %s, Got: %s" % (url, hash_string, file_hash))
+
+def handle_language(lang_entries, filedir):
+ mar = os.environ.get('MAR', 'mar')
+ langs = {}
+ for lang, data in lang_entries.items():
+ lang_dir = os.path.join(filedir, lang)
+ lang_file = os.path.join(lang_dir, "lang.mar")
+ mkdir_p(lang_dir)
+ download_file(lang_file , data["url"], data["hash"])
+ dir_path = os.path.join(lang_dir, "lang")
+ mkdir_p(dir_path)
+ extract_mar(lang_file, dir_path)
+ langs[lang] = dir_path
+
+ return langs
+
+def download_mar_for_update_channel_and_platform(config, platform, temp_dir):
+ mar = os.environ.get('MAR', 'mar')
+ base_url = config.server_url + "update/partial-targets/1/"
+ url = base_url + platform + "/" + config.channel
+ r = requests.get(url)
+ if r.status_code != 200:
+ print(r.content)
+ raise Exception("download failed")
+
+ update_info = json.loads(r.content.decode("utf-8"))
+ update_files = update_info['updates']
+ downloaded_updates = {}
+ for update_file in update_files:
+ build = update_file["build"]
+ filedir = os.path.join(temp_dir, build)
+
+ mkdir_p(filedir)
+
+ filepath = filedir + "/complete.mar"
+ url = update_file["update"]["url"]
+ expected_hash = update_file["update"]["hash"]
+ download_file(filepath, url, expected_hash)
+
+ dir_path = os.path.join(filedir, "complete")
+ mkdir_p(dir_path)
+ extract_mar(filepath, dir_path)
+
+ downloaded_updates[build] = {"complete": dir_path}
+
+ langs = handle_language(update_file["languages"], filedir)
+ downloaded_updates[build]["languages"] = langs
+
+ return downloaded_updates
+
+def generate_file_name(current_build_id, old_build_id, mar_name_prefix):
+ name = "%s_from_%s_partial.mar" %(mar_name_prefix, old_build_id)
+ return name
+
+def generate_lang_file_name(current_build_id, old_build_id, mar_name_prefix, lang):
+ name = "%s_%s_from_%s_partial.mar" %(mar_name_prefix, lang, old_build_id)
+ return name
+
+def add_single_dir(path):
+ dir_name = [os.path.join(path, name) for name in os.listdir(path) if os.path.isdir(os.path.join(path, name))]
+ return dir_name[0]
+
+def main():
+ workdir = sys.argv[1]
+
+ updater_path = UpdaterPath(workdir)
+ updater_path.ensure_dir_exist()
+
+ mar_name_prefix = sys.argv[2]
+ update_config = sys.argv[3]
+ platform = sys.argv[4]
+ build_id = sys.argv[5]
+
+ current_build_path = updater_path.get_current_build_dir()
+ mar_dir = updater_path.get_mar_dir()
+ temp_dir = updater_path.get_previous_build_dir()
+ update_dir = updater_path.get_update_dir()
+
+ current_build_path = add_single_dir(current_build_path)
+ if sys.platform == "cygwin":
+ current_build_path = add_single_dir(current_build_path)
+
+ config = parse_config(update_config)
+
+ updates = download_mar_for_update_channel_and_platform(config, platform, temp_dir)
+
+ data = {"partials": []}
+
+ for build, update in updates.items():
+ file_name = generate_file_name(build_id, build, mar_name_prefix)
+ mar_file = os.path.join(update_dir, file_name)
+ subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(mar_file), convert_to_native(update["complete"]), convert_to_native(current_build_path)])
+ sign_mar_file(update_dir, config, mar_file, mar_name_prefix)
+
+ partial_info = {"file":get_file_info(mar_file, config.base_url), "from": build, "to": build_id, "languages": {}}
+
+ # on Windows we don't use language packs
+ if sys.platform != "cygwin":
+ for lang, lang_info in update["languages"].items():
+ lang_name = generate_lang_file_name(build_id, build, mar_name_prefix, lang)
+
+ # write the file into the final directory
+ lang_mar_file = os.path.join(update_dir, lang_name)
+
+ # the directory of the old language file is of the form
+ # workdir/mar/language/en-US/LibreOffice_<version>_<os>_archive_langpack_<lang>/
+ language_dir = add_single_dir(os.path.join(mar_dir, "language", lang))
+ subprocess.call([os.path.join(current_dir_path, 'make_incremental_update.sh'), convert_to_native(lang_mar_file), convert_to_native(lang_info), convert_to_native(language_dir)])
+ sign_mar_file(update_dir, config, lang_mar_file, mar_name_prefix)
+
+ # add the partial language info
+ partial_info["languages"][lang] = get_file_info(lang_mar_file, config.base_url)
+
+ data["partials"].append(partial_info)
+
+ with open(os.path.join(update_dir, "partial_update_info.json"), "w") as f:
+ json.dump(data, f)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update/get_update_channel.py b/bin/update/get_update_channel.py
new file mode 100755
index 000000000..f94507d64
--- /dev/null
+++ b/bin/update/get_update_channel.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python3
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import sys
+from config import parse_config
+
+def main():
+ if len(sys.argv) < 2:
+ sys.exit(1)
+
+ update_config = sys.argv[1]
+ config = parse_config(update_config)
+ print(config.channel)
+
+if __name__ == "__main__":
+ main()
+
+# vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/bin/update/make_full_update.sh b/bin/update/make_full_update.sh
new file mode 100755
index 000000000..cb7de49b2
--- /dev/null
+++ b/bin/update/make_full_update.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool generates full update packages for the update system.
+# Author: Darin Fisher
+#
+
+. $(dirname "$0")/common.sh
+
+# -----------------------------------------------------------------------------
+
+print_usage() {
+ notice "Usage: $(basename $0) [OPTIONS] ARCHIVE DIRECTORY"
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit 1
+fi
+
+if [ $1 = -h ]; then
+ print_usage
+ notice ""
+ notice "The contents of DIRECTORY will be stored in ARCHIVE."
+ notice ""
+ notice "Options:"
+ notice " -h show this help text"
+ notice ""
+ exit 1
+fi
+
+check_externals
+# -----------------------------------------------------------------------------
+
+archive="$1"
+targetdir="$2"
+# Prevent the workdir from being inside the targetdir so it isn't included in
+# the update mar.
+if [ $(echo "$targetdir" | grep -c '\/$') = 1 ]; then
+ # Remove the /
+ targetdir=$(echo "$targetdir" | sed -e 's:\/$::')
+fi
+workdir="$targetdir.work"
+updatemanifestv2="$workdir/updatev2.manifest"
+updatemanifestv3="$workdir/updatev3.manifest"
+targetfiles="updatev2.manifest updatev3.manifest"
+
+mkdir -p "$workdir"
+echo "updatev2.manifest" >> $workdir/files.txt
+echo "updatev3.manifest" >> $workdir/files.txt
+
+# Generate a list of all files in the target directory.
+pushd "$targetdir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+# if [ ! -f "precomplete" ]; then
+# if [ ! -f "Contents/Resources/precomplete" ]; then
+# notice "precomplete file is missing!"
+# exit 1
+# fi
+# fi
+
+list_files files
+
+popd
+
+# Add the type of update to the beginning of the update manifests.
+> $updatemanifestv2
+> $updatemanifestv3
+notice ""
+notice "Adding type instruction to update manifests"
+notice " type complete"
+echo "type \"complete\"" >> $updatemanifestv2
+echo "type \"complete\"" >> $updatemanifestv3
+
+notice ""
+notice "Adding file add instructions to update manifests"
+num_files=${#files[*]}
+
+for ((i=0; $i<$num_files; i=$i+1)); do
+ f="${files[$i]}"
+
+ if check_for_add_if_not_update "$f"; then
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ if check_for_add_to_manifestv2 "$f"; then
+ make_add_instruction "$f" "$updatemanifestv2" "" 1
+ fi
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ fi
+
+ dir=$(dirname "$f")
+ mkdir -p "$workdir/$dir"
+ $BZIP2 -cz9 "$targetdir/$f" > "$workdir/$f"
+ copy_perm "$targetdir/$f" "$workdir/$f"
+
+ targetfiles="$targetfiles \"$f\""
+ echo $f >> $workdir/files.txt
+done
+
+# Append remove instructions for any dead files.
+notice ""
+notice "Adding file and directory remove instructions from file 'removed-files'"
+append_remove_instructions "$targetdir" "$updatemanifestv2" "$updatemanifestv3"
+
+$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
+$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
+
+eval "$MAR -C \"$workdir\" -c output.mar -f $workdir/files.txt"
+mv -f "$workdir/output.mar" "$archive"
+
+# cleanup
+rm -fr "$workdir"
+
+notice ""
+notice "Finished"
+notice ""
diff --git a/bin/update/make_incremental_update.sh b/bin/update/make_incremental_update.sh
new file mode 100755
index 000000000..e76f2159f
--- /dev/null
+++ b/bin/update/make_incremental_update.sh
@@ -0,0 +1,318 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool generates incremental update packages for the update system.
+# Author: Darin Fisher
+#
+
+. $(dirname "$0")/common.sh
+
+# -----------------------------------------------------------------------------
+
+print_usage() {
+ notice "Usage: $(basename $0) [OPTIONS] ARCHIVE FROMDIR TODIR"
+ notice ""
+ notice "The differences between FROMDIR and TODIR will be stored in ARCHIVE."
+ notice ""
+ notice "Options:"
+ notice " -h show this help text"
+ notice " -f clobber this file in the installation"
+ notice " Must be a path to a file to clobber in the partial update."
+ notice ""
+}
+
+check_for_forced_update() {
+ force_list="$1"
+ forced_file_chk="$2"
+
+ local f
+
+ if [ "$forced_file_chk" = "precomplete" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "Contents/Resources/precomplete" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "removed-files" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "$forced_file_chk" = "Contents/Resources/removed-files" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ if [ "${forced_file_chk##*.}" = "chk" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+
+ for f in $force_list; do
+ #echo comparing $forced_file_chk to $f
+ if [ "$forced_file_chk" = "$f" ]; then
+ ## "true" *giggle*
+ return 0;
+ fi
+ done
+ ## 'false'... because this is bash. Oh yay!
+ return 1;
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit 1
+fi
+
+requested_forced_updates='Contents/MacOS/firefox'
+
+while getopts "hf:" flag
+do
+ case "$flag" in
+ h) print_usage; exit 0
+ ;;
+ f) requested_forced_updates="$requested_forced_updates $OPTARG"
+ ;;
+ ?) print_usage; exit 1
+ ;;
+ esac
+done
+
+# -----------------------------------------------------------------------------
+
+let arg_start=$OPTIND-1
+shift $arg_start
+
+archive="$1"
+olddir="$2"
+newdir="$3"
+# Prevent the workdir from being inside the targetdir so it isn't included in
+# the update mar.
+if [ $(echo "$newdir" | grep -c '\/$') = 1 ]; then
+ # Remove the /
+ newdir=$(echo "$newdir" | sed -e 's:\/$::')
+fi
+workdir="$newdir.work"
+updatemanifestv2="$workdir/updatev2.manifest"
+updatemanifestv3="$workdir/updatev3.manifest"
+
+mkdir -p "$workdir"
+echo "updatev2.manifest" >> $workdir/files.txt
+echo "updatev3.manifest" >> $workdir/files.txt
+
+# Generate a list of all files in the target directory.
+pushd "$olddir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+list_files oldfiles
+list_dirs olddirs
+
+popd
+
+pushd "$newdir"
+if test $? -ne 0 ; then
+ exit 1
+fi
+
+# if [ ! -f "precomplete" ]; then
+# if [ ! -f "Contents/Resources/precomplete" ]; then
+# notice "precomplete file is missing!"
+# exit 1
+# fi
+# fi
+
+list_dirs newdirs
+list_files newfiles
+
+popd
+
+# Add the type of update to the beginning of the update manifests.
+notice ""
+notice "Adding type instruction to update manifests"
+> $updatemanifestv2
+> $updatemanifestv3
+notice " type partial"
+echo "type \"partial\"" >> $updatemanifestv2
+echo "type \"partial\"" >> $updatemanifestv3
+
+notice ""
+notice "Adding file patch and add instructions to update manifests"
+
+num_oldfiles=${#oldfiles[*]}
+remove_array=
+num_removes=0
+
+for ((i=0; $i<$num_oldfiles; i=$i+1)); do
+ f="${oldfiles[$i]}"
+
+ # If this file exists in the new directory as well, then check if it differs.
+ if [ -f "$newdir/$f" ]; then
+
+ if check_for_add_if_not_update "$f"; then
+ # The full workdir may not exist yet, so create it if necessary.
+ mkdir -p `dirname "$workdir/$f"`
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ echo $f >> $workdir/files.txt
+ continue 1
+ fi
+
+ if check_for_forced_update "$requested_forced_updates" "$f"; then
+ # The full workdir may not exist yet, so create it if necessary.
+ mkdir -p `dirname "$workdir/$f"`
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3" 1
+ echo $f >> $workdir/files.txt
+ continue 1
+ fi
+
+ if ! diff "$olddir/$f" "$newdir/$f" > /dev/null; then
+ # Compute both the compressed binary diff and the compressed file, and
+ # compare the sizes. Then choose the smaller of the two to package.
+ dir=$(dirname "$workdir/$f")
+ mkdir -p "$dir"
+ notice "diffing \"$f\""
+ # MBSDIFF_HOOK represents the communication interface with funsize and,
+ # if enabled, caches the intermediate patches for future use and
+ # compute avoidance
+ #
+ # An example of MBSDIFF_HOOK env variable could look like this:
+ # export MBSDIFF_HOOK="myscript.sh -A https://funsize/api -c /home/user"
+ # where myscript.sh has the following usage:
+ # myscript.sh -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] \
+ # PATH-FROM-URL PATH-TO-URL PATH-PATCH SERVER-URL
+ #
+ # Note: patches are bzipped stashed in funsize to gain more speed
+
+ # if service is not enabled then default to old behavior
+ if [ -z "$MBSDIFF_HOOK" ]; then
+ $MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
+ $BZIP2 -z9 "$workdir/$f.patch"
+ else
+ # if service enabled then check patch existence for retrieval
+ if $MBSDIFF_HOOK -g "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"; then
+ notice "file \"$f\" found in funsize, diffing skipped"
+ else
+ # if not found already - compute it and cache it for future use
+ $MBSDIFF "$olddir/$f" "$newdir/$f" "$workdir/$f.patch"
+ $BZIP2 -z9 "$workdir/$f.patch"
+ $MBSDIFF_HOOK -u "$olddir/$f" "$newdir/$f" "$workdir/$f.patch.bz2"
+ fi
+ fi
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+ patchfile="$workdir/$f.patch.bz2"
+ patchsize=$(get_file_size "$patchfile")
+ fullsize=$(get_file_size "$workdir/$f")
+
+ if [ $patchsize -lt $fullsize ]; then
+ make_patch_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ mv -f "$patchfile" "$workdir/$f.patch"
+ rm -f "$workdir/$f"
+ echo $f.patch >> $workdir/files.txt
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ rm -f "$patchfile"
+ echo $f >> $workdir/files.txt
+ fi
+ fi
+ else
+ # remove instructions are added after add / patch instructions for
+ # consistency with make_incremental_updates.py
+ remove_array[$num_removes]=$f
+ (( num_removes++ ))
+ fi
+done
+
+# Newly added files
+notice ""
+notice "Adding file add instructions to update manifests"
+num_newfiles=${#newfiles[*]}
+
+for ((i=0; $i<$num_newfiles; i=$i+1)); do
+ f="${newfiles[$i]}"
+
+ # If we've already tested this file, then skip it
+ for ((j=0; $j<$num_oldfiles; j=$j+1)); do
+ if [ "$f" = "${oldfiles[j]}" ]; then
+ continue 2
+ fi
+ done
+
+ dir=$(dirname "$workdir/$f")
+ mkdir -p "$dir"
+
+ $BZIP2 -cz9 "$newdir/$f" > "$workdir/$f"
+ copy_perm "$newdir/$f" "$workdir/$f"
+
+ if check_for_add_if_not_update "$f"; then
+ make_add_if_not_instruction "$f" "$updatemanifestv3"
+ else
+ make_add_instruction "$f" "$updatemanifestv2" "$updatemanifestv3"
+ fi
+
+
+ echo $f >> $workdir/files.txt
+done
+
+notice ""
+notice "Adding file remove instructions to update manifests"
+for ((i=0; $i<$num_removes; i=$i+1)); do
+ f="${remove_array[$i]}"
+ notice " remove \"$f\""
+ echo "remove \"$f\"" >> $updatemanifestv2
+ echo "remove \"$f\"" >> $updatemanifestv3
+done
+
+# Add remove instructions for any dead files.
+notice ""
+notice "Adding file and directory remove instructions from file 'removed-files'"
+append_remove_instructions "$newdir" "$updatemanifestv2" "$updatemanifestv3"
+
+notice ""
+notice "Adding directory remove instructions for directories that no longer exist"
+num_olddirs=${#olddirs[*]}
+
+for ((i=0; $i<$num_olddirs; i=$i+1)); do
+ f="${olddirs[$i]}"
+ # If this dir doesn't exist in the new directory remove it.
+ if [ ! -d "$newdir/$f" ]; then
+ notice " rmdir $f/"
+ echo "rmdir \"$f/\"" >> $updatemanifestv2
+ echo "rmdir \"$f/\"" >> $updatemanifestv3
+ fi
+done
+
+$BZIP2 -z9 "$updatemanifestv2" && mv -f "$updatemanifestv2.bz2" "$updatemanifestv2"
+$BZIP2 -z9 "$updatemanifestv3" && mv -f "$updatemanifestv3.bz2" "$updatemanifestv3"
+
+mar_command="$MAR"
+if [[ -n $PRODUCT_VERSION ]]
+then
+ mar_command="$mar_command -V $PRODUCT_VERSION"
+fi
+if [[ -n $CHANNEL_ID ]]
+then
+ mar_command="$mar_command -H $CHANNEL_ID"
+fi
+mar_command="$mar_command -C \"$workdir\" -c output.mar -f $workdir/files.txt"
+eval "$mar_command"
+mv -f "$workdir/output.mar" "$archive"
+
+# cleanup
+rm -fr "$workdir"
+
+notice ""
+notice "Finished"
+notice ""
diff --git a/bin/update/path.py b/bin/update/path.py
new file mode 100644
index 000000000..0fe0fd5eb
--- /dev/null
+++ b/bin/update/path.py
@@ -0,0 +1,69 @@
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import os
+import errno
+import subprocess
+from sys import platform
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError as exc: # Python >2.5
+ if exc.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else:
+ raise
+
+def convert_to_unix(path):
+ if platform == "cygwin":
+ return subprocess.check_output(["cygpath", "-u", path]).decode("utf-8", "strict").rstrip()
+ else:
+ return path
+
+def convert_to_native(path):
+ if platform == "cygwin":
+ return subprocess.check_output(["cygpath", "-m", path]).decode("utf-8", "strict").rstrip()
+ else:
+ return path
+
+class UpdaterPath(object):
+
+ def __init__(self, workdir):
+ self._workdir = convert_to_unix(workdir)
+
+ def get_workdir(self):
+ return self._workdir
+
+ def get_update_dir(self):
+ return os.path.join(self._workdir, "update-info")
+
+ def get_current_build_dir(self):
+ return os.path.join(self._workdir, "mar", "current-build")
+
+ def get_mar_dir(self):
+ return os.path.join(self._workdir, "mar")
+
+ def get_previous_build_dir(self):
+ return os.path.join(self._workdir, "mar", "previous-build")
+
+ def get_language_dir(self):
+ return os.path.join(self.get_mar_dir(), "language")
+
+ def get_workdir(self):
+ return self._workdir
+
+ def ensure_dir_exist(self):
+ mkdir_p(self.get_update_dir())
+ mkdir_p(self.get_current_build_dir())
+ mkdir_p(self.get_mar_dir())
+ mkdir_p(self.get_previous_build_dir())
+ mkdir_p(self.get_language_dir())
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/update/signing.py b/bin/update/signing.py
new file mode 100644
index 000000000..c0b43ce91
--- /dev/null
+++ b/bin/update/signing.py
@@ -0,0 +1,12 @@
+from tools import make_complete_mar_name
+
+import os
+import subprocess
+import path
+
+def sign_mar_file(target_dir, config, mar_file, filename_prefix):
+ signed_mar_file = make_complete_mar_name(target_dir, filename_prefix + '_signed')
+ mar_executable = os.environ.get('MAR', 'mar')
+ subprocess.check_call([mar_executable, '-C', path.convert_to_native(target_dir), '-d', path.convert_to_native(config.certificate_path), '-n', config.certificate_name, '-s', path.convert_to_native(mar_file), path.convert_to_native(signed_mar_file)])
+
+ os.rename(signed_mar_file, mar_file)
diff --git a/bin/update/tools.py b/bin/update/tools.py
new file mode 100644
index 000000000..8cd786635
--- /dev/null
+++ b/bin/update/tools.py
@@ -0,0 +1,64 @@
+import os
+import hashlib
+import zipfile
+import tarfile
+
+def uncompress_file_to_dir(compressed_file, uncompress_dir):
+ command = None
+ extension = os.path.splitext(compressed_file)[1]
+
+ try:
+ os.mkdir(uncompress_dir)
+ except FileExistsError as e:
+ pass
+
+ if extension == '.gz':
+ tar = tarfile.open(compressed_file)
+ tar.extractall(uncompress_dir)
+ tar.close()
+ elif extension == '.zip':
+ zip_file = zipfile.ZipFile(compressed_file)
+ zip_file.extractall(uncompress_dir)
+ zip_file.close()
+
+ uncompress_dir = os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0])
+ if " " in os.listdir(uncompress_dir)[0]:
+ print("replacing whitespace in directory name")
+ os.rename(os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0]),
+ os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0].replace(" ", "_")))
+ else:
+ print("Error: unknown extension " + extension)
+
+ return os.path.join(uncompress_dir, os.listdir(uncompress_dir)[0])
+
+BUF_SIZE = 1048576
+
+def get_hash(file_path):
+ sha512 = hashlib.sha512()
+ with open(file_path, 'rb') as f:
+ while True:
+ data = f.read(BUF_SIZE)
+ if not data:
+ break
+ sha512.update(data)
+ return sha512.hexdigest()
+
+def get_file_info(mar_file, url):
+ filesize = os.path.getsize(mar_file)
+ data = { 'hash' : get_hash(mar_file),
+ 'hashFunction' : 'sha512',
+ 'size' : filesize,
+ 'url' : url + os.path.basename(mar_file)}
+
+ return data
+
+def replace_variables_in_string(string, **kwargs):
+ new_string = string
+ for key, val in kwargs.items():
+ new_string = new_string.replace('$(%s)'%key, val)
+
+ return new_string
+
+def make_complete_mar_name(target_dir, filename_prefix):
+ filename = filename_prefix + "_complete.mar"
+ return os.path.join(target_dir, filename)
diff --git a/bin/update/uncompress_mar.py b/bin/update/uncompress_mar.py
new file mode 100755
index 000000000..0989c7e92
--- /dev/null
+++ b/bin/update/uncompress_mar.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Extract a mar file and uncompress the content
+
+import os
+import re
+import sys
+import subprocess
+from path import convert_to_native
+
+def uncompress_content(file_path):
+ bzip2 = os.environ.get('BZIP2', 'bzip2')
+ file_path_compressed = file_path + ".bz2"
+ os.rename(file_path, file_path_compressed)
+ subprocess.check_call(["bzip2", "-d", convert_to_native(file_path_compressed)])
+
+def extract_mar(mar_file, target_dir):
+ mar = os.environ.get('MAR', 'mar')
+ subprocess.check_call([mar, "-C", convert_to_native(target_dir), "-x", convert_to_native(mar_file)])
+ file_info = subprocess.check_output([mar, "-t", convert_to_native(mar_file)])
+ lines = file_info.splitlines()
+ prog = re.compile("\d+\s+\d+\s+(.+)")
+ for line in lines:
+ match = prog.match(line.decode("utf-8", "strict"))
+ if match is None:
+ continue
+ info = match.groups()[0]
+ # ignore header line
+ if info == b'NAME':
+ continue
+
+ uncompress_content(os.path.join(target_dir, info))
+
+def main():
+ if len(sys.argv) != 3:
+ print("Help: This program takes exactly two arguments pointing to a mar file and a target location")
+ sys.exit(1)
+
+ mar_file = sys.argv[1]
+ target_dir = sys.argv[2]
+ extract_mar(mar_file, target_dir)
+
+if __name__ == "__main__":
+ main()
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/update/upload_build_config.py b/bin/update/upload_build_config.py
new file mode 100755
index 000000000..9a87661ee
--- /dev/null
+++ b/bin/update/upload_build_config.py
@@ -0,0 +1,42 @@
+#! /usr/bin/env python3
+
+import sys
+import os
+import configparser
+import requests
+
+dir_path = os.path.dirname(os.path.realpath(__file__))
+
+def main(argv):
+
+ updater_config = sys.argv[2]
+
+ config = configparser.ConfigParser()
+ config.read(os.path.expanduser(updater_config))
+
+ user = config["Updater"]["User"]
+ password = config["Updater"]["Password"]
+ base_address = config["Updater"]["ServerURL"]
+
+ login_url = base_address + "accounts/login/"
+
+ session = requests.session()
+ r1 = session.get(login_url)
+ csrftoken = session.cookies['csrftoken']
+
+ login_data = { 'username': user,'password': password,
+ 'csrfmiddlewaretoken': csrftoken }
+ r1 = session.post(login_url, data=login_data, headers={"Referer": login_url})
+
+ url = base_address + "update/upload/release"
+ data = {}
+ data['csrfmiddlewaretoken'] = csrftoken
+
+ build_config = os.path.join(sys.argv[1], "build_config.json")
+ r = session.post(url, files={'release_config': open(build_config, "r")}, data=data)
+ print(r.content)
+ if r.status_code != 200:
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/bin/update/upload_builds.py b/bin/update/upload_builds.py
new file mode 100755
index 000000000..210668e0d
--- /dev/null
+++ b/bin/update/upload_builds.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python3
+
+import sys
+import os
+import subprocess
+
+from config import parse_config
+from path import convert_to_unix
+
+from tools import replace_variables_in_string
+
+def main():
+ product_name = sys.argv[1]
+ buildid = sys.argv[2]
+ platform = sys.argv[3]
+ update_dir = sys.argv[4]
+ update_config = sys.argv[5]
+
+ config = parse_config(update_config)
+ upload_url = replace_variables_in_string(config.upload_url, channel=config.channel, buildid=buildid, platform=platform)
+
+ target_url, target_dir = upload_url.split(':')
+
+ command = "ssh %s 'mkdir -p %s'"%(target_url, target_dir)
+ print(command)
+ subprocess.call(command, shell=True)
+ for file in os.listdir(update_dir):
+ if file.endswith('.mar'):
+ subprocess.call(['scp', convert_to_unix(os.path.join(update_dir, file)), upload_url])
+
+if __name__ == '__main__':
+ main()
diff --git a/bin/update_pch b/bin/update_pch
new file mode 100755
index 000000000..00cd50681
--- /dev/null
+++ b/bin/update_pch
@@ -0,0 +1,1308 @@
+#! /usr/bin/env python3
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+"""
+This script generates precompiled headers for a given
+module and library.
+
+Given a gmake makefile that belongs to some LO module:
+1) Process the makefile to find source files (process_makefile).
+2) For every source file, find all includes (process_source).
+3) Uncommon and rare includes are filtered (remove_rare).
+4) Conflicting headers are excluded (filter_ignore).
+5) Local files to the source are excluded (Filter_Local).
+6) Fixup missing headers that sources expect (fixup).
+7) The resulting includes are sorted by category (sort_by_category).
+8) The pch file is generated (generate).
+"""
+
+import sys
+import re
+import os
+import unittest
+import glob
+
+CUTOFF = 1
+EXCLUDE_MODULE = False
+EXCLUDE_LOCAL = False
+EXCLUDE_SYSTEM = True
+SILENT = False
+WORKDIR = 'workdir'
+
+# System includes: oox, sal, sd, svl, vcl
+
+INCLUDE = False
+EXCLUDE = True
+DEFAULTS = \
+{
+# module.library : (min, system, module, local), best time
+ 'accessibility.acc' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 7.8
+ 'basctl.basctl' : ( 3, EXCLUDE, INCLUDE, EXCLUDE), # 11.9
+ 'basegfx.basegfx' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 3.8
+ 'basic.sb' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 10.7
+ 'chart2.chartcontroller' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 18.4
+ 'chart2.chartcore' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 22.5
+ 'comphelper.comphelper' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 7.6
+ 'configmgr.configmgr' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 6.0
+ 'connectivity.ado' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 6.4
+ 'connectivity.calc' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 4.6
+ 'connectivity.dbase' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 5.2
+ 'connectivity.dbpool2' : ( 5, EXCLUDE, INCLUDE, EXCLUDE), # 3.0
+ 'connectivity.dbtools' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 0.8
+ 'connectivity.file' : ( 2, EXCLUDE, INCLUDE, EXCLUDE), # 5.1
+ 'connectivity.firebird_sdbc' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 5.1
+ 'connectivity.flat' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 4.6
+ 'connectivity.mysql' : ( 4, EXCLUDE, INCLUDE, EXCLUDE), # 3.4
+ 'connectivity.odbc' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 5.0
+ 'connectivity.postgresql-sdbc-impl' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 6.7
+ 'cppcanvas.cppcanvas' : (11, EXCLUDE, INCLUDE, INCLUDE), # 4.8
+ 'cppuhelper.cppuhelper' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 4.6
+ 'cui.cui' : ( 8, EXCLUDE, INCLUDE, EXCLUDE), # 19.7
+ 'dbaccess.dba' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 13.8
+ 'dbaccess.dbaxml' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 6.5
+ 'dbaccess.dbu' : (12, EXCLUDE, EXCLUDE, EXCLUDE), # 23.6
+ 'dbaccess.sdbt' : ( 1, EXCLUDE, INCLUDE, EXCLUDE), # 2.9
+ 'desktop.deployment' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 6.1
+ 'desktop.deploymentgui' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 5.7
+ 'desktop.deploymentmisc' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 3.4
+ 'desktop.sofficeapp' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 6.5
+ 'drawinglayer.drawinglayer' : ( 4, EXCLUDE, EXCLUDE, EXCLUDE), # 7.4
+ 'editeng.editeng' : ( 5, EXCLUDE, INCLUDE, EXCLUDE), # 13.0
+ 'forms.frm' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 14.2
+ 'framework.fwe' : (10, EXCLUDE, INCLUDE, EXCLUDE), # 5.5
+ 'framework.fwi' : ( 9, EXCLUDE, INCLUDE, EXCLUDE), # 3.4
+ 'framework.fwk' : ( 7, EXCLUDE, INCLUDE, INCLUDE), # 14.8
+ 'framework.fwl' : ( 5, EXCLUDE, INCLUDE, INCLUDE), # 5.1
+ 'hwpfilter.hwp' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 6.0
+ 'lotuswordpro.lwpft' : ( 2, EXCLUDE, EXCLUDE, EXCLUDE), # 11.6
+ 'oox.oox' : ( 6, EXCLUDE, EXCLUDE, INCLUDE), # 28.2
+ 'package.package2' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 4.5
+ 'package.xstor' : ( 2, EXCLUDE, INCLUDE, EXCLUDE), # 3.8
+ 'reportdesign.rpt' : ( 9, EXCLUDE, INCLUDE, INCLUDE), # 9.4
+ 'reportdesign.rptui' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 13.1
+ 'reportdesign.rptxml' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 7.6
+ 'sal.sal' : ( 2, EXCLUDE, EXCLUDE, INCLUDE), # 4.2
+ 'sc.sc' : (12, EXCLUDE, INCLUDE, INCLUDE), # 92.6
+ 'sc.scfilt' : ( 4, EXCLUDE, EXCLUDE, INCLUDE), # 39.9
+ 'sc.scui' : ( 1, EXCLUDE, EXCLUDE, INCLUDE), # 15.0
+ 'sc.vbaobj' : ( 1, EXCLUDE, EXCLUDE, INCLUDE), # 17.3
+ 'sd.sd' : ( 4, EXCLUDE, EXCLUDE, INCLUDE), # 47.4
+ 'sd.sdui' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 9.4
+ 'sdext.PresentationMinimizer' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 4.1
+ 'sdext.PresenterScreen' : ( 2, EXCLUDE, INCLUDE, EXCLUDE), # 7.1
+ 'sfx2.sfx' : ( 3, EXCLUDE, EXCLUDE, EXCLUDE), # 27.4
+ 'slideshow.slideshow' : ( 4, EXCLUDE, INCLUDE, EXCLUDE), # 10.8
+ 'sot.sot' : ( 5, EXCLUDE, EXCLUDE, INCLUDE), # 3.1
+ 'starmath.sm' : ( 5, EXCLUDE, EXCLUDE, INCLUDE), # 10.9
+ 'svgio.svgio' : ( 8, EXCLUDE, EXCLUDE, INCLUDE), # 4.3
+ 'emfio.emfio' : ( 8, EXCLUDE, EXCLUDE, INCLUDE), # 4.3
+ 'svl.svl' : ( 6, EXCLUDE, EXCLUDE, EXCLUDE), # 7.6
+ 'svtools.svt' : ( 4, EXCLUDE, INCLUDE, EXCLUDE), # 17.6
+ 'svx.svx' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 20.7
+ 'svx.svxcore' : ( 7, EXCLUDE, INCLUDE, EXCLUDE), # 37.0
+ 'sw.msword' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 22.4
+ 'sw.sw' : ( 7, EXCLUDE, EXCLUDE, INCLUDE), # 129.6
+ 'sw.swui' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 26.1
+ 'sw.vbaswobj' : ( 4, EXCLUDE, INCLUDE, INCLUDE), # 13.1
+ 'tools.tl' : ( 5, EXCLUDE, EXCLUDE, EXCLUDE), # 4.2
+ 'unotools.utl' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 7.0
+ 'unoxml.unoxml' : ( 1, EXCLUDE, EXCLUDE, EXCLUDE), # 4.6
+ 'uui.uui' : ( 4, EXCLUDE, EXCLUDE, EXCLUDE), # 4.9
+ 'vbahelper.msforms' : ( 3, EXCLUDE, INCLUDE, INCLUDE), # 5.2
+ 'vbahelper.vbahelper' : ( 3, EXCLUDE, EXCLUDE, INCLUDE), # 7.0
+ 'vcl.vcl' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 35.7
+ 'writerfilter.writerfilter' : ( 5, EXCLUDE, EXCLUDE, EXCLUDE), # 19.7/27.3
+ 'xmloff.xo' : ( 7, EXCLUDE, INCLUDE, INCLUDE), # 22.1
+ 'xmloff.xof' : ( 1, EXCLUDE, EXCLUDE, INCLUDE), # 4.4
+ 'xmlscript.xmlscript' : ( 4, EXCLUDE, EXCLUDE, INCLUDE), # 3.6
+ 'xmlsecurity.xmlsecurity' : ( 6, EXCLUDE, INCLUDE, INCLUDE), # 5.1
+ 'xmlsecurity.xsec_xmlsec' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # 4.4
+ 'xmlsecurity.xsec_gpg' : ( 2, EXCLUDE, INCLUDE, INCLUDE), # ?
+}
+
+def remove_rare(raw, min_use=-1):
+ """ Remove headers not commonly included.
+ The minimum threshold is min_use.
+ """
+ # The minimum number of times a header
+ # must be included to be in the PCH.
+ min_use = min_use if min_use >= 0 else CUTOFF
+
+ out = []
+ if not raw or not len(raw):
+ return out
+
+ inc = sorted(raw)
+ last = inc[0]
+ count = 1
+ for x in range(1, len(inc)):
+ i = inc[x]
+ if i == last:
+ count += 1
+ else:
+ if count >= min_use:
+ out.append(last)
+ last = i
+ count = 1
+
+ # Last group.
+ if count >= min_use:
+ out.append(last)
+
+ return out
+
+def process_list(list, callable):
+ """ Given a list and callable
+ we pass each entry through
+ the callable and only add to
+ the output if not blank.
+ """
+ out = []
+ for i in list:
+ line = callable(i)
+ if line and len(line):
+ out.append(line)
+ return out
+
+def find_files(path, recurse=True):
+ list = []
+ for root, dir, files in os.walk(path):
+ list += map(lambda x: os.path.join(root, x), files)
+ return list
+
+def get_filename(line):
+ """ Strips the line from the
+ '#include' and angled brakets
+ and return the filename only.
+ """
+ if not len(line) or line[0] != '#':
+ return line
+ return re.sub(r'(.*#include\s*)<(.*)>(.*)', r'\2', line)
+
+def is_c_runtime(inc, root, module):
+ """ Heuristic-based detection of C/C++
+ runtime headers.
+ They are all-lowercase, with .h or
+ no extension, filename only.
+ Try to check that they are not LO headers.
+ """
+ inc = get_filename(inc)
+
+ if inc.endswith('.hxx') or inc.endswith('.hpp'):
+ return False
+
+ if inc.endswith('.h') and inc.startswith( 'config_' ):
+ return False
+
+ hasdot = False
+ for c in inc:
+ if c == '/':
+ return False
+ if c == '.' and not inc.endswith('.h'):
+ return False
+ if c == '.':
+ hasdot = True
+ if c.isupper():
+ return False
+ if not hasdot: # <memory> etc.
+ return True
+
+ if glob.glob(os.path.join(root, module, '**', inc), recursive=True):
+ return False;
+
+ return True
+
+def sanitize(raw):
+ """ There are two forms of includes,
+ those with <> and "".
+ Technically, the difference is that
+ the compiler can use an internal
+ representation for an angled include,
+ such that it doesn't have to be a file.
+ For our purposes, there is no difference.
+ Here, we convert everything to angled.
+ """
+ if not raw or not len(raw):
+ return ''
+ raw = raw.strip()
+ if not len(raw):
+ return ''
+ return re.sub(r'(.*#include\s*)\"(.*)\"(.*)', r'#include <\2>', raw)
+
+class Filter_Local(object):
+ """ Filter headers local to a module.
+ allow_public: allows include/module/file.hxx
+ #include <module/file.hxx>
+ allow_module: allows module/inc/file.hxx
+ #include <file.hxx>
+ allow_locals: allows module/source/file.hxx and
+ module/source/inc/file.hxx
+ #include <file.hxx>
+ """
+ def __init__(self, root, module, allow_public=True, allow_module=True, allow_locals=True):
+ self.root = root
+ self.module = module
+ self.allow_public = allow_public
+ self.allow_module = allow_module
+ self.allow_locals = allow_locals
+ self.public_prefix = '<' + self.module + '/'
+
+ all = find_files(os.path.join(root, module))
+ self.module_includes = []
+ self.locals = []
+ mod_prefix = module + '/inc/'
+ for i in all:
+ if mod_prefix in i:
+ self.module_includes.append(i)
+ else:
+ self.locals.append(i)
+
+ def is_public(self, line):
+ return self.public_prefix in line
+
+ def is_module(self, line):
+ """ Returns True if in module/inc/... """
+ filename = get_filename(line)
+ for i in self.module_includes:
+ if i.endswith(filename):
+ return True
+ return False
+
+ def is_local(self, line):
+ """ Returns True if in module/source/... """
+ filename = get_filename(line)
+ for i in self.locals:
+ if i.endswith(filename):
+ return True
+ return False
+
+ def is_external(self, line):
+ return is_c_runtime(line, self.root, self.module) and \
+ not self.is_public(line) and \
+ not self.is_module(line) and \
+ not self.is_local(line)
+
+ def find_local_file(self, line):
+ """ Finds the header file in the module dir,
+ but doesn't validate.
+ """
+ filename = get_filename(line)
+ for i in self.locals:
+ if i.endswith(filename):
+ return i
+ for i in self.module_includes:
+ if i.endswith(filename):
+ return i
+ return None
+
+ def proc(self, line):
+ assert line and len(line)
+
+ if line[0] == '#':
+ if not SILENT:
+ sys.stderr.write('unhandled #include : {}\n'.format(line))
+ return ''
+
+ assert line[0] != '<' and line[0] != '#'
+
+ filename = get_filename(line)
+
+ # Local with relative path.
+ if filename.startswith('..'):
+ # Exclude for now as we don't have cxx path.
+ return ''
+
+ # Locals are included first (by the compiler).
+ if self.is_local(filename):
+ # Use only locals that are in some /inc/ directory (either in <module>/inc or
+ # somewhere under <module>/source/**/inc/, compilations use -I for these paths
+ # and headers elsewhere would not be found when compiling the PCH.
+ if not self.allow_locals:
+ return ''
+ elif '/inc/' in filename:
+ return filename
+ elif glob.glob(os.path.join(self.root, self.module, '**', 'inc', filename), recursive=True):
+ return filename
+ else:
+ return ''
+
+ # Module headers are next.
+ if self.is_module(filename):
+ return line if self.allow_module else ''
+
+ # Public headers are last.
+ if self.is_public(line):
+ return line if self.allow_public else ''
+
+ # Leave out potentially unrelated files local
+ # to some other module we can't include directly.
+ if '/' not in filename and not self.is_external(filename):
+ return ''
+
+ # Unfiltered.
+ return line
+
+def filter_ignore(line, module):
+ """ Filters includes from known
+ problematic ones.
+ Expects sanitized input.
+ """
+ assert line and len(line)
+
+ # Always include files without extension.
+ if '.' not in line:
+ return line
+
+ # Extract filenames for ease of comparison.
+ line = get_filename(line)
+
+ # Filter out all files that are not normal headers.
+ if not line.endswith('.h') and \
+ not line.endswith('.hxx') and \
+ not line.endswith('.hpp') and \
+ not line.endswith('.hdl'):
+ return ''
+
+ ignore_list = [
+ 'LibreOfficeKit/LibreOfficeKitEnums.h', # Needs special directives
+ 'LibreOfficeKit/LibreOfficeKitTypes.h', # Needs special directives
+ 'jerror.h', # c++ unfriendly
+ 'jpeglib.h', # c++ unfriendly
+ 'boost/spirit/include/classic_core.hpp', # depends on BOOST_SPIRIT_DEBUG
+ 'svtools/editimplementation.hxx' # no direct include
+ ]
+
+ if module == 'accessibility':
+ ignore_list += [
+ # STR_SVT_ACC_LISTENTRY_SELCTED_STATE redefined from svtools.hrc
+ 'accessibility/extended/textwindowaccessibility.hxx',
+ ]
+ if module == 'basic':
+ ignore_list += [
+ 'basic/vbahelper.hxx',
+ ]
+ if module == 'connectivity':
+ ignore_list += [
+ 'com/sun/star/beans/PropertyAttribute.hpp', # OPTIONAL defined via objbase.h
+ 'com/sun/star/sdbcx/Privilege.hpp', # DELETE defined via objbase.h
+ 'ado/*' , # some strange type conflict because of Window's adoctint.h
+ 'adoint.h',
+ 'adoctint.h',
+ ]
+ if module == 'sc':
+ ignore_list += [
+ 'progress.hxx', # special directives
+ 'scslots.hxx', # special directives
+ ]
+ if module == 'sd':
+ ignore_list += [
+ 'sdgslots.hxx', # special directives
+ 'sdslots.hxx', # special directives
+ ]
+ if module == 'sfx2':
+ ignore_list += [
+ 'sfx2/recentdocsview.hxx', # Redefines ApplicationType defined in objidl.h
+ 'sfx2/sidebar/Sidebar.hxx',
+ 'sfx2/sidebar/UnoSidebar.hxx',
+ 'sfxslots.hxx', # externally defined types
+ ]
+ if module == 'sot':
+ ignore_list += [
+ 'sysformats.hxx', # Windows headers
+ ]
+ if module == 'vcl':
+ ignore_list += [
+ 'accmgr.hxx', # redefines ImplAccelList
+ 'image.h',
+ 'jobset.h',
+ 'opengl/gdiimpl.hxx',
+ 'opengl/salbmp.hxx',
+ 'openglgdiimpl', # ReplaceTextA
+ 'printdlg.hxx',
+ 'salinst.hxx', # GetDefaultPrinterA
+ 'salprn.hxx', # SetPrinterDataA
+ 'vcl/jobset.hxx',
+ 'vcl/oldprintadaptor.hxx',
+ 'vcl/opengl/OpenGLContext.hxx',
+ 'vcl/opengl/OpenGLHelper.hxx', # Conflicts with X header on *ix
+ 'vcl/print.hxx',
+ 'vcl/prntypes.hxx', # redefines Orientation from filter/jpeg/Exif.hxx
+ 'vcl/sysdata.hxx',
+ ]
+ if module == 'xmloff':
+ ignore_list += [
+ 'SchXMLExport.hxx', # SchXMLAutoStylePoolP.hxx not found
+ 'SchXMLImport.hxx', # enums redefined in draw\sdxmlimp_impl.hxx
+ 'XMLEventImportHelper.hxx', # NameMap redefined in XMLEventExport.hxx
+ 'xmloff/XMLEventExport.hxx', # enums redefined
+ ]
+ if module == 'xmlsecurity':
+ ignore_list += [
+ 'xmlsec/*',
+ 'xmlsecurity/xmlsec-wrapper.h',
+ ]
+ if module == 'external/pdfium':
+ ignore_list += [
+ 'third_party/freetype/include/pstables.h',
+ ]
+ if module == 'external/clucene':
+ ignore_list += [
+ '_bufferedstream.h',
+ '_condition.h',
+ '_gunichartables.h',
+ '_threads.h',
+ 'error.h',
+ 'CLucene/LuceneThreads.h',
+ 'CLucene/config/_threads.h',
+ ]
+ if module == 'external/skia':
+ ignore_list += [
+ 'skcms_internal.h',
+ 'zlib.h', # causes crc32 conflict
+ 'dirent.h', # unix-specific
+ 'pthread.h',
+ 'unistd.h',
+ 'sys/stat.h',
+ 'ft2build.h',
+ 'fontconfig/fontconfig.h',
+ 'GL/glx.h',
+ 'src/Transform_inl.h',
+ 'src/c/sk_c_from_to.h',
+ 'src/c/sk_types_priv.h',
+ 'src/core/SkBlitBWMaskTemplate.h',
+ 'src/sfnt/SkSFNTHeader.h',
+ 'src/opts/',
+ 'src/core/SkCubicSolver.h',
+ 'src/sksl/SkSLCPP.h',
+ 'src/gpu/vk/GrVkAMDMemoryAllocator.h',
+ 'src/gpu/GrUtil.h',
+ ]
+
+ for i in ignore_list:
+ if line.startswith(i):
+ return ''
+ if i[0] == '*' and line.endswith(i[1:]):
+ return ''
+ if i[-1] == '*' and line.startswith(i[:-1]):
+ return ''
+
+ return line
+
+def fixup(includes, module):
+ """ Here we add any headers
+ necessary in the pch.
+ These could be known to be very
+ common but for technical reasons
+ left out of the pch by this generator.
+ Or, they could be missing from the
+ source files where they are used
+ (probably because they had been
+ in the old pch, they were missed).
+ Also, these could be headers
+ that make the build faster but
+ aren't added automatically.
+ """
+ fixes = []
+ def append(inc):
+ # Add a space to exclude from
+ # ignore bisecting.
+ line = ' #include <{}>'.format(inc)
+ try:
+ i = fixes.index(inc)
+ fixes[i] = inc
+ except:
+ fixes.append(inc)
+
+ if module == 'basctl':
+ if 'basslots.hxx' in includes:
+ append('sfx2/msg.hxx')
+
+ #if module == 'sc':
+ # if 'scslots.hxx' in includes:
+ # append('sfx2/msg.hxx')
+ return fixes
+
+def sort_by_category(list, root, module, filter_local):
+ """ Move all 'system' headers first.
+ Core files of osl, rtl, sal, next.
+ Everything non-module-specific third.
+ Last, module-specific headers.
+ """
+ sys = []
+ boo = []
+ cor = []
+ rst = []
+ mod = []
+
+ prefix = '<' + module + '/'
+ for i in list:
+ if is_c_runtime(i, root, module):
+ sys.append(i)
+ elif '<boost/' in i:
+ boo.append(i)
+ elif prefix in i or not '/' in i:
+ mod.append(i)
+ elif '<sal/' in i or '<vcl/' in i:
+ cor.append(i)
+ elif '<osl/' in i or '<rtl/' in i:
+ if module == "sal": # osl and rtl are also part of sal
+ mod.append(i)
+ else:
+ cor.append(i)
+ # Headers from another module that is closely tied to the module.
+ elif module == 'sc' and '<formula' in i:
+ mod.append(i)
+ else:
+ rst.append(i)
+
+ out = []
+ out += [ "#if PCH_LEVEL >= 1" ]
+ out += sorted(sys)
+ out += sorted(boo)
+ out += [ "#endif // PCH_LEVEL >= 1" ]
+ out += [ "#if PCH_LEVEL >= 2" ]
+ out += sorted(cor)
+ out += [ "#endif // PCH_LEVEL >= 2" ]
+ out += [ "#if PCH_LEVEL >= 3" ]
+ out += sorted(rst)
+ out += [ "#endif // PCH_LEVEL >= 3" ]
+ out += [ "#if PCH_LEVEL >= 4" ]
+ out += sorted(mod)
+ out += [ "#endif // PCH_LEVEL >= 4" ]
+ return out
+
+def parse_makefile(groups, lines, lineno, lastif, ifstack):
+
+ inobjects = False
+ ingeneratedobjects = False
+ inelse = False
+ suffix = 'cxx'
+ os_cond_re = re.compile('(ifeq|ifneq)\s*\(\$\(OS\)\,(\w*)\)')
+
+ line = lines[lineno]
+ if line.startswith('if'):
+ lastif = line
+ if ifstack == 0:
+ # Correction if first line is an if.
+ lineno = parse_makefile(groups, lines, lineno, line, ifstack+1)
+ else:
+ lineno -= 1
+
+ while lineno + 1 < len(lines):
+ lineno += 1
+ line = lines[lineno].strip()
+ line = line.rstrip('\\').strip()
+ #print('line #{}: {}'.format(lineno, line))
+ if len(line) == 0:
+ continue
+
+ if line == '))':
+ inobjects = False
+ ingeneratedobjects = False
+ elif 'add_exception_objects' in line or \
+ 'add_cxxobject' in line:
+ inobjects = True
+ #print('inobjects')
+ #if ifstack and not SILENT:
+ #sys.stderr.write('Sources in a conditional, ignoring for now.\n')
+ elif 'add_generated_exception_objects' in line or \
+ 'add_generated_cxxobject' in line:
+ ingeneratedobjects = True
+ elif 'set_generated_cxx_suffix' in line:
+ suffix_re = re.compile('.*set_generated_cxx_suffix,[^,]*,([^)]*).*')
+ match = suffix_re.match(line)
+ if match:
+ suffix = match.group(1)
+ elif line.startswith('if'):
+ lineno = parse_makefile(groups, lines, lineno, line, ifstack+1)
+ continue
+ elif line.startswith('endif'):
+ if ifstack:
+ return lineno
+ continue
+ elif line.startswith('else'):
+ inelse = True
+ elif inobjects or ingeneratedobjects:
+ if EXCLUDE_SYSTEM and ifstack:
+ continue
+ file = line + '.' + suffix
+ if ',' in line or '(' in line or ')' in line or file.startswith('-'):
+ #print('passing: ' + line)
+ pass # $if() probably, or something similar
+ else:
+ osname = ''
+ if lastif:
+ if 'filter' in lastif:
+ # We can't grok filter, yet.
+ continue
+ match = os_cond_re.match(lastif)
+ if not match:
+ # We only support OS conditionals.
+ continue
+ in_out = match.group(1)
+ osname = match.group(2) if match else ''
+ if (in_out == 'ifneq' and not inelse) or \
+ (in_out == 'ifeq' and inelse):
+ osname = '!' + osname
+
+ if osname not in groups:
+ groups[osname] = []
+ if ingeneratedobjects:
+ file = WORKDIR + '/' + file
+ groups[osname].append(file)
+
+ return groups
+
+def process_makefile(root, module, libname):
+ """ Parse a gmake makefile and extract
+ source filenames from it.
+ """
+
+ makefile = 'Library_{}.mk'.format(libname)
+ filename = os.path.join(os.path.join(root, module), makefile)
+ if not os.path.isfile(filename):
+ makefile = 'StaticLibrary_{}.mk'.format(libname)
+ filename = os.path.join(os.path.join(root, module), makefile)
+ if not os.path.isfile(filename):
+ sys.stderr.write('Error: Module {} has no makefile at {}.'.format(module, filename))
+
+ groups = {'':[], 'ANDROID':[], 'iOS':[], 'WNT':[], 'LINUX':[], 'MACOSX':[]}
+
+ with open(filename, 'r') as f:
+ lines = f.readlines()
+ groups = parse_makefile(groups, lines, lineno=0, lastif=None, ifstack=0)
+
+ return groups
+
+def is_allowed_if(line, module):
+ """ Check whether the given #if condition
+ is allowed for the given module or whether
+ its block should be ignored.
+ """
+
+ # remove trailing comments
+ line = re.sub(r'(.*) *//.*', r'\1', line)
+ line = line.strip()
+
+ # Our sources always build with LIBO_INTERNAL_ONLY.
+ if line == "#if defined LIBO_INTERNAL_ONLY" or line == "#ifdef LIBO_INTERNAL_ONLY":
+ return True
+ if module == "external/skia":
+ # We always set these.
+ if line == "#ifdef SK_VULKAN" or line == "#if SK_SUPPORT_GPU":
+ return True
+ return False
+
+def process_source(root, module, filename, maxdepth=0):
+ """ Process a source file to extract
+ included headers.
+ For now, skip on compiler directives.
+ maxdepth is used when processing headers
+ which typically have protecting ifndef.
+ """
+
+ ifdepth = 0
+ lastif = ''
+ raw_includes = []
+ allowed_ifs = []
+ ifsallowed = 0
+ with open(filename, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith('#if'):
+ if is_allowed_if(line, module):
+ allowed_ifs.append(True)
+ ifsallowed += 1
+ else:
+ allowed_ifs.append(False)
+ lastif = line
+ ifdepth += 1
+ elif line.startswith('#endif'):
+ ifdepth -= 1
+ if allowed_ifs[ ifdepth ]:
+ ifsallowed -= 1
+ else:
+ lastif = '#if'
+ del allowed_ifs[ ifdepth ]
+ elif line.startswith('#include'):
+ if ifdepth - ifsallowed <= maxdepth:
+ line = sanitize(line)
+ if line:
+ line = get_filename(line)
+ if line and len(line):
+ raw_includes.append(line)
+ elif not SILENT:
+ sys.stderr.write('#include in {} : {}\n'.format(lastif, line))
+
+ return raw_includes
+
+def explode(root, module, includes, tree, filter_local, recurse):
+ incpath = os.path.join(root, 'include')
+
+ for inc in includes:
+ filename = get_filename(inc)
+ if filename in tree or len(filter_local.proc(filename)) == 0:
+ continue
+
+ try:
+ # Module or Local header.
+ filepath = filter_local.find_local_file(inc)
+ if filepath:
+ #print('trying loc: ' + filepath)
+ incs = process_source(root, module, filepath, maxdepth=1)
+ incs = map(get_filename, incs)
+ incs = process_list(incs, lambda x: filter_ignore(x, module))
+ incs = process_list(incs, filter_local.proc)
+ tree[filename] = incs
+ if recurse:
+ tree = explode(root, module, incs, tree, filter_local, recurse)
+ #print('{} => {}'.format(filepath, tree[filename]))
+ continue
+ except:
+ pass
+
+ try:
+ # Public header.
+ filepath = os.path.join(incpath, filename)
+ #print('trying pub: ' + filepath)
+ incs = process_source(root, module, filepath, maxdepth=1)
+ incs = map(get_filename, incs)
+ incs = process_list(incs, lambda x: filter_ignore(x, module))
+ incs = process_list(incs, filter_local.proc)
+ tree[filename] = incs
+ if recurse:
+ tree = explode(root, module, incs, tree, filter_local, recurse)
+ #print('{} => {}'.format(filepath, tree[filename]))
+ continue
+ except:
+ pass
+
+ # Failed, but remember to avoid searching again.
+ tree[filename] = []
+
+ return tree
+
+def make_command_line():
+ args = sys.argv[:]
+ # Remove command line flags and
+ # use internal flags.
+ for i in range(len(args)-1, 0, -1):
+ if args[i].startswith('--'):
+ args.pop(i)
+
+ args.append('--cutoff=' + str(CUTOFF))
+ if EXCLUDE_SYSTEM:
+ args.append('--exclude:system')
+ else:
+ args.append('--include:system')
+ if EXCLUDE_MODULE:
+ args.append('--exclude:module')
+ else:
+ args.append('--include:module')
+ if EXCLUDE_LOCAL:
+ args.append('--exclude:local')
+ else:
+ args.append('--include:local')
+
+ return ' '.join(args)
+
+def generate_includes(includes):
+ """Generates the include lines of the pch.
+ """
+ lines = []
+ for osname, group in includes.items():
+ if not len(group):
+ continue
+
+ if len(osname):
+ not_eq = ''
+ if osname[0] == '!':
+ not_eq = '!'
+ osname = osname[1:]
+ lines.append('')
+ lines.append('#if {}defined({})'.format(not_eq, osname))
+
+ for i in group:
+ lines.append(i)
+
+ if len(osname):
+ lines.append('#endif')
+
+ return lines
+
+def generate(includes, libname, filename, module):
+ header = \
+"""/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+
+/*
+ This file has been autogenerated by update_pch.sh. It is possible to edit it
+ manually (such as when an include file has been moved/renamed/removed). All such
+ manual changes will be rewritten by the next run of update_pch.sh (which presumably
+ also fixes all possible problems, so it's usually better to use it).
+"""
+
+ footer = \
+"""
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
+"""
+ import datetime
+
+ with open(filename, 'w') as f:
+ f.write(header)
+ f.write('\n Generated on {} using:\n {}\n'.format(
+ datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+ make_command_line()))
+ f.write('\n If after updating build fails, use the following command to locate conflicting headers:\n ./bin/update_pch_bisect {} "make {}.build" --find-conflicts\n*/\n'.format(
+ filename, module))
+
+ # sal needs this for rand_s()
+ if module == 'sal' and libname == 'sal':
+ sal_define = """
+#if defined(_WIN32)
+#if !defined _CRT_RAND_S
+#define _CRT_RAND_S
+#endif
+#endif
+"""
+ f.write(sal_define)
+
+ # Dump the headers.
+ f.write('\n')
+ for i in includes:
+ f.write(i + '\n')
+
+ # Some libraries pull windows headers that aren't self contained.
+ if (module == 'connectivity' and libname == 'ado') or \
+ (module == 'xmlsecurity' and libname == 'xsec_xmlsec'):
+ ado_define = """
+// Cleanup windows header macro pollution.
+#if defined(_WIN32) && defined(WINAPI)
+#include <postwin.h>
+#undef RGB
+#endif
+"""
+ f.write(ado_define)
+
+ f.write(footer)
+
+def remove_from_tree(filename, tree):
+ # Remove this file, if top-level.
+ incs = tree.pop(filename, [])
+ for i in incs:
+ tree = remove_from_tree(i, tree)
+
+ # Also remove if included from another.
+ for (k, v) in tree.items():
+ if filename in v:
+ v.remove(filename)
+
+ return tree
+
+def tree_to_list(includes, filename, tree):
+ if filename in includes:
+ return includes
+ includes.append(filename)
+ #incs = tree.pop(filename, [])
+ incs = tree[filename] if filename in tree else []
+ for i in incs:
+ tree_to_list(includes, i, tree)
+
+ return includes
+
+def promote(includes):
+ """ Common library headers are heavily
+ referenced, even if they are included
+ from a few places.
+ Here we separate them to promote
+ their inclusion in the final pch.
+ """
+ promo = []
+ for inc in includes:
+ if inc.startswith('boost') or \
+ inc.startswith('sal') or \
+ inc.startswith('osl') or \
+ inc.startswith('rtl'):
+ promo.append(inc)
+ return promo
+
+def make_pch_filename(root, module, libname):
+ """ PCH files are stored here:
+ <root>/<module>/inc/pch/precompiled_<libname>.hxx
+ """
+
+ path = os.path.join(root, module)
+ path = os.path.join(path, 'inc')
+ path = os.path.join(path, 'pch')
+ path = os.path.join(path, 'precompiled_' + libname + '.hxx')
+ return path
+
+def main():
+
+ global CUTOFF
+ global EXCLUDE_MODULE
+ global EXCLUDE_LOCAL
+ global EXCLUDE_SYSTEM
+ global SILENT
+ global WORKDIR
+
+ if os.getenv('WORKDIR'):
+ WORKDIR = os.getenv('WORKDIR')
+
+ root = '.'
+ module = sys.argv[1]
+ libname = sys.argv[2]
+ header = make_pch_filename(root, module, libname)
+
+ if not os.path.exists(os.path.join(root, module)):
+ raise Exception('Error: module [{}] not found.'.format(module))
+
+ key = '{}.{}'.format(module, libname)
+ if key in DEFAULTS:
+ # Load the module-specific defaults.
+ CUTOFF = DEFAULTS[key][0]
+ EXCLUDE_SYSTEM = DEFAULTS[key][1]
+ EXCLUDE_MODULE = DEFAULTS[key][2]
+ EXCLUDE_LOCAL = DEFAULTS[key][3]
+
+ force_update = False
+ for x in range(3, len(sys.argv)):
+ i = sys.argv[x]
+ if i.startswith('--cutoff='):
+ CUTOFF = int(i.split('=')[1])
+ elif i.startswith('--exclude:'):
+ cat = i.split(':')[1]
+ if cat == 'module':
+ EXCLUDE_MODULE = True
+ elif cat == 'local':
+ EXCLUDE_LOCAL = True
+ elif cat == 'system':
+ EXCLUDE_SYSTEM = True
+ elif i.startswith('--include:'):
+ cat = i.split(':')[1]
+ if cat == 'module':
+ EXCLUDE_MODULE = False
+ elif cat == 'local':
+ EXCLUDE_LOCAL = False
+ elif cat == 'system':
+ EXCLUDE_SYSTEM = False
+ elif i == '--silent':
+ SILENT = True
+ elif i == '--force':
+ force_update = True
+ else:
+ sys.stderr.write('Unknown option [{}].'.format(i))
+ return 1
+
+ filter_local = Filter_Local(root, module, \
+ not EXCLUDE_MODULE, \
+ not EXCLUDE_LOCAL)
+
+ # Read input.
+ groups = process_makefile(root, module, libname)
+
+ generic = []
+ for osname, group in groups.items():
+ if not len(group):
+ continue
+
+ includes = []
+ for filename in group:
+ includes += process_source(root, module, filename)
+
+ # Save unique top-level includes.
+ unique = set(includes)
+ promoted = promote(unique)
+
+ # Process includes.
+ includes = remove_rare(includes)
+ includes = process_list(includes, lambda x: filter_ignore(x, module))
+ includes = process_list(includes, filter_local.proc)
+
+ # Remove the already included ones.
+ for inc in includes:
+ unique.discard(inc)
+
+ # Explode the excluded ones.
+ tree = {i:[] for i in includes}
+ tree = explode(root, module, unique, tree, filter_local, not EXCLUDE_MODULE)
+
+ # Remove the already included ones from the tree.
+ for inc in includes:
+ filename = get_filename(inc)
+ tree = remove_from_tree(filename, tree)
+
+ extra = []
+ for (k, v) in tree.items():
+ extra += tree_to_list([], k, tree)
+
+ promoted += promote(extra)
+ promoted = process_list(promoted, lambda x: filter_ignore(x, module))
+ promoted = process_list(promoted, filter_local.proc)
+ promoted = set(promoted)
+ # If a promoted header includes others, remove the rest.
+ for (k, v) in tree.items():
+ if k in promoted:
+ for i in v:
+ promoted.discard(i)
+ includes += [x for x in promoted]
+
+ extra = remove_rare(extra)
+ extra = process_list(extra, lambda x: filter_ignore(x, module))
+ extra = process_list(extra, filter_local.proc)
+ includes += extra
+
+ includes = [x for x in set(includes)]
+ fixes = fixup(includes, module)
+ fixes = map(lambda x: '#include <' + x + '>', fixes)
+
+ includes = map(lambda x: '#include <' + x + '>', includes)
+ sorted = sort_by_category(includes, root, module, filter_local)
+ includes = list(fixes) + sorted
+
+ if len(osname):
+ for i in generic:
+ if i in includes:
+ includes.remove(i)
+
+ groups[osname] = includes
+ if not len(osname):
+ generic = includes
+
+ # Open the old pch and compare its contents
+ # with new includes.
+ # Clobber only if they are different.
+ with open(header, 'r') as f:
+ old_pch_lines = [x.strip() for x in f.readlines()]
+ new_lines = generate_includes(groups)
+ # Find the first include in the old pch.
+ start = -1
+ for i in range(len(old_pch_lines)):
+ if old_pch_lines[i].startswith('#include') or old_pch_lines[i].startswith('#if PCH_LEVEL'):
+ start = i
+ break
+ # Clobber if there is a mismatch.
+ if force_update or start < 0 or (len(old_pch_lines) - start < len(new_lines)):
+ generate(new_lines, libname, header, module)
+ return 0
+ else:
+ for i in range(len(new_lines)):
+ if new_lines[i] != old_pch_lines[start + i]:
+ generate(new_lines, libname, header, module)
+ return 0
+ else:
+ # Identical, but see if new pch removed anything.
+ for i in range(start + len(new_lines), len(old_pch_lines)):
+ if '#include' in old_pch_lines[i]:
+ generate(new_lines, libname, header, module)
+ return 0
+
+ # Didn't update.
+ return 1
+
+if __name__ == '__main__':
+ """ Process all the includes in a Module
+ to make into a PCH file.
+ Run without arguments for unittests,
+ and to see usage.
+ """
+
+ if len(sys.argv) >= 3:
+ status = main()
+ sys.exit(status)
+
+ print('Usage: {} <Module name> <Library name> [options]'.format(sys.argv[0]))
+ print(' Always run from the root of LO repository.\n')
+ print(' Options:')
+ print(' --cutoff=<count> - Threshold to excluding headers.')
+ print(' --exclude:<category> - Exclude category-specific headers.')
+ print(' --include:<category> - Include category-specific headers.')
+ print(' --force - Force updating the pch even when nothing changes.')
+ print(' Categories:')
+ print(' module - Headers in /inc directory of a module.')
+ print(' local - Headers local to a source file.')
+ print(' system - Platform-specific headers.')
+ print(' --silent - print only errors.')
+ print('\nRunning unit-tests...')
+
+
+class TestMethods(unittest.TestCase):
+
+ def test_sanitize(self):
+ self.assertEqual(sanitize('#include "blah/file.cxx"'),
+ '#include <blah/file.cxx>')
+ self.assertEqual(sanitize(' #include\t"blah/file.cxx" '),
+ '#include <blah/file.cxx>')
+ self.assertEqual(sanitize(' '),
+ '')
+
+ def test_filter_ignore(self):
+ self.assertEqual(filter_ignore('blah/file.cxx', 'mod'),
+ '')
+ self.assertEqual(filter_ignore('vector', 'mod'),
+ 'vector')
+ self.assertEqual(filter_ignore('file.cxx', 'mod'),
+ '')
+
+ def test_remove_rare(self):
+ self.assertEqual(remove_rare([]),
+ [])
+
+class TestMakefileParser(unittest.TestCase):
+
+ def setUp(self):
+ global EXCLUDE_SYSTEM
+ EXCLUDE_SYSTEM = False
+
+ def test_parse_singleline_eval(self):
+ source = "$(eval $(call gb_Library_Library,sal))"
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_multiline_eval(self):
+ source = """$(eval $(call gb_Library_set_include,sal,\\
+ $$(INCLUDE) \\
+ -I$(SRCDIR)/sal/inc \\
+))
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_multiline_eval_with_if(self):
+ source = """$(eval $(call gb_Library_add_defs,sal,\\
+ $(if $(filter $(OS),iOS), \\
+ -DNO_CHILD_PROCESSES \\
+ ) \\
+))
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_multiline_add_with_if(self):
+ source = """$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/time \\
+ $(if $(filter DESKTOP,$(BUILD_TYPE)), sal/osl/unx/salinit) \\
+))
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 1)
+ self.assertEqual(groups[''][0], 'sal/osl/unx/time.cxx')
+
+ def test_parse_if_else(self):
+ source = """ifeq ($(OS),MACOSX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/mac/mac \\
+))
+else
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/uunxapi \\
+))
+endif
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 3)
+ self.assertEqual(len(groups['']), 0)
+ self.assertEqual(len(groups['MACOSX']), 1)
+ self.assertEqual(len(groups['!MACOSX']), 1)
+ self.assertEqual(groups['MACOSX'][0], 'sal/osl/mac/mac.cxx')
+ self.assertEqual(groups['!MACOSX'][0], 'sal/osl/unx/uunxapi.cxx')
+
+ def test_parse_nested_if(self):
+ source = """ifeq ($(OS),MACOSX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/mac/mac \\
+))
+else
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/uunxapi \\
+))
+
+ifeq ($(OS),LINUX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/textenc/context \\
+))
+endif
+endif
+"""
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 4)
+ self.assertEqual(len(groups['']), 0)
+ self.assertEqual(len(groups['MACOSX']), 1)
+ self.assertEqual(len(groups['!MACOSX']), 1)
+ self.assertEqual(len(groups['LINUX']), 1)
+ self.assertEqual(groups['MACOSX'][0], 'sal/osl/mac/mac.cxx')
+ self.assertEqual(groups['!MACOSX'][0], 'sal/osl/unx/uunxapi.cxx')
+ self.assertEqual(groups['LINUX'][0], 'sal/textenc/context.cxx')
+
+ def test_parse_exclude_system(self):
+ source = """ifeq ($(OS),MACOSX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/mac/mac \\
+))
+else
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/uunxapi \\
+))
+
+ifeq ($(OS),LINUX)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/textenc/context \\
+))
+endif
+endif
+"""
+ global EXCLUDE_SYSTEM
+ EXCLUDE_SYSTEM = True
+
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+ def test_parse_filter(self):
+ source = """ifneq ($(filter $(OS),MACOSX iOS),)
+$(eval $(call gb_Library_add_exception_objects,sal,\\
+ sal/osl/unx/osxlocale \\
+))
+endif
+"""
+ # Filter is still unsupported.
+ lines = source.split('\n')
+ groups = {'':[]}
+ groups = parse_makefile(groups, lines, 0, None, 0)
+ self.assertEqual(len(groups), 1)
+ self.assertEqual(len(groups['']), 0)
+
+unittest.main()
+
+# vim: set et sw=4 ts=4 expandtab:
diff --git a/bin/update_pch.sh b/bin/update_pch.sh
new file mode 100755
index 000000000..78b4a47e6
--- /dev/null
+++ b/bin/update_pch.sh
@@ -0,0 +1,65 @@
+#! /bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Usage: update_pch.sh [<module>/inc/pch/precompiled_xxx.hxx]
+# Usage: update_pch.sh [<module>]
+# Invoke: make cmd cmd="./bin/update_pch.sh [..]"
+
+if test -n "$SRC_DIR"; then
+ root="$SRC_DIR"
+else
+ root=`dirname $0`
+ root=`cd $root/.. >/dev/null && pwd`
+fi
+root=`readlink -f $root`
+cd $root
+
+if test -z "$1"; then
+ headers=`ls ./*/inc/pch/precompiled_*.hxx`
+else
+ headers="$@"
+fi
+
+# Split the headers into an array.
+IFS=' ' read -a aheaders <<< $headers
+hlen=${#aheaders[@]};
+if [ $hlen -gt 1 ]; then
+ if [ -z "$PARALLELISM" ]; then
+ PARALLELISM=0 # Let xargs decide
+ fi
+ echo $headers | xargs -n 1 -P $PARALLELISM $0
+ exit $?
+fi
+
+for x in $headers; do
+ if [ -d "$x" ]; then
+ # We got a directory, find pch files to update.
+ headers=`find $root/$x/ -type f -iname "precompiled_*.hxx"`
+ if test -n "$headers"; then
+ $0 "$headers"
+ fi
+ else
+ header=$x
+ update_msg=`echo $header | sed -e s%$root/%%`
+ module=`readlink -f $header | sed -e s%$root/%% -e s%/.*%%`
+ if [ "$module" = "pch" ]; then
+ continue # PCH's in pch/inc/pch/ are handled manually
+ fi
+ echo updating $update_msg
+ if [ "$module" = "external" ]; then
+ module=external/`readlink -f $header | sed -e s%$root/external/%% -e s%/.*%%`
+ fi
+ libname=`echo $header | sed -e s/.*precompiled_// -e s/\.hxx//`
+
+ ./bin/update_pch "$module" "$libname"
+ fi
+done
+
+#echo Done.
+exit 0
diff --git a/bin/update_pch_autotune.sh b/bin/update_pch_autotune.sh
new file mode 100755
index 000000000..ab9b0a688
--- /dev/null
+++ b/bin/update_pch_autotune.sh
@@ -0,0 +1,229 @@
+#! /bin/bash
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+# Finds the optimal update_pch settings that results in,
+# per module and library, the fastest build time and
+# smallest intermediate files (.o/.obj) output.
+
+# Usage: update_pch_autotune.sh [<module1> <module2>]
+# Invoke: /opt/lo/bin/make cmd cmd="./bin/update_pch_autotune.sh [..]"
+
+# The resulting values may be entered in update_pch
+# to be use for generating PCH in the future.
+# Run this script after major header changes.
+
+root=`dirname $0`
+root=`cd $root/.. && pwd`
+cd $root
+
+if test -z "$1"; then
+ modules=`ls ./*/inc/pch/precompiled_*.hxx | sed -e s%./%% -e s%/.*%% | uniq`
+else
+ modules="$@"
+fi
+
+if [[ "$OSTYPE" == "cygwin" ]]; then
+ MAKE=/opt/lo/bin/make
+else
+ MAKE=make
+fi
+
+function build()
+{
+ local START=$(date +%s.%N)
+
+ $MAKE -sr "$module" > /dev/null
+ status=$?
+ if [ $status -ne 0 ];
+ then
+ # Spurious failures happen.
+ $MAKE "$module.build" > /dev/null
+ status=$?
+ fi
+
+ local END=$(date +%s.%N1)
+ build_time=$(printf %.1f $(echo "$END - $START" | bc))
+
+ size="FAILED"
+ score="FAILED"
+ if [ $status -eq 0 ];
+ then
+ # The total size of the object files.
+ size="$(du -s workdir/CxxObject/$module/ | awk '{print $1}')"
+ # Add the pch file size.
+ filename_rel="workdir/PrecompiledHeader/nodebug/$(basename $header)*"
+ filename_dbg="workdir/PrecompiledHeader/debug/$(basename $header)*"
+ if [[ $filename_rel -nt $filename_dbg ]]; then
+ pch_size="$(du -s $filename_rel | awk '{print $1}' | paste -sd+ | bc)"
+ else
+ pch_size="$(du -s $filename_dbg | awk '{print $1}' | paste -sd+ | bc)"
+ fi
+ size="$(echo "$pch_size + $size" | bc)"
+
+ # Compute a score based on the build time and size.
+ # The shorter the build time, and smaller disk usage, the higher the score.
+ score=$(printf %.2f $(echo "10000 / ($build_time * e($size/1048576))" | bc -l))
+ fi
+}
+
+function run()
+{
+ local msg="$module.$libname, ${@:3}, "
+ printf "$msg"
+ ./bin/update_pch "$module" "$libname" "${@:3}" --silent
+ status=$?
+
+ if [ $status -eq 0 ];
+ then
+ build
+
+ summary="$build_time, $size, $score"
+ if [ $status -eq 0 ];
+ then
+ new_best_for_cuttof=$(echo "$score > $best_score_for_cuttof" | bc -l)
+ if [ $new_best_for_cuttof -eq 1 ];
+ then
+ best_score_for_cuttof=$score
+ fi
+
+ new_best=$(echo "$score > $best_score" | bc -l)
+ if [ $new_best -eq 1 ];
+ then
+ best_score=$score
+ best_args="${@:3}"
+ best_time=$build_time
+ best_cutoff=$cutoff
+ summary="$build_time, $size, $score,*"
+ fi
+ fi
+ else
+ # Skip if pch is not updated.
+ summary="0, 0, 0"
+ fi
+
+ echo "$summary"
+}
+
+function args_to_table()
+{
+ local sys="EXCLUDE"
+ local mod="EXCLUDE"
+ local loc="EXCLUDE"
+ local cutoff=0
+ IFS=' ' read -r -a aargs <<< $best_args
+ for index in "${!aargs[@]}"
+ do
+ if [ "${aargs[index]}" = "--include:system" ];
+ then
+ sys="INCLUDE"
+ elif [ "${aargs[index]}" = "--exclude:system" ];
+ then
+ sys="EXCLUDE"
+ elif [ "${aargs[index]}" = "--include:module" ];
+ then
+ mod="INCLUDE"
+ elif [ "${aargs[index]}" = "--exclude:module" ];
+ then
+ mod="EXCLUDE"
+ elif [ "${aargs[index]}" = "--include:local" ];
+ then
+ loc="INCLUDE"
+ elif [ "${aargs[index]}" = "--exclude:local" ];
+ then
+ loc="EXCLUDE"
+ elif [[ "${aargs[index]}" == *"cutoff"* ]]
+ then
+ cutoff=$(echo "${aargs[index]}" | grep -Po '\-\-cutoff\=\K\d+')
+ fi
+ done
+
+ local key=$(printf "'%s.%s'" $module $libname)
+ echo "$(printf " %-36s: (%2d, %s, %s, %s), # %5.1f" $key $cutoff $sys $mod $loc $best_time)"
+}
+
+for module in $modules; do
+
+ # Build without pch includes as sanity check.
+ #run "$root" "$module" --cutoff=999
+
+ # Build before updating pch.
+ $MAKE "$module.build" > /dev/null
+ if [ $? -ne 0 ];
+ then
+ # Build with dependencies before updating pch.
+ echo "Failed to build $module, building known state with dependencies..."
+ ./bin/update_pch.sh "$module" > /dev/null
+ $MAKE "$module.clean" > /dev/null
+ $MAKE "$module.all" > /dev/null
+ if [ $? -ne 0 ];
+ then
+ # Build all!
+ echo "Failed to build $module with dependencies, building all..."
+ $MAKE build-nocheck > /dev/null
+ if [ $? -ne 0 ];
+ then
+ >&2 echo "Broken build. Please revert changes and try again."
+ exit 1
+ fi
+ fi
+ fi
+
+ # Find pch files in the module to update.
+ headers=`find $root/$module/ -type f -iname "precompiled_*.hxx"`
+
+ # Each pch belongs to a library.
+ for header in $headers; do
+ libname=`echo $header | sed -e s/.*precompiled_// -e s/\.hxx//`
+ #TODO: Backup the header and restore when last tune fails.
+
+ # Force update on first try below.
+ echo "Autotuning $module.$libname..."
+ ./bin/update_pch "$module" "$libname" --cutoff=999 --silent --force
+
+ best_score=0
+ best_args=""
+ best_time=0
+ best_cutoff=0
+ for i in {1..16}; do
+ cutoff=$i
+ best_score_for_cuttof=0
+ #run "$root" "$module" "--cutoff=$i" --include:system --exclude:module --exclude:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --exclude:module --exclude:local
+ #run "$root" "$module" "--cutoff=$i" --include:system --include:module --exclude:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --include:module --exclude:local
+ #run "$root" "$module" "--cutoff=$i" --include:system --exclude:module --include:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --exclude:module --include:local
+ #run "$root" "$module" "--cutoff=$i" --include:system --include:module --include:local
+ run "$root" "$module" "--cutoff=$i" --exclude:system --include:module --include:local
+
+ if [ $i -gt $((best_cutoff+2)) ];
+ then
+ score_too_low=$(echo "$best_score_for_cuttof < $best_score / 1.10" | bc -l)
+ if [ $score_too_low -eq 1 ];
+ then
+ echo "Score hit low of $best_score_for_cuttof, well below overall best of $best_score. Stopping."
+ break;
+ fi
+ fi
+ done
+
+ ./bin/update_pch "$module" "$libname" $best_args --force --silent
+ echo "> $module.$libname, $best_args, $best_time, $size, $score"
+ echo
+
+ table+=$'\n'
+ table+="$(args_to_table)"
+ done
+
+done
+
+echo "Update the relevant lines in ./bin/update_pch script:"
+>&2 echo "$table"
+
+exit 0
diff --git a/bin/update_pch_bisect b/bin/update_pch_bisect
new file mode 100755
index 000000000..8c86ac3cc
--- /dev/null
+++ b/bin/update_pch_bisect
@@ -0,0 +1,354 @@
+#! /usr/bin/env python
+# -*- Mode: python; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+"""
+This script is to fix precompiled headers.
+
+This script runs in two modes.
+In one mode, it starts with a header
+that doesn't compile. If finds the
+minimum number of includes in the
+header to remove to get a successful
+run of the command (i.e. compile).
+
+In the second mode, it starts with a
+header that compiles fine, however,
+it contains one or more required
+include without which it wouldn't
+compile, which it identifies.
+
+Usage: ./bin/update_pch_bisect ./vcl/inc/pch/precompiled_vcl.hxx "make vcl.build" --find-required --verbose
+"""
+
+import sys
+import re
+import os
+import unittest
+import subprocess
+
+SILENT = True
+FIND_CONFLICTS = True
+
+IGNORE = 0
+GOOD = 1
+TEST_ON = 2
+TEST_OFF = 3
+BAD = 4
+
+def run(command):
+ try:
+ cmd = command.split(' ', 1)
+ status = subprocess.call(cmd, stdout=open(os.devnull, 'w'),
+ stderr=subprocess.STDOUT, close_fds=True)
+ return True if status == 0 else False
+ except Exception as e:
+ sys.stderr.write('Error: {}\n'.format(e))
+ return False
+
+def update_pch(filename, lines, marks):
+ with open(filename, 'w') as f:
+ for i in xrange(len(marks)):
+ mark = marks[i]
+ if mark <= TEST_ON:
+ f.write(lines[i])
+ else:
+ f.write('//' + lines[i])
+
+def log(*args, **kwargs):
+ global SILENT
+ if not SILENT:
+ print(*args, **kwargs)
+
+def bisect(lines, marks, min, max, update, command):
+ """ Disable half the includes and
+ calls the command.
+ Depending on the result,
+ recurse or return.
+ """
+ global FIND_CONFLICTS
+
+ log('Bisecting [{}, {}].'.format(min+1, max))
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_ON if FIND_CONFLICTS else TEST_OFF
+
+ assume_fail = False
+ if not FIND_CONFLICTS:
+ on_list = [x for x in marks if x in (TEST_ON, GOOD)]
+ assume_fail = (len(on_list) == 0)
+
+ update(lines, marks)
+ if assume_fail or not command():
+ # Failed
+ log('Failed [{}, {}].'.format(min+1, max))
+ if min >= max - 1:
+ if not FIND_CONFLICTS:
+ # Try with this one alone.
+ marks[min] = TEST_ON
+ update(lines, marks)
+ if command():
+ log(' Found @{}: {}'.format(min+1, lines[min].strip('\n')))
+ marks[min] = GOOD
+ return marks
+ else:
+ log(' Found @{}: {}'.format(min+1, lines[min].strip('\n')))
+ # Either way, this one is irrelevant.
+ marks[min] = BAD
+ return marks
+
+ # Bisect
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_OFF if FIND_CONFLICTS else TEST_ON
+
+ half = min + ((max - min) / 2)
+ marks = bisect(lines, marks, min, half, update, command)
+ marks = bisect(lines, marks, half, max, update, command)
+ else:
+ # Success
+ if FIND_CONFLICTS:
+ log(' Good [{}, {}].'.format(min+1, max))
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = GOOD
+
+ return marks
+
+def get_filename(line):
+ """ Strips the line from the
+ '#include' and angled brakets
+ and return the filename only.
+ """
+ return re.sub(r'(.*#include\s*)<(.*)>(.*)', r'\2', line)
+
+def get_marks(lines):
+ marks = []
+ min = -1
+ max = -1
+ for i in xrange(len(lines)):
+ line = lines[i]
+ if line.startswith('#include'):
+ marks.append(TEST_ON)
+ min = i if min < 0 else min
+ max = i
+ else:
+ marks.append(IGNORE)
+
+ return (marks, min, max+1)
+
+def main():
+
+ global FIND_CONFLICTS
+ global SILENT
+
+ filename = sys.argv[1]
+ command = sys.argv[2]
+
+ for i in range(3, len(sys.argv)):
+ opt = sys.argv[i]
+ if opt == '--find-conflicts':
+ FIND_CONFLICTS = True
+ elif opt == '--find-required':
+ FIND_CONFLICTS = False
+ elif opt == '--verbose':
+ SILENT = False
+ else:
+ sys.stderr.write('Error: Unknown option [{}].\n'.format(opt))
+ return 1
+
+ lines = []
+ with open(filename) as f:
+ lines = f.readlines()
+
+ (marks, min, max) = get_marks(lines)
+
+ # Test preconditions.
+ log('Validating all-excluded state...')
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_OFF
+ update_pch(filename, lines, marks)
+ res = run(command)
+
+ if FIND_CONFLICTS:
+ # Must build all excluded.
+ if not res:
+ sys.stderr.write("Error: broken state when all excluded, fix first and try again.")
+ return 1
+ else:
+ # If builds all excluded, we can't bisect.
+ if res:
+ sys.stderr.write("Done: in good state when all excluded, nothing to do.")
+ return 1
+
+ # Must build all included.
+ log('Validating all-included state...')
+ for i in range(min, max):
+ if marks[i] != IGNORE:
+ marks[i] = TEST_ON
+ update_pch(filename, lines, marks)
+ if not run(command):
+ sys.stderr.write("Error: broken state without modifying, fix first and try again.")
+ return 1
+
+ marks = bisect(lines, marks, min, max+1,
+ lambda l, m: update_pch(filename, l, m),
+ lambda: run(command))
+ if not FIND_CONFLICTS:
+ # Simplify further, as sometimes we can have
+ # false positives due to the benign nature
+ # of includes that are not absolutely required.
+ for i in xrange(len(marks)):
+ if marks[i] == GOOD:
+ marks[i] = TEST_OFF
+ update_pch(filename, lines, marks)
+ if not run(command):
+ # Revert.
+ marks[i] = GOOD
+ else:
+ marks[i] = BAD
+ elif marks[i] == TEST_OFF:
+ marks[i] = TEST_ON
+
+ update_pch(filename, lines, marks)
+
+ log('')
+ for i in xrange(len(marks)):
+ if marks[i] == (BAD if FIND_CONFLICTS else GOOD):
+ print("'{}',".format(get_filename(lines[i].strip('\n'))))
+
+ return 0
+
+if __name__ == '__main__':
+
+ if len(sys.argv) in (3, 4, 5):
+ status = main()
+ sys.exit(status)
+
+ print('Usage: {} <pch> <command> [--find-conflicts]|[--find-required] [--verbose]\n'.format(sys.argv[0]))
+ print(' --find-conflicts - Finds all conflicting includes. (Default)')
+ print(' Must compile without any includes.\n')
+ print(' --find-required - Finds all required includes.')
+ print(' Must compile with all includes.\n')
+ print(' --verbose - print noisy progress.')
+ print('Example: ./bin/update_pch_bisect ./vcl/inc/pch/precompiled_vcl.hxx "make vcl.build" --find-required --verbose')
+ print('\nRunning unit-tests...')
+
+
+class TestBisectConflict(unittest.TestCase):
+ TEST = """ /* Test header. */
+#include <memory>
+#include <set>
+#include <algorithm>
+#include <vector>
+/* blah blah */
+"""
+ BAD_LINE = "#include <bad>"
+
+ def setUp(self):
+ global FIND_CONFLICTS
+ FIND_CONFLICTS = True
+
+ def _update_func(self, lines, marks):
+ self.lines = []
+ for i in xrange(len(marks)):
+ mark = marks[i]
+ if mark <= TEST_ON:
+ self.lines.append(lines[i])
+ else:
+ self.lines.append('//' + lines[i])
+
+ def _test_func(self):
+ """ Command function called by bisect.
+ Returns True on Success, False on failure.
+ """
+ # If the bad line is still there, fail.
+ return self.BAD_LINE not in self.lines
+
+ def test_success(self):
+ lines = self.TEST.split('\n')
+ (marks, min, max) = get_marks(lines)
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ self.assertTrue(BAD not in marks)
+
+ def test_conflict(self):
+ lines = self.TEST.split('\n')
+ for pos in xrange(len(lines) + 1):
+ lines = self.TEST.split('\n')
+ lines.insert(pos, self.BAD_LINE)
+ (marks, min, max) = get_marks(lines)
+
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ for i in xrange(len(marks)):
+ if i == pos:
+ self.assertEqual(BAD, marks[i])
+ else:
+ self.assertNotEqual(BAD, marks[i])
+
+class TestBisectRequired(unittest.TestCase):
+ TEST = """#include <algorithm>
+#include <set>
+#include <map>
+#include <vector>
+"""
+ REQ_LINE = "#include <req>"
+
+ def setUp(self):
+ global FIND_CONFLICTS
+ FIND_CONFLICTS = False
+
+ def _update_func(self, lines, marks):
+ self.lines = []
+ for i in xrange(len(marks)):
+ mark = marks[i]
+ if mark <= TEST_ON:
+ self.lines.append(lines[i])
+ else:
+ self.lines.append('//' + lines[i])
+
+ def _test_func(self):
+ """ Command function called by bisect.
+ Returns True on Success, False on failure.
+ """
+ # If the required line is not there, fail.
+ found = self.REQ_LINE in self.lines
+ return found
+
+ def test_success(self):
+ lines = self.TEST.split('\n')
+ (marks, min, max) = get_marks(lines)
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ self.assertTrue(GOOD not in marks)
+
+ def test_required(self):
+ lines = self.TEST.split('\n')
+ for pos in xrange(len(lines) + 1):
+ lines = self.TEST.split('\n')
+ lines.insert(pos, self.REQ_LINE)
+ (marks, min, max) = get_marks(lines)
+
+ marks = bisect(lines, marks, min, max,
+ lambda l, m: self._update_func(l, m),
+ lambda: self._test_func())
+ for i in xrange(len(marks)):
+ if i == pos:
+ self.assertEqual(GOOD, marks[i])
+ else:
+ self.assertNotEqual(GOOD, marks[i])
+
+unittest.main()
+
+# vim: set et sw=4 ts=4 expandtab:
diff --git a/bin/upload_symbols.py b/bin/upload_symbols.py
new file mode 100755
index 000000000..277508da7
--- /dev/null
+++ b/bin/upload_symbols.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+# -*- tab-width: 4; indent-tabs-mode: nil; py-indent-offset: 4 -*-
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+import requests, sys
+import platform, configparser
+
+def detect_platform():
+ return platform.system()
+
+def main():
+ if len(sys.argv) < 4:
+ print(sys.argv)
+ print("Invalid number of parameters")
+ print("Usage: upload-symbols.py symbols.zip config.ini \"long explanation\" [--system]")
+ sys.exit(1)
+
+ base_url = "https://crashreport.libreoffice.org/"
+ upload_url = base_url + "upload/"
+ login_url = base_url + "accounts/login/"
+
+ config = configparser.ConfigParser()
+ config.read(sys.argv[2])
+
+ user = config["CrashReport"]["User"]
+ password = config["CrashReport"]["Password"]
+
+ platform = detect_platform()
+ files = {'symbols': open(sys.argv[1], 'rb')}
+ data = {'version': sys.argv[3], 'platform': platform}
+
+ if len(sys.argv) > 4 and sys.argv[4] == "--system":
+ data['system'] = True
+
+ session = requests.session()
+ session.get(login_url)
+ csrftoken = session.cookies['csrftoken']
+
+ login_data = { 'username': user,'password': password,
+ 'csrfmiddlewaretoken': csrftoken }
+ headers = { "Referer": base_url }
+ r1 = session.post(login_url, headers=headers, data=login_data)
+
+ data['csrfmiddlewaretoken'] = csrftoken
+
+ r = session.post(upload_url, headers=headers, files=files, data=data)
+
+if __name__ == "__main__":
+ main()
+
+# vim: set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/bin/verify-custom-widgets-libs b/bin/verify-custom-widgets-libs
new file mode 100755
index 000000000..7fad02f17
--- /dev/null
+++ b/bin/verify-custom-widgets-libs
@@ -0,0 +1,30 @@
+#!/bin/sh
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# Run this from the source root dir of a completed build to
+# verify that all customwidgets used in our .ui files have
+# their factory method in the library they claim to be in
+#
+# Under Linux dlsym will search other locations and find
+# them if they exist elsewhere, but not under windows, so
+# its easy to put the wrong lib name in if developing
+# under Linux
+
+ret=0
+FOO=`git grep -h -r lo- */uiconfig | sed -e "s/<object class=\"//g" | sed -e "s/\".*$//"| sed 's/^[ \t]*//;s/[ \t]*$//'|sort|uniq`
+for foo in $FOO; do
+ lib=$(echo $foo | cut -f1 -d-)
+ symbol=$(echo $foo | cut -f2 -d-)
+ nm -D instdir/program/lib$lib.so | grep make$symbol > /dev/null
+ if [ $? != 0 ]; then
+ echo "$foo exists in a .ui file, but make$symbol is missing from lib$lib.so, Windows will fail to find the symbol and crash"
+ echo " typically make$symbol is in a different library and $foo should have the prefix of that library instead"
+ ret=1
+ fi
+done
+exit $ret
diff --git a/binaryurp/CppunitTest_binaryurp_test-cache.mk b/binaryurp/CppunitTest_binaryurp_test-cache.mk
new file mode 100644
index 000000000..97f5b78f0
--- /dev/null
+++ b/binaryurp/CppunitTest_binaryurp_test-cache.mk
@@ -0,0 +1,22 @@
+# -*- Mode: makefile-gmake; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+$(eval $(call gb_CppunitTest_CppunitTest,binaryurp_test-cache))
+
+$(eval $(call gb_CppunitTest_use_external,binaryurp_test-cache,boost_headers))
+
+$(eval $(call gb_CppunitTest_use_libraries,binaryurp_test-cache,\
+ sal \
+))
+
+$(eval $(call gb_CppunitTest_add_exception_objects,binaryurp_test-cache,\
+ binaryurp/qa/test-cache \
+))
+
+# vim: set noet sw=4 ts=4:
diff --git a/binaryurp/CppunitTest_binaryurp_test-unmarshal.mk b/binaryurp/CppunitTest_binaryurp_test-unmarshal.mk
new file mode 100644
index 000000000..11c70f5b9
--- /dev/null
+++ b/binaryurp/CppunitTest_binaryurp_test-unmarshal.mk
@@ -0,0 +1,33 @@
+# -*- Mode: makefile-gmake; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+$(eval $(call gb_CppunitTest_CppunitTest,binaryurp_test-unmarshal))
+
+$(eval $(call gb_CppunitTest_use_external,binaryurp_test-unmarshal,boost_headers))
+
+$(eval $(call gb_CppunitTest_use_libraries,binaryurp_test-unmarshal,\
+ cppu \
+ cppuhelper \
+ sal \
+ salhelper \
+))
+
+$(eval $(call gb_CppunitTest_use_library_objects,binaryurp_test-unmarshal,\
+ binaryurp \
+))
+
+$(eval $(call gb_CppunitTest_use_api,binaryurp_test-unmarshal,\
+ udkapi \
+))
+
+$(eval $(call gb_CppunitTest_add_exception_objects,binaryurp_test-unmarshal,\
+ binaryurp/qa/test-unmarshal \
+))
+
+# vim: set noet sw=4 ts=4:
diff --git a/binaryurp/IwyuFilter_binaryurp.yaml b/binaryurp/IwyuFilter_binaryurp.yaml
new file mode 100644
index 000000000..e6bf80978
--- /dev/null
+++ b/binaryurp/IwyuFilter_binaryurp.yaml
@@ -0,0 +1,34 @@
+---
+assumeFilename: binaryurp/source/bridge.cxx
+blacklist:
+ binaryurp/source/marshal.hxx:
+ # Don't propose hxx -> h change in URE libs
+ - rtl/byteseq.hxx
+ binaryurp/source/reader.hxx:
+ # Don't propose hxx -> h change in URE libs
+ - rtl/byteseq.hxx
+ binaryurp/source/incomingrequest.hxx:
+ # Don't propose hxx -> h change in URE libs
+ - rtl/byteseq.hxx
+ binaryurp/source/lessoperators.cxx:
+ # Don't propose hxx -> h change in URE libs
+ - rtl/byteseq.hxx
+ binaryurp/source/outgoingrequests.cxx:
+ # Don't propose hxx -> h change in URE libs
+ - rtl/byteseq.hxx
+ # Actually needed
+ - lessoperators.hxx
+ binaryurp/source/bridgefactory.cxx:
+ # Actually needed
+ - com/sun/star/connection/XConnection.hpp
+ - com/sun/star/uno/XComponentContext.hpp
+ - com/sun/star/uno/XInterface.hpp
+ binaryurp/source/bridge.cxx:
+ # Actually needed
+ - com/sun/star/bridge/XInstanceProvider.hpp
+ - com/sun/star/connection/XConnection.hpp
+ - com/sun/star/lang/XEventListener.hpp
+ - com/sun/star/uno/XInterface.hpp
+ binaryurp/source/unmarshal.cxx:
+ # Actually used
+ - vector
diff --git a/binaryurp/Library_binaryurp.mk b/binaryurp/Library_binaryurp.mk
new file mode 100644
index 000000000..51efc35a5
--- /dev/null
+++ b/binaryurp/Library_binaryurp.mk
@@ -0,0 +1,40 @@
+# -*- Mode: makefile-gmake; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+$(eval $(call gb_Library_Library,binaryurp))
+
+$(eval $(call gb_Library_set_componentfile,binaryurp,binaryurp/source/binaryurp))
+
+$(eval $(call gb_Library_use_udk_api,binaryurp))
+
+$(eval $(call gb_Library_use_external,binaryurp,boost_headers))
+
+$(eval $(call gb_Library_use_libraries,binaryurp,\
+ cppu \
+ cppuhelper \
+ sal \
+ salhelper \
+))
+
+$(eval $(call gb_Library_add_exception_objects,binaryurp,\
+ binaryurp/source/binaryany \
+ binaryurp/source/bridge \
+ binaryurp/source/bridgefactory \
+ binaryurp/source/currentcontext \
+ binaryurp/source/incomingrequest \
+ binaryurp/source/lessoperators \
+ binaryurp/source/marshal \
+ binaryurp/source/outgoingrequests \
+ binaryurp/source/proxy \
+ binaryurp/source/reader \
+ binaryurp/source/unmarshal \
+ binaryurp/source/writer \
+))
+
+# vim: set noet sw=4 ts=4:
diff --git a/binaryurp/Makefile b/binaryurp/Makefile
new file mode 100644
index 000000000..ccb1c85a0
--- /dev/null
+++ b/binaryurp/Makefile
@@ -0,0 +1,7 @@
+# -*- Mode: makefile-gmake; tab-width: 4; indent-tabs-mode: t -*-
+
+module_directory:=$(dir $(realpath $(firstword $(MAKEFILE_LIST))))
+
+include $(module_directory)/../solenv/gbuild/partial_build.mk
+
+# vim: set noet sw=4 ts=4:
diff --git a/binaryurp/Module_binaryurp.mk b/binaryurp/Module_binaryurp.mk
new file mode 100644
index 000000000..fcfd303f0
--- /dev/null
+++ b/binaryurp/Module_binaryurp.mk
@@ -0,0 +1,21 @@
+# -*- Mode: makefile-gmake; tab-width: 4; indent-tabs-mode: t -*-
+#
+# This file is part of the LibreOffice project.
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+$(eval $(call gb_Module_Module,binaryurp))
+
+$(eval $(call gb_Module_add_targets,binaryurp,\
+ Library_binaryurp \
+))
+
+$(eval $(call gb_Module_add_check_targets,binaryurp,\
+ CppunitTest_binaryurp_test-cache \
+ CppunitTest_binaryurp_test-unmarshal \
+))
+
+# vim: set noet sw=4 ts=4:
diff --git a/binaryurp/README b/binaryurp/README
new file mode 100644
index 000000000..1f1e08894
--- /dev/null
+++ b/binaryurp/README
@@ -0,0 +1,9 @@
+UNO Remote Protocol (URP). A binary protocol.
+
+UNO provides a protocol called the UNO Remote Protocol (URP) that provides
+a bridge between UNO environments. This bridge allows processes and objects
+to send method calls and to receive return values. UNO objects in different
+environments are connected by way of this interprocess bridge. The underlying
+connection is made through a socket or pipe. Remote UNO objects are connected
+by means of TCP/IP using the high-level protocol of the URP.
+
diff --git a/binaryurp/qa/test-cache.cxx b/binaryurp/qa/test-cache.cxx
new file mode 100644
index 000000000..989b103e2
--- /dev/null
+++ b/binaryurp/qa/test-cache.cxx
@@ -0,0 +1,77 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/types.h>
+#include <cppunit/TestAssert.h>
+#include <cppunit/TestFixture.h>
+#include <cppunit/extensions/HelperMacros.h>
+#include <cppunit/plugin/TestPlugIn.h>
+
+#include "../source/cache.hxx"
+
+namespace {
+
+class Test: public CppUnit::TestFixture {
+private:
+ CPPUNIT_TEST_SUITE(Test);
+ CPPUNIT_TEST(testNothingLostFromLruList);
+ CPPUNIT_TEST_SUITE_END();
+
+ void testNothingLostFromLruList();
+};
+
+// cf. jurt/test/com/sun/star/lib/uno/protocols/urp/Cache_Test.java:
+void Test::testNothingLostFromLruList() {
+ int a[8];
+ for (int i = 0; i != int(SAL_N_ELEMENTS(a)); ++i) {
+ for (int j = 0; j != i; ++j) {
+ a[j] = 0;
+ }
+ for (;;) {
+ binaryurp::Cache< int > c(4);
+ for (int k = 0; k != i; ++k) {
+ bool f;
+ c.add(a[k], &f);
+ }
+ bool f;
+ CPPUNIT_ASSERT_EQUAL(
+ 6,
+ c.add(-1, &f) + c.add(-2, &f) + c.add(-3, &f) + c.add(-4, &f));
+ int j = i - 1;
+ while (j >= 0 && a[j] == 3) {
+ --j;
+ }
+ if (j < 0) {
+ break;
+ }
+ ++a[j];
+ for (int k = j + 1; k != i; ++k) {
+ a[k] = 0;
+ }
+ }
+ }
+}
+
+CPPUNIT_TEST_SUITE_REGISTRATION(Test);
+
+}
+
+CPPUNIT_PLUGIN_IMPLEMENT();
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/qa/test-unmarshal.cxx b/binaryurp/qa/test-unmarshal.cxx
new file mode 100644
index 000000000..df3a96d8f
--- /dev/null
+++ b/binaryurp/qa/test-unmarshal.cxx
@@ -0,0 +1,94 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <com/sun/star/io/IOException.hpp>
+#include <com/sun/star/uno/Sequence.hxx>
+#include <cppu/unotype.hxx>
+#include <cppunit/TestAssert.h>
+#include <cppunit/TestFixture.h>
+#include <cppunit/extensions/HelperMacros.h>
+#include <cppunit/plugin/TestPlugIn.h>
+#include <rtl/ref.hxx>
+#include <rtl/string.h>
+#include <sal/types.h>
+#include <typelib/typedescription.hxx>
+
+#include "../source/bridge.hxx"
+#include "../source/cache.hxx"
+#include "../source/readerstate.hxx"
+#include "../source/unmarshal.hxx"
+
+namespace {
+
+class Test: public CppUnit::TestFixture {
+private:
+ CPPUNIT_TEST_SUITE(Test);
+ CPPUNIT_TEST(testTypeOfBooleanSequence);
+ CPPUNIT_TEST(testTypeOfVoidSequence);
+ CPPUNIT_TEST_SUITE_END();
+
+ void testTypeOfBooleanSequence();
+
+ void testTypeOfVoidSequence();
+};
+
+void Test::testTypeOfBooleanSequence() {
+ binaryurp::ReaderState state;
+ css::uno::Sequence<sal_Int8> buf{
+ static_cast<sal_Int8>(static_cast<sal_uInt8>(20 | 0x80)),
+ // sequence type | cache flag
+ static_cast<sal_Int8>(
+ static_cast<sal_uInt8>(binaryurp::cache::ignore >> 8)),
+ static_cast<sal_Int8>(
+ static_cast<sal_uInt8>(binaryurp::cache::ignore & 0xFF)),
+ RTL_CONSTASCII_LENGTH("[]boolean"),
+ '[', ']', 'b', 'o', 'o', 'l', 'e', 'a', 'n' };
+ binaryurp::Unmarshal m(rtl::Reference< binaryurp::Bridge >(), state, buf);
+ css::uno::TypeDescription t(m.readType());
+ CPPUNIT_ASSERT(
+ t.equals(
+ css::uno::TypeDescription(
+ cppu::UnoType< css::uno::Sequence< bool > >::get())));
+ m.done();
+}
+
+void Test::testTypeOfVoidSequence() {
+ binaryurp::ReaderState state;
+ css::uno::Sequence<sal_Int8> buf{
+ static_cast<sal_Int8>(static_cast<sal_uInt8>(20 | 0x80)),
+ // sequence type | cache flag
+ static_cast<sal_Int8>(
+ static_cast<sal_uInt8>(binaryurp::cache::ignore >> 8)),
+ static_cast<sal_Int8>(
+ static_cast<sal_uInt8>(binaryurp::cache::ignore & 0xFF)),
+ RTL_CONSTASCII_LENGTH("[]void"), '[', ']', 'v', 'o', 'i', 'd' };
+ binaryurp::Unmarshal m(rtl::Reference< binaryurp::Bridge >(), state, buf);
+ try {
+ m.readType();
+ CPPUNIT_FAIL("exception expected");
+ } catch (const css::io::IOException &) {}
+}
+
+CPPUNIT_TEST_SUITE_REGISTRATION(Test);
+
+}
+
+CPPUNIT_PLUGIN_IMPLEMENT();
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/binaryany.cxx b/binaryurp/source/binaryany.cxx
new file mode 100644
index 000000000..af1493331
--- /dev/null
+++ b/binaryurp/source/binaryany.cxx
@@ -0,0 +1,114 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <utility>
+
+#include <typelib/typeclass.h>
+#include <typelib/typedescription.hxx>
+#include <uno/any2.h>
+
+#include "binaryany.hxx"
+
+namespace binaryurp {
+
+namespace {
+
+// Cf. com::sun::star::uno::detail::moveAnyInternals in
+// include/com/sun/star/uno/Any.hxx:
+void moveInternals(uno_Any & from, uno_Any & to) {
+ uno_any_construct(&to, nullptr, nullptr, nullptr);
+ std::swap(from.pType, to.pType);
+ std::swap(from.pData, to.pData);
+ std::swap(from.pReserved, to.pReserved);
+ if (to.pData == &from.pReserved) {
+ to.pData = &to.pReserved;
+ }
+ // This leaves to.pData (where "to" is now VOID) dangling to somewhere (cf.
+ // CONSTRUCT_EMPTY_ANY, cppu/source/uno/prim.hxx), but what's relevant is
+ // only that it isn't a nullptr (as e.g. >>= -> uno_type_assignData ->
+ // _assignData takes a null pSource to mean "construct a default value").
+}
+
+}
+
+BinaryAny::BinaryAny() throw () {
+ uno_any_construct(&data_, nullptr, nullptr, nullptr);
+}
+
+BinaryAny::BinaryAny(css::uno::TypeDescription const & type, void * value)
+ throw ()
+{
+ assert(type.is());
+ uno_any_construct(&data_, value, type.get(), nullptr);
+}
+
+BinaryAny::BinaryAny(uno_Any const & raw) throw () {
+ assert(raw.pType != nullptr);
+ data_.pType = raw.pType;
+ typelib_typedescriptionreference_acquire(data_.pType);
+ data_.pData = raw.pData == &raw.pReserved ? &data_.pReserved : raw.pData;
+ data_.pReserved = raw.pReserved;
+}
+
+BinaryAny::BinaryAny(BinaryAny const & other) throw () {
+ uno_type_any_construct(&data_, other.data_.pData, other.data_.pType, nullptr);
+}
+
+BinaryAny::BinaryAny(BinaryAny && other) throw () {
+ moveInternals(other.data_, data_);
+}
+
+BinaryAny::~BinaryAny() throw () {
+ uno_any_destruct(&data_, nullptr);
+}
+
+BinaryAny & BinaryAny::operator =(BinaryAny const & other) throw () {
+ if (&other != this) {
+ uno_type_any_assign(&data_, other.data_.pData, other.data_.pType, nullptr, nullptr);
+ }
+ return *this;
+}
+
+BinaryAny & BinaryAny::operator =(BinaryAny && other) throw () {
+ uno_any_destruct(&data_, nullptr);
+ moveInternals(other.data_, data_);
+ return *this;
+}
+
+css::uno::TypeDescription BinaryAny::getType() const throw () {
+ return css::uno::TypeDescription(data_.pType);
+}
+
+void * BinaryAny::getValue(css::uno::TypeDescription const & type) const
+ throw ()
+{
+ assert(type.is());
+ assert(
+ type.get()->eTypeClass == typelib_TypeClass_ANY ||
+ type.equals(css::uno::TypeDescription(data_.pType)));
+ return type.get()->eTypeClass == typelib_TypeClass_ANY
+ ? &data_ : data_.pData;
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/binaryany.hxx b/binaryurp/source/binaryany.hxx
new file mode 100644
index 000000000..8422ca1ac
--- /dev/null
+++ b/binaryurp/source/binaryany.hxx
@@ -0,0 +1,69 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_BINARYANY_HXX
+#define INCLUDED_BINARYURP_SOURCE_BINARYANY_HXX
+
+#include <sal/config.h>
+
+#include <uno/any2.h>
+
+namespace com::sun::star::uno { class TypeDescription; }
+
+namespace binaryurp {
+
+class BinaryAny {
+public:
+ BinaryAny() throw ();
+
+ BinaryAny(com::sun::star::uno::TypeDescription const & type, void * value)
+ throw ();
+
+ explicit BinaryAny(uno_Any const & raw) throw ();
+ // takes over raw.pData (but copies raw.pType); raw must not be passed
+ // to uno_any_destruct
+
+ BinaryAny(BinaryAny const & other) throw ();
+
+ BinaryAny(BinaryAny && other) throw ();
+
+ ~BinaryAny() throw ();
+
+ BinaryAny & operator =(BinaryAny const & other) throw ();
+
+ BinaryAny & operator =(BinaryAny && other) throw ();
+
+ uno_Any& get() throw () { return data_; }
+
+ com::sun::star::uno::TypeDescription getType() const throw ();
+
+ void * getValue(com::sun::star::uno::TypeDescription const & type) const
+ throw ();
+
+private:
+ mutable uno_Any data_;
+ // mutable so that getValue() can return a non-const void *, as in turn
+ // required at various places in binary UNO
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/binaryurp.component b/binaryurp/source/binaryurp.component
new file mode 100644
index 000000000..1eff0c2c1
--- /dev/null
+++ b/binaryurp/source/binaryurp.component
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ -->
+
+<component loader="com.sun.star.loader.SharedLibrary" environment="@CPPU_ENV@"
+ prefix="binaryurp" xmlns="http://openoffice.org/2010/uno-components">
+ <implementation name="com.sun.star.comp.bridge.BridgeFactory">
+ <service name="com.sun.star.bridge.BridgeFactory"/>
+ </implementation>
+</component>
diff --git a/binaryurp/source/bridge.cxx b/binaryurp/source/bridge.cxx
new file mode 100644
index 000000000..1be59b933
--- /dev/null
+++ b/binaryurp/source/bridge.cxx
@@ -0,0 +1,1019 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <algorithm>
+#include <cassert>
+#include <cstddef>
+#include <limits>
+#include <memory>
+#include <vector>
+
+#include <com/sun/star/bridge/InvalidProtocolChangeException.hpp>
+#include <com/sun/star/bridge/XBridge.hpp>
+#include <com/sun/star/bridge/XInstanceProvider.hpp>
+#include <com/sun/star/bridge/XProtocolProperties.hpp>
+#include <com/sun/star/connection/XConnection.hpp>
+#include <com/sun/star/io/IOException.hpp>
+#include <com/sun/star/lang/DisposedException.hpp>
+#include <com/sun/star/lang/EventObject.hpp>
+#include <com/sun/star/lang/XEventListener.hpp>
+#include <com/sun/star/uno/Reference.hxx>
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <com/sun/star/uno/Sequence.hxx>
+#include <com/sun/star/uno/XInterface.hpp>
+#include <cppuhelper/exc_hlp.hxx>
+#include <cppuhelper/weak.hxx>
+#include <osl/mutex.hxx>
+#include <osl/thread.hxx>
+#include <rtl/byteseq.hxx>
+#include <rtl/random.h>
+#include <rtl/ref.hxx>
+#include <rtl/string.h>
+#include <rtl/ustring.hxx>
+#include <sal/log.hxx>
+#include <sal/types.h>
+#include <typelib/typedescription.h>
+#include <typelib/typedescription.hxx>
+#include <uno/dispatcher.hxx>
+#include <uno/environment.hxx>
+#include <uno/lbnames.h>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "bridgefactory.hxx"
+#include "incomingreply.hxx"
+#include "lessoperators.hxx"
+#include "outgoingrequest.hxx"
+#include "outgoingrequests.hxx"
+#include "proxy.hxx"
+#include "reader.hxx"
+
+namespace binaryurp {
+
+namespace {
+
+sal_Int32 random() {
+ sal_Int32 n;
+ rtlRandomPool pool = rtl_random_createPool();
+ rtl_random_getBytes(pool, &n, sizeof n);
+ rtl_random_destroyPool(pool);
+ return n;
+}
+
+OUString toString(css::uno::TypeDescription const & type) {
+ typelib_TypeDescription * d = type.get();
+ assert(d != nullptr && d->pTypeName != nullptr);
+ return OUString(d->pTypeName);
+}
+
+extern "C" void freeProxyCallback(
+ SAL_UNUSED_PARAMETER uno_ExtEnvironment *, void * pProxy)
+{
+ assert(pProxy != nullptr);
+ static_cast< Proxy * >(pProxy)->do_free();
+}
+
+bool isThread(salhelper::Thread * thread) {
+ assert(thread != nullptr);
+ return osl::Thread::getCurrentIdentifier() == thread->getIdentifier();
+}
+
+class AttachThread {
+public:
+ explicit AttachThread(uno_ThreadPool threadPool);
+
+ ~AttachThread();
+
+ const rtl::ByteSequence& getTid() const throw () { return tid_;}
+
+private:
+ AttachThread(const AttachThread&) = delete;
+ AttachThread& operator=(const AttachThread&) = delete;
+
+ uno_ThreadPool threadPool_;
+ rtl::ByteSequence tid_;
+};
+
+AttachThread::AttachThread(uno_ThreadPool threadPool): threadPool_(threadPool) {
+ sal_Sequence * s = nullptr;
+ uno_getIdOfCurrentThread(&s);
+ tid_ = rtl::ByteSequence(s, rtl::BYTESEQ_NOACQUIRE);
+ uno_threadpool_attach(threadPool_);
+}
+
+AttachThread::~AttachThread() {
+ uno_threadpool_detach(threadPool_);
+ uno_releaseIdFromCurrentThread();
+}
+
+
+class PopOutgoingRequest {
+public:
+ PopOutgoingRequest(
+ OutgoingRequests & requests, rtl::ByteSequence const & tid,
+ OutgoingRequest const & request);
+
+ ~PopOutgoingRequest();
+
+ void clear();
+
+private:
+ PopOutgoingRequest(const PopOutgoingRequest&) = delete;
+ PopOutgoingRequest& operator=(const PopOutgoingRequest&) = delete;
+
+ OutgoingRequests & requests_;
+ rtl::ByteSequence tid_;
+ bool cleared_;
+};
+
+PopOutgoingRequest::PopOutgoingRequest(
+ OutgoingRequests & requests, rtl::ByteSequence const & tid,
+ OutgoingRequest const & request):
+ requests_(requests), tid_(tid), cleared_(false)
+{
+ requests_.push(tid_, request);
+}
+
+PopOutgoingRequest::~PopOutgoingRequest() {
+ if (!cleared_) {
+ requests_.pop(tid_);
+ }
+}
+
+void PopOutgoingRequest::clear() {
+ cleared_ = true;
+}
+
+}
+
+struct Bridge::SubStub {
+ com::sun::star::uno::UnoInterfaceReference object;
+
+ sal_uInt32 references;
+};
+
+Bridge::Bridge(
+ rtl::Reference< BridgeFactory > const & factory, OUString const & name,
+ css::uno::Reference< css::connection::XConnection > const & connection,
+ css::uno::Reference< css::bridge::XInstanceProvider > const & provider):
+ factory_(factory), name_(name), connection_(connection),
+ provider_(provider),
+ binaryUno_(UNO_LB_UNO),
+ cppToBinaryMapping_(CPPU_CURRENT_LANGUAGE_BINDING_NAME, UNO_LB_UNO),
+ binaryToCppMapping_(UNO_LB_UNO, CPPU_CURRENT_LANGUAGE_BINDING_NAME),
+ protPropTid_(
+ reinterpret_cast< sal_Int8 const * >(".UrpProtocolPropertiesTid"),
+ RTL_CONSTASCII_LENGTH(".UrpProtocolPropertiesTid")),
+ protPropOid_("UrpProtocolProperties"),
+ protPropType_(
+ cppu::UnoType<
+ css::uno::Reference< css::bridge::XProtocolProperties > >::get()),
+ protPropRequest_("com.sun.star.bridge.XProtocolProperties::requestChange"),
+ protPropCommit_("com.sun.star.bridge.XProtocolProperties::commitChange"),
+ state_(STATE_INITIAL), threadPool_(nullptr), currentContextMode_(false),
+ proxies_(0), calls_(0), normalCall_(false), activeCalls_(0),
+ mode_(MODE_REQUESTED)
+{
+ assert(factory.is() && connection.is());
+ if (!binaryUno_.is()) {
+ throw css::uno::RuntimeException("URP: no binary UNO environment");
+ }
+ if (!(cppToBinaryMapping_.is() && binaryToCppMapping_.is())) {
+ throw css::uno::RuntimeException("URP: no C++ UNO mapping");
+ }
+ passive_.set();
+}
+
+void Bridge::start() {
+ rtl::Reference r(new Reader(this));
+ rtl::Reference w(new Writer(this));
+ {
+ osl::MutexGuard g(mutex_);
+ assert(
+ state_ == STATE_INITIAL && threadPool_ == nullptr && !writer_.is() &&
+ !reader_.is());
+ threadPool_ = uno_threadpool_create();
+ assert(threadPool_ != nullptr);
+ reader_ = r;
+ writer_ = w;
+ state_ = STATE_STARTED;
+ }
+ // It is important to call reader_->launch() last here; both
+ // Writer::execute and Reader::execute can call Bridge::terminate, but
+ // Writer::execute is initially blocked in unblocked_.wait() until
+ // Reader::execute has called bridge_->sendRequestChangeRequest(), so
+ // effectively only reader_->launch() can lead to an early call to
+ // Bridge::terminate
+ w->launch();
+ r->launch();
+}
+
+void Bridge::terminate(bool final) {
+ uno_ThreadPool tp;
+ // Make sure function-local variables (Stubs s, etc.) are destroyed before
+ // the final uno_threadpool_destroy/threadPool_ = 0:
+ {
+ rtl::Reference< Reader > r;
+ rtl::Reference< Writer > w;
+ bool joinW;
+ Listeners ls;
+ {
+ osl::ClearableMutexGuard g(mutex_);
+ switch (state_) {
+ case STATE_INITIAL: // via ~Bridge -> dispose -> terminate
+ case STATE_FINAL:
+ return;
+ case STATE_STARTED:
+ break;
+ case STATE_TERMINATED:
+ if (final) {
+ g.clear();
+ terminated_.wait();
+ {
+ osl::MutexGuard g2(mutex_);
+ tp = threadPool_;
+ threadPool_ = nullptr;
+ if (reader_.is()) {
+ if (!isThread(reader_.get())) {
+ r = reader_;
+ }
+ reader_.clear();
+ }
+ if (writer_.is()) {
+ if (!isThread(writer_.get())) {
+ w = writer_;
+ }
+ writer_.clear();
+ }
+ state_ = STATE_FINAL;
+ }
+ assert(!(r.is() && w.is()));
+ if (r.is()) {
+ r->join();
+ } else if (w.is()) {
+ w->join();
+ }
+ if (tp != nullptr) {
+ uno_threadpool_destroy(tp);
+ }
+ }
+ return;
+ }
+ tp = threadPool_;
+ assert(!(final && isThread(reader_.get())));
+ if (!isThread(reader_.get())) {
+ std::swap(reader_, r);
+ }
+ w = writer_;
+ joinW = !isThread(writer_.get());
+ assert(!final || joinW);
+ if (joinW) {
+ writer_.clear();
+ }
+ ls.swap(listeners_);
+ state_ = final ? STATE_FINAL : STATE_TERMINATED;
+ }
+ try {
+ connection_->close();
+ } catch (const css::io::IOException & e) {
+ SAL_INFO("binaryurp", "caught IO exception '" << e << '\'');
+ }
+ assert(w.is());
+ w->stop();
+ if (r.is()) {
+ r->join();
+ }
+ if (joinW) {
+ w->join();
+ }
+ assert(tp != nullptr);
+ uno_threadpool_dispose(tp);
+ Stubs s;
+ {
+ osl::MutexGuard g(mutex_);
+ s.swap(stubs_);
+ }
+ for (auto & stub : s)
+ {
+ for (auto & item : stub.second)
+ {
+ SAL_INFO(
+ "binaryurp",
+ "stub '" << stub.first << "', '" << toString(item.first)
+ << "' still mapped at Bridge::terminate");
+ binaryUno_.get()->pExtEnv->revokeInterface(
+ binaryUno_.get()->pExtEnv, item.second.object.get());
+ }
+ }
+ factory_->removeBridge(this);
+ for (auto const& listener : ls)
+ {
+ try {
+ listener->disposing(
+ css::lang::EventObject(
+ static_cast< cppu::OWeakObject * >(this)));
+ } catch (const css::uno::RuntimeException & e) {
+ SAL_WARN("binaryurp", "caught " << e);
+ }
+ }
+ }
+ if (final) {
+ uno_threadpool_destroy(tp);
+ }
+ {
+ osl::MutexGuard g(mutex_);
+ if (final) {
+ threadPool_ = nullptr;
+ }
+ }
+ terminated_.set();
+}
+
+
+BinaryAny Bridge::mapCppToBinaryAny(css::uno::Any const & cppAny) {
+ css::uno::Any in(cppAny);
+ BinaryAny out;
+ out.~BinaryAny();
+ uno_copyAndConvertData(
+ &out.get(), &in,
+ css::uno::TypeDescription(cppu::UnoType< css::uno::Any >::get()).get(),
+ cppToBinaryMapping_.get());
+ return out;
+}
+
+uno_ThreadPool Bridge::getThreadPool() {
+ osl::MutexGuard g(mutex_);
+ checkDisposed();
+ assert(threadPool_ != nullptr);
+ return threadPool_;
+}
+
+rtl::Reference< Writer > Bridge::getWriter() {
+ osl::MutexGuard g(mutex_);
+ checkDisposed();
+ assert(writer_.is());
+ return writer_;
+}
+
+css::uno::UnoInterfaceReference Bridge::registerIncomingInterface(
+ OUString const & oid, css::uno::TypeDescription const & type)
+{
+ assert(type.is());
+ if (oid.isEmpty()) {
+ return css::uno::UnoInterfaceReference();
+ }
+ css::uno::UnoInterfaceReference obj(findStub(oid, type));
+ if (!obj.is()) {
+ binaryUno_.get()->pExtEnv->getRegisteredInterface(
+ binaryUno_.get()->pExtEnv,
+ reinterpret_cast< void ** >(&obj.m_pUnoI), oid.pData,
+ reinterpret_cast< typelib_InterfaceTypeDescription * >(type.get()));
+ if (obj.is()) {
+ makeReleaseCall(oid, type);
+ } else {
+ obj.set(new Proxy(this, oid, type), SAL_NO_ACQUIRE);
+ {
+ osl::MutexGuard g(mutex_);
+ assert(proxies_ < std::numeric_limits< std::size_t >::max());
+ ++proxies_;
+ }
+ binaryUno_.get()->pExtEnv->registerProxyInterface(
+ binaryUno_.get()->pExtEnv,
+ reinterpret_cast< void ** >(&obj.m_pUnoI), &freeProxyCallback,
+ oid.pData,
+ reinterpret_cast< typelib_InterfaceTypeDescription * >(
+ type.get()));
+ }
+ }
+ return obj;
+}
+
+OUString Bridge::registerOutgoingInterface(
+ css::uno::UnoInterfaceReference const & object,
+ css::uno::TypeDescription const & type)
+{
+ assert(type.is());
+ if (!object.is()) {
+ return OUString();
+ }
+ OUString oid;
+ if (!Proxy::isProxy(this, object, &oid)) {
+ binaryUno_.get()->pExtEnv->getObjectIdentifier(
+ binaryUno_.get()->pExtEnv, &oid.pData, object.get());
+ osl::MutexGuard g(mutex_);
+ Stubs::iterator i(stubs_.find(oid));
+ Stub newStub;
+ Stub * stub = i == stubs_.end() ? &newStub : &i->second;
+ Stub::iterator j(stub->find(type));
+ //TODO: Release sub-stub if it is not successfully sent to remote side
+ // (otherwise, stub will leak until terminate()):
+ if (j == stub->end()) {
+ j = stub->emplace(type, SubStub()).first;
+ if (stub == &newStub) {
+ i = stubs_.emplace(oid, Stub()).first;
+ std::swap(i->second, newStub);
+ j = i->second.find(type);
+ assert(j != i->second.end());
+ }
+ j->second.object = object;
+ j->second.references = 1;
+ binaryUno_.get()->pExtEnv->registerInterface(
+ binaryUno_.get()->pExtEnv,
+ reinterpret_cast< void ** >(&j->second.object.m_pUnoI),
+ oid.pData,
+ reinterpret_cast< typelib_InterfaceTypeDescription * >(
+ type.get()));
+ } else {
+ assert(stub != &newStub);
+ if (j->second.references == SAL_MAX_UINT32) {
+ throw css::uno::RuntimeException(
+ "URP: stub reference count overflow");
+ }
+ ++j->second.references;
+ }
+ }
+ return oid;
+}
+
+css::uno::UnoInterfaceReference Bridge::findStub(
+ OUString const & oid, css::uno::TypeDescription const & type)
+{
+ assert(!oid.isEmpty() && type.is());
+ osl::MutexGuard g(mutex_);
+ Stubs::iterator i(stubs_.find(oid));
+ if (i != stubs_.end()) {
+ Stub::iterator j(i->second.find(type));
+ if (j != i->second.end()) {
+ return j->second.object;
+ }
+ for (auto const& item : i->second)
+ {
+ if (typelib_typedescription_isAssignableFrom(
+ type.get(), item.first.get()))
+ {
+ return item.second.object;
+ }
+ }
+ }
+ return css::uno::UnoInterfaceReference();
+}
+
+void Bridge::releaseStub(
+ OUString const & oid, css::uno::TypeDescription const & type)
+{
+ assert(!oid.isEmpty() && type.is());
+ css::uno::UnoInterfaceReference obj;
+ bool unused;
+ {
+ osl::MutexGuard g(mutex_);
+ Stubs::iterator i(stubs_.find(oid));
+ if (i == stubs_.end()) {
+ throw css::uno::RuntimeException("URP: release unknown stub");
+ }
+ Stub::iterator j(i->second.find(type));
+ if (j == i->second.end()) {
+ throw css::uno::RuntimeException("URP: release unknown stub");
+ }
+ assert(j->second.references > 0);
+ --j->second.references;
+ if (j->second.references == 0) {
+ obj = j->second.object;
+ i->second.erase(j);
+ if (i->second.empty()) {
+ stubs_.erase(i);
+ }
+ }
+ unused = becameUnused();
+ }
+ if (obj.is()) {
+ binaryUno_.get()->pExtEnv->revokeInterface(
+ binaryUno_.get()->pExtEnv, obj.get());
+ }
+ terminateWhenUnused(unused);
+}
+
+void Bridge::resurrectProxy(Proxy & proxy) {
+ uno_Interface * p = &proxy;
+ binaryUno_.get()->pExtEnv->registerProxyInterface(
+ binaryUno_.get()->pExtEnv,
+ reinterpret_cast< void ** >(&p), &freeProxyCallback,
+ proxy.getOid().pData,
+ reinterpret_cast< typelib_InterfaceTypeDescription * >(
+ proxy.getType().get()));
+ assert(p == &proxy);
+}
+
+void Bridge::revokeProxy(Proxy & proxy) {
+ binaryUno_.get()->pExtEnv->revokeInterface(
+ binaryUno_.get()->pExtEnv, &proxy);
+}
+
+void Bridge::freeProxy(Proxy & proxy) {
+ try {
+ makeReleaseCall(proxy.getOid(), proxy.getType());
+ } catch (const css::uno::RuntimeException & e) {
+ SAL_INFO(
+ "binaryurp", "caught runtime exception '" << e << '\'');
+ } catch (const std::exception & e) {
+ SAL_WARN("binaryurp", "caught C++ exception '" << e.what() << '\'');
+ }
+ bool unused;
+ {
+ osl::MutexGuard g(mutex_);
+ assert(proxies_ > 0);
+ --proxies_;
+ unused = becameUnused();
+ }
+ terminateWhenUnused(unused);
+}
+
+void Bridge::incrementCalls(bool normalCall) throw () {
+ osl::MutexGuard g(mutex_);
+ assert(calls_ < std::numeric_limits< std::size_t >::max());
+ ++calls_;
+ normalCall_ |= normalCall;
+}
+
+void Bridge::decrementCalls() {
+ bool unused;
+ {
+ osl::MutexGuard g(mutex_);
+ assert(calls_ > 0);
+ --calls_;
+ unused = becameUnused();
+ }
+ terminateWhenUnused(unused);
+}
+
+void Bridge::incrementActiveCalls() throw () {
+ osl::MutexGuard g(mutex_);
+ assert(
+ activeCalls_ <= calls_ &&
+ activeCalls_ < std::numeric_limits< std::size_t >::max());
+ ++activeCalls_;
+ passive_.reset();
+}
+
+void Bridge::decrementActiveCalls() throw () {
+ osl::MutexGuard g(mutex_);
+ assert(activeCalls_ <= calls_ && activeCalls_ > 0);
+ --activeCalls_;
+ if (activeCalls_ == 0) {
+ passive_.set();
+ }
+}
+
+bool Bridge::makeCall(
+ OUString const & oid, css::uno::TypeDescription const & member,
+ bool setter, std::vector< BinaryAny > const & inArguments,
+ BinaryAny * returnValue, std::vector< BinaryAny > * outArguments)
+{
+ std::unique_ptr< IncomingReply > resp;
+ {
+ uno_ThreadPool tp = getThreadPool();
+ AttachThread att(tp);
+ PopOutgoingRequest pop(
+ outgoingRequests_, att.getTid(),
+ OutgoingRequest(OutgoingRequest::KIND_NORMAL, member, setter));
+ sendRequest(
+ att.getTid(), oid, css::uno::TypeDescription(), member,
+ inArguments);
+ pop.clear();
+ incrementCalls(true);
+ incrementActiveCalls();
+ void * job;
+ uno_threadpool_enter(tp, &job);
+ resp.reset(static_cast< IncomingReply * >(job));
+ decrementActiveCalls();
+ decrementCalls();
+ }
+ if (resp == nullptr)
+ {
+ throw css::lang::DisposedException(
+ "Binary URP bridge disposed during call",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+ *returnValue = resp->returnValue;
+ if (!resp->exception) {
+ *outArguments = resp->outArguments;
+ }
+ return resp->exception;
+}
+
+void Bridge::sendRequestChangeRequest() {
+ assert(mode_ == MODE_REQUESTED);
+ random_ = random();
+ std::vector< BinaryAny > a;
+ a.emplace_back(
+ css::uno::TypeDescription(cppu::UnoType< sal_Int32 >::get()),
+ &random_);
+ sendProtPropRequest(OutgoingRequest::KIND_REQUEST_CHANGE, a);
+}
+
+void Bridge::handleRequestChangeReply(
+ bool exception, BinaryAny const & returnValue)
+{
+ try {
+ throwException(exception, returnValue);
+ } catch (css::uno::RuntimeException & e) {
+ // Before OOo 2.2, Java URP would throw a RuntimeException when
+ // receiving a requestChange message (see i#35277 "Java URP: Support
+ // Manipulation of Protocol Properties"):
+ if (mode_ != MODE_REQUESTED) {
+ throw;
+ }
+ SAL_WARN(
+ "binaryurp",
+ "requestChange caught " << e << " in state 'requested'");
+ mode_ = MODE_NORMAL;
+ getWriter()->unblock();
+ decrementCalls();
+ return;
+ }
+ sal_Int32 n = *static_cast< sal_Int32 * >(
+ returnValue.getValue(
+ css::uno::TypeDescription(cppu::UnoType< sal_Int32 >::get())));
+ sal_Int32 exp = 0;
+ switch (mode_) {
+ case MODE_REQUESTED:
+ case MODE_REPLY_1:
+ exp = 1;
+ break;
+ case MODE_REPLY_MINUS1:
+ exp = -1;
+ mode_ = MODE_REQUESTED;
+ break;
+ case MODE_REPLY_0:
+ exp = 0;
+ mode_ = MODE_WAIT;
+ break;
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ if (n != exp) {
+ throw css::uno::RuntimeException(
+ "URP: requestChange reply with unexpected return value received",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+ decrementCalls();
+ switch (exp) {
+ case -1:
+ sendRequestChangeRequest();
+ break;
+ case 0:
+ break;
+ case 1:
+ sendCommitChangeRequest();
+ break;
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+}
+
+void Bridge::handleCommitChangeReply(
+ bool exception, BinaryAny const & returnValue)
+{
+ bool bCcMode = true;
+ try {
+ throwException(exception, returnValue);
+ } catch (const css::bridge::InvalidProtocolChangeException &) {
+ bCcMode = false;
+ }
+ if (bCcMode) {
+ setCurrentContextMode();
+ }
+ assert(mode_ == MODE_REQUESTED || mode_ == MODE_REPLY_1);
+ mode_ = MODE_NORMAL;
+ getWriter()->unblock();
+ decrementCalls();
+}
+
+void Bridge::handleRequestChangeRequest(
+ rtl::ByteSequence const & tid, std::vector< BinaryAny > const & inArguments)
+{
+ assert(inArguments.size() == 1);
+ switch (mode_) {
+ case MODE_REQUESTED:
+ {
+ sal_Int32 n2 = *static_cast< sal_Int32 * >(
+ inArguments[0].getValue(
+ css::uno::TypeDescription(
+ cppu::UnoType< sal_Int32 >::get())));
+ sal_Int32 ret;
+ if (n2 > random_) {
+ ret = 1;
+ mode_ = MODE_REPLY_0;
+ } else if (n2 == random_) {
+ ret = -1;
+ mode_ = MODE_REPLY_MINUS1;
+ } else {
+ ret = 0;
+ mode_ = MODE_REPLY_1;
+ }
+ getWriter()->sendDirectReply(
+ tid, protPropRequest_, false,
+ BinaryAny(
+ css::uno::TypeDescription(
+ cppu::UnoType< sal_Int32 >::get()),
+ &ret),
+ std::vector< BinaryAny >());
+ break;
+ }
+ case MODE_NORMAL:
+ {
+ mode_ = MODE_NORMAL_WAIT;
+ sal_Int32 ret = 1;
+ getWriter()->queueReply(
+ tid, protPropRequest_, false, false,
+ BinaryAny(
+ css::uno::TypeDescription(
+ cppu::UnoType< sal_Int32 >::get()),
+ &ret),
+ std::vector< BinaryAny >(), false);
+ break;
+ }
+ default:
+ throw css::uno::RuntimeException(
+ "URP: unexpected requestChange request received",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+}
+
+void Bridge::handleCommitChangeRequest(
+ rtl::ByteSequence const & tid, std::vector< BinaryAny > const & inArguments)
+{
+ bool bCcMode = false;
+ bool bExc = false;
+ BinaryAny ret;
+ assert(inArguments.size() == 1);
+ css::uno::Sequence< css::bridge::ProtocolProperty > s;
+ [[maybe_unused]] bool ok = (mapBinaryToCppAny(inArguments[0]) >>= s);
+ assert(ok);
+ for (const auto & pp : std::as_const(s)) {
+ if (pp.Name == "CurrentContext") {
+ bCcMode = true;
+ } else {
+ bCcMode = false;
+ bExc = true;
+ ret = mapCppToBinaryAny(
+ css::uno::Any(
+ css::bridge::InvalidProtocolChangeException(
+ "InvalidProtocolChangeException",
+ css::uno::Reference< css::uno::XInterface >(), pp,
+ 1)));
+ break;
+ }
+ }
+ switch (mode_) {
+ case MODE_WAIT:
+ getWriter()->sendDirectReply(
+ tid, protPropCommit_, bExc, ret, std::vector< BinaryAny >());
+ if (bCcMode) {
+ setCurrentContextMode();
+ mode_ = MODE_NORMAL;
+ getWriter()->unblock();
+ } else {
+ mode_ = MODE_REQUESTED;
+ sendRequestChangeRequest();
+ }
+ break;
+ case MODE_NORMAL_WAIT:
+ getWriter()->queueReply(
+ tid, protPropCommit_, false, false, ret, std::vector< BinaryAny >(),
+ bCcMode);
+ mode_ = MODE_NORMAL;
+ break;
+ default:
+ throw css::uno::RuntimeException(
+ "URP: unexpected commitChange request received",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+}
+
+OutgoingRequest Bridge::lastOutgoingRequest(rtl::ByteSequence const & tid) {
+ OutgoingRequest req(outgoingRequests_.top(tid));
+ outgoingRequests_.pop(tid);
+ return req;
+}
+
+bool Bridge::isProtocolPropertiesRequest(
+ OUString const & oid, css::uno::TypeDescription const & type) const
+{
+ return oid == protPropOid_ && type.equals(protPropType_);
+}
+
+void Bridge::setCurrentContextMode() {
+ osl::MutexGuard g(mutex_);
+ currentContextMode_ = true;
+}
+
+bool Bridge::isCurrentContextMode() {
+ osl::MutexGuard g(mutex_);
+ return currentContextMode_;
+}
+
+Bridge::~Bridge() {
+#if OSL_DEBUG_LEVEL > 0
+ {
+ osl::MutexGuard g(mutex_);
+ SAL_WARN_IF(
+ state_ == STATE_STARTED || state_ == STATE_TERMINATED, "binaryurp",
+ "undisposed bridge, potential deadlock ahead");
+ }
+#endif
+ dispose();
+}
+
+css::uno::Reference< css::uno::XInterface > Bridge::getInstance(
+ OUString const & sInstanceName)
+{
+ if (sInstanceName.isEmpty()) {
+ throw css::uno::RuntimeException(
+ "XBridge::getInstance sInstanceName must be non-empty",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+ for (sal_Int32 i = 0; i != sInstanceName.getLength(); ++i) {
+ if (sInstanceName[i] > 0x7F) {
+ throw css::uno::RuntimeException(
+ "XBridge::getInstance sInstanceName contains non-ASCII"
+ " character");
+ }
+ }
+ css::uno::TypeDescription ifc(cppu::UnoType<css::uno::XInterface>::get());
+ typelib_TypeDescription * p = ifc.get();
+ std::vector< BinaryAny > inArgs;
+ inArgs.emplace_back(
+ css::uno::TypeDescription(cppu::UnoType< css::uno::Type >::get()),
+ &p);
+ BinaryAny ret;
+ std::vector< BinaryAny> outArgs;
+ bool bExc = makeCall(
+ sInstanceName,
+ css::uno::TypeDescription(
+ "com.sun.star.uno.XInterface::queryInterface"),
+ false, inArgs, &ret, &outArgs);
+ throwException(bExc, ret);
+ return css::uno::Reference< css::uno::XInterface >(
+ static_cast< css::uno::XInterface * >(
+ binaryToCppMapping_.mapInterface(
+ *static_cast< uno_Interface ** >(ret.getValue(ifc)),
+ ifc.get())),
+ SAL_NO_ACQUIRE);
+}
+
+OUString Bridge::getName() {
+ return name_;
+}
+
+OUString Bridge::getDescription() {
+ OUString b = name_ + ":" + connection_->getDescription();
+ return b;
+}
+
+void Bridge::dispose() {
+ // For terminate(true) not to deadlock, an external protocol must ensure
+ // that dispose is not called from a thread pool worker thread (that dispose
+ // is never called from the reader or writer thread is already ensured
+ // internally):
+ terminate(true);
+ // OOo expects dispose to not return while there are still remote calls in
+ // progress; an external protocol must ensure that dispose is not called
+ // from within an incoming or outgoing remote call, as passive_.wait() would
+ // otherwise deadlock:
+ passive_.wait();
+}
+
+void Bridge::addEventListener(
+ css::uno::Reference< css::lang::XEventListener > const & xListener)
+{
+ assert(xListener.is());
+ {
+ osl::MutexGuard g(mutex_);
+ assert(state_ != STATE_INITIAL);
+ if (state_ == STATE_STARTED) {
+ listeners_.push_back(xListener);
+ return;
+ }
+ }
+ xListener->disposing(
+ css::lang::EventObject(static_cast< cppu::OWeakObject * >(this)));
+}
+
+void Bridge::removeEventListener(
+ css::uno::Reference< css::lang::XEventListener > const & aListener)
+{
+ osl::MutexGuard g(mutex_);
+ Listeners::iterator i(
+ std::find(listeners_.begin(), listeners_.end(), aListener));
+ if (i != listeners_.end()) {
+ listeners_.erase(i);
+ }
+}
+
+void Bridge::sendCommitChangeRequest() {
+ assert(mode_ == MODE_REQUESTED || mode_ == MODE_REPLY_1);
+ css::uno::Sequence< css::bridge::ProtocolProperty > s(1);
+ s[0].Name = "CurrentContext";
+ std::vector< BinaryAny > a;
+ a.push_back(mapCppToBinaryAny(css::uno::Any(s)));
+ sendProtPropRequest(OutgoingRequest::KIND_COMMIT_CHANGE, a);
+}
+
+void Bridge::sendProtPropRequest(
+ OutgoingRequest::Kind kind, std::vector< BinaryAny > const & inArguments)
+{
+ assert(
+ kind == OutgoingRequest::KIND_REQUEST_CHANGE ||
+ kind == OutgoingRequest::KIND_COMMIT_CHANGE);
+ incrementCalls(false);
+ css::uno::TypeDescription member(
+ kind == OutgoingRequest::KIND_REQUEST_CHANGE
+ ? protPropRequest_ : protPropCommit_);
+ PopOutgoingRequest pop(
+ outgoingRequests_, protPropTid_, OutgoingRequest(kind, member, false));
+ getWriter()->sendDirectRequest(
+ protPropTid_, protPropOid_, protPropType_, member, inArguments);
+ pop.clear();
+}
+
+void Bridge::makeReleaseCall(
+ OUString const & oid, css::uno::TypeDescription const & type)
+{
+ AttachThread att(getThreadPool());
+ sendRequest(
+ att.getTid(), oid, type,
+ css::uno::TypeDescription("com.sun.star.uno.XInterface::release"),
+ std::vector< BinaryAny >());
+}
+
+void Bridge::sendRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ css::uno::TypeDescription const & type,
+ css::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments)
+{
+ getWriter()->queueRequest(tid, oid, type, member, inArguments);
+}
+
+void Bridge::throwException(bool exception, BinaryAny const & value) {
+ if (exception) {
+ cppu::throwException(mapBinaryToCppAny(value));
+ }
+}
+
+css::uno::Any Bridge::mapBinaryToCppAny(BinaryAny const & binaryAny) {
+ BinaryAny in(binaryAny);
+ css::uno::Any out;
+ out.~Any();
+ uno_copyAndConvertData(
+ &out, &in.get(),
+ css::uno::TypeDescription(cppu::UnoType< css::uno::Any >::get()).get(),
+ binaryToCppMapping_.get());
+ return out;
+}
+
+bool Bridge::becameUnused() const {
+ return stubs_.empty() && proxies_ == 0 && calls_ == 0 && normalCall_;
+}
+
+void Bridge::terminateWhenUnused(bool unused) {
+ if (unused) {
+ // That the current thread considers the bridge unused implies that it
+ // is not within an incoming or outgoing remote call (so calling
+ // terminate cannot lead to deadlock):
+ terminate(false);
+ }
+}
+
+void Bridge::checkDisposed() {
+ assert(state_ != STATE_INITIAL);
+ if (state_ != STATE_STARTED) {
+ throw css::lang::DisposedException(
+ "Binary URP bridge already disposed",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/bridge.hxx b/binaryurp/source/bridge.hxx
new file mode 100644
index 000000000..d6528a3f1
--- /dev/null
+++ b/binaryurp/source/bridge.hxx
@@ -0,0 +1,283 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_BRIDGE_HXX
+#define INCLUDED_BINARYURP_SOURCE_BRIDGE_HXX
+
+#include <sal/config.h>
+
+#include <cstddef>
+#include <map>
+#include <vector>
+
+#include <com/sun/star/bridge/XBridge.hpp>
+#include <com/sun/star/lang/XComponent.hpp>
+#include <com/sun/star/uno/Reference.hxx>
+#include <cppuhelper/implbase.hxx>
+#include <osl/conditn.hxx>
+#include <osl/mutex.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <uno/environment.hxx>
+#include <uno/mapping.hxx>
+#include <uno/threadpool.h>
+
+#include "outgoingrequest.hxx"
+#include "outgoingrequests.hxx"
+#include "writer.hxx"
+
+namespace binaryurp {
+ class BinaryAny;
+ class BridgeFactory;
+ class Proxy;
+ class Reader;
+}
+namespace com::sun::star {
+ namespace bridge { class XInstanceProvider; }
+ namespace connection { class XConnection; }
+ namespace lang { class XEventListener; }
+ namespace uno {
+ class Any;
+ class TypeDescription;
+ class UnoInterfaceReference;
+ class XInterface;
+ }
+}
+namespace rtl { class ByteSequence; }
+
+namespace binaryurp {
+
+class Bridge:
+ public cppu::WeakImplHelper<
+ com::sun::star::bridge::XBridge, com::sun::star::lang::XComponent >
+{
+public:
+ Bridge(
+ rtl::Reference< BridgeFactory > const & factory,
+ OUString const & name,
+ com::sun::star::uno::Reference<
+ com::sun::star::connection::XConnection > const & connection,
+ com::sun::star::uno::Reference<
+ com::sun::star::bridge::XInstanceProvider > const & provider);
+
+ void start();
+
+ // Internally waits for all incoming and outgoing remote calls to terminate,
+ // so must not be called from within such a call; when final is true, also
+ // joins all remaining threads (reader, writer, and worker threads from the
+ // thread pool), so must not be called with final set to true from such a
+ // thread:
+ void terminate(bool final);
+
+ const com::sun::star::uno::Reference< com::sun::star::connection::XConnection >&
+ getConnection() const { return connection_;}
+
+ const com::sun::star::uno::Reference< com::sun::star::bridge::XInstanceProvider >&
+ getProvider() const { return provider_;}
+
+ com::sun::star::uno::Mapping & getCppToBinaryMapping() { return cppToBinaryMapping_;}
+
+ BinaryAny mapCppToBinaryAny(com::sun::star::uno::Any const & cppAny);
+
+ uno_ThreadPool getThreadPool();
+
+ rtl::Reference< Writer > getWriter();
+
+ com::sun::star::uno::UnoInterfaceReference registerIncomingInterface(
+ OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type);
+
+ OUString registerOutgoingInterface(
+ com::sun::star::uno::UnoInterfaceReference const & object,
+ com::sun::star::uno::TypeDescription const & type);
+
+ com::sun::star::uno::UnoInterfaceReference findStub(
+ OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type);
+
+ void releaseStub(
+ OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type);
+
+ void resurrectProxy(Proxy & proxy);
+
+ void revokeProxy(Proxy & proxy);
+
+ void freeProxy(Proxy & proxy);
+
+ void incrementCalls(bool normalCall) throw ();
+
+ void decrementCalls();
+
+ void incrementActiveCalls() throw ();
+
+ void decrementActiveCalls() throw ();
+
+ bool makeCall(
+ OUString const & oid,
+ com::sun::star::uno::TypeDescription const & member, bool setter,
+ std::vector< BinaryAny > const & inArguments, BinaryAny * returnValue,
+ std::vector< BinaryAny > * outArguments);
+
+ // Only called from reader_ thread:
+ void sendRequestChangeRequest();
+
+ // Only called from reader_ thread:
+ void handleRequestChangeReply(
+ bool exception, BinaryAny const & returnValue);
+
+ // Only called from reader_ thread:
+ void handleCommitChangeReply(bool exception, BinaryAny const & returnValue);
+
+ // Only called from reader_ thread:
+ void handleRequestChangeRequest(
+ rtl::ByteSequence const & tid,
+ std::vector< BinaryAny > const & inArguments);
+
+ // Only called from reader_ thread:
+ void handleCommitChangeRequest(
+ rtl::ByteSequence const & tid,
+ std::vector< BinaryAny > const & inArguments);
+
+ OutgoingRequest lastOutgoingRequest(rtl::ByteSequence const & tid);
+
+ bool isProtocolPropertiesRequest(
+ OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type) const;
+
+ void setCurrentContextMode();
+
+ bool isCurrentContextMode();
+
+private:
+ Bridge(const Bridge&) = delete;
+ Bridge& operator=(const Bridge&) = delete;
+
+ virtual ~Bridge() override;
+
+ virtual com::sun::star::uno::Reference< com::sun::star::uno::XInterface >
+ SAL_CALL getInstance(OUString const & sInstanceName) override;
+
+ virtual OUString SAL_CALL getName() override;
+
+ virtual OUString SAL_CALL getDescription() override;
+
+ virtual void SAL_CALL dispose() override;
+
+ virtual void SAL_CALL addEventListener(
+ com::sun::star::uno::Reference< com::sun::star::lang::XEventListener >
+ const & xListener) override;
+
+ virtual void SAL_CALL removeEventListener(
+ com::sun::star::uno::Reference< com::sun::star::lang::XEventListener >
+ const & aListener) override;
+
+ // Only called from reader_ thread:
+ void sendCommitChangeRequest();
+
+ // Only called from reader_ thread:
+ void sendProtPropRequest(
+ OutgoingRequest::Kind kind,
+ std::vector< BinaryAny > const & inArguments);
+
+ void makeReleaseCall(
+ OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type);
+
+ void sendRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type,
+ com::sun::star::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments);
+
+ void throwException(bool exception, BinaryAny const & value);
+
+ com::sun::star::uno::Any mapBinaryToCppAny(BinaryAny const & binaryAny);
+
+ bool becameUnused() const;
+
+ void terminateWhenUnused(bool unused);
+
+ // Must only be called with mutex_ locked:
+ void checkDisposed();
+
+ typedef
+ std::vector<
+ com::sun::star::uno::Reference<
+ com::sun::star::lang::XEventListener > >
+ Listeners;
+
+ struct SubStub;
+
+ typedef std::map< com::sun::star::uno::TypeDescription, SubStub > Stub;
+
+ typedef std::map< OUString, Stub > Stubs;
+
+ enum State { STATE_INITIAL, STATE_STARTED, STATE_TERMINATED, STATE_FINAL };
+
+ enum Mode {
+ MODE_REQUESTED, MODE_REPLY_MINUS1, MODE_REPLY_0, MODE_REPLY_1,
+ MODE_WAIT, MODE_NORMAL, MODE_NORMAL_WAIT };
+
+ rtl::Reference< BridgeFactory > factory_;
+ OUString name_;
+ com::sun::star::uno::Reference< com::sun::star::connection::XConnection >
+ connection_;
+ com::sun::star::uno::Reference< com::sun::star::bridge::XInstanceProvider >
+ provider_;
+ com::sun::star::uno::Environment binaryUno_;
+ com::sun::star::uno::Mapping cppToBinaryMapping_;
+ com::sun::star::uno::Mapping binaryToCppMapping_;
+ rtl::ByteSequence protPropTid_;
+ OUString protPropOid_;
+ com::sun::star::uno::TypeDescription protPropType_;
+ com::sun::star::uno::TypeDescription protPropRequest_;
+ com::sun::star::uno::TypeDescription protPropCommit_;
+ OutgoingRequests outgoingRequests_;
+ osl::Condition passive_;
+ // to guarantee that passive_ is eventually set (to avoid deadlock, see
+ // dispose), activeCalls_ only counts those calls for which it can be
+ // guaranteed that incrementActiveCalls is indeed followed by
+ // decrementActiveCalls, without an intervening exception
+ osl::Condition terminated_;
+
+ osl::Mutex mutex_;
+ State state_;
+ Listeners listeners_;
+ uno_ThreadPool threadPool_;
+ rtl::Reference< Writer > writer_;
+ rtl::Reference< Reader > reader_;
+ bool currentContextMode_;
+ Stubs stubs_;
+ std::size_t proxies_;
+ std::size_t calls_;
+ bool normalCall_;
+ std::size_t activeCalls_;
+
+ // Only accessed from reader_ thread:
+ Mode mode_;
+ sal_Int32 random_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/bridgefactory.cxx b/binaryurp/source/bridgefactory.cxx
new file mode 100644
index 000000000..be21fc9c6
--- /dev/null
+++ b/binaryurp/source/bridgefactory.cxx
@@ -0,0 +1,220 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <algorithm>
+#include <cassert>
+
+#include <com/sun/star/bridge/BridgeExistsException.hpp>
+#include <com/sun/star/connection/XConnection.hpp>
+#include <com/sun/star/lang/IllegalArgumentException.hpp>
+#include <com/sun/star/uno/Exception.hpp>
+#include <com/sun/star/uno/Reference.hxx>
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <com/sun/star/uno/XComponentContext.hpp>
+#include <com/sun/star/uno/XInterface.hpp>
+#include <cppuhelper/factory.hxx>
+#include <cppuhelper/implementationentry.hxx>
+#include <cppuhelper/supportsservice.hxx>
+#include <o3tl/safeint.hxx>
+#include <rtl/ref.hxx>
+#include <sal/log.hxx>
+#include <sal/types.h>
+
+#include "bridge.hxx"
+#include "bridgefactory.hxx"
+
+namespace binaryurp {
+
+css::uno::Reference< css::uno::XInterface > BridgeFactory::static_create(
+ css::uno::Reference< css::uno::XComponentContext > const & /*xContext*/)
+{
+ return static_cast< cppu::OWeakObject * >(new BridgeFactory);
+}
+
+OUString BridgeFactory::static_getImplementationName() {
+ return "com.sun.star.comp.bridge.BridgeFactory";
+}
+
+css::uno::Sequence< OUString >
+BridgeFactory::static_getSupportedServiceNames() {
+ return css::uno::Sequence<OUString>{ "com.sun.star.bridge.BridgeFactory" };
+}
+
+void BridgeFactory::removeBridge(
+ css::uno::Reference< css::bridge::XBridge > const & bridge)
+{
+ assert(bridge.is());
+ OUString n(bridge->getName());
+ osl::MutexGuard g(m_aMutex);
+ if (n.isEmpty())
+ {
+ unnamed_.erase(std::remove(unnamed_.begin(), unnamed_.end(), bridge), unnamed_.end());
+ }
+ else
+ {
+ BridgeMap::iterator i(named_.find(n));
+ if (i != named_.end() && i->second == bridge)
+ named_.erase(i);
+ }
+}
+
+BridgeFactory::BridgeFactory():
+ BridgeFactoryBase(m_aMutex)
+{
+}
+
+BridgeFactory::~BridgeFactory() {}
+
+OUString BridgeFactory::getImplementationName()
+{
+ return static_getImplementationName();
+}
+
+sal_Bool BridgeFactory::supportsService(OUString const & ServiceName)
+{
+ return cppu::supportsService(this, ServiceName);
+}
+
+css::uno::Sequence< OUString > BridgeFactory::getSupportedServiceNames()
+{
+ return static_getSupportedServiceNames();
+}
+
+css::uno::Reference< css::bridge::XBridge > BridgeFactory::createBridge(
+ OUString const & sName, OUString const & sProtocol,
+ css::uno::Reference< css::connection::XConnection > const & aConnection,
+ css::uno::Reference< css::bridge::XInstanceProvider > const &
+ anInstanceProvider)
+{
+ rtl::Reference< Bridge > b;
+ {
+ osl::MutexGuard g(m_aMutex);
+ if (rBHelper.bDisposed) {
+ throw css::lang::DisposedException(
+ "BridgeFactory disposed",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+ if (named_.find(sName) != named_.end()) {
+ throw css::bridge::BridgeExistsException(
+ sName, static_cast< cppu::OWeakObject * >(this));
+ }
+ if (sProtocol != "urp" || !aConnection.is()) {
+ throw css::lang::IllegalArgumentException(
+ ("BridgeFactory::createBridge: sProtocol != urp ||"
+ " aConnection == null"),
+ static_cast< cppu::OWeakObject * >(this), -1);
+ }
+ b.set(new Bridge(this, sName, aConnection, anInstanceProvider));
+ if (sName.isEmpty()) {
+ unnamed_.emplace_back(b.get());
+ } else {
+ named_[sName] = b.get();
+ }
+ }
+ b->start();
+ return css::uno::Reference< css::bridge::XBridge >(b.get());
+}
+
+css::uno::Reference< css::bridge::XBridge > BridgeFactory::getBridge(
+ OUString const & sName)
+{
+ osl::MutexGuard g(m_aMutex);
+ BridgeMap::iterator i(named_.find(sName));
+ return i == named_.end()
+ ? css::uno::Reference< css::bridge::XBridge >() : i->second;
+}
+
+css::uno::Sequence< css::uno::Reference< css::bridge::XBridge > >
+BridgeFactory::getExistingBridges() {
+ osl::MutexGuard g(m_aMutex);
+ if (unnamed_.size() > SAL_MAX_INT32) {
+ throw css::uno::RuntimeException(
+ "BridgeFactory::getExistingBridges: too many",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+ sal_Int32 n = static_cast< sal_Int32 >(unnamed_.size());
+ if (named_.size() > o3tl::make_unsigned(SAL_MAX_INT32 - n)) {
+ throw css::uno::RuntimeException(
+ "BridgeFactory::getExistingBridges: too many",
+ static_cast< cppu::OWeakObject * >(this));
+ }
+ n = static_cast< sal_Int32 >(n + named_.size());
+ css::uno::Sequence< css::uno::Reference< css::bridge::XBridge > > s(n);
+ sal_Int32 i = 0;
+ for (auto const& item : unnamed_)
+ s[i++] = item;
+
+ for (auto const& item : named_)
+ s[i++] = item.second;
+
+ return s;
+}
+
+void BridgeFactory::disposing() {
+ BridgeVector l1;
+ BridgeMap l2;
+ {
+ osl::MutexGuard g(m_aMutex);
+ l1.swap(unnamed_);
+ l2.swap(named_);
+ }
+ for (auto const& item : l1)
+ {
+ try {
+ css::uno::Reference<css::lang::XComponent>(
+ item, css::uno::UNO_QUERY_THROW)->dispose();
+ } catch (css::uno::Exception & e) {
+ SAL_WARN("binaryurp", "ignoring " << e);
+ }
+ }
+ for (auto const& item : l2)
+ {
+ try {
+ css::uno::Reference<css::lang::XComponent>(
+ item.second, css::uno::UNO_QUERY_THROW)->dispose();
+ } catch (css::uno::Exception & e) {
+ SAL_WARN("binaryurp", "ignoring " << e);
+ }
+ }
+}
+
+}
+
+namespace {
+
+static cppu::ImplementationEntry const services[] = {
+ { &binaryurp::BridgeFactory::static_create,
+ &binaryurp::BridgeFactory::static_getImplementationName,
+ &binaryurp::BridgeFactory::static_getSupportedServiceNames,
+ &cppu::createOneInstanceComponentFactory, nullptr, 0 },
+ { nullptr, nullptr, nullptr, nullptr, nullptr, 0 }
+};
+
+}
+
+extern "C" SAL_DLLPUBLIC_EXPORT void * binaryurp_component_getFactory(
+ char const * pImplName, void * pServiceManager, void * pRegistryKey)
+{
+ return cppu::component_getFactoryHelper(
+ pImplName, pServiceManager, pRegistryKey, services);
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/bridgefactory.hxx b/binaryurp/source/bridgefactory.hxx
new file mode 100644
index 000000000..5e49610ee
--- /dev/null
+++ b/binaryurp/source/bridgefactory.hxx
@@ -0,0 +1,128 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_BRIDGEFACTORY_HXX
+#define INCLUDED_BINARYURP_SOURCE_BRIDGEFACTORY_HXX
+
+#include <sal/config.h>
+
+#include <vector>
+#include <map>
+
+#include <com/sun/star/bridge/XBridgeFactory2.hpp>
+#include <com/sun/star/lang/XServiceInfo.hpp>
+#include <com/sun/star/uno/Reference.hxx>
+#include <cppuhelper/basemutex.hxx>
+#include <cppuhelper/compbase.hxx>
+#include <sal/types.h>
+
+namespace com::sun::star {
+ namespace connection { class XConnection; }
+ namespace uno {
+ class XComponentContext;
+ class XInterface;
+ }
+}
+
+namespace binaryurp {
+
+// That BridgeFactory derives from XComponent appears to be a historic mistake;
+// the implementation does not care about a disposed state:
+
+typedef
+ cppu::WeakComponentImplHelper<
+ com::sun::star::lang::XServiceInfo,
+ com::sun::star::bridge::XBridgeFactory2 >
+ BridgeFactoryBase;
+
+class BridgeFactory : private cppu::BaseMutex, public BridgeFactoryBase
+{
+public:
+ static com::sun::star::uno::Reference< com::sun::star::uno::XInterface >
+ SAL_CALL static_create(
+ com::sun::star::uno::Reference< com::sun::star::uno::XComponentContext >
+ const & xContext);
+
+ static OUString SAL_CALL static_getImplementationName();
+
+ static com::sun::star::uno::Sequence< OUString > SAL_CALL
+ static_getSupportedServiceNames();
+
+ void removeBridge(
+ com::sun::star::uno::Reference< com::sun::star::bridge::XBridge >
+ const & bridge);
+
+ using BridgeFactoryBase::acquire;
+ using BridgeFactoryBase::release;
+
+private:
+ BridgeFactory(const BridgeFactory&) = delete;
+ BridgeFactory& operator=(const BridgeFactory&) = delete;
+
+ BridgeFactory();
+
+ virtual ~BridgeFactory() override;
+
+ virtual OUString SAL_CALL getImplementationName() override;
+
+ virtual sal_Bool SAL_CALL supportsService(OUString const & ServiceName) override;
+
+ virtual com::sun::star::uno::Sequence< OUString > SAL_CALL
+ getSupportedServiceNames() override;
+
+ virtual com::sun::star::uno::Reference< com::sun::star::bridge::XBridge >
+ SAL_CALL createBridge(
+ OUString const & sName, OUString const & sProtocol,
+ com::sun::star::uno::Reference<
+ com::sun::star::connection::XConnection > const & aConnection,
+ com::sun::star::uno::Reference<
+ com::sun::star::bridge::XInstanceProvider > const &
+ anInstanceProvider) override;
+
+ virtual com::sun::star::uno::Reference< com::sun::star::bridge::XBridge >
+ SAL_CALL getBridge(
+ OUString const & sName) override;
+
+ virtual
+ com::sun::star::uno::Sequence<
+ com::sun::star::uno::Reference< com::sun::star::bridge::XBridge > >
+ SAL_CALL getExistingBridges() override;
+
+ void SAL_CALL disposing() override;
+
+ typedef
+ std::vector<
+ com::sun::star::uno::Reference< com::sun::star::bridge::XBridge > >
+ BridgeVector;
+
+ typedef
+ std::map<
+ OUString,
+ com::sun::star::uno::Reference< com::sun::star::bridge::XBridge > >
+ BridgeMap;
+
+ BridgeVector unnamed_;
+ BridgeMap named_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/cache.hxx b/binaryurp/source/cache.hxx
new file mode 100644
index 000000000..722e7494b
--- /dev/null
+++ b/binaryurp/source/cache.hxx
@@ -0,0 +1,98 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_CACHE_HXX
+#define INCLUDED_BINARYURP_SOURCE_CACHE_HXX
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <cstddef>
+#include <map>
+#include <list>
+
+#include <sal/types.h>
+
+namespace binaryurp {
+
+namespace cache {
+
+enum { size = 256, ignore = 0xFFFF };
+
+}
+
+template< typename T > class Cache {
+public:
+ typedef sal_uInt16 IdxType;
+
+ explicit Cache(std::size_t size):
+ size_(size)
+ {
+ assert(size < cache::ignore);
+ }
+
+ IdxType add( const T& rContent, bool* pbFound) {
+ assert( pbFound != nullptr);
+ if( !size_) {
+ *pbFound = false;
+ return cache::ignore;
+ }
+ // try to insert into the map
+ list_.push_front( rContent); // create a temp entry
+ auto const [it, inserted] = map_.emplace( list_.begin(), 0 );
+ *pbFound = !inserted;
+
+ if( !inserted) { // insertion not needed => found the entry
+ list_.pop_front(); // remove the temp entry
+ list_.splice( list_.begin(), list_, it->first); // the found entry is moved to front
+ return it->second;
+ }
+
+ // test insertion successful => it was new so we keep it
+ IdxType n = static_cast<IdxType>( map_.size() - 1);
+ if( n >= size_) { // cache full => replace the LRU entry
+ // find the least recently used element in the map
+ typename LruItMap::iterator lru = map_.find( --list_.end());
+ n = lru->second;
+ map_.erase( lru); // remove it from the map
+ list_.pop_back(); // remove from the list
+ }
+ it->second = n;
+ return n;
+ }
+
+private:
+ Cache(const Cache&) = delete;
+ Cache& operator=(const Cache&) = delete;
+
+ typedef std::list<T> LruList; // last recently used list
+ typedef typename LruList::iterator LruListIt;
+ struct CmpT{ bool operator()( const LruListIt& rA, const LruListIt& rB) const { return (*rA<*rB);}};
+ typedef std::map< LruListIt, IdxType, CmpT > LruItMap; // a map into a LruList
+
+ std::size_t size_;
+ LruItMap map_;
+ LruList list_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/currentcontext.cxx b/binaryurp/source/currentcontext.cxx
new file mode 100644
index 000000000..acaf606d2
--- /dev/null
+++ b/binaryurp/source/currentcontext.cxx
@@ -0,0 +1,55 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <rtl/ustring.hxx>
+#include <uno/current_context.h>
+#include <uno/dispatcher.hxx>
+#include <uno/lbnames.h>
+
+#include "currentcontext.hxx"
+
+namespace binaryurp::current_context {
+
+css::uno::UnoInterfaceReference get() {
+ css::uno::UnoInterfaceReference cc;
+ if (!uno_getCurrentContext(
+ reinterpret_cast< void ** >(&cc.m_pUnoI),
+ OUString(UNO_LB_UNO).pData, nullptr))
+ {
+ throw css::uno::RuntimeException("uno_getCurrentContext failed");
+ }
+ return cc;
+}
+
+void set(css::uno::UnoInterfaceReference const & value) {
+ css::uno::UnoInterfaceReference cc(value);
+ if (!uno_setCurrentContext(
+ cc.m_pUnoI,
+ OUString(UNO_LB_UNO).pData, nullptr))
+ {
+ throw css::uno::RuntimeException("uno_setCurrentContext failed");
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/currentcontext.hxx b/binaryurp/source/currentcontext.hxx
new file mode 100644
index 000000000..95600e62d
--- /dev/null
+++ b/binaryurp/source/currentcontext.hxx
@@ -0,0 +1,37 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_CURRENTCONTEXT_HXX
+#define INCLUDED_BINARYURP_SOURCE_CURRENTCONTEXT_HXX
+
+#include <sal/config.h>
+
+namespace com::sun::star::uno { class UnoInterfaceReference; }
+
+namespace binaryurp::current_context {
+
+com::sun::star::uno::UnoInterfaceReference get();
+
+void set(com::sun::star::uno::UnoInterfaceReference const & value);
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/incomingreply.hxx b/binaryurp/source/incomingreply.hxx
new file mode 100644
index 000000000..058d0edf8
--- /dev/null
+++ b/binaryurp/source/incomingreply.hxx
@@ -0,0 +1,54 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_INCOMINGREPLY_HXX
+#define INCLUDED_BINARYURP_SOURCE_INCOMINGREPLY_HXX
+
+#include <sal/config.h>
+
+#include <vector>
+
+#include "binaryany.hxx"
+
+namespace binaryurp {
+
+struct IncomingReply {
+private:
+ IncomingReply(const IncomingReply&) = delete;
+ IncomingReply& operator=(const IncomingReply&) = delete;
+public:
+ IncomingReply(
+ bool theException, BinaryAny const & theReturnValue,
+ std::vector< BinaryAny > const & theOutArguments):
+ exception(theException), returnValue(theReturnValue),
+ outArguments(theOutArguments)
+ {}
+
+ bool exception;
+
+ BinaryAny returnValue;
+
+ std::vector< BinaryAny > outArguments;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/incomingrequest.cxx b/binaryurp/source/incomingrequest.cxx
new file mode 100644
index 000000000..eed6f7cb8
--- /dev/null
+++ b/binaryurp/source/incomingrequest.cxx
@@ -0,0 +1,285 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <vector>
+
+#include <com/sun/star/bridge/XInstanceProvider.hpp>
+#include <com/sun/star/container/NoSuchElementException.hpp>
+#include <cppuhelper/exc_hlp.hxx>
+#include <o3tl/runtimetooustring.hxx>
+#include <rtl/byteseq.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/log.hxx>
+#include <sal/types.h>
+#include <typelib/typedescription.hxx>
+#include <uno/dispatcher.hxx>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "currentcontext.hxx"
+#include "incomingrequest.hxx"
+#include "specialfunctionids.hxx"
+
+namespace binaryurp {
+
+IncomingRequest::IncomingRequest(
+ rtl::Reference< Bridge > const & bridge, rtl::ByteSequence const & tid,
+ OUString const & oid, css::uno::UnoInterfaceReference const & object,
+ css::uno::TypeDescription const & type, sal_uInt16 functionId,
+ bool synchronous, css::uno::TypeDescription const & member, bool setter,
+ std::vector< BinaryAny > const & inArguments, bool currentContextMode,
+ css::uno::UnoInterfaceReference const & currentContext):
+ bridge_(bridge), tid_(tid), oid_(oid), object_(object), type_(type),
+ functionId_(functionId), synchronous_(synchronous), member_(member),
+ setter_(setter), inArguments_(inArguments),
+ currentContextMode_(currentContextMode), currentContext_(currentContext)
+{
+ assert(bridge.is());
+ assert(member.is());
+ assert(member.get()->bComplete);
+}
+
+IncomingRequest::~IncomingRequest() {}
+
+void IncomingRequest::execute() const {
+ BinaryAny ret;
+ std::vector< BinaryAny > outArgs;
+ bool isExc;
+ try {
+ bool resetCc = false;
+ css::uno::UnoInterfaceReference oldCc;
+ if (currentContextMode_) {
+ oldCc = current_context::get();
+ current_context::set(currentContext_);
+ resetCc = true;
+ }
+ try {
+ try {
+ isExc = !execute_throw(&ret, &outArgs);
+ } catch (const std::exception & e) {
+ throw css::uno::RuntimeException(
+ "caught C++ exception: "
+ + o3tl::runtimeToOUString(e.what()));
+ }
+ } catch (const css::uno::RuntimeException &) {
+ css::uno::Any exc(cppu::getCaughtException());
+ ret = bridge_->mapCppToBinaryAny(exc);
+ isExc = true;
+ }
+ if (resetCc) {
+ current_context::set(oldCc);
+ }
+ } catch (const css::uno::RuntimeException &) {
+ css::uno::Any exc(cppu::getCaughtException());
+ ret = bridge_->mapCppToBinaryAny(exc);
+ isExc = true;
+ }
+ if (synchronous_) {
+ bridge_->decrementActiveCalls();
+ try {
+ bridge_->getWriter()->queueReply(
+ tid_, member_, setter_, isExc, ret, outArgs, false);
+ return;
+ } catch (const css::uno::RuntimeException & e) {
+ SAL_INFO("binaryurp", "caught " << e);
+ } catch (const std::exception & e) {
+ SAL_INFO("binaryurp", "caught C++ exception " << e.what());
+ }
+ bridge_->terminate(false);
+ } else {
+ if (isExc) {
+ SAL_INFO("binaryurp", "oneway method raised exception");
+ }
+ bridge_->decrementCalls();
+ }
+}
+
+static size_t size_t_round(size_t val)
+{
+ return (val + (sizeof(size_t)-1)) & ~(sizeof(size_t)-1);
+}
+
+bool IncomingRequest::execute_throw(
+ BinaryAny * returnValue, std::vector< BinaryAny > * outArguments) const
+{
+ assert(returnValue != nullptr);
+ assert(
+ returnValue->getType().equals(
+ css::uno::TypeDescription(cppu::UnoType<void>::get())));
+ assert(outArguments != nullptr);
+ assert(outArguments->empty());
+ bool isExc = false;
+ switch (functionId_) {
+ case SPECIAL_FUNCTION_ID_RESERVED:
+ assert(false); // this cannot happen
+ break;
+ case SPECIAL_FUNCTION_ID_RELEASE:
+ bridge_->releaseStub(oid_, type_);
+ break;
+ case SPECIAL_FUNCTION_ID_QUERY_INTERFACE:
+ if (!object_.is()) {
+ css::uno::Reference< css::uno::XInterface > ifc;
+ css::uno::Reference< css::bridge::XInstanceProvider > prov(
+ bridge_->getProvider());
+ if (prov.is()) {
+ try {
+ ifc = prov->getInstance(oid_);
+ } catch (const css::container::NoSuchElementException & e) {
+ SAL_INFO("binaryurp", "initial element " << oid_ << ": " << e);
+ }
+ }
+ if (ifc.is()) {
+ css::uno::UnoInterfaceReference unoIfc(
+ static_cast< uno_Interface * >(
+ bridge_->getCppToBinaryMapping().mapInterface(
+ ifc.get(),
+ (css::uno::TypeDescription(
+ cppu::UnoType<
+ css::uno::Reference<
+ css::uno::XInterface > >::get()).
+ get()))),
+ SAL_NO_ACQUIRE);
+ *returnValue = BinaryAny(
+ css::uno::TypeDescription(
+ cppu::UnoType<
+ css::uno::Reference<
+ css::uno::XInterface > >::get()),
+ &unoIfc.m_pUnoI);
+ }
+ break;
+ }
+ [[fallthrough]];
+ default:
+ {
+ assert(object_.is());
+ css::uno::TypeDescription retType;
+ std::vector< std::vector< char > > outBufs;
+ std::vector< void * > args;
+ switch (member_.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ {
+ css::uno::TypeDescription t(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription * >(
+ member_.get())->
+ pAttributeTypeRef);
+ if (setter_) {
+ assert(inArguments_.size() == 1);
+ args.push_back(inArguments_[0].getValue(t));
+ } else {
+ assert(inArguments_.empty());
+ retType = t;
+ }
+ break;
+ }
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription * >(
+ member_.get());
+ retType = css::uno::TypeDescription(mtd->pReturnTypeRef);
+ std::vector< BinaryAny >::const_iterator i(
+ inArguments_.begin());
+ for (sal_Int32 j = 0; j != mtd->nParams; ++j) {
+ void * p;
+ if (mtd->pParams[j].bIn) {
+ p = i++->getValue(
+ css::uno::TypeDescription(
+ mtd->pParams[j].pTypeRef));
+ } else {
+ outBufs.emplace_back(size_t_round(
+ css::uno::TypeDescription(
+ mtd->pParams[j].pTypeRef).
+ get()->nSize));
+ p = outBufs.back().data();
+ }
+ args.push_back(p);
+ if (mtd->pParams[j].bOut) {
+ outArguments->push_back(BinaryAny());
+ }
+ }
+ assert(i == inArguments_.end());
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ size_t nSize = 0;
+ if (retType.is())
+ nSize = size_t_round(retType.get()->nSize);
+ std::vector< char > retBuf(nSize);
+ uno_Any exc;
+ uno_Any * pexc = &exc;
+ (*object_.get()->pDispatcher)(
+ object_.get(), member_.get(), retBuf.empty() ? nullptr : retBuf.data(),
+ args.empty() ? nullptr : args.data(), &pexc);
+ isExc = pexc != nullptr;
+ if (isExc) {
+ *returnValue = BinaryAny(
+ css::uno::TypeDescription(
+ cppu::UnoType< css::uno::Any >::get()),
+ &exc);
+ uno_any_destruct(&exc, nullptr);
+ } else {
+ if (!retBuf.empty()) {
+ *returnValue = BinaryAny(retType, retBuf.data());
+ uno_destructData(retBuf.data(), retType.get(), nullptr);
+ }
+ if (!outArguments->empty()) {
+ assert(
+ member_.get()->eTypeClass ==
+ typelib_TypeClass_INTERFACE_METHOD);
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription * >(
+ member_.get());
+ std::vector< BinaryAny >::iterator i(outArguments->begin());
+ std::vector< std::vector< char > >::iterator j(
+ outBufs.begin());
+ for (sal_Int32 k = 0; k != mtd->nParams; ++k) {
+ if (mtd->pParams[k].bOut) {
+ *i++ = BinaryAny(
+ css::uno::TypeDescription(
+ mtd->pParams[k].pTypeRef),
+ args[k]);
+ }
+ if (!mtd->pParams[k].bIn) {
+ uno_type_destructData(
+ (j++)->data(), mtd->pParams[k].pTypeRef, nullptr);
+ }
+ }
+ assert(i == outArguments->end());
+ assert(j == outBufs.end());
+ }
+ }
+ break;
+ }
+ }
+ return !isExc;
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/incomingrequest.hxx b/binaryurp/source/incomingrequest.hxx
new file mode 100644
index 000000000..d6ed7fbcc
--- /dev/null
+++ b/binaryurp/source/incomingrequest.hxx
@@ -0,0 +1,82 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_INCOMINGREQUEST_HXX
+#define INCLUDED_BINARYURP_SOURCE_INCOMINGREQUEST_HXX
+
+#include <sal/config.h>
+
+#include <vector>
+
+#include <rtl/byteseq.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <typelib/typedescription.hxx>
+#include <uno/dispatcher.hxx>
+
+namespace binaryurp {
+ class BinaryAny;
+ class Bridge;
+}
+
+namespace binaryurp {
+
+class IncomingRequest {
+private:
+ IncomingRequest(const IncomingRequest&) = delete;
+ IncomingRequest& operator=(const IncomingRequest&) = delete;
+public:
+ IncomingRequest(
+ rtl::Reference< Bridge > const & bridge, rtl::ByteSequence const & tid,
+ OUString const & oid,
+ com::sun::star::uno::UnoInterfaceReference const & object,
+ com::sun::star::uno::TypeDescription const & type,
+ sal_uInt16 functionId, bool synchronous,
+ com::sun::star::uno::TypeDescription const & member, bool setter,
+ std::vector< BinaryAny > const & inArguments, bool currentContextMode,
+ com::sun::star::uno::UnoInterfaceReference const & currentContext);
+
+ ~IncomingRequest();
+
+ void execute() const;
+
+private:
+ bool execute_throw(
+ BinaryAny * returnValue, std::vector< BinaryAny > * outArguments) const;
+
+ rtl::Reference< Bridge > bridge_;
+ rtl::ByteSequence tid_;
+ OUString oid_; // initial object queryInterface; release
+ com::sun::star::uno::UnoInterfaceReference object_;
+ com::sun::star::uno::TypeDescription type_;
+ sal_uInt16 functionId_;
+ bool synchronous_;
+ com::sun::star::uno::TypeDescription member_;
+ bool setter_;
+ std::vector< BinaryAny > inArguments_;
+ bool currentContextMode_;
+ com::sun::star::uno::UnoInterfaceReference currentContext_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/lessoperators.cxx b/binaryurp/source/lessoperators.cxx
new file mode 100644
index 000000000..acab81175
--- /dev/null
+++ b/binaryurp/source/lessoperators.cxx
@@ -0,0 +1,65 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <algorithm>
+#include <cassert>
+
+#include <rtl/byteseq.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <typelib/typeclass.h>
+#include <typelib/typedescription.hxx>
+
+#include "lessoperators.hxx"
+
+namespace com::sun::star::uno {
+
+bool operator <(TypeDescription const & left, TypeDescription const & right) {
+ assert(left.is() && right.is());
+ typelib_TypeClass tc1 = left.get()->eTypeClass;
+ typelib_TypeClass tc2 = right.get()->eTypeClass;
+ return tc1 < tc2 ||
+ (tc1 == tc2 &&
+ (OUString::unacquired(&left.get()->pTypeName) <
+ OUString::unacquired(&right.get()->pTypeName)));
+}
+
+}
+
+namespace rtl {
+
+bool operator <(ByteSequence const & left, ByteSequence const & right) {
+ const sal_Int32 nLen = std::min( left.getLength(), right.getLength());
+ for( sal_Int32 i = 0; i < nLen; ++i )
+ {
+ if (left[i] < right[i]) {
+ return true;
+ }
+ if (right[i] < left[i]) {
+ return false;
+ }
+ }
+ return left.getLength() < right.getLength();
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/lessoperators.hxx b/binaryurp/source/lessoperators.hxx
new file mode 100644
index 000000000..65f2c5366
--- /dev/null
+++ b/binaryurp/source/lessoperators.hxx
@@ -0,0 +1,42 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_LESSOPERATORS_HXX
+#define INCLUDED_BINARYURP_SOURCE_LESSOPERATORS_HXX
+
+#include <sal/config.h>
+
+namespace com::sun::star::uno { class TypeDescription; }
+namespace rtl { class ByteSequence; }
+
+namespace com::sun::star::uno {
+
+bool operator <(TypeDescription const & left, TypeDescription const & right);
+
+}
+
+namespace rtl {
+
+bool operator <(ByteSequence const & left, ByteSequence const & right);
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/marshal.cxx b/binaryurp/source/marshal.cxx
new file mode 100644
index 000000000..7d60cbf4d
--- /dev/null
+++ b/binaryurp/source/marshal.cxx
@@ -0,0 +1,300 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <vector>
+
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <com/sun/star/uno/Sequence.hxx>
+#include <cppu/unotype.hxx>
+#include <rtl/byteseq.hxx>
+#include <rtl/string.hxx>
+#include <rtl/textcvt.h>
+#include <rtl/textenc.h>
+#include <rtl/ustring.h>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <typelib/typeclass.h>
+#include <typelib/typedescription.h>
+#include <typelib/typedescription.hxx>
+#include <uno/dispatcher.hxx>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "cache.hxx"
+#include "lessoperators.hxx"
+#include "marshal.hxx"
+
+namespace binaryurp {
+
+namespace {
+
+void write64(std::vector< unsigned char > * buffer, sal_uInt64 value) {
+ Marshal::write8(buffer, value >> 56);
+ Marshal::write8(buffer, (value >> 48) & 0xFF);
+ Marshal::write8(buffer, (value >> 40) & 0xFF);
+ Marshal::write8(buffer, (value >> 32) & 0xFF);
+ Marshal::write8(buffer, (value >> 24) & 0xFF);
+ Marshal::write8(buffer, (value >> 16) & 0xFF);
+ Marshal::write8(buffer, (value >> 8) & 0xFF);
+ Marshal::write8(buffer, value & 0xFF);
+}
+
+void writeCompressed(std::vector< unsigned char > * buffer, sal_uInt32 value) {
+ if (value < 0xFF) {
+ Marshal::write8(buffer, static_cast< sal_uInt8 >(value));
+ } else {
+ Marshal::write8(buffer, 0xFF);
+ Marshal::write32(buffer, value);
+ }
+}
+
+void writeString(
+ std::vector< unsigned char > * buffer, OUString const & value)
+{
+ assert(buffer != nullptr);
+ OString v;
+ if (!value.convertToString(
+ &v, RTL_TEXTENCODING_UTF8,
+ (RTL_UNICODETOTEXT_FLAGS_UNDEFINED_ERROR |
+ RTL_UNICODETOTEXT_FLAGS_INVALID_ERROR)))
+ {
+ throw css::uno::RuntimeException(
+ "UNO string contains invalid UTF-16 sequence");
+ }
+ writeCompressed(buffer, static_cast< sal_uInt32 >(v.getLength()));
+ buffer->insert(buffer->end(), v.getStr(), v.getStr() + v.getLength());
+}
+
+}
+
+Marshal::Marshal(rtl::Reference< Bridge > const & bridge, WriterState & state):
+ bridge_(bridge), state_(state)
+{
+ assert(bridge.is());
+}
+
+Marshal::~Marshal() {}
+
+void Marshal::write8(std::vector< unsigned char > * buffer, sal_uInt8 value) {
+ assert(buffer != nullptr);
+ buffer->push_back(value);
+}
+
+void Marshal::write16(std::vector< unsigned char > * buffer, sal_uInt16 value) {
+ write8(buffer, value >> 8);
+ write8(buffer, value & 0xFF);
+}
+
+void Marshal::write32(std::vector< unsigned char > * buffer, sal_uInt32 value) {
+ write8(buffer, value >> 24);
+ write8(buffer, (value >> 16) & 0xFF);
+ write8(buffer, (value >> 8) & 0xFF);
+ write8(buffer, value & 0xFF);
+}
+
+void Marshal::writeValue(
+ std::vector< unsigned char > * buffer,
+ css::uno::TypeDescription const & type, BinaryAny const & value)
+{
+ assert(
+ type.is() &&
+ (type.get()->eTypeClass == typelib_TypeClass_ANY ||
+ value.getType().equals(type)));
+ writeValue(buffer, type, value.getValue(type));
+}
+
+void Marshal::writeType(
+ std::vector< unsigned char > * buffer,
+ css::uno::TypeDescription const & value)
+{
+ value.makeComplete();
+ assert(value.is());
+ typelib_TypeClass tc = value.get()->eTypeClass;
+ if (tc <= typelib_TypeClass_ANY) {
+ write8(buffer, static_cast< sal_uInt8 >(tc));
+ } else {
+ bool found;
+ sal_uInt16 idx = state_.typeCache.add(value, &found);
+ if (found) {
+ write8(buffer, static_cast< sal_uInt8 >(tc));
+ write16(buffer, idx);
+ } else {
+ write8(buffer, static_cast< sal_uInt8 >(tc) | 0x80);
+ write16(buffer, idx);
+ writeString(buffer, OUString(value.get()->pTypeName));
+ }
+ }
+}
+
+void Marshal::writeOid(
+ std::vector< unsigned char > * buffer, OUString const & oid)
+{
+ bool found;
+ sal_uInt16 idx;
+ if ( oid.isEmpty() ) {
+ found = true;
+ idx = cache::ignore;
+ } else {
+ idx = state_.oidCache.add(oid, &found);
+ }
+ if (found) {
+ write8(buffer, 0);
+ } else {
+ writeString(buffer, oid);
+ }
+ write16(buffer, idx);
+}
+
+void Marshal::writeTid(
+ std::vector< unsigned char > * buffer, rtl::ByteSequence const & tid)
+{
+ bool found;
+ sal_uInt16 idx = state_.tidCache.add(tid, &found);
+ if (found) {
+ write8(buffer, 0);
+ } else {
+ sal_Sequence * p = tid.getHandle();
+ writeValue(
+ buffer,
+ css::uno::TypeDescription(
+ cppu::UnoType< css::uno::Sequence< sal_Int8 > >::get()), &p);
+ }
+ write16(buffer, idx);
+}
+
+void Marshal::writeValue(
+ std::vector< unsigned char > * buffer,
+ css::uno::TypeDescription const & type, void const * value)
+{
+ assert(buffer != nullptr && type.is());
+ type.makeComplete();
+ switch (type.get()->eTypeClass) {
+ case typelib_TypeClass_VOID:
+ break;
+ case typelib_TypeClass_BOOLEAN:
+ assert(*static_cast< sal_uInt8 const * >(value) <= 1);
+ [[fallthrough]];
+ case typelib_TypeClass_BYTE:
+ write8(buffer, *static_cast< sal_uInt8 const * >(value));
+ break;
+ case typelib_TypeClass_SHORT:
+ case typelib_TypeClass_UNSIGNED_SHORT:
+ case typelib_TypeClass_CHAR:
+ write16(buffer, *static_cast< sal_uInt16 const * >(value));
+ break;
+ case typelib_TypeClass_LONG:
+ case typelib_TypeClass_UNSIGNED_LONG:
+ case typelib_TypeClass_FLOAT:
+ case typelib_TypeClass_ENUM:
+ write32(buffer, *static_cast< sal_uInt32 const * >(value));
+ break;
+ case typelib_TypeClass_HYPER:
+ case typelib_TypeClass_UNSIGNED_HYPER:
+ case typelib_TypeClass_DOUBLE:
+ write64(buffer, *static_cast< sal_uInt64 const * >(value));
+ break;
+ case typelib_TypeClass_STRING:
+ writeString(
+ buffer,
+ OUString(*static_cast< rtl_uString * const * >(value)));
+ break;
+ case typelib_TypeClass_TYPE:
+ writeType(
+ buffer,
+ css::uno::TypeDescription(
+ *static_cast< typelib_TypeDescriptionReference * const * >(
+ value)));
+ break;
+ case typelib_TypeClass_ANY:
+ {
+ uno_Any const * p = static_cast< uno_Any const * >(value);
+ css::uno::TypeDescription t(p->pType);
+ writeType(buffer, t);
+ writeValue(buffer, t, p->pData);
+ break;
+ }
+ case typelib_TypeClass_SEQUENCE:
+ {
+ sal_Sequence * p = *static_cast< sal_Sequence * const * >(value);
+ writeCompressed(buffer, static_cast< sal_uInt32 >(p->nElements));
+ css::uno::TypeDescription ctd(
+ reinterpret_cast< typelib_IndirectTypeDescription * >(
+ type.get())->
+ pType);
+ assert(ctd.is());
+ if (ctd.get()->eTypeClass == typelib_TypeClass_BYTE) {
+ buffer->insert(
+ buffer->end(), p->elements, p->elements + p->nElements);
+ } else {
+ for (sal_Int32 i = 0; i != p->nElements; ++i) {
+ writeValue(buffer, ctd, p->elements + i * ctd.get()->nSize);
+ }
+ }
+ break;
+ }
+ case typelib_TypeClass_STRUCT:
+ case typelib_TypeClass_EXCEPTION:
+ writeMemberValues(buffer, type, value);
+ break;
+ case typelib_TypeClass_INTERFACE:
+ writeOid(
+ buffer,
+ bridge_->registerOutgoingInterface(
+ css::uno::UnoInterfaceReference(
+ *static_cast< uno_Interface * const * >(value)),
+ type));
+ break;
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+}
+
+void Marshal::writeMemberValues(
+ std::vector< unsigned char > * buffer,
+ css::uno::TypeDescription const & type, void const * aggregateValue)
+{
+ assert(
+ type.is() &&
+ (type.get()->eTypeClass == typelib_TypeClass_STRUCT ||
+ type.get()->eTypeClass == typelib_TypeClass_EXCEPTION) &&
+ aggregateValue != nullptr);
+ type.makeComplete();
+ typelib_CompoundTypeDescription * ctd =
+ reinterpret_cast< typelib_CompoundTypeDescription * >(type.get());
+ if (ctd->pBaseTypeDescription != nullptr) {
+ writeMemberValues(
+ buffer,
+ css::uno::TypeDescription(&ctd->pBaseTypeDescription->aBase),
+ aggregateValue);
+ }
+ for (sal_Int32 i = 0; i != ctd->nMembers; ++i) {
+ writeValue(
+ buffer, css::uno::TypeDescription(ctd->ppTypeRefs[i]),
+ (static_cast< char const * >(aggregateValue) +
+ ctd->pMemberOffsets[i]));
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/marshal.hxx b/binaryurp/source/marshal.hxx
new file mode 100644
index 000000000..333c8f90f
--- /dev/null
+++ b/binaryurp/source/marshal.hxx
@@ -0,0 +1,91 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_MARSHAL_HXX
+#define INCLUDED_BINARYURP_SOURCE_MARSHAL_HXX
+
+#include <sal/config.h>
+
+#include <vector>
+
+#include <rtl/byteseq.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <typelib/typedescription.hxx>
+
+namespace binaryurp {
+ class BinaryAny;
+ class Bridge;
+ struct WriterState;
+}
+
+namespace binaryurp {
+
+class Marshal {
+public:
+ Marshal(rtl::Reference< Bridge > const & bridge, WriterState & state);
+
+ ~Marshal();
+
+ static void write8(std::vector< unsigned char > * buffer, sal_uInt8 value);
+
+ static void write16(
+ std::vector< unsigned char > * buffer, sal_uInt16 value);
+
+ static void write32(
+ std::vector< unsigned char > * buffer, sal_uInt32 value);
+
+ void writeValue(
+ std::vector< unsigned char > * buffer,
+ com::sun::star::uno::TypeDescription const & type,
+ BinaryAny const & value);
+
+ void writeType(
+ std::vector< unsigned char > * buffer,
+ com::sun::star::uno::TypeDescription const & value);
+
+ void writeOid(
+ std::vector< unsigned char > * buffer, OUString const & oid);
+
+ void writeTid(
+ std::vector< unsigned char > * buffer, rtl::ByteSequence const & tid);
+
+private:
+ Marshal(const Marshal&) = delete;
+ Marshal& operator=(const Marshal&) = delete;
+
+ void writeValue(
+ std::vector< unsigned char > * buffer,
+ com::sun::star::uno::TypeDescription const & type, void const * value);
+
+ void writeMemberValues(
+ std::vector< unsigned char > * buffer,
+ com::sun::star::uno::TypeDescription const & type,
+ void const * aggregateValue);
+
+ rtl::Reference< Bridge > bridge_;
+ WriterState & state_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/outgoingrequest.hxx b/binaryurp/source/outgoingrequest.hxx
new file mode 100644
index 000000000..aaf68e23f
--- /dev/null
+++ b/binaryurp/source/outgoingrequest.hxx
@@ -0,0 +1,49 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_OUTGOINGREQUEST_HXX
+#define INCLUDED_BINARYURP_SOURCE_OUTGOINGREQUEST_HXX
+
+#include <sal/config.h>
+
+#include <typelib/typedescription.hxx>
+
+namespace binaryurp {
+
+struct OutgoingRequest {
+ enum Kind { KIND_NORMAL, KIND_REQUEST_CHANGE, KIND_COMMIT_CHANGE };
+
+ OutgoingRequest(
+ Kind theKind, com::sun::star::uno::TypeDescription const & theMember,
+ bool theSetter):
+ kind(theKind), member(theMember), setter(theSetter)
+ {}
+
+ Kind kind;
+
+ com::sun::star::uno::TypeDescription member;
+
+ bool setter;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/outgoingrequests.cxx b/binaryurp/source/outgoingrequests.cxx
new file mode 100644
index 000000000..d18c01434
--- /dev/null
+++ b/binaryurp/source/outgoingrequests.cxx
@@ -0,0 +1,68 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <rtl/byteseq.hxx>
+#include <osl/mutex.hxx>
+
+#include "lessoperators.hxx"
+#include "outgoingrequest.hxx"
+#include "outgoingrequests.hxx"
+
+namespace binaryurp {
+
+OutgoingRequests::OutgoingRequests() {}
+
+OutgoingRequests::~OutgoingRequests() {}
+
+void OutgoingRequests::push(
+ rtl::ByteSequence const & tid, OutgoingRequest const & request)
+{
+ osl::MutexGuard g(mutex_);
+ map_[tid].push_back(request);
+}
+
+OutgoingRequest OutgoingRequests::top(rtl::ByteSequence const & tid) {
+ osl::MutexGuard g(mutex_);
+ Map::iterator i(map_.find(tid));
+ if (i == map_.end()) {
+ throw css::uno::RuntimeException(
+ "URP: reply for unknown TID");
+ }
+ assert(!i->second.empty());
+ return i->second.back();
+}
+
+void OutgoingRequests::pop(rtl::ByteSequence const & tid) throw () {
+ osl::MutexGuard g(mutex_);
+ Map::iterator i(map_.find(tid));
+ assert(i != map_.end());
+ i->second.pop_back();
+ if (i->second.empty()) {
+ map_.erase(i);
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/outgoingrequests.hxx b/binaryurp/source/outgoingrequests.hxx
new file mode 100644
index 000000000..e085288bf
--- /dev/null
+++ b/binaryurp/source/outgoingrequests.hxx
@@ -0,0 +1,61 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_OUTGOINGREQUESTS_HXX
+#define INCLUDED_BINARYURP_SOURCE_OUTGOINGREQUESTS_HXX
+
+#include <sal/config.h>
+
+#include <map>
+#include <vector>
+
+#include <osl/mutex.hxx>
+
+namespace binaryurp { struct OutgoingRequest; }
+namespace rtl { class ByteSequence; }
+
+namespace binaryurp {
+
+class OutgoingRequests {
+public:
+ OutgoingRequests();
+
+ ~OutgoingRequests();
+
+ void push(rtl::ByteSequence const & tid, OutgoingRequest const & request);
+
+ OutgoingRequest top(rtl::ByteSequence const & tid);
+
+ void pop(rtl::ByteSequence const & tid) throw ();
+
+private:
+ OutgoingRequests(const OutgoingRequests&) = delete;
+ OutgoingRequests& operator=(const OutgoingRequests&) = delete;
+
+ typedef std::map< rtl::ByteSequence, std::vector< OutgoingRequest > > Map;
+
+ osl::Mutex mutex_;
+ Map map_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/proxy.cxx b/binaryurp/source/proxy.cxx
new file mode 100644
index 000000000..0e5a92b68
--- /dev/null
+++ b/binaryurp/source/proxy.cxx
@@ -0,0 +1,238 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <exception>
+#include <vector>
+
+#include <cppuhelper/exc_hlp.hxx>
+#include <o3tl/runtimetooustring.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <typelib/typedescription.h>
+#include <typelib/typedescription.hxx>
+#include <uno/any2.h>
+#include <uno/dispatcher.h>
+#include <uno/dispatcher.hxx>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "proxy.hxx"
+
+namespace binaryurp {
+
+namespace {
+
+extern "C" void proxy_acquireInterface(uno_Interface * pInterface) {
+ assert(pInterface != nullptr);
+ static_cast< Proxy * >(pInterface)->do_acquire();
+}
+
+extern "C" void proxy_releaseInterface(uno_Interface * pInterface) {
+ assert(pInterface != nullptr);
+ static_cast< Proxy * >(pInterface)->do_release();
+}
+
+extern "C" void proxy_dispatchInterface(
+ uno_Interface * pUnoI, typelib_TypeDescription const * pMemberType,
+ void * pReturn, void ** pArgs, uno_Any ** ppException)
+{
+ assert(pUnoI != nullptr);
+ static_cast< Proxy * >(pUnoI)->do_dispatch(
+ pMemberType, pReturn, pArgs, ppException);
+}
+
+}
+
+Proxy::Proxy(
+ rtl::Reference< Bridge > const & bridge, OUString const & oid,
+ css::uno::TypeDescription const & type):
+ bridge_(bridge), oid_(oid), type_(type), references_(1)
+{
+ assert(bridge.is());
+ acquire = &proxy_acquireInterface;
+ release = &proxy_releaseInterface;
+ pDispatcher = &proxy_dispatchInterface;
+}
+
+
+void Proxy::do_acquire() {
+ if (++references_ == 1) {
+ bridge_->resurrectProxy(*this);
+ }
+}
+
+void Proxy::do_release() {
+ if (--references_ == 0) {
+ bridge_->revokeProxy(*this);
+ }
+}
+
+void Proxy::do_free() {
+ bridge_->freeProxy(*this);
+ delete this;
+}
+
+void Proxy::do_dispatch(
+ typelib_TypeDescription const * member, void * returnValue,
+ void ** arguments, uno_Any ** exception) const
+{
+ try {
+ try {
+ do_dispatch_throw(member, returnValue, arguments, exception);
+ } catch (const std::exception & e) {
+ throw css::uno::RuntimeException(
+ "caught C++ exception: " + o3tl::runtimeToOUString(e.what()));
+ }
+ } catch (const css::uno::RuntimeException &) {
+ css::uno::Any exc(cppu::getCaughtException());
+ uno_copyAndConvertData(
+ *exception, &exc,
+ (css::uno::TypeDescription(cppu::UnoType< css::uno::Any >::get()).
+ get()),
+ bridge_->getCppToBinaryMapping().get());
+ }
+}
+
+bool Proxy::isProxy(
+ rtl::Reference< Bridge > const & bridge,
+ css::uno::UnoInterfaceReference const & object, OUString * oid)
+{
+ assert(object.is());
+ return object.m_pUnoI->acquire == &proxy_acquireInterface &&
+ static_cast< Proxy * >(object.m_pUnoI)->isProxy(bridge, oid);
+}
+
+Proxy::~Proxy() {}
+
+void Proxy::do_dispatch_throw(
+ typelib_TypeDescription const * member, void * returnValue,
+ void ** arguments, uno_Any ** exception) const
+{
+ //TODO: Optimize queryInterface:
+ assert(member != nullptr);
+ bool bSetter = false;
+ std::vector< BinaryAny > inArgs;
+ switch (member->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ bSetter = returnValue == nullptr;
+ if (bSetter) {
+ inArgs.emplace_back(
+ css::uno::TypeDescription(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription const * >(
+ member)->
+ pAttributeTypeRef),
+ arguments[0]);
+ }
+ break;
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription const * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription const * >(member);
+ for (sal_Int32 i = 0; i != mtd->nParams; ++i) {
+ if (mtd->pParams[i].bIn) {
+ inArgs.emplace_back(
+ css::uno::TypeDescription(mtd->pParams[i].pTypeRef),
+ arguments[i]);
+ }
+ }
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ BinaryAny ret;
+ std::vector< BinaryAny > outArgs;
+ if (bridge_->makeCall(
+ oid_,
+ css::uno::TypeDescription(
+ const_cast< typelib_TypeDescription * >(member)),
+ bSetter, inArgs, &ret, &outArgs))
+ {
+ assert(ret.getType().get()->eTypeClass == typelib_TypeClass_EXCEPTION);
+ uno_any_construct(
+ *exception, ret.getValue(ret.getType()), ret.getType().get(), nullptr);
+ } else {
+ switch (member->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ if (!bSetter) {
+ css::uno::TypeDescription t(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription const * >(
+ member)->
+ pAttributeTypeRef);
+ uno_copyData(returnValue, ret.getValue(t), t.get(), nullptr);
+ }
+ break;
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription const * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription const * >(
+ member);
+ css::uno::TypeDescription t(mtd->pReturnTypeRef);
+ if (t.get()->eTypeClass != typelib_TypeClass_VOID) {
+ uno_copyData(returnValue, ret.getValue(t), t.get(), nullptr);
+ }
+ std::vector< BinaryAny >::iterator i(outArgs.begin());
+ for (sal_Int32 j = 0; j != mtd->nParams; ++j) {
+ if (mtd->pParams[j].bOut) {
+ css::uno::TypeDescription pt(mtd->pParams[j].pTypeRef);
+ if (mtd->pParams[j].bIn) {
+ (void) uno_assignData(
+ arguments[j], pt.get(), i++->getValue(pt),
+ pt.get(), nullptr, nullptr, nullptr);
+ } else {
+ uno_copyData(
+ arguments[j], i++->getValue(pt), pt.get(), nullptr);
+ }
+ }
+ }
+ assert(i == outArgs.end());
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ *exception = nullptr;
+ }
+}
+
+bool Proxy::isProxy(
+ rtl::Reference< Bridge > const & bridge, OUString * oid) const
+{
+ assert(oid != nullptr);
+ if (bridge == bridge_) {
+ *oid = oid_;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/proxy.hxx b/binaryurp/source/proxy.hxx
new file mode 100644
index 000000000..d4a94cd02
--- /dev/null
+++ b/binaryurp/source/proxy.hxx
@@ -0,0 +1,88 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_PROXY_HXX
+#define INCLUDED_BINARYURP_SOURCE_PROXY_HXX
+
+#include <sal/config.h>
+
+#include <atomic>
+#include <cstddef>
+
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <typelib/typedescription.h>
+#include <typelib/typedescription.hxx>
+#include <uno/any2.h>
+#include <uno/dispatcher.h>
+
+namespace binaryurp { class Bridge; }
+namespace com::sun::star::uno { class UnoInterfaceReference; }
+
+namespace binaryurp {
+
+class Proxy: public uno_Interface {
+public:
+ Proxy(
+ rtl::Reference< Bridge > const & bridge, OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type);
+
+ const OUString& getOid() const { return oid_;}
+
+ const com::sun::star::uno::TypeDescription& getType() const { return type_;}
+
+ void do_acquire();
+
+ void do_release();
+
+ void do_free();
+
+ void do_dispatch(
+ typelib_TypeDescription const * member, void * returnValue,
+ void ** arguments, uno_Any ** exception) const;
+
+ static bool isProxy(
+ rtl::Reference< Bridge > const & bridge,
+ com::sun::star::uno::UnoInterfaceReference const & object,
+ OUString * oid);
+
+private:
+ Proxy(const Proxy&) = delete;
+ Proxy& operator=(const Proxy&) = delete;
+
+ ~Proxy();
+
+ void do_dispatch_throw(
+ typelib_TypeDescription const * member, void * returnValue,
+ void ** arguments, uno_Any ** exception) const;
+
+ bool isProxy(rtl::Reference< Bridge > const & bridge, OUString * oid)
+ const;
+
+ rtl::Reference< Bridge > bridge_;
+ OUString oid_;
+ com::sun::star::uno::TypeDescription type_;
+ std::atomic<std::size_t> references_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/reader.cxx b/binaryurp/source/reader.cxx
new file mode 100644
index 000000000..b11b77ffe
--- /dev/null
+++ b/binaryurp/source/reader.cxx
@@ -0,0 +1,479 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <exception>
+#include <memory>
+#include <vector>
+
+#include <com/sun/star/connection/XConnection.hpp>
+#include <com/sun/star/io/IOException.hpp>
+#include <com/sun/star/uno/Any.hxx>
+#include <com/sun/star/uno/Exception.hpp>
+#include <com/sun/star/uno/Reference.hxx>
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <com/sun/star/uno/Sequence.hxx>
+#include <com/sun/star/uno/Type.hxx>
+#include <com/sun/star/uno/XCurrentContext.hpp>
+#include <com/sun/star/uno/XInterface.hpp>
+#include <cppu/unotype.hxx>
+#include <rtl/byteseq.h>
+#include <rtl/ustring.hxx>
+#include <sal/log.hxx>
+#include <sal/types.h>
+#include <typelib/typeclass.h>
+#include <typelib/typedescription.h>
+#include <typelib/typedescription.hxx>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "incomingreply.hxx"
+#include "incomingrequest.hxx"
+#include "outgoingrequest.hxx"
+#include "reader.hxx"
+#include "specialfunctionids.hxx"
+#include "unmarshal.hxx"
+
+namespace binaryurp {
+
+namespace {
+
+css::uno::Sequence< sal_Int8 > read(
+ css::uno::Reference< css::connection::XConnection > const & connection,
+ sal_uInt32 size, bool eofOk)
+{
+ assert(connection.is());
+ if (size > SAL_MAX_INT32) {
+ throw css::uno::RuntimeException(
+ "binaryurp::Reader: block size too large");
+ }
+ css::uno::Sequence< sal_Int8 > buf;
+ sal_Int32 n = connection->read(buf, static_cast< sal_Int32 >(size));
+ if (n == 0 && eofOk) {
+ return css::uno::Sequence< sal_Int8 >();
+ }
+ if (n != static_cast< sal_Int32 >(size)) {
+ throw css::io::IOException(
+ "binaryurp::Reader: premature end of input");
+ }
+ assert(buf.getLength() == static_cast< sal_Int32 >(size));
+ return buf;
+}
+
+extern "C" void request(void * pThreadSpecificData) {
+ assert(pThreadSpecificData != nullptr);
+ std::unique_ptr< IncomingRequest >(
+ static_cast< IncomingRequest * >(pThreadSpecificData))->
+ execute();
+}
+
+}
+
+Reader::Reader(rtl::Reference< Bridge > const & bridge):
+ Thread("binaryurpReader"), bridge_(bridge)
+{
+ assert(bridge.is());
+}
+
+Reader::~Reader() {}
+
+void Reader::execute() {
+ try {
+ bridge_->sendRequestChangeRequest();
+ css::uno::Reference< css::connection::XConnection > con(
+ bridge_->getConnection());
+ for (;;) {
+ css::uno::Sequence< sal_Int8 > s(read(con, 8, true));
+ if (!s.hasElements()) {
+ break;
+ }
+ Unmarshal header(bridge_, state_, s);
+ sal_uInt32 size = header.read32();
+ sal_uInt32 count = header.read32();
+ header.done();
+ if (count == 0) {
+ throw css::io::IOException(
+ "binaryurp::Reader: block with zero message count received");
+ }
+ Unmarshal block(bridge_, state_, read(con, size, false));
+ for (sal_uInt32 i = 0; i != count; ++i) {
+ readMessage(block);
+ }
+ block.done();
+ }
+ } catch (const css::uno::Exception & e) {
+ SAL_WARN("binaryurp", "caught UNO exception '" << e << '\'');
+ } catch (const std::exception & e) {
+ SAL_WARN("binaryurp", "caught C++ exception '" << e.what() << '\'');
+ }
+ bridge_->terminate(false);
+ bridge_.clear();
+}
+
+void Reader::readMessage(Unmarshal & unmarshal) {
+ sal_uInt8 flags1 = unmarshal.read8();
+ bool newType;
+ bool newOid;
+ bool newTid;
+ bool forceSynchronous;
+ sal_uInt16 functionId;
+ if ((flags1 & 0x80) != 0) { // bit 7: LONGHEADER
+ if ((flags1 & 0x40) == 0) { // bit 6: REQUEST
+ readReplyMessage(unmarshal, flags1);
+ return;
+ }
+ newType = (flags1 & 0x20) != 0; // bit 5: NEWTYPE
+ newOid = (flags1 & 0x10) != 0; // bit 4: NEWOID
+ newTid = (flags1 & 0x08) != 0; // bit 3: NEWTID
+ if ((flags1 & 0x01) != 0) { // bit 0: MOREFLAGSS
+ sal_uInt8 flags2 = unmarshal.read8();
+ forceSynchronous = (flags2 & 0x80) != 0; // bit 7: MUSTREPLY
+ if (((flags2 & 0x40) != 0) != forceSynchronous) {
+ // bit 6: SYNCHRONOUS
+ throw css::uno::RuntimeException(
+ "URP: request message with MUSTREPLY != SYNCHRONOUS"
+ " received");
+ }
+ } else {
+ forceSynchronous = false;
+ }
+ functionId = ((flags1 & 0x04) != 0) // bit 2: FUNCTIONID16
+ ? unmarshal.read16() : unmarshal.read8();
+ } else {
+ newType = false;
+ newOid = false;
+ newTid = false;
+ forceSynchronous = false;
+ functionId = ((flags1 & 0x40) != 0) // bit 6: FUNCTIONID14
+ ? ((flags1 & 0x3F) << 8) | unmarshal.read8() : flags1 & 0x3F;
+ }
+ css::uno::TypeDescription type;
+ if (newType) {
+ type = unmarshal.readType();
+ lastType_ = type;
+ } else {
+ if (!lastType_.is()) {
+ throw css::uno::RuntimeException(
+ "URP: request message with NEWTYPE received when last"
+ " interface type has not yet been set");
+ }
+ type = lastType_;
+ }
+ OUString oid;
+ if (newOid) {
+ oid = unmarshal.readOid();
+ if (oid.isEmpty()) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: empty OID");
+ }
+ lastOid_ = oid;
+ } else {
+ if (lastOid_.isEmpty()) {
+ throw css::uno::RuntimeException(
+ "URP: request message with NEWOID received when last OID has"
+ " not yet been set");
+ }
+ oid = lastOid_;
+ }
+ rtl::ByteSequence tid(getTid(unmarshal, newTid));
+ lastTid_ = tid;
+ type.makeComplete();
+ if (type.get()->eTypeClass != typelib_TypeClass_INTERFACE) {
+ throw css::uno::RuntimeException(
+ "URP: request message with non-interface interface type received");
+ }
+ typelib_InterfaceTypeDescription * itd =
+ reinterpret_cast< typelib_InterfaceTypeDescription * >(type.get());
+ if (functionId >= itd->nMapFunctionIndexToMemberIndex) {
+ throw css::uno::RuntimeException(
+ "URP: request message with unknown function ID received");
+ }
+ sal_Int32 memberId = itd->pMapFunctionIndexToMemberIndex[functionId];
+ css::uno::TypeDescription memberTd(itd->ppAllMembers[memberId]);
+ memberTd.makeComplete();
+ assert(memberTd.is());
+ bool protProps = bridge_->isProtocolPropertiesRequest(oid, type);
+ bool ccMode = !protProps && functionId != SPECIAL_FUNCTION_ID_RELEASE &&
+ bridge_->isCurrentContextMode();
+ css::uno::UnoInterfaceReference cc;
+ if (ccMode) {
+ css::uno::TypeDescription t(
+ cppu::UnoType<css::uno::XCurrentContext>::get());
+ cc.set(
+ *static_cast< uno_Interface ** >(
+ unmarshal.readValue(t).getValue(t)));
+ }
+ bool oneWay =
+ memberTd.get()->eTypeClass == typelib_TypeClass_INTERFACE_METHOD &&
+ (reinterpret_cast< typelib_InterfaceMethodTypeDescription * >(
+ memberTd.get())->
+ bOneWay);
+ SAL_INFO_IF(
+ !oneWay && forceSynchronous, "binaryurp",
+ ("superfluous MUSTREPLY/SYNCHRONOUS ignored in request message with"
+ " non-oneway function ID"));
+ bool synchronous = !oneWay || forceSynchronous;
+ bool bSetter = false;
+ std::vector< BinaryAny > inArgs;
+ switch (memberTd.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ bSetter = itd->pMapMemberIndexToFunctionIndex[memberId] != functionId;
+ // pMapMemberIndexToFunctionIndex contains function index of
+ // attribute getter
+ if (bSetter) {
+ inArgs.push_back(
+ unmarshal.readValue(
+ css::uno::TypeDescription(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription * >(
+ memberTd.get())->
+ pAttributeTypeRef)));
+ }
+ break;
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast< typelib_InterfaceMethodTypeDescription * >(
+ memberTd.get());
+ for (sal_Int32 i = 0; i != mtd->nParams; ++i) {
+ if (mtd->pParams[i].bIn) {
+ inArgs.push_back(
+ unmarshal.readValue(
+ css::uno::TypeDescription(
+ mtd->pParams[i].pTypeRef)));
+ }
+ }
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ bridge_->incrementCalls(
+ !protProps && functionId != SPECIAL_FUNCTION_ID_RELEASE);
+ if (protProps) {
+ switch (functionId) {
+ case SPECIAL_FUNCTION_ID_REQUEST_CHANGE:
+ bridge_->handleRequestChangeRequest(tid, inArgs);
+ break;
+ case SPECIAL_FUNCTION_ID_COMMIT_CHANGE:
+ bridge_->handleCommitChangeRequest(tid, inArgs);
+ break;
+ default:
+ throw css::uno::RuntimeException(
+ "URP: request message with UrpProtocolProperties OID and"
+ " unknown function ID received");
+ }
+ } else {
+ css::uno::UnoInterfaceReference obj;
+ switch (functionId) {
+ case SPECIAL_FUNCTION_ID_QUERY_INTERFACE:
+ obj = bridge_->findStub(oid, type);
+ if (!obj.is()) {
+ assert(
+ inArgs.size() == 1
+ && inArgs[0].getType().equals(
+ css::uno::TypeDescription(
+ cppu::UnoType< css::uno::Type >::get())));
+ if (!(type.equals(
+ css::uno::TypeDescription(
+ cppu::UnoType<
+ css::uno::Reference<
+ css::uno::XInterface > >::get()))
+ && (css::uno::TypeDescription(
+ *static_cast<
+ typelib_TypeDescriptionReference ** >(
+ inArgs[0].getValue(inArgs[0].getType()))).
+ equals(
+ css::uno::TypeDescription(
+ cppu::UnoType<
+ css::uno::Reference<
+ css::uno::XInterface > >::get())))))
+ {
+ throw css::uno::RuntimeException(
+ "URP: queryInterface request message with unknown OID '"
+ + oid + "' received");
+ }
+ }
+ break;
+ case SPECIAL_FUNCTION_ID_RESERVED:
+ throw css::uno::RuntimeException(
+ "URP: request message with unknown function ID 1 received");
+ case SPECIAL_FUNCTION_ID_RELEASE:
+ break;
+ default:
+ obj = bridge_->findStub(oid, type);
+ if (!obj.is()) {
+ throw css::uno::RuntimeException(
+ "URP: request message with unknown OID received");
+ }
+ break;
+ }
+ std::unique_ptr< IncomingRequest > req(
+ new IncomingRequest(
+ bridge_, tid, oid, obj, type, functionId, synchronous, memberTd,
+ bSetter, inArgs, ccMode, cc));
+ if (synchronous) {
+ bridge_->incrementActiveCalls();
+ }
+ uno_threadpool_putJob(
+ bridge_->getThreadPool(), tid.getHandle(), req.get(), &request,
+ !synchronous);
+ req.release();
+ }
+}
+
+void Reader::readReplyMessage(Unmarshal & unmarshal, sal_uInt8 flags1) {
+ rtl::ByteSequence tid(getTid(unmarshal, (flags1 & 0x08) != 0));
+ // bit 3: NEWTID
+ lastTid_ = tid;
+ OutgoingRequest req(bridge_->lastOutgoingRequest(tid));
+ bool exc = (flags1 & 0x20) != 0; // bit 5: EXCEPTION
+ BinaryAny ret;
+ std::vector< BinaryAny > outArgs;
+ if (exc) {
+ ret = unmarshal.readValue(
+ css::uno::TypeDescription(cppu::UnoType< css::uno::Any >::get()));
+ if (!typelib_typedescription_isAssignableFrom(
+ (css::uno::TypeDescription(
+ cppu::UnoType< css::uno::RuntimeException >::get()).
+ get()),
+ ret.getType().get()))
+ {
+ sal_Int32 n = 0;
+ typelib_TypeDescriptionReference ** p = nullptr;
+ switch (req.member.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ {
+ typelib_InterfaceAttributeTypeDescription * atd =
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription * >(
+ req.member.get());
+ n = req.setter ? atd->nSetExceptions : atd->nGetExceptions;
+ p = req.setter
+ ? atd->ppSetExceptions : atd->ppGetExceptions;
+ break;
+ }
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription * >(
+ req.member.get());
+ n = mtd->nExceptions;
+ p = mtd->ppExceptions;
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ bool bOk = false;
+ for (sal_Int32 i = 0; i != n; ++i) {
+ if (typelib_typedescriptionreference_isAssignableFrom(
+ p[i],
+ reinterpret_cast< typelib_TypeDescriptionReference * >(
+ ret.getType().get())))
+ {
+ bOk = true;
+ break;
+ }
+ }
+ if (!bOk) {
+ throw css::uno::RuntimeException(
+ "URP: reply message with bad exception type received");
+ }
+ }
+ } else {
+ switch (req.member.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ if (!req.setter) {
+ ret = unmarshal.readValue(
+ css::uno::TypeDescription(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription * >(
+ req.member.get())->
+ pAttributeTypeRef));
+ }
+ break;
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription * >(
+ req.member.get());
+ ret = unmarshal.readValue(
+ css::uno::TypeDescription(mtd->pReturnTypeRef));
+ for (sal_Int32 i = 0; i != mtd->nParams; ++i) {
+ if (mtd->pParams[i].bOut) {
+ outArgs.push_back(
+ unmarshal.readValue(
+ css::uno::TypeDescription(
+ mtd->pParams[i].pTypeRef)));
+ }
+ }
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ }
+ switch (req.kind) {
+ case OutgoingRequest::KIND_NORMAL:
+ {
+ std::unique_ptr< IncomingReply > resp(
+ new IncomingReply(exc, ret, outArgs));
+ uno_threadpool_putJob(
+ bridge_->getThreadPool(), tid.getHandle(), resp.get(), nullptr,
+ false);
+ resp.release();
+ break;
+ }
+ case OutgoingRequest::KIND_REQUEST_CHANGE:
+ assert(outArgs.empty());
+ bridge_->handleRequestChangeReply(exc, ret);
+ break;
+ case OutgoingRequest::KIND_COMMIT_CHANGE:
+ assert(outArgs.empty());
+ bridge_->handleCommitChangeReply(exc, ret);
+ break;
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+}
+
+rtl::ByteSequence Reader::getTid(Unmarshal & unmarshal, bool newTid) const {
+ if (newTid) {
+ return unmarshal.readTid();
+ }
+ if (lastTid_.getLength() == 0) {
+ throw css::uno::RuntimeException(
+ "URP: message with NEWTID received when last TID has not yet been"
+ " set");
+ }
+ return lastTid_;
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/reader.hxx b/binaryurp/source/reader.hxx
new file mode 100644
index 000000000..6510bbb30
--- /dev/null
+++ b/binaryurp/source/reader.hxx
@@ -0,0 +1,67 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_READER_HXX
+#define INCLUDED_BINARYURP_SOURCE_READER_HXX
+
+#include <sal/config.h>
+
+#include <rtl/byteseq.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <salhelper/thread.hxx>
+#include <typelib/typedescription.hxx>
+
+#include "readerstate.hxx"
+
+namespace binaryurp {
+ class Bridge;
+ class Unmarshal;
+}
+
+namespace binaryurp {
+
+class Reader: public salhelper::Thread {
+public:
+ explicit Reader(rtl::Reference< Bridge > const & bridge);
+
+private:
+ virtual ~Reader() override;
+
+ virtual void execute() override;
+
+ void readMessage(Unmarshal & unmarshal);
+
+ void readReplyMessage(Unmarshal & unmarshal, sal_uInt8 flags1);
+
+ rtl::ByteSequence getTid(Unmarshal & unmarshal, bool newTid) const;
+
+ rtl::Reference< Bridge > bridge_;
+ com::sun::star::uno::TypeDescription lastType_;
+ OUString lastOid_;
+ rtl::ByteSequence lastTid_;
+ ReaderState state_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/readerstate.hxx b/binaryurp/source/readerstate.hxx
new file mode 100644
index 000000000..5b401faec
--- /dev/null
+++ b/binaryurp/source/readerstate.hxx
@@ -0,0 +1,49 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_READERSTATE_HXX
+#define INCLUDED_BINARYURP_SOURCE_READERSTATE_HXX
+
+#include <sal/config.h>
+
+#include <rtl/byteseq.hxx>
+#include <rtl/ustring.hxx>
+#include <typelib/typedescription.hxx>
+
+#include "cache.hxx"
+
+namespace binaryurp {
+
+struct ReaderState {
+private:
+ ReaderState(const ReaderState&) = delete;
+ ReaderState& operator=(const ReaderState&) = delete;
+public:
+ ReaderState() {}
+
+ com::sun::star::uno::TypeDescription typeCache[cache::size];
+ OUString oidCache[cache::size];
+ rtl::ByteSequence tidCache[cache::size];
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/specialfunctionids.hxx b/binaryurp/source/specialfunctionids.hxx
new file mode 100644
index 000000000..ef147a326
--- /dev/null
+++ b/binaryurp/source/specialfunctionids.hxx
@@ -0,0 +1,43 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_SPECIALFUNCTIONIDS_HXX
+#define INCLUDED_BINARYURP_SOURCE_SPECIALFUNCTIONIDS_HXX
+
+#include <sal/config.h>
+
+namespace binaryurp {
+
+enum SpecialFunctionIds {
+ SPECIAL_FUNCTION_ID_QUERY_INTERFACE = 0,
+
+ SPECIAL_FUNCTION_ID_RESERVED = 1,
+
+ SPECIAL_FUNCTION_ID_RELEASE = 2,
+
+ SPECIAL_FUNCTION_ID_REQUEST_CHANGE = 4,
+
+ SPECIAL_FUNCTION_ID_COMMIT_CHANGE = 5
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/unmarshal.cxx b/binaryurp/source/unmarshal.cxx
new file mode 100644
index 000000000..8cf8676ed
--- /dev/null
+++ b/binaryurp/source/unmarshal.cxx
@@ -0,0 +1,488 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <cstdlib>
+#include <new>
+#include <vector>
+
+#include <com/sun/star/io/IOException.hpp>
+#include <com/sun/star/uno/RuntimeException.hpp>
+#include <com/sun/star/uno/Sequence.hxx>
+#include <cppu/unotype.hxx>
+#include <rtl/byteseq.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/textcvt.h>
+#include <rtl/textenc.h>
+#include <rtl/ustring.h>
+#include <rtl/ustring.hxx>
+#include <sal/types.h>
+#include <typelib/typeclass.h>
+#include <typelib/typedescription.h>
+#include <typelib/typedescription.hxx>
+#include <uno/any2.h>
+#include <uno/data.h>
+#include <uno/dispatcher.hxx>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "cache.hxx"
+#include "readerstate.hxx"
+#include "unmarshal.hxx"
+
+namespace binaryurp {
+
+namespace {
+
+void * allocate(sal_Size size) {
+ void * p = std::malloc(size);
+ if (p == nullptr) {
+ throw std::bad_alloc();
+ }
+ return p;
+}
+
+std::vector< BinaryAny >::iterator copyMemberValues(
+ css::uno::TypeDescription const & type,
+ std::vector< BinaryAny >::iterator const & it, void * buffer) throw ()
+{
+ assert(
+ type.is() &&
+ (type.get()->eTypeClass == typelib_TypeClass_STRUCT ||
+ type.get()->eTypeClass == typelib_TypeClass_EXCEPTION) &&
+ buffer != nullptr);
+ type.makeComplete();
+ std::vector< BinaryAny >::iterator i(it);
+ typelib_CompoundTypeDescription * ctd =
+ reinterpret_cast< typelib_CompoundTypeDescription * >(type.get());
+ if (ctd->pBaseTypeDescription != nullptr) {
+ i = copyMemberValues(
+ css::uno::TypeDescription(&ctd->pBaseTypeDescription->aBase), i,
+ buffer);
+ }
+ for (sal_Int32 j = 0; j != ctd->nMembers; ++j) {
+ uno_type_copyData(
+ static_cast< char * >(buffer) + ctd->pMemberOffsets[j],
+ i++->getValue(css::uno::TypeDescription(ctd->ppTypeRefs[j])),
+ ctd->ppTypeRefs[j], nullptr);
+ }
+ return i;
+}
+
+}
+
+Unmarshal::Unmarshal(
+ rtl::Reference< Bridge > const & bridge, ReaderState & state,
+ css::uno::Sequence< sal_Int8 > const & buffer):
+ bridge_(bridge), state_(state), buffer_(buffer)
+{
+ data_ = reinterpret_cast< sal_uInt8 const * >(buffer_.getConstArray());
+ end_ = data_ + buffer_.getLength();
+}
+
+Unmarshal::~Unmarshal() {}
+
+sal_uInt8 Unmarshal::read8() {
+ check(1);
+ return *data_++;
+}
+
+sal_uInt16 Unmarshal::read16() {
+ check(2);
+ sal_uInt16 n = static_cast< sal_uInt16 >(*data_++) << 8;
+ return n | *data_++;
+}
+
+sal_uInt32 Unmarshal::read32() {
+ check(4);
+ sal_uInt32 n = static_cast< sal_uInt32 >(*data_++) << 24;
+ n |= static_cast< sal_uInt32 >(*data_++) << 16;
+ n |= static_cast< sal_uInt32 >(*data_++) << 8;
+ return n | *data_++;
+}
+
+css::uno::TypeDescription Unmarshal::readType() {
+ sal_uInt8 flags = read8();
+ typelib_TypeClass tc = static_cast< typelib_TypeClass >(flags & 0x7F);
+ switch (tc) {
+ case typelib_TypeClass_VOID:
+ case typelib_TypeClass_BOOLEAN:
+ case typelib_TypeClass_BYTE:
+ case typelib_TypeClass_SHORT:
+ case typelib_TypeClass_UNSIGNED_SHORT:
+ case typelib_TypeClass_LONG:
+ case typelib_TypeClass_UNSIGNED_LONG:
+ case typelib_TypeClass_HYPER:
+ case typelib_TypeClass_UNSIGNED_HYPER:
+ case typelib_TypeClass_FLOAT:
+ case typelib_TypeClass_DOUBLE:
+ case typelib_TypeClass_CHAR:
+ case typelib_TypeClass_STRING:
+ case typelib_TypeClass_TYPE:
+ case typelib_TypeClass_ANY:
+ if ((flags & 0x80) != 0) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: cache flag of simple type is set");
+ }
+ return css::uno::TypeDescription(
+ *typelib_static_type_getByTypeClass(tc));
+ case typelib_TypeClass_SEQUENCE:
+ case typelib_TypeClass_ENUM:
+ case typelib_TypeClass_STRUCT:
+ case typelib_TypeClass_EXCEPTION:
+ case typelib_TypeClass_INTERFACE:
+ {
+ sal_uInt16 idx = readCacheIndex();
+ if ((flags & 0x80) == 0) {
+ if (idx == cache::ignore || !state_.typeCache[idx].is()) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: unknown type cache index");
+ }
+ return state_.typeCache[idx];
+ } else {
+ OUString const str(readString());
+ css::uno::TypeDescription t(str);
+ if (!t.is() || t.get()->eTypeClass != tc) {
+
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: type with unknown name: " + str);
+ }
+ for (css::uno::TypeDescription t2(t);
+ t2.get()->eTypeClass == typelib_TypeClass_SEQUENCE;)
+ {
+ t2.makeComplete();
+ t2 = css::uno::TypeDescription(
+ reinterpret_cast< typelib_IndirectTypeDescription * >(
+ t2.get())->pType);
+ if (!t2.is()) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: sequence type with unknown"
+ " component type");
+ }
+ switch (t2.get()->eTypeClass) {
+ case typelib_TypeClass_VOID:
+ case typelib_TypeClass_EXCEPTION:
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: sequence type with bad"
+ " component type");
+ default:
+ break;
+ }
+ }
+ if (idx != cache::ignore) {
+ state_.typeCache[idx] = t;
+ }
+ return t;
+ }
+ }
+ default:
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: type of unknown type class");
+ }
+}
+
+OUString Unmarshal::readOid() {
+ OUString oid(readString());
+ for (sal_Int32 i = 0; i != oid.getLength(); ++i) {
+ if (oid[i] > 0x7F) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: OID contains non-ASCII character");
+ }
+ }
+ sal_uInt16 idx = readCacheIndex();
+ if (oid.isEmpty() && idx != cache::ignore) {
+ if (state_.oidCache[idx].isEmpty()) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: unknown OID cache index");
+ }
+ return state_.oidCache[idx];
+ }
+ if (idx != cache::ignore) {
+ state_.oidCache[idx] = oid;
+ }
+ return oid;
+}
+
+rtl::ByteSequence Unmarshal::readTid() {
+ rtl::ByteSequence tid(
+ *static_cast< sal_Sequence * const * >(
+ readSequence(
+ css::uno::TypeDescription(
+ cppu::UnoType< css::uno::Sequence< sal_Int8 > >::get())).
+ getValue(
+ css::uno::TypeDescription(
+ cppu::UnoType< css::uno::Sequence< sal_Int8 > >::get()))));
+ sal_uInt16 idx = readCacheIndex();
+ if (tid.getLength() == 0) {
+ if (idx == cache::ignore || state_.tidCache[idx].getLength() == 0) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: unknown TID cache index");
+ }
+ return state_.tidCache[idx];
+ }
+ if (idx != cache::ignore) {
+ state_.tidCache[idx] = tid;
+ }
+ return tid;
+}
+
+BinaryAny Unmarshal::readValue(css::uno::TypeDescription const & type) {
+ assert(type.is());
+ switch (type.get()->eTypeClass) {
+ default:
+ std::abort(); // this cannot happen
+ // pseudo fall-through to avoid compiler warnings
+ case typelib_TypeClass_VOID:
+ return BinaryAny();
+ case typelib_TypeClass_BOOLEAN:
+ {
+ sal_uInt8 v = read8();
+ if (v > 1) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: boolean of unknown value");
+ }
+ return BinaryAny(type, &v);
+ }
+ case typelib_TypeClass_BYTE:
+ {
+ sal_uInt8 v = read8();
+ return BinaryAny(type, &v);
+ }
+ case typelib_TypeClass_SHORT:
+ case typelib_TypeClass_UNSIGNED_SHORT:
+ case typelib_TypeClass_CHAR:
+ {
+ sal_uInt16 v = read16();
+ return BinaryAny(type, &v);
+ }
+ case typelib_TypeClass_LONG:
+ case typelib_TypeClass_UNSIGNED_LONG:
+ case typelib_TypeClass_FLOAT:
+ {
+ sal_uInt32 v = read32();
+ return BinaryAny(type, &v);
+ }
+ case typelib_TypeClass_HYPER:
+ case typelib_TypeClass_UNSIGNED_HYPER:
+ case typelib_TypeClass_DOUBLE:
+ {
+ sal_uInt64 v = read64();
+ return BinaryAny(type, &v);
+ }
+ case typelib_TypeClass_STRING:
+ {
+ OUString v(readString());
+ return BinaryAny(type, &v.pData);
+ }
+ case typelib_TypeClass_TYPE:
+ {
+ css::uno::TypeDescription v(readType());
+ typelib_TypeDescription * p = v.get();
+ return BinaryAny(type, &p);
+ }
+ case typelib_TypeClass_ANY:
+ {
+ css::uno::TypeDescription t(readType());
+ if (t.get()->eTypeClass == typelib_TypeClass_ANY) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: any of type ANY");
+ }
+ return readValue(t);
+ }
+ case typelib_TypeClass_SEQUENCE:
+ type.makeComplete();
+ return readSequence(type);
+ case typelib_TypeClass_ENUM:
+ {
+ sal_Int32 v = static_cast< sal_Int32 >(read32());
+ type.makeComplete();
+ typelib_EnumTypeDescription * etd =
+ reinterpret_cast< typelib_EnumTypeDescription * >(type.get());
+ bool bFound = false;
+ for (sal_Int32 i = 0; i != etd->nEnumValues; ++i) {
+ if (etd->pEnumValues[i] == v) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: unknown enum value");
+ }
+ return BinaryAny(type, &v);
+ }
+ case typelib_TypeClass_STRUCT:
+ case typelib_TypeClass_EXCEPTION:
+ {
+ std::vector< BinaryAny > as;
+ readMemberValues(type, &as);
+ void * buf = allocate(type.get()->nSize);
+ copyMemberValues(type, as.begin(), buf);
+ uno_Any raw;
+ raw.pType = reinterpret_cast< typelib_TypeDescriptionReference * >(
+ type.get());
+ raw.pData = buf;
+ raw.pReserved = nullptr;
+ return BinaryAny(raw);
+ }
+ case typelib_TypeClass_INTERFACE:
+ {
+ css::uno::UnoInterfaceReference obj(
+ bridge_->registerIncomingInterface(readOid(), type));
+ return BinaryAny(type, &obj.m_pUnoI);
+ }
+ }
+}
+
+void Unmarshal::done() const {
+ if (data_ != end_) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: block contains excess data");
+ }
+}
+
+void Unmarshal::check(sal_Int32 size) const {
+ if (end_ - data_ < size) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: trying to read past end of block");
+ }
+}
+
+sal_uInt32 Unmarshal::readCompressed() {
+ sal_uInt8 n = read8();
+ return n == 0xFF ? read32() : n;
+}
+
+sal_uInt16 Unmarshal::readCacheIndex() {
+ sal_uInt16 idx = read16();
+ if (idx >= cache::size && idx != cache::ignore) {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: cache index out of range");
+ }
+ return idx;
+}
+
+sal_uInt64 Unmarshal::read64() {
+ check(8);
+ sal_uInt64 n = static_cast< sal_uInt64 >(*data_++) << 56;
+ n |= static_cast< sal_uInt64 >(*data_++) << 48;
+ n |= static_cast< sal_uInt64 >(*data_++) << 40;
+ n |= static_cast< sal_uInt64 >(*data_++) << 32;
+ n |= static_cast< sal_uInt64 >(*data_++) << 24;
+ n |= static_cast< sal_uInt64 >(*data_++) << 16;
+ n |= static_cast< sal_uInt64 >(*data_++) << 8;
+ return n | *data_++;
+}
+
+OUString Unmarshal::readString() {
+ sal_uInt32 n = readCompressed();
+ if (n > SAL_MAX_INT32) {
+ throw css::uno::RuntimeException(
+ "binaryurp::Unmarshal: string size too large");
+ }
+ check(static_cast< sal_Int32 >(n));
+ OUString s;
+ if (!rtl_convertStringToUString(
+ &s.pData, reinterpret_cast< char const * >(data_),
+ static_cast< sal_Int32 >(n), RTL_TEXTENCODING_UTF8,
+ (RTL_TEXTTOUNICODE_FLAGS_UNDEFINED_ERROR |
+ RTL_TEXTTOUNICODE_FLAGS_MBUNDEFINED_ERROR |
+ RTL_TEXTTOUNICODE_FLAGS_INVALID_ERROR)))
+ {
+ throw css::io::IOException(
+ "binaryurp::Unmarshal: string does not contain UTF-8");
+ }
+ data_ += n;
+ return s;
+}
+
+BinaryAny Unmarshal::readSequence(css::uno::TypeDescription const & type) {
+ assert(type.is() && type.get()->eTypeClass == typelib_TypeClass_SEQUENCE);
+ sal_uInt32 n = readCompressed();
+ if (n > SAL_MAX_INT32) {
+ throw css::uno::RuntimeException(
+ "binaryurp::Unmarshal: sequence size too large");
+ }
+ if (n == 0) {
+ return BinaryAny(type, nullptr);
+ }
+ css::uno::TypeDescription ctd(
+ reinterpret_cast< typelib_IndirectTypeDescription * >(
+ type.get())->pType);
+ if (ctd.get()->eTypeClass == typelib_TypeClass_BYTE) {
+ check(static_cast< sal_Int32 >(n));
+ rtl::ByteSequence s(
+ reinterpret_cast< sal_Int8 const * >(data_),
+ static_cast< sal_Int32 >(n));
+ data_ += n;
+ sal_Sequence * p = s.getHandle();
+ return BinaryAny(type, &p);
+ }
+ std::vector< BinaryAny > as;
+ for (sal_uInt32 i = 0; i != n; ++i) {
+ as.push_back(readValue(ctd));
+ }
+ assert(ctd.get()->nSize >= 0);
+ sal_uInt64 size = static_cast< sal_uInt64 >(n) *
+ static_cast< sal_uInt64 >(ctd.get()->nSize);
+ // sal_uInt32 * sal_Int32 -> sal_uInt64 cannot overflow
+ if (size > SAL_MAX_SIZE - SAL_SEQUENCE_HEADER_SIZE) {
+ throw css::uno::RuntimeException(
+ "binaryurp::Unmarshal: sequence size too large");
+ }
+ void * buf = allocate(
+ SAL_SEQUENCE_HEADER_SIZE + static_cast< sal_Size >(size));
+ static_cast< sal_Sequence * >(buf)->nRefCount = 0;
+ static_cast< sal_Sequence * >(buf)->nElements =
+ static_cast< sal_Int32 >(n);
+ for (sal_uInt32 i = 0; i != n; ++i) {
+ uno_copyData(
+ static_cast< sal_Sequence * >(buf)->elements + i * ctd.get()->nSize,
+ as[i].getValue(ctd), ctd.get(), nullptr);
+ }
+ return BinaryAny(type, &buf);
+}
+
+void Unmarshal::readMemberValues(
+ css::uno::TypeDescription const & type, std::vector< BinaryAny > * values)
+{
+ assert(
+ type.is() &&
+ (type.get()->eTypeClass == typelib_TypeClass_STRUCT ||
+ type.get()->eTypeClass == typelib_TypeClass_EXCEPTION) &&
+ values != nullptr);
+ type.makeComplete();
+ typelib_CompoundTypeDescription * ctd =
+ reinterpret_cast< typelib_CompoundTypeDescription * >(type.get());
+ if (ctd->pBaseTypeDescription != nullptr) {
+ readMemberValues(
+ css::uno::TypeDescription(&ctd->pBaseTypeDescription->aBase),
+ values);
+ }
+ for (sal_Int32 i = 0; i != ctd->nMembers; ++i) {
+ values->push_back(
+ readValue(css::uno::TypeDescription(ctd->ppTypeRefs[i])));
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/unmarshal.hxx b/binaryurp/source/unmarshal.hxx
new file mode 100644
index 000000000..6867d8b17
--- /dev/null
+++ b/binaryurp/source/unmarshal.hxx
@@ -0,0 +1,96 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_UNMARSHAL_HXX
+#define INCLUDED_BINARYURP_SOURCE_UNMARSHAL_HXX
+
+#include <sal/config.h>
+
+#include <vector>
+
+#include <com/sun/star/uno/Sequence.hxx>
+#include <rtl/ref.hxx>
+#include <sal/types.h>
+#include <typelib/typedescription.hxx>
+
+namespace binaryurp {
+ class BinaryAny;
+ class Bridge;
+ struct ReaderState;
+}
+namespace com::sun::star::uno { class TypeDescription; }
+
+namespace binaryurp {
+
+class Unmarshal {
+public:
+ Unmarshal(
+ rtl::Reference< Bridge > const & bridge, ReaderState & state,
+ com::sun::star::uno::Sequence< sal_Int8 > const & buffer);
+
+ ~Unmarshal();
+
+ sal_uInt8 read8();
+
+ sal_uInt16 read16();
+
+ sal_uInt32 read32();
+
+ com::sun::star::uno::TypeDescription readType();
+
+ OUString readOid();
+
+ rtl::ByteSequence readTid();
+
+ BinaryAny readValue(com::sun::star::uno::TypeDescription const & type);
+
+ void done() const;
+
+private:
+ Unmarshal(const Unmarshal&) = delete;
+ Unmarshal& operator=(const Unmarshal&) = delete;
+
+ void check(sal_Int32 size) const;
+
+ sal_uInt32 readCompressed();
+
+ sal_uInt16 readCacheIndex();
+
+ sal_uInt64 read64();
+
+ OUString readString();
+
+ BinaryAny readSequence(com::sun::star::uno::TypeDescription const & type);
+
+ void readMemberValues(
+ com::sun::star::uno::TypeDescription const & type,
+ std::vector< BinaryAny > * values);
+
+ rtl::Reference< Bridge > bridge_;
+ ReaderState & state_;
+ com::sun::star::uno::Sequence< sal_Int8 > buffer_;
+ sal_uInt8 const * data_;
+ sal_uInt8 const * end_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/writer.cxx b/binaryurp/source/writer.cxx
new file mode 100644
index 000000000..585f375d7
--- /dev/null
+++ b/binaryurp/source/writer.cxx
@@ -0,0 +1,454 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#include <sal/config.h>
+
+#include <cassert>
+#include <cstddef>
+#include <cstring>
+#include <exception>
+#include <limits>
+#include <vector>
+
+#include <com/sun/star/connection/XConnection.hpp>
+#include <com/sun/star/io/IOException.hpp>
+#include <com/sun/star/lang/WrappedTargetRuntimeException.hpp>
+#include <com/sun/star/uno/XCurrentContext.hpp>
+#include <cppuhelper/exc_hlp.hxx>
+#include <osl/mutex.hxx>
+#include <sal/log.hxx>
+#include <uno/dispatcher.hxx>
+
+#include "binaryany.hxx"
+#include "bridge.hxx"
+#include "currentcontext.hxx"
+#include "specialfunctionids.hxx"
+#include "writer.hxx"
+
+namespace binaryurp {
+
+Writer::Item::Item()
+ : request(false)
+ , setter(false)
+ , exception(false)
+ , setCurrentContextMode(false)
+{}
+
+Writer::Item::Item(
+ rtl::ByteSequence const & theTid, OUString const & theOid,
+ css::uno::TypeDescription const & theType,
+ css::uno::TypeDescription const & theMember,
+ std::vector< BinaryAny > const & inArguments,
+ css::uno::UnoInterfaceReference const & theCurrentContext):
+ request(true), tid(theTid), oid(theOid), type(theType), member(theMember),
+ setter(false), arguments(inArguments), exception(false),
+ currentContext(theCurrentContext), setCurrentContextMode(false)
+{}
+
+Writer::Item::Item(
+ rtl::ByteSequence const & theTid,
+ css::uno::TypeDescription const & theMember, bool theSetter,
+ bool theException, BinaryAny const & theReturnValue,
+ std::vector< BinaryAny > const & outArguments,
+ bool theSetCurrentContextMode):
+ request(false), tid(theTid), member(theMember), setter(theSetter),
+ arguments(outArguments), exception(theException),
+ returnValue(theReturnValue), setCurrentContextMode(theSetCurrentContextMode)
+{}
+
+Writer::Writer(rtl::Reference< Bridge > const & bridge):
+ Thread("binaryurpWriter"), bridge_(bridge), marshal_(bridge, state_),
+ stop_(false)
+{
+ assert(bridge.is());
+}
+
+void Writer::sendDirectRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ css::uno::TypeDescription const & type,
+ css::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments)
+{
+ assert(!unblocked_.check());
+ sendRequest(
+ tid, oid, type, member, inArguments, false,
+ css::uno::UnoInterfaceReference());
+}
+
+void Writer::sendDirectReply(
+ rtl::ByteSequence const & tid, css::uno::TypeDescription const & member,
+ bool exception, BinaryAny const & returnValue,
+ std::vector< BinaryAny > const & outArguments)
+{
+ assert(!unblocked_.check());
+ sendReply(tid, member, false, exception, returnValue,outArguments);
+}
+
+void Writer::queueRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ css::uno::TypeDescription const & type,
+ css::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments)
+{
+ css::uno::UnoInterfaceReference cc(current_context::get());
+ osl::MutexGuard g(mutex_);
+ queue_.emplace_back(tid, oid, type, member, inArguments, cc);
+ items_.set();
+}
+
+void Writer::queueReply(
+ rtl::ByteSequence const & tid,
+ com::sun::star::uno::TypeDescription const & member, bool setter,
+ bool exception, BinaryAny const & returnValue,
+ std::vector< BinaryAny > const & outArguments, bool setCurrentContextMode)
+{
+ osl::MutexGuard g(mutex_);
+ queue_.emplace_back(
+ tid, member, setter, exception, returnValue, outArguments,
+ setCurrentContextMode);
+ items_.set();
+}
+
+void Writer::unblock() {
+ // Assumes that osl::Condition::set works as a memory barrier, so that
+ // changes made by preceding sendDirectRequest/Reply calls are visible to
+ // subsequent sendRequest/Reply calls:
+ unblocked_.set();
+}
+
+void Writer::stop() {
+ {
+ osl::MutexGuard g(mutex_);
+ stop_ = true;
+ }
+ unblocked_.set();
+ items_.set();
+}
+
+Writer::~Writer() {}
+
+void Writer::execute() {
+ try {
+ unblocked_.wait();
+ for (;;) {
+ items_.wait();
+ Item item;
+ {
+ osl::MutexGuard g(mutex_);
+ if (stop_) {
+ return;
+ }
+ assert(!queue_.empty());
+ item = queue_.front();
+ queue_.pop_front();
+ if (queue_.empty()) {
+ items_.reset();
+ }
+ }
+ if (item.request) {
+ sendRequest(
+ item.tid, item.oid, item.type, item.member, item.arguments,
+ (item.oid != "UrpProtocolProperties" &&
+ !item.member.equals(
+ css::uno::TypeDescription(
+ "com.sun.star.uno.XInterface::release")) &&
+ bridge_->isCurrentContextMode()),
+ item.currentContext);
+ } else {
+ sendReply(
+ item.tid, item.member, item.setter, item.exception,
+ item.returnValue, item.arguments);
+ if (item.setCurrentContextMode) {
+ bridge_->setCurrentContextMode();
+ }
+ }
+ }
+ } catch (const css::uno::Exception & e) {
+ SAL_INFO("binaryurp", "caught " << e);
+ } catch (const std::exception & e) {
+ SAL_INFO("binaryurp", "caught C++ exception " << e.what());
+ }
+ bridge_->terminate(false);
+ bridge_.clear();
+}
+
+void Writer::sendRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ css::uno::TypeDescription const & type,
+ css::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments, bool currentContextMode,
+ css::uno::UnoInterfaceReference const & currentContext)
+{
+ assert(tid.getLength() != 0);
+ assert(!oid.isEmpty());
+ assert(member.is());
+ css::uno::TypeDescription t(type);
+ sal_Int32 functionId = 0;
+ bool bForceSynchronous = false;
+ member.makeComplete();
+ switch (member.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ {
+ typelib_InterfaceAttributeTypeDescription * atd =
+ reinterpret_cast< typelib_InterfaceAttributeTypeDescription * >(
+ member.get());
+ assert(atd->pInterface != nullptr);
+ if (!t.is()) {
+ t = css::uno::TypeDescription(&atd->pInterface->aBase);
+ }
+ t.makeComplete();
+ functionId = atd->pInterface->pMapMemberIndexToFunctionIndex[
+ atd->aBase.nPosition];
+ if (!inArguments.empty()) { // setter
+ ++functionId;
+ }
+ break;
+ }
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast< typelib_InterfaceMethodTypeDescription * >(
+ member.get());
+ assert(mtd->pInterface != nullptr);
+ if (!t.is()) {
+ t = css::uno::TypeDescription(&mtd->pInterface->aBase);
+ }
+ t.makeComplete();
+ functionId = mtd->pInterface->pMapMemberIndexToFunctionIndex[
+ mtd->aBase.nPosition];
+ bForceSynchronous = mtd->bOneWay &&
+ functionId != SPECIAL_FUNCTION_ID_RELEASE;
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ assert(functionId >= 0);
+ if (functionId > SAL_MAX_UINT16) {
+ throw css::uno::RuntimeException("function ID too large for URP");
+ }
+ std::vector< unsigned char > buf;
+ bool newType = !(lastType_.is() && t.equals(lastType_));
+ bool newOid = oid != lastOid_;
+ bool newTid = tid != lastTid_;
+ if (newType || newOid || newTid || bForceSynchronous || functionId > 0x3FFF)
+ // > 14 bit function ID
+ {
+ Marshal::write8(
+ &buf,
+ (0xC0 | (newType ? 0x20 : 0) | (newOid ? 0x10 : 0) |
+ (newTid ? 0x08 : 0) | (functionId > 0xFF ? 0x04 : 0) |
+ (bForceSynchronous ? 0x01 : 0)));
+ // bit 7: LONGHEADER, bit 6: REQUEST, bit 5: NEWTYPE, bit 4: NEWOID,
+ // bit 3: NEWTID, bit 2: FUNCTIONID16, bit 0: MOREFLAGS
+ if (bForceSynchronous) {
+ Marshal::write8(&buf, 0xC0); // bit 7: MUSTREPLY, bit 6: SYNCHRONOUS
+ }
+ if (functionId <= 0xFF) {
+ Marshal::write8(&buf, static_cast< sal_uInt8 >(functionId));
+ } else {
+ Marshal::write16(&buf, static_cast< sal_uInt16 >(functionId));
+ }
+ if (newType) {
+ marshal_.writeType(&buf, t);
+ }
+ if (newOid) {
+ marshal_.writeOid(&buf, oid);
+ }
+ if (newTid) {
+ marshal_.writeTid(&buf, tid);
+ }
+ } else if (functionId <= 0x3F) { // <= 6 bit function ID
+ Marshal::write8(&buf, static_cast< sal_uInt8 >(functionId));
+ // bit 7: !LONGHEADER, bit 6: !FUNCTIONID14
+ } else {
+ Marshal::write8(
+ &buf, static_cast< sal_uInt8 >(0x40 | (functionId >> 8)));
+ // bit 7: !LONGHEADER, bit 6: FUNCTIONID14
+ Marshal::write8(&buf, functionId & 0xFF);
+ }
+ if (currentContextMode) {
+ css::uno::UnoInterfaceReference cc(currentContext);
+ marshal_.writeValue(
+ &buf,
+ css::uno::TypeDescription(
+ cppu::UnoType<
+ css::uno::Reference< css::uno::XCurrentContext > >::get()),
+ BinaryAny(
+ css::uno::TypeDescription(
+ cppu::UnoType<
+ css::uno::Reference<
+ css::uno::XCurrentContext > >::get()),
+ &cc.m_pUnoI));
+ }
+ switch (member.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ if (!inArguments.empty()) { // setter
+ assert(inArguments.size() == 1);
+ marshal_.writeValue(
+ &buf,
+ css::uno::TypeDescription(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription * >(
+ member.get())->
+ pAttributeTypeRef),
+ inArguments.front());
+ }
+ break;
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast< typelib_InterfaceMethodTypeDescription * >(
+ member.get());
+ std::vector< BinaryAny >::const_iterator i(inArguments.begin());
+ for (sal_Int32 j = 0; j != mtd->nParams; ++j) {
+ if (mtd->pParams[j].bIn) {
+ marshal_.writeValue(
+ &buf,
+ css::uno::TypeDescription(mtd->pParams[j].pTypeRef),
+ *i++);
+ }
+ }
+ assert(i == inArguments.end());
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ sendMessage(buf);
+ lastType_ = t;
+ lastOid_ = oid;
+ lastTid_ = tid;
+}
+
+void Writer::sendReply(
+ rtl::ByteSequence const & tid,
+ com::sun::star::uno::TypeDescription const & member, bool setter,
+ bool exception, BinaryAny const & returnValue,
+ std::vector< BinaryAny > const & outArguments)
+{
+ assert(tid.getLength() != 0);
+ assert(member.is());
+ assert(member.get()->bComplete);
+ std::vector< unsigned char > buf;
+ bool newTid = tid != lastTid_;
+ Marshal::write8(&buf, 0x80 | (exception ? 0x20 : 0) | (newTid ? 0x08 : 0));
+ // bit 7: LONGHEADER; bit 6: !REQUEST; bit 5: EXCEPTION; bit 3: NEWTID
+ if (newTid) {
+ marshal_.writeTid(&buf, tid);
+ }
+ if (exception) {
+ marshal_.writeValue(
+ &buf,
+ css::uno::TypeDescription(cppu::UnoType< css::uno::Any >::get()),
+ returnValue);
+ } else {
+ switch (member.get()->eTypeClass) {
+ case typelib_TypeClass_INTERFACE_ATTRIBUTE:
+ if (!setter) {
+ marshal_.writeValue(
+ &buf,
+ css::uno::TypeDescription(
+ reinterpret_cast<
+ typelib_InterfaceAttributeTypeDescription * >(
+ member.get())->
+ pAttributeTypeRef),
+ returnValue);
+ }
+ break;
+ case typelib_TypeClass_INTERFACE_METHOD:
+ {
+ typelib_InterfaceMethodTypeDescription * mtd =
+ reinterpret_cast<
+ typelib_InterfaceMethodTypeDescription * >(
+ member.get());
+ marshal_.writeValue(
+ &buf, css::uno::TypeDescription(mtd->pReturnTypeRef),
+ returnValue);
+ std::vector< BinaryAny >::const_iterator i(
+ outArguments.begin());
+ for (sal_Int32 j = 0; j != mtd->nParams; ++j) {
+ if (mtd->pParams[j].bOut) {
+ marshal_.writeValue(
+ &buf,
+ css::uno::TypeDescription(mtd->pParams[j].pTypeRef),
+ *i++);
+ }
+ }
+ assert(i == outArguments.end());
+ break;
+ }
+ default:
+ assert(false); // this cannot happen
+ break;
+ }
+ }
+ sendMessage(buf);
+ lastTid_ = tid;
+ bridge_->decrementCalls();
+}
+
+void Writer::sendMessage(std::vector< unsigned char > const & buffer) {
+ std::vector< unsigned char > header;
+ if (buffer.size() > SAL_MAX_UINT32) {
+ throw css::uno::RuntimeException(
+ "message too large for URP");
+ }
+ Marshal::write32(&header, static_cast< sal_uInt32 >(buffer.size()));
+ Marshal::write32(&header, 1);
+ assert(!buffer.empty());
+ unsigned char const * p = buffer.data();
+ std::vector< unsigned char >::size_type n = buffer.size();
+ assert(header.size() <= SAL_MAX_INT32);
+ /*static_*/assert(SAL_MAX_INT32 <= std::numeric_limits<std::size_t>::max());
+ std::size_t k = SAL_MAX_INT32 - header.size();
+ if (n < k) {
+ k = n;
+ }
+ css::uno::Sequence<sal_Int8> s(header.size() + k);
+ assert(!header.empty());
+ std::memcpy(s.getArray(), header.data(), header.size());
+ for (;;) {
+ std::memcpy(s.getArray() + s.getLength() - k, p, k);
+ try {
+ bridge_->getConnection()->write(s);
+ } catch (const css::io::IOException & e) {
+ css::uno::Any exc(cppu::getCaughtException());
+ throw css::lang::WrappedTargetRuntimeException(
+ "Binary URP write raised IO exception: " + e.Message,
+ css::uno::Reference< css::uno::XInterface >(), exc);
+ }
+ n -= k;
+ if (n == 0) {
+ break;
+ }
+ p += k;
+ k = SAL_MAX_INT32;
+ if (n < k) {
+ k = n;
+ }
+ s.realloc(k);
+ }
+}
+
+}
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/writer.hxx b/binaryurp/source/writer.hxx
new file mode 100644
index 000000000..42ea687ce
--- /dev/null
+++ b/binaryurp/source/writer.hxx
@@ -0,0 +1,165 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_WRITER_HXX
+#define INCLUDED_BINARYURP_SOURCE_WRITER_HXX
+
+#include <sal/config.h>
+
+#include <deque>
+#include <vector>
+
+#include <osl/conditn.hxx>
+#include <osl/mutex.hxx>
+#include <rtl/byteseq.hxx>
+#include <rtl/ref.hxx>
+#include <rtl/ustring.hxx>
+#include <salhelper/thread.hxx>
+#include <typelib/typedescription.hxx>
+#include <uno/dispatcher.hxx>
+
+#include "binaryany.hxx"
+#include "marshal.hxx"
+#include "writerstate.hxx"
+
+namespace binaryurp { class Bridge; }
+
+namespace binaryurp {
+
+class Writer: public salhelper::Thread
+{
+public:
+ explicit Writer(rtl::Reference< Bridge > const & bridge);
+
+ // Only called from Bridge::reader_ thread, and only before Bridge::writer_
+ // thread is unblocked:
+ void sendDirectRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type,
+ com::sun::star::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments);
+
+ // Only called from Bridge::reader_ thread, and only before Bridge::writer_
+ // thread is unblocked:
+ void sendDirectReply(
+ rtl::ByteSequence const & tid,
+ com::sun::star::uno::TypeDescription const & member,
+ bool exception, BinaryAny const & returnValue,
+ std::vector< BinaryAny > const & outArguments);
+
+ void queueRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type,
+ com::sun::star::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments);
+
+ void queueReply(
+ rtl::ByteSequence const & tid,
+ com::sun::star::uno::TypeDescription const & member, bool setter,
+ bool exception, BinaryAny const & returnValue,
+ std::vector< BinaryAny > const & outArguments,
+ bool setCurrentContextMode);
+
+ void unblock();
+
+ void stop();
+
+private:
+ virtual ~Writer() override;
+
+ virtual void execute() override;
+
+ void sendRequest(
+ rtl::ByteSequence const & tid, OUString const & oid,
+ com::sun::star::uno::TypeDescription const & type,
+ com::sun::star::uno::TypeDescription const & member,
+ std::vector< BinaryAny > const & inArguments, bool currentContextMode,
+ com::sun::star::uno::UnoInterfaceReference const & currentContext);
+
+ void sendReply(
+ rtl::ByteSequence const & tid,
+ com::sun::star::uno::TypeDescription const & member, bool setter,
+ bool exception, BinaryAny const & returnValue,
+ std::vector< BinaryAny > const & outArguments);
+
+ void sendMessage(std::vector< unsigned char > const & buffer);
+
+ struct Item {
+ Item();
+
+ // Request:
+ Item(
+ rtl::ByteSequence const & theTid, OUString const & theOid,
+ com::sun::star::uno::TypeDescription const & theType,
+ com::sun::star::uno::TypeDescription const & theMember,
+ std::vector< BinaryAny > const & inArguments,
+ com::sun::star::uno::UnoInterfaceReference const &
+ theCurrentContext);
+
+ // Reply:
+ Item(
+ rtl::ByteSequence const & theTid,
+ com::sun::star::uno::TypeDescription const & theMember,
+ bool theSetter, bool theException, BinaryAny const & theReturnValue,
+ std::vector< BinaryAny > const & outArguments,
+ bool theSetCurrentContextMode);
+
+ bool request;
+
+ rtl::ByteSequence tid; // request + reply
+
+ OUString oid; // request
+
+ com::sun::star::uno::TypeDescription type; // request
+
+ com::sun::star::uno::TypeDescription member; // request + reply
+
+ bool setter; // reply
+
+ std::vector< BinaryAny > arguments;
+ // request: inArguments; reply: outArguments
+
+ bool exception; // reply
+
+ BinaryAny returnValue; // reply
+
+ com::sun::star::uno::UnoInterfaceReference currentContext; // request
+
+ bool setCurrentContextMode; // reply
+ };
+
+ rtl::Reference< Bridge > bridge_;
+ WriterState state_;
+ Marshal marshal_;
+ com::sun::star::uno::TypeDescription lastType_;
+ OUString lastOid_;
+ rtl::ByteSequence lastTid_;
+ osl::Condition unblocked_;
+ osl::Condition items_;
+
+ osl::Mutex mutex_;
+ std::deque< Item > queue_;
+ bool stop_;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
diff --git a/binaryurp/source/writerstate.hxx b/binaryurp/source/writerstate.hxx
new file mode 100644
index 000000000..3386ef943
--- /dev/null
+++ b/binaryurp/source/writerstate.hxx
@@ -0,0 +1,52 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * This file incorporates work covered by the following license notice:
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed
+ * with this work for additional information regarding copyright
+ * ownership. The ASF licenses this file to you under the Apache
+ * License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of
+ * the License at http://www.apache.org/licenses/LICENSE-2.0 .
+ */
+
+#ifndef INCLUDED_BINARYURP_SOURCE_WRITERSTATE_HXX
+#define INCLUDED_BINARYURP_SOURCE_WRITERSTATE_HXX
+
+#include <sal/config.h>
+
+#include <rtl/byteseq.hxx>
+#include <rtl/ustring.hxx>
+#include <typelib/typedescription.hxx>
+
+#include "cache.hxx"
+
+namespace binaryurp {
+
+struct WriterState {
+private:
+ WriterState(const WriterState&) = delete;
+ WriterState& operator=(const WriterState&) = delete;
+public:
+ WriterState():
+ typeCache(cache::size), oidCache(cache::size), tidCache(cache::size) {}
+
+ Cache< com::sun::star::uno::TypeDescription > typeCache;
+
+ Cache< OUString > oidCache;
+
+ Cache< rtl::ByteSequence > tidCache;
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */