summaryrefslogtreecommitdiffstats
path: root/sax/source/tools/CachedOutputStream.hxx
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:06:44 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:06:44 +0000
commited5640d8b587fbcfed7dd7967f3de04b37a76f26 (patch)
tree7a5f7c6c9d02226d7471cb3cc8fbbf631b415303 /sax/source/tools/CachedOutputStream.hxx
parentInitial commit. (diff)
downloadlibreoffice-upstream.tar.xz
libreoffice-upstream.zip
Adding upstream version 4:7.4.7.upstream/4%7.4.7upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--sax/source/tools/CachedOutputStream.hxx118
1 files changed, 118 insertions, 0 deletions
diff --git a/sax/source/tools/CachedOutputStream.hxx b/sax/source/tools/CachedOutputStream.hxx
new file mode 100644
index 000000000..7d9e514c3
--- /dev/null
+++ b/sax/source/tools/CachedOutputStream.hxx
@@ -0,0 +1,118 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
+/*
+ * This file is part of the LibreOffice project.
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ */
+
+#ifndef INCLUDED_SAX_SOURCE_TOOLS_CACHEDOUTPUTSTREAM_HXX
+#define INCLUDED_SAX_SOURCE_TOOLS_CACHEDOUTPUTSTREAM_HXX
+
+#include <sal/types.h>
+
+#include <com/sun/star/io/XOutputStream.hpp>
+#include <com/sun/star/uno/Sequence.hxx>
+
+#include <cstring>
+#include <memory>
+
+namespace sax_fastparser {
+
+class ForMergeBase
+{
+public:
+ virtual ~ForMergeBase() {}
+ virtual void append( const css::uno::Sequence<sal_Int8>& rWhat ) = 0;
+};
+
+class CachedOutputStream
+{
+ /// When buffer hits this size, it's written to mxOutputStream
+ static const sal_Int32 mnMaximumSize = 0x100000; // 1Mbyte
+
+ /// ForMerge structure is used for sorting elements in Writer
+ std::shared_ptr< ForMergeBase > mpForMerge;
+ const css::uno::Sequence<sal_Int8> mpCache;
+ /// Output stream, usually writing data into files.
+ css::uno::Reference< css::io::XOutputStream > mxOutputStream;
+ uno_Sequence *pSeq;
+ sal_Int32 mnCacheWrittenSize;
+ bool mbWriteToOutStream;
+
+public:
+ CachedOutputStream() : mpCache(mnMaximumSize)
+ , pSeq(mpCache.get())
+ , mnCacheWrittenSize(0)
+ , mbWriteToOutStream(true)
+ {}
+
+ const css::uno::Reference< css::io::XOutputStream >& getOutputStream() const
+ {
+ return mxOutputStream;
+ }
+
+ void setOutputStream( const css::uno::Reference< css::io::XOutputStream >& xOutputStream )
+ {
+ mxOutputStream = xOutputStream;
+ }
+
+ void setOutput( std::shared_ptr< ForMergeBase > pForMerge )
+ {
+ flush();
+ mbWriteToOutStream = false;
+ mpForMerge = pForMerge;
+ }
+
+ void resetOutputToStream()
+ {
+ flush();
+ mbWriteToOutStream = true;
+ mpForMerge.reset();
+ }
+
+ /// cache string and if limit is hit, flush
+ void writeBytes( const sal_Int8* pStr, sal_Int32 nLen )
+ {
+ // Write when the buffer gets big enough
+ if (mnCacheWrittenSize + nLen > mnMaximumSize)
+ {
+ flush();
+
+ // Writer does some elements sorting, so it can accumulate
+ // pretty big strings in FastSaxSerializer::ForMerge.
+ // In that case, just flush data and write immediately.
+ if (nLen > mnMaximumSize)
+ {
+ if (mbWriteToOutStream)
+ mxOutputStream->writeBytes( css::uno::Sequence<sal_Int8>(pStr, nLen) );
+ else
+ mpForMerge->append( css::uno::Sequence<sal_Int8>(pStr, nLen) );
+ return;
+ }
+ }
+
+ memcpy(pSeq->elements + mnCacheWrittenSize, pStr, nLen);
+ mnCacheWrittenSize += nLen;
+ }
+
+ /// immediately write buffer into mxOutputStream and clear
+ void flush()
+ {
+ // resize the Sequence to written size
+ pSeq->nElements = mnCacheWrittenSize;
+ if (mbWriteToOutStream)
+ mxOutputStream->writeBytes( mpCache );
+ else
+ mpForMerge->append( mpCache );
+ // and next time write to the beginning
+ mnCacheWrittenSize = 0;
+ }
+};
+
+}
+
+#endif
+
+/* vim:set shiftwidth=4 softtabstop=4 expandtab: */