summaryrefslogtreecommitdiff
path: root/sax/source/tools/CachedOutputStream.hxx
blob: c66231140fbbd76651e6ef5f7458d9404d37c1a8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/*
 * This file is part of the LibreOffice project.
 *
 * This Source Code Form is subject to the terms of the Mozilla Public
 * License, v. 2.0. If a copy of the MPL was not distributed with this
 * file, You can obtain one at http://mozilla.org/MPL/2.0/.
 */

#ifndef INCLUDED_SAX_SOURCE_TOOLS_CACHEDOUTPUTSTREAM_HXX
#define INCLUDED_SAX_SOURCE_TOOLS_CACHEDOUTPUTSTREAM_HXX

#include <sal/types.h>

#include <com/sun/star/io/XOutputStream.hpp>
#include <com/sun/star/uno/Sequence.hxx>

#include <cstring>
#include <memory>

namespace sax_fastparser {

class ForMergeBase
{
public:
    virtual ~ForMergeBase() {}
    virtual void append( const css::uno::Sequence<sal_Int8>& rWhat ) = 0;
};

class CachedOutputStream
{
    /// When buffer hits this size, it's written to mxOutputStream
    static const sal_Int32 mnMaximumSize = 0x100000; // 1Mbyte

    /// ForMerge structure is used for sorting elements in Writer
    std::shared_ptr< ForMergeBase > mpForMerge;
    const css::uno::Sequence<sal_Int8> mpCache;
    /// Output stream, usually writing data into files.
    css::uno::Reference< css::io::XOutputStream > mxOutputStream;
    uno_Sequence *pSeq;
    sal_Int32 mnCacheWrittenSize;
    bool mbWriteToOutStream;

public:
    CachedOutputStream() : mpCache(mnMaximumSize)
                         , pSeq(mpCache.get())
                         , mnCacheWrittenSize(0)
                         , mbWriteToOutStream(true)
    {}

    const css::uno::Reference< css::io::XOutputStream >& getOutputStream() const
    {
        return mxOutputStream;
    }

    void setOutputStream( const css::uno::Reference< css::io::XOutputStream >& xOutputStream )
    {
        mxOutputStream = xOutputStream;
    }

    void setOutput(const std::shared_ptr<ForMergeBase>& pForMerge)
    {
        flush();
        mbWriteToOutStream = false;
        mpForMerge = pForMerge;
    }

    void resetOutputToStream()
    {
        flush();
        mbWriteToOutStream = true;
        mpForMerge.reset();
    }

    /// cache string and if limit is hit, flush
    void writeBytes( const sal_Int8* pStr, sal_Int32 nLen )
    {
        // Write when the buffer gets big enough
        if (mnCacheWrittenSize + nLen > mnMaximumSize)
        {
            flush();

            // Writer does some elements sorting, so it can accumulate
            // pretty big strings in FastSaxSerializer::ForMerge.
            // In that case, just flush data and write immediately.
            if (nLen > mnMaximumSize)
            {
                if (mbWriteToOutStream)
                    mxOutputStream->writeBytes( css::uno::Sequence<sal_Int8>(pStr, nLen) );
                else
                    mpForMerge->append( css::uno::Sequence<sal_Int8>(pStr, nLen) );
                return;
            }
        }

        memcpy(pSeq->elements + mnCacheWrittenSize, pStr, nLen);
        mnCacheWrittenSize += nLen;
    }

    /// immediately write buffer into mxOutputStream and clear
    void flush()
    {
        // resize the Sequence to written size
        pSeq->nElements = mnCacheWrittenSize;
        if (mbWriteToOutStream)
            mxOutputStream->writeBytes( mpCache );
        else
            mpForMerge->append( mpCache );
        // and next time write to the beginning
        mnCacheWrittenSize = 0;
    }
};

}

#endif

/* vim:set shiftwidth=4 softtabstop=4 expandtab: */