summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNoel Grandin <noel.grandin@collabora.co.uk>2019-03-14 12:55:25 +0200
committerNoel Grandin <noel.grandin@collabora.co.uk>2019-03-15 12:56:09 +0200
commit6c46fdd75de8cec4f62f9fed02212a2d1e0b71b5 (patch)
tree4e1a3c1ebfc9d9b87dbc0810827686df4192c1f8
parent22de481db334fcce404cdcf88183cd544cd19271 (diff)
new pahole-all-classes script, and update a couple of sc/ structs
Create a new script to scan our codebase for holes in our structures. Implemented a couple of the things I found ScSortKeyState 12bytes -> 8bytes sc::MultiDataCellState 12bytes -> 8bytes Change-Id: I139dda36aedf02b7f19be121eb312e5552142b87
-rwxr-xr-xcompilerplugins/clang/pahole-all-classes.py123
-rw-r--r--compilerplugins/clang/pahole.results66
-rw-r--r--sc/inc/sortparam.hxx2
-rw-r--r--sc/inc/types.hxx7
-rw-r--r--sc/source/core/data/types.cxx6
-rw-r--r--sc/source/ui/dbgui/tpsort.cxx2
6 files changed, 196 insertions, 10 deletions
diff --git a/compilerplugins/clang/pahole-all-classes.py b/compilerplugins/clang/pahole-all-classes.py
new file mode 100755
index 000000000000..9fda73c8789f
--- /dev/null
+++ b/compilerplugins/clang/pahole-all-classes.py
@@ -0,0 +1,123 @@
+#!/usr/bin/python3
+#
+# Find holes in structures, so that we can pack them and improve our memory density.
+#
+# In order to make this work, you need to
+# (1) be operating in a workspace where you have a debug build of LibreOffice
+# (2) first run the unusedfields loplugin to generate a log file
+# (3) install the pahole stuff into your gdb, I used this one: https://github.com/PhilArmstrong/pahole-gdb
+# (4) ./compilerplugins/clang/pahole-all-classes.py > ./compilerplugins/clang/pahole.results
+# Warning: running this script will make GDB soak up about 8G of RAM
+#
+
+import _thread
+import io
+import os
+import subprocess
+import time
+import re
+
+# search for all the class names in the file produced by the unusedfields loplugin
+#a = subprocess.Popen("grep 'definition:' workdir/loplugin.unusedfields.log | sort -u", stdout=subprocess.PIPE, shell=True)
+a = subprocess.Popen("cat n1", stdout=subprocess.PIPE, shell=True)
+
+classSourceLocDict = dict()
+classSet = set()
+with a.stdout as txt:
+ for line in txt:
+ tokens = line.decode('utf8').strip().split("\t")
+ className = tokens[2].strip()
+ srcLoc = tokens[5].strip()
+ if "anonymous" in className: continue
+ # for now, just check the stuff in /sc/inc
+ if not srcLoc.startswith("sc/inc/"):
+ continue
+ if className in classSet: continue
+ classSourceLocDict[srcLoc] = className
+ classSet.add(className)
+a.terminate()
+
+gdbProc = subprocess.Popen("gdb", stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
+
+stdin = io.TextIOWrapper(gdbProc.stdin, 'utf-8')
+
+# make gdb load all the debugging info
+stdin.write("set confirm off\n")
+for filename in sorted(os.listdir('instdir/program')):
+ if filename.endswith(".so"):
+ stdin.write("add-symbol-file instdir/program/" + filename + "\n")
+stdin.flush()
+
+
+# Some of the pahole commands are going to fail, and I cannot read the error stream and the input stream
+# together because python has no way of (easily) doing a non-blocking read.
+# So I have to write the commands out using a background thread, and then read the entire resulting
+# stream out below.
+def write_pahole_commands():
+ for srcLoc in sorted(classSourceLocDict.keys()):
+ className = classSourceLocDict[srcLoc]
+ stdin.write("echo " + className + " " + srcLoc + "\n")
+ stdin.write("pahole " + className + "\n")
+ stdin.flush()
+ stdin.write("echo all-done\n")
+ stdin.flush()
+ stdin.close() # only way to make it flush the last echo command
+
+_thread.start_new_thread( write_pahole_commands, () )
+
+time.sleep(2)
+
+# Use generator because lines often end up merged together in gdb's output, and we need
+# to split them up, and that creates a mess in the parsing logic.
+def read_generator():
+ while True:
+ line = gdbProc.stdout.readline().decode('utf8').strip()
+ for split in line.split("(gdb)"):
+ split = split.strip()
+ if len(split) == 0: continue
+ if "all-done" in split: return
+ yield split
+
+firstLineRegex = re.compile("/\*\s+(\d+)\s+\*/ struct")
+fieldLineRegex = re.compile("/\*\s+\d+\s+(\d+)\s+\*/ ")
+holeLineRegex = re.compile("/\* XXX (\d+) bit hole, try to pack \*/")
+structLines = list()
+foundHole = False
+cumulativeHoleBits = 0
+structSize = 0
+found8ByteField = False
+for line in read_generator():
+ structLines.append(line)
+ firstLineMatch = firstLineRegex.match(line)
+ if firstLineMatch:
+ structSize = int(firstLineMatch.group(1))
+ holeLineMatch = holeLineRegex.match(line)
+ if holeLineMatch:
+ foundHole = True
+ cumulativeHoleBits += int(holeLineMatch.group(1))
+ fieldLineMatch = fieldLineRegex.match(line)
+ if fieldLineMatch:
+ fieldSize = int(fieldLineMatch.group(1))
+ if fieldSize == 8:
+ found8ByteField = True
+ if line == "}":
+ # Ignore very large structs, packing those is not going to help much, and
+ # re-organising them can make them much less readable.
+ if foundHole and len(structLines) < 12 and structSize < 100:
+ # If we have an 8-byte field, then the whole structure must be 8-byte aligned, otherwise
+ # it must be 4-byte aligned. (that's my approximation of the rules, the real ones are probably
+ # more complicated). So check if removing the holes will remove enough space to actually shrink
+ # this structure.
+ alignBytes = 4
+ if found8ByteField: alignBytes = 8
+ if (cumulativeHoleBits / 8) >= alignBytes:
+ # print("Found one " + str(structSize) + " " + str(cumulativeHoleBits/8) + " " + str(newStructSize%4))
+ for line in structLines:
+ print(line)
+ structLines.clear()
+ foundHole = False
+ cumulativeHoleBits = 0
+ structSize = 0
+ found8ByteField = False
+
+gdbProc.terminate() \ No newline at end of file
diff --git a/compilerplugins/clang/pahole.results b/compilerplugins/clang/pahole.results
new file mode 100644
index 000000000000..5d7129eb31a5
--- /dev/null
+++ b/compilerplugins/clang/pahole.results
@@ -0,0 +1,66 @@
+ScColorScaleEntry sc/inc/colorscale.hxx:46
+/* 48 */ struct ScColorScaleEntry {
+/* 0 8 */ double mnVal
+/* 8 4 */ class Color maColor
+/* XXX 32 bit hole, try to pack */
+/* 16 8 */ class std::unique_ptr<ScFormulaCell, std::default_delete<ScFormulaCell> > mpCell
+/* 24 8 */ class std::unique_ptr<ScFormulaListener, std::default_delete<ScFormulaListener> > mpListener
+/* 32 4 */ enum ScColorScaleEntryType meType
+/* XXX 32 bit hole, try to pack */
+/* 40 8 */ class ScConditionalFormat * mpFormat
+}
+ScDetOpList sc/inc/detdata.hxx:66
+/* 64 */ struct ScDetOpList {
+/* 0 1 */ bool bHasAddError
+/* XXX 56 bit hole, try to pack */
+/* 8 56 */ class std::__debug::vector<std::unique_ptr<ScDetOpData, std::default_delete<ScDetOpData> >, std::allocator<std::unique_ptr<ScDetOpData, std::default_delete<ScDetOpData> > > > aDetOpDataVector
+}
+ScDocRowHeightUpdater::TabRanges sc/inc/dociter.hxx:569
+/* 24 */ struct ScDocRowHeightUpdater::TabRanges {
+/* 0 2 */ short mnTab
+/* XXX 48 bit hole, try to pack */
+/* 8 16 */ class std::shared_ptr<ScFlatBoolRowSegments> mpRanges
+}
+ScPivotField sc/inc/pivot.hxx:122
+/* 56 */ struct ScPivotField {
+/* 0 2 */ short nCol
+/* XXX 48 bit hole, try to pack */
+/* 8 8 */ long mnOriginalDim
+/* 16 4 */ enum PivotFunc nFuncMask
+/* 20 1 */ unsigned char mnDupCount
+/* XXX 24 bit hole, try to pack */
+/* 24 32 */ struct com::sun::star::sheet::DataPilotFieldReference maFieldRef
+}
+ScPivotFuncData sc/inc/pivot.hxx:164
+/* 56 */ struct ScPivotFuncData {
+/* 0 2 */ short mnCol
+/* XXX 48 bit hole, try to pack */
+/* 8 8 */ long mnOriginalDim
+/* 16 4 */ enum PivotFunc mnFuncMask
+/* 20 1 */ unsigned char mnDupCount
+/* XXX 24 bit hole, try to pack */
+/* 24 32 */ struct com::sun::star::sheet::DataPilotFieldReference maFieldRef
+}
+ScExternalSingleRefToken sc/inc/token.hxx:131
+/* 56 */ struct ScExternalSingleRefToken {
+/* 0 16 */ class formula::FormulaToken formula::FormulaToken
+/* 16 2 */ const unsigned short mnFileId
+/* XXX 48 bit hole, try to pack */
+/* 24 16 */ const class svl::SharedString maTabName
+/* 40 12 */ struct ScSingleRefData maSingleRef
+}
+ScExternalDoubleRefToken sc/inc/token.hxx:155
+/* 64 */ struct ScExternalDoubleRefToken {
+/* 0 16 */ class formula::FormulaToken formula::FormulaToken
+/* 16 2 */ const unsigned short mnFileId
+/* XXX 48 bit hole, try to pack */
+/* 24 16 */ const class svl::SharedString maTabName
+/* 40 24 */ struct ScComplexRefData maDoubleRef
+}
+ScExternalNameToken sc/inc/token.hxx:182
+/* 40 */ struct ScExternalNameToken {
+/* 0 16 */ class formula::FormulaToken formula::FormulaToken
+/* 16 2 */ const unsigned short mnFileId
+/* XXX 48 bit hole, try to pack */
+/* 24 16 */ const class svl::SharedString maName
+}
diff --git a/sc/inc/sortparam.hxx b/sc/inc/sortparam.hxx
index 6eba07e64487..58f6b38e7db1 100644
--- a/sc/inc/sortparam.hxx
+++ b/sc/inc/sortparam.hxx
@@ -33,8 +33,8 @@ struct ScQueryParam;
struct ScSortKeyState
{
- bool bDoSort;
SCCOLROW nField;
+ bool bDoSort;
bool bAscending;
};
diff --git a/sc/inc/types.hxx b/sc/inc/types.hxx
index 2cfcb00eab21..c4e62ae63452 100644
--- a/sc/inc/types.hxx
+++ b/sc/inc/types.hxx
@@ -97,12 +97,11 @@ struct RangeMatrix
struct MultiDataCellState
{
- enum StateType { Invalid = 0, Empty, HasOneCell, HasMultipleCells };
+ enum StateType : sal_uInt8 { Invalid = 0, Empty, HasOneCell, HasMultipleCells };
- StateType meState;
-
- SCCOL mnCol1; //< first non-empty column
SCROW mnRow1; //< first non-empty row
+ SCCOL mnCol1; //< first non-empty column
+ StateType meState;
MultiDataCellState();
MultiDataCellState( StateType eState );
diff --git a/sc/source/core/data/types.cxx b/sc/source/core/data/types.cxx
index 919a7e62fee4..e5f97c92daf2 100644
--- a/sc/source/core/data/types.cxx
+++ b/sc/source/core/data/types.cxx
@@ -23,11 +23,9 @@ bool RangeMatrix::isRangeValid() const
}
MultiDataCellState::MultiDataCellState() :
- meState(StateType::Invalid),
- mnCol1(-1), mnRow1(-1) {}
+ mnRow1(-1), mnCol1(-1), meState(StateType::Invalid) {}
MultiDataCellState::MultiDataCellState( StateType eState ) :
- meState(eState),
- mnCol1(-1), mnRow1(-1) {}
+ mnRow1(-1), mnCol1(-1), meState(eState) {}
}
diff --git a/sc/source/ui/dbgui/tpsort.cxx b/sc/source/ui/dbgui/tpsort.cxx
index 2fa7ad90747d..9b6c72ecdb22 100644
--- a/sc/source/ui/dbgui/tpsort.cxx
+++ b/sc/source/ui/dbgui/tpsort.cxx
@@ -404,7 +404,7 @@ sal_uInt16 ScTabPageSortFields::GetFieldSelPos( SCCOLROW nField )
void ScTabPageSortFields::SetLastSortKey( sal_uInt16 nItem )
{
// Extend local SortParam copy
- const ScSortKeyState atempKeyState = { false, 0, true };
+ const ScSortKeyState atempKeyState = { 0, false, true };
aSortData.maKeyState.push_back( atempKeyState );
// Add Sort Key Item