}
@Override
protected long doIndexing() throws IOException
{
EstimatedHistogram rowSizes = SSTable.defaultRowHistogram();
EstimatedHistogram columnCounts = SSTable.defaultColumnHistogram();
long rows = 0L;
DecoratedKey key;
CompactionController controller = CompactionController.getBasicController(true);
long dfileLength = dfile.length();
while (!dfile.isEOF())
{
// read key
key = SSTableReader.decodeKey(StorageService.getPartitioner(), desc, ByteBufferUtil.readWithShortLength(dfile));
// skip data size, bloom filter, column index
long dataSize = SSTableReader.readRowSize(dfile, desc);
SSTableIdentityIterator iter = new SSTableIdentityIterator(cfs.metadata, dfile, key, dfile.getFilePointer(), dataSize, true);
AbstractCompactedRow row;
if (dataSize > DatabaseDescriptor.getInMemoryCompactionLimit())
{
logger.info(String.format("Rebuilding post-streaming large counter row %s (%d bytes) incrementally", ByteBufferUtil.bytesToHex(key.key), dataSize));
row = new LazilyCompactedRow(controller, Collections.singletonList(iter));
}
else
{
row = new PrecompactedRow(controller, Collections.singletonList(iter));
}
updateCache(key, dataSize, row);
rowSizes.add(dataSize);
columnCounts.add(row.columnCount());
// update index writer
iwriter.afterAppend(key, writerDfile.getFilePointer());
// write key and row
ByteBufferUtil.writeWithShortLength(key.key, writerDfile);