private long writeBlocks(Random rand, Compression.Algorithm compressAlgo,
Path path, List<Long> expectedOffsets, List<Long> expectedPrevOffsets,
List<BlockType> expectedTypes, List<ByteBuffer> expectedContents
) throws IOException {
boolean cacheOnWrite = expectedContents != null;
FSDataOutputStream os = fs.create(path);
HFileBlock.Writer hbw = new HFileBlock.Writer(compressAlgo, null,
includesMemstoreTS,
HFileReaderV2.MAX_MINOR_VERSION,
HFile.DEFAULT_CHECKSUM_TYPE,
HFile.DEFAULT_BYTES_PER_CHECKSUM);
Map<BlockType, Long> prevOffsetByType = new HashMap<BlockType, Long>();
long totalSize = 0;
for (int i = 0; i < NUM_TEST_BLOCKS; ++i) {
long pos = os.getPos();
int blockTypeOrdinal = rand.nextInt(BlockType.values().length);
if (blockTypeOrdinal == BlockType.ENCODED_DATA.ordinal()) {
blockTypeOrdinal = BlockType.DATA.ordinal();
}
BlockType bt = BlockType.values()[blockTypeOrdinal];
DataOutputStream dos = hbw.startWriting(bt);
int size = rand.nextInt(500);
for (int j = 0; j < size; ++j) {
// This might compress well.
dos.writeShort(i + 1);
dos.writeInt(j + 1);
}
if (expectedOffsets != null)
expectedOffsets.add(os.getPos());
if (expectedPrevOffsets != null) {
Long prevOffset = prevOffsetByType.get(bt);
expectedPrevOffsets.add(prevOffset != null ? prevOffset : -1);
prevOffsetByType.put(bt, os.getPos());
}
expectedTypes.add(bt);
hbw.writeHeaderAndData(os);
totalSize += hbw.getOnDiskSizeWithHeader();
if (cacheOnWrite)
expectedContents.add(hbw.getUncompressedBufferWithHeader());
if (detailedLogging) {
LOG.info("Written block #" + i + " of type " + bt
+ ", uncompressed size " + hbw.getUncompressedSizeWithoutHeader()
+ " at offset " + pos);
}
}
os.close();
LOG.info("Created a temporary file at " + path + ", "
+ fs.getFileStatus(path).getLen() + " byte, compression=" +
compressAlgo);
return totalSize;
}