do {
pageHeader = dataReader.readPageHeader();
if (pageHeader.getType() == PageType.DICTIONARY_PAGE) {
//TODO: Handle buffer allocation exception
BytesInput bytesIn;
ByteBuf uncompressedData=allocateBuffer(pageHeader.getUncompressed_page_size());
allocatedDictionaryBuffers.add(uncompressedData);
if( parentColumnReader.columnChunkMetaData.getCodec()== CompressionCodecName.UNCOMPRESSED) {
dataReader.getPageAsBytesBuf(uncompressedData, pageHeader.compressed_page_size);
bytesIn=parentColumnReader.parentReader.getCodecFactoryExposer().getBytesInput(uncompressedData,
pageHeader.getUncompressed_page_size());
}else{
ByteBuf compressedData=allocateBuffer(pageHeader.compressed_page_size);
dataReader.getPageAsBytesBuf(compressedData, pageHeader.compressed_page_size);
bytesIn = parentColumnReader.parentReader.getCodecFactoryExposer()
.decompress(parentColumnReader.columnChunkMetaData.getCodec(),
compressedData,
uncompressedData,
pageHeader.compressed_page_size,
pageHeader.getUncompressed_page_size());
compressedData.release();
}
DictionaryPage page = new DictionaryPage(
bytesIn,
pageHeader.uncompressed_page_size,
pageHeader.dictionary_page_header.num_values,
parquet.column.Encoding.valueOf(pageHeader.dictionary_page_header.encoding.name())
);
this.dictionary = page.getEncoding().initDictionary(parentColumnReader.columnDescriptor, page);
}
} while (pageHeader.getType() == PageType.DICTIONARY_PAGE);
//TODO: Handle buffer allocation exception
BytesInput bytesIn;
ByteBuf uncompressedData=allocateBuffer(pageHeader.getUncompressed_page_size());
allocatedBuffers.add(uncompressedData);
if(parentColumnReader.columnChunkMetaData.getCodec()==CompressionCodecName.UNCOMPRESSED) {
dataReader.getPageAsBytesBuf(uncompressedData, pageHeader.compressed_page_size);
bytesIn=parentColumnReader.parentReader.getCodecFactoryExposer().getBytesInput(uncompressedData,