@Override
public void tryCompress(final ClientContext context) throws InsertException {
long origSize = origData.size();
COMPRESSOR_TYPE bestCodec = null;
RandomAccessBucket bestCompressedData = origData;
long bestCompressedDataSize = origSize;
HashResult[] hashes = null;
if(logMINOR) Logger.minor(this, "Attempt to compress the data");
// Try to compress the data.
// Try each algorithm, starting with the fastest and weakest.
// Stop when run out of algorithms, or the compressed data fits in a single block.
try {
COMPRESSOR_TYPE[] comps = COMPRESSOR_TYPE.getCompressorsArray(compressorDescriptor, pre1254);
boolean first = true;
for (final COMPRESSOR_TYPE comp : comps) {
boolean shouldFreeOnFinally = true;
RandomAccessBucket result = null;
try {
if(logMINOR)
Logger.minor(this, "Attempt to compress using " + comp);
// Only produce if we are compressing *the original data*
if(persistent) {
context.jobRunner.queue(new PersistentJob() {
@Override
public boolean run(ClientContext context) {
inserter.onStartCompression(comp, context);
return false;
}
}, NativeThread.NORM_PRIORITY+1);
} else {
try {
inserter.onStartCompression(comp, context);
} catch (Throwable t) {
Logger.error(this, "Transient insert callback threw "+t, t);
}
}
InputStream is = null;
OutputStream os = null;
MultiHashInputStream hasher = null;
try {
is = origData.getInputStream();
result = bucketFactory.makeBucket(-1);
os = result.getOutputStream();
long maxOutputSize = bestCompressedDataSize;
if(first && generateHashes != 0) {
if(logMINOR) Logger.minor(this, "Generating hashes: "+generateHashes);
is = hasher = new MultiHashInputStream(is, generateHashes);
}
try {
comp.compress(is, os, origSize, maxOutputSize);
} catch (RuntimeException e) {
// ArithmeticException has been seen in bzip2 codec.
Logger.error(this, "Compression failed with codec "+comp+" : "+e, e);
// Try the next one
// RuntimeException is iffy, so lets not try the hasher.
continue;
} catch (CompressionOutputSizeException e) {
if(hasher != null) {
is.skip(Long.MAX_VALUE);
hashes = hasher.getResults();
first = false;
}
continue; // try next compressor type
}
if(hasher != null) {
hashes = hasher.getResults();
first = false;
}
} finally {
Closer.close(is);
Closer.close(os);
}
long resultSize = result.size();
// minSize is {SSKBlock,CHKBlock}.MAX_COMPRESSED_DATA_LENGTH
if(resultSize <= minSize) {
if(logMINOR)
Logger.minor(this, "New size "+resultSize+" smaller then minSize "+minSize);
bestCodec = comp;
if(bestCompressedData != null && bestCompressedData != origData)
// Don't need to removeFrom() : we haven't stored it.
bestCompressedData.free();
bestCompressedData = result;
bestCompressedDataSize = resultSize;
shouldFreeOnFinally = false;
break;
}
if(resultSize < bestCompressedDataSize) {
if(logMINOR)
Logger.minor(this, "New size "+resultSize+" better than old best "+bestCompressedDataSize);
if(bestCompressedData != null && bestCompressedData != origData)
bestCompressedData.free();
bestCompressedData = result;
bestCompressedDataSize = resultSize;
bestCodec = comp;
shouldFreeOnFinally = false;
}
} catch (PersistenceDisabledException e) {
if(!context.jobRunner.shuttingDown())
Logger.error(this, "Database disabled compressing data", new Exception("error"));
shouldFreeOnFinally = true;
if(bestCompressedData != null && bestCompressedData != origData && bestCompressedData != result)
bestCompressedData.free();
} finally {
if(shouldFreeOnFinally && (result != null) && result != origData)
result.free();
}
}
final CompressionOutput output = new CompressionOutput(bestCompressedData, bestCodec, hashes);