// If we have multiple values for this md5 that is a collision,
// throw an exception--either the data itself has duplicates, there
// are trillions of keys, or someone is attempting something
// malicious ( We obviously expect collisions when we save keys )
if(!getSaveKeys() && numTuples > 1)
throw new VoldemortException("Duplicate keys detected for md5 sum "
+ ByteUtils.toHexString(ByteUtils.copy(key.get(),
0,
key.getSize())));
}
if(numTuples < 0) {
// Overflow
throw new VoldemortException("Found too many collisions: chunk " + chunkId
+ " has exceeded " + Short.MAX_VALUE + " collisions.");
} else if(numTuples > 1) {
// Update number of collisions + max keys per collision
reporter.incrCounter(CollisionCounter.NUM_COLLISIONS, 1);
long numCollisions = reporter.getCounter(CollisionCounter.MAX_COLLISIONS).getCounter();
if(numTuples > numCollisions) {
reporter.incrCounter(CollisionCounter.MAX_COLLISIONS, numTuples - numCollisions);
}
}
// Flush the value
valueStream.flush();
byte[] value = stream.toByteArray();
// Start writing to file now
// First, if save keys flag set the number of keys
if(getSaveKeys()) {
this.valueFileStream[chunkId].writeShort(numTuples);
this.position[chunkId] += ByteUtils.SIZE_OF_SHORT;
if(this.checkSumDigestValue[chunkId] != null) {
this.checkSumDigestValue[chunkId].update(numTuples);
}
}
this.valueFileStream[chunkId].write(value);
this.position[chunkId] += value.length;
if(this.checkSumDigestValue[chunkId] != null) {
this.checkSumDigestValue[chunkId].update(value);
}
if(this.position[chunkId] < 0)
throw new VoldemortException("Chunk overflow exception: chunk " + chunkId
+ " has exceeded " + Integer.MAX_VALUE + " bytes.");
}