recordMap = new ConcurrentHashMap(mSize);
}
Iterator iter = entries.iterator();
while (iter.hasNext()) {
VRecordMap record = (VRecordMap)iter.next();
Object key = null;
Object value = null;
Throwable kex = null;
Throwable vex = null;
Throwable ex = null;
try {
ByteBuffer buffer = record.getBuffer();
buffer.position(0);
int limit = buffer.limit();
byte[] data = new byte[limit];
buffer.get(data);
ByteArrayInputStream bais = new ByteArrayInputStream(data);
// Use our version of ObjectInputStream so we can load old
// serialized object from an old store, i.e. store migration
ObjectInputStream ois = new MQObjectInputStream(bais);
try {
key = ois.readObject();
} catch (Throwable e) {
if (e instanceof ClassNotFoundException) {
throw (ClassNotFoundException)e;
} else {
kex = e;
}
}
try {
value = ois.readObject();
} catch (Throwable e) {
if (e instanceof ClassNotFoundException) {
throw (ClassNotFoundException)e;
} else {
vex = e;
}
}
// Mark client data starting position
if (maxClientDataSize > 0) {
// Since we've read in all data in the buffer, we need to
// reset the position back to client data starting position
int pos = limit - bais.available();
buffer.position(pos);
buffer.mark();
}
ois.close();
bais.close();
} catch (IOException e) {
ex = e;
}
if (kex != null || vex != null || ex != null) {
PHashMapLoadException le = new PHashMapLoadException(
"Failed to load data in [" + record.toString() + "]");
le.setKey(key);
le.setValue(value);
le.setKeyCause(kex);
le.setValueCause(vex);
le.setNextException(loadException);