}
else
{
try{
DataInputStream dis = new DataInputStream(Files.openFileStream(indexFilename));
offsetLookup = new OnDiskDocid2OffsetLookup(new RandomDataInputAccessor(new RandomDataInputMemory(dis, indexFileLength)),length, dataFileLength);
dis.close();
}
catch (OutOfMemoryError oome) {
//logger.warn("OutOfMemoryError: Structure "+ structureName + " reading lookup file directly from disk");
//logger.debug("Metadata lookup will be read directly from disk: "+ length +" entries, size "+ dataFileLength + " bytes");
RandomDataInput rfi = Files.openFileRandom(indexFilename);
offsetLookup = new OnDiskDocid2OffsetLookup(
rfi instanceof RandomAccessFile
? new ChannelByteAccessor((RandomAccessFile)rfi)
: new RandomDataInputAccessor(rfi),
length, dataFileLength
);
}
}
} else {
//logger.warn("Structure "+ structureName + " reading lookup file directly from disk (SLOW)");
//logger.debug("Metadata lookup will be read directly from disk: "+ length +" entries, size "+ dataFileLength + " bytes");
RandomDataInput rfi = Files.openFileRandom(indexFilename);
offsetLookup = new OnDiskDocid2OffsetLookup(
rfi instanceof RandomAccessFile
? new ChannelByteAccessor((RandomAccessFile)rfi)
: new RandomDataInputAccessor(rfi),
length, dataFileLength
);
}
//debug log lookups using a wrapper class
if (logger.isDebugEnabled())
offsetLookup = new LoggingDocid2OffsetLookup(offsetLookup);
//now build the keyname and lengths into 2 maps:
// keyname -> length & keyname -> offsets
keyCount = keyNames.length;
key2bytelength = new TObjectIntHashMap<String>(keyCount);
TObjectIntHashMap<String> key2stringlength = new TObjectIntHashMap<String>(keyCount);
key2byteoffset = new TObjectIntHashMap<String>(keyCount);
valueByteOffsets = new int[keyCount];
int cumulativeOffset = 0;
for(i=0;i<keyCount;i++)
{
key2stringlength.put(keyNames[i], valueCharLengths[i]);
key2bytelength.put(keyNames[i], valueByteLengths[i]);
key2byteoffset.put(keyNames[i], cumulativeOffset);
valueByteOffsets[i] = cumulativeOffset;
cumulativeOffset += valueByteLengths[i];
}
key2forwardOffset = new TObjectIntHashMap<String>(2);
final String[] forwardKeys = index.getIndexProperty("index."+structureName+".reverse-key-names", "").split("\\s*,\\s*");
forwardMetaMaps = (Map<Text,IntWritable>[])new Map[forwardKeys.length];
keyFactories = (FixedSizeWriteableFactory<Text>[])new FixedSizeWriteableFactory[forwardKeys.length];
i=0;
final FixedSizeIntWritableFactory valueFactory = new FixedSizeIntWritableFactory();
for(String keyName : forwardKeys)
{
if (keyName.trim().equals(""))
continue;
key2forwardOffset.put(keyName, 1+i);
logger.debug("Forward key "+ keyName +", length="+ key2bytelength.get(keyName));
keyFactories[i] = new FixedSizeTextFactory(key2stringlength.get(keyName));
String filename = path+ApplicationSetup.FILE_SEPARATOR+prefix+"."+structureName+"-"+i+FSOrderedMapFile.USUAL_EXTENSION;
String loadFormat = index.getIndexProperty("index."+structureName+".reverse."+keyName+".in-mem", "false");
if (loadFormat.equals("hashmap"))
{
//logger.info("Structure "+ structureName + " reading reverse map for key "+ keyName + " into memory as hashmap");
forwardMetaMaps[i] = new FSOrderedMapFile.MapFileInMemory<Text, IntWritable>(
filename,
keyFactories[i],
valueFactory);
}
else if (loadFormat.equals("mapfileinmem"))
{
final long revDataFileLength = Files.length(filename);
//if (revDataFileLength > Integer.MAX_VALUE)
//{
// loadFormat = "false";
// //logger.info("Structure "+ structureName + " reading reverse map for key "+ keyName + " - too big for memory as bytearray");
//}
//else
//{
//logger.info("Structure "+ structureName + " reading reverse map for key "+ keyName + " into memory as bytearray");
DataInputStream dis = new DataInputStream(Files.openFileStream(filename));
//final byte[] bytes = new byte[(int)revDataFileLength];
//dis.readFully(bytes);
//dis.close();
forwardMetaMaps[i] = new FSOrderedMapFile<Text, IntWritable>(
new RandomDataInputMemory(dis, revDataFileLength),
filename,
keyFactories[i],
valueFactory);
//}
}