QNameTerm key = entry.getKey();
OccurrenceList occurencesList = entry.getValue();
occurencesList.sort();
os.clear();
Lock lock = index.db.getLock();
try {
lock.acquire(Lock.WRITE_LOCK);
NGramQNameKey value = new NGramQNameKey(currentDoc.getCollection().getId(), key.qname,
index.getBrokerPool().getSymbols(), key.term);
boolean changed = false;
os.clear();
VariableByteInput is = index.db.getAsStream(value);
if (is == null)
continue;
while (is.available() > 0) {
int storedDocId = is.readInt();
byte nameType = is.readByte();
int occurrences = is.readInt();
//Read (variable) length of node IDs + frequency + offsets
int length = is.readFixedInt();
if (storedDocId != currentDoc.getDocId()) {
// data are related to another document:
// copy them to any existing data
os.writeInt(storedDocId);
os.writeByte(nameType);
os.writeInt(occurrences);
os.writeFixedInt(length);
is.copyRaw(os, length);
} else {
// data are related to our document:
if (mode == StreamListener.REMOVE_ALL_NODES) {
// skip them
is.skipBytes(length);
} else {
// removing nodes: need to filter out the node ids to be removed
// feed the new list with the GIDs
NodeId previous = null;
OccurrenceList newOccurrences = new OccurrenceList();
for (int m = 0; m < occurrences; m++) {
NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is);
previous = nodeId;
int freq = is.readInt();
// add the node to the new list if it is not
// in the list of removed nodes
if (!occurencesList.contains(nodeId)) {
for (int n = 0; n < freq; n++) {
newOccurrences.add(nodeId, is.readInt());
}
} else {
is.skip(freq);
}
}
// append the data from the new list
if(newOccurrences.getSize() > 0) {
//Don't forget this one
newOccurrences.sort();
os.writeInt(currentDoc.getDocId());
os.writeByte(nameType);
os.writeInt(newOccurrences.getTermCount());
//Mark position
int lenOffset = os.position();
//Dummy value : actual one will be written below
os.writeFixedInt(0);
previous = null;
for (int m = 0; m < newOccurrences.getSize(); ) {
previous = newOccurrences.getNode(m).write(previous, os);
int freq = newOccurrences.getOccurrences(m);
os.writeInt(freq);
for (int n = 0; n < freq; n++) {
os.writeInt(newOccurrences.getOffset(m + n));
}
m += freq;
}
//Write (variable) length of node IDs + frequency + offsets
os.writeFixedInt(lenOffset, os.position() - lenOffset - 4);
}
}
changed = true;
}
}
//Store new data, if relevant
if (changed) {
//Well, nothing to store : remove the existing data
if (os.data().size() == 0) {
index.db.remove(value);
} else {
if (index.db.put(value, os.data()) == BFile.UNKNOWN_ADDRESS) {
LOG.error("Could not put index data for token '" + key.term + "' in '" +
index.db.getFile().getName() + "'");
}
}
}
} catch (LockException e) {
LOG.warn("Failed to acquire lock for file " + index.db.getFile().getName(), e);
} catch (IOException e) {
LOG.warn("IO error for file " + index.db.getFile().getName(), e);
} catch (ReadOnlyException e) {
LOG.warn("Read-only error for file " + index.db.getFile().getName(), e);
} finally {
lock.release(Lock.WRITE_LOCK);
os.clear();
}
}
ngrams.clear();
}