// remember what we already have in the cache
Map<String, NodeDocument> cachedDocs = Collections.emptyMap();
if (collection == Collection.NODES) {
cachedDocs = new HashMap<String, NodeDocument>();
for (String key : chunkedIds) {
cachedDocs.put(key, nodesCache.getIfPresent(new StringValue(key)));
}
}
Connection connection = null;
String tableName = getTable(collection);
boolean success = false;
try {
connection = getConnection();
success = dbBatchedAppendingUpdate(connection, tableName, chunkedIds, modified, appendData);
connection.commit();
} catch (SQLException ex) {
success = false;
} finally {
closeConnection(connection);
}
if (success) {
for (Entry<String, NodeDocument> entry : cachedDocs.entrySet()) {
if (entry.getValue() == null) {
// make sure concurrently loaded document is invalidated
nodesCache.invalidate(new StringValue(entry.getKey()));
} else {
T oldDoc = (T)(entry.getValue());
T newDoc = applyChanges(collection, (T)(entry.getValue()), update, true);
applyToCache((NodeDocument) oldDoc, (NodeDocument) newDoc);
}