*/
private IndexInfo validateXMLResourceInternal(final Txn transaction, final DBBroker broker, final XmldbURI docUri, final CollectionConfiguration config, final ValidateBlock doValidate) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException {
//Make the necessary operations if we process a collection configuration document
checkConfigurationDocument(transaction, broker, docUri);
final Database db = broker.getBrokerPool();
if (db.isReadOnly()) {
throw new PermissionDeniedException("Database is read-only");
}
DocumentImpl oldDoc = null;
boolean oldDocLocked = false;
try {
db.getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, docUri);
getLock().acquire(Lock.WRITE_LOCK);
DocumentImpl document = new DocumentImpl((BrokerPool) db, this, docUri);
oldDoc = documents.get(docUri.getRawCollectionPath());
checkPermissionsForAddDocument(broker, oldDoc);
checkCollectionConflict(docUri);
manageDocumentInformation(oldDoc, document);
final Indexer indexer = new Indexer(broker, transaction);
final IndexInfo info = new IndexInfo(indexer, config);
info.setCreating(oldDoc == null);
info.setOldDocPermissions(oldDoc != null ? oldDoc.getPermissions() : null);
indexer.setDocument(document, config);
addObserversToIndexer(broker, indexer);
indexer.setValidating(true);
if(CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI.equals(docUri)) {
// we are updating collection.xconf. Notify configuration manager
//CollectionConfigurationManager confMgr = broker.getBrokerPool().getConfigurationManager();
//confMgr.invalidateAll(getURI());
setCollectionConfigEnabled(false);
}
final DocumentTriggers trigger = new DocumentTriggers(broker, indexer, this, isTriggersEnabled() ? config : null);
trigger.setValidating(true);
info.setTriggers(trigger);
if(oldDoc == null) {
trigger.beforeCreateDocument(broker, transaction, getURI().append(docUri));
} else {
trigger.beforeUpdateDocument(broker, transaction, oldDoc);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning document " + getURI().append(docUri));
}
doValidate.run(info);
// new document is valid: remove old document
if (oldDoc != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("removing old document " + oldDoc.getFileURI());
}
oldDoc.getUpdateLock().acquire(Lock.WRITE_LOCK);
oldDocLocked = true;
if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) {
//TODO : use a more elaborated method ? No triggers...
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
documents.remove(oldDoc.getFileURI().getRawCollectionPath());
//This lock is released in storeXMLInternal()
//TODO : check that we go until there to ensure the lock is released
// if (transaction != null)
// transaction.acquireLock(document.getUpdateLock(), Lock.WRITE_LOCK);
// else
document.getUpdateLock().acquire(Lock.WRITE_LOCK);
document.setDocId(broker.getNextResourceId(transaction, this));
addDocument(transaction, broker, document);
} else {
//TODO : use a more elaborated method ? No triggers...
broker.removeXMLResource(transaction, oldDoc, false);
oldDoc.copyOf(document, true);
indexer.setDocumentObject(oldDoc);
//old has become new at this point
document = oldDoc;
oldDocLocked = false;
}
if (LOG.isDebugEnabled()) {
LOG.debug("removed old document " + oldDoc.getFileURI());
}
} else {
//This lock is released in storeXMLInternal()
//TODO : check that we go until there to ensure the lock is released
// if (transaction != null)
// transaction.acquireLock(document.getUpdateLock(), Lock.WRITE_LOCK);
// else
document.getUpdateLock().acquire(Lock.WRITE_LOCK);
document.setDocId(broker.getNextResourceId(transaction, this));
addDocument(transaction, broker, document);
}
trigger.setValidating(false);
return info;
} finally {
if (oldDoc != null && oldDocLocked) {
oldDoc.getUpdateLock().release(Lock.WRITE_LOCK);
}
getLock().release(Lock.WRITE_LOCK);
db.getProcessMonitor().endJob();
}
}