use of org.exist.Indexer in project exist by eXist-db.
the class MutableCollection method validateXMLResourceInternal.
/**
* Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during
* the process.
*
* @param transaction The database transaction
* @param broker The database broker
* @param name the name (without path) of the document
* @param validator A function which validates the document of throws an Exception
*
* @return An {@link IndexInfo} with a write lock on the document.
*/
private IndexInfo validateXMLResourceInternal(final Txn transaction, final DBBroker broker, final XmldbURI name, final CollectionConfiguration config, final Consumer2E<IndexInfo, SAXException, EXistException> validator) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException {
// Make the necessary operations if we process a collection configuration document
checkConfigurationDocument(transaction, broker, name);
final Database db = broker.getBrokerPool();
if (db.isReadOnly()) {
throw new IOException("Database is read-only");
}
ManagedDocumentLock documentWriteLock = null;
DocumentImpl oldDoc = null;
db.getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, name);
try {
try (final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) {
// acquire the WRITE_LOCK on the Document, this lock is released in storeXMLInternal via IndexInfo
documentWriteLock = lockManager.acquireDocumentWriteLock(getURI().append(name.lastSegment()));
oldDoc = documents.get(name.lastSegmentString());
checkPermissionsForAddDocument(broker, oldDoc);
// NOTE: the new `document` object actually gets discarded in favour of the `oldDoc` below if there is an oldDoc and it is XML (so we can use -1 as the docId because it will never be used)
final int docId = (oldDoc != null && oldDoc.getResourceType() == DocumentImpl.XML_FILE) ? -1 : broker.getNextResourceId(transaction);
DocumentImpl document = new DocumentImpl((BrokerPool) db, this, docId, name);
checkCollectionConflict(name);
manageDocumentInformation(oldDoc, document);
final Indexer indexer = new Indexer(broker, transaction);
final IndexInfo info = new IndexInfo(indexer, config, documentWriteLock);
info.setCreating(oldDoc == null);
info.setOldDocPermissions(oldDoc != null ? oldDoc.getPermissions() : null);
indexer.setDocument(document, config);
indexer.setValidating(true);
final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, indexer, this, broker.isTriggersEnabled() ? config : null);
trigger.setValidating(true);
info.setTriggers(trigger);
if (oldDoc == null) {
trigger.beforeCreateDocument(broker, transaction, getURI().append(name));
} else {
trigger.beforeUpdateDocument(broker, transaction, oldDoc);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning document {}", getURI().append(name));
}
validator.accept(info);
// new document is valid: remove old document
if (oldDoc != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("removing old document {}", oldDoc.getFileURI());
}
updateModificationTime(document);
/**
* Matching {@link StreamListener#endReplaceDocument(Txn)} call is in
* {@link #storeXMLInternal(Txn, DBBroker, IndexInfo, Consumer2E)}
*/
final StreamListener listener = broker.getIndexController().getStreamListener(document, StreamListener.ReindexMode.REPLACE_DOCUMENT);
listener.startReplaceDocument(transaction);
if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) {
// TODO : use a more elaborated method ? No triggers...
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
documents.remove(oldDoc.getFileURI().lastSegmentString());
addDocument(transaction, broker, document);
} else {
// TODO : use a more elaborated method ? No triggers...
broker.removeXMLResource(transaction, oldDoc, false);
oldDoc.copyOf(broker, document, oldDoc);
indexer.setDocumentObject(oldDoc);
// old has become new at this point
document = oldDoc;
}
if (LOG.isDebugEnabled()) {
LOG.debug("removed old document {}", oldDoc.getFileURI());
}
} else {
addDocument(transaction, broker, document);
}
trigger.setValidating(false);
return info;
}
} catch (final EXistException | PermissionDeniedException | SAXException | LockException | IOException e) {
// if there is an exception and we hold the document WRITE_LOCK we must release it
if (documentWriteLock != null) {
documentWriteLock.close();
}
throw e;
} finally {
db.getProcessMonitor().endJob();
}
}
Aggregations