use of org.exist.indexing.StreamListener in project exist by eXist-db.
the class MutableCollection method removeBinaryResource.
@Override
public void removeBinaryResource(final Txn transaction, final DBBroker broker, final DocumentImpl doc) throws PermissionDeniedException, LockException, TriggerException {
if (doc == null) {
// TODO should throw an exception!!! Otherwise we dont know if the document was removed
return;
}
broker.getBrokerPool().getProcessMonitor().startJob(ProcessMonitor.ACTION_REMOVE_BINARY, doc.getFileURI());
try (final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) {
if (!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.WRITE)) {
throw new PermissionDeniedException("Permission denied to write collection: " + path);
}
if (doc.getResourceType() != DocumentImpl.BINARY_FILE) {
throw new PermissionDeniedException("document " + doc.getFileURI() + " is not a binary object");
}
try (final ManagedDocumentLock docUpdateLock = lockManager.acquireDocumentWriteLock(doc.getURI())) {
try {
final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, null, this, broker.isTriggersEnabled() ? getConfiguration(broker) : null);
trigger.beforeDeleteDocument(broker, transaction, doc);
final IndexController indexController = broker.getIndexController();
final StreamListener listener = indexController.getStreamListener(doc, StreamListener.ReindexMode.REMOVE_BINARY);
try {
indexController.startIndexDocument(transaction, listener);
try {
broker.removeBinaryResource(transaction, (BinaryDocument) doc);
} catch (final IOException ex) {
throw new PermissionDeniedException("Cannot delete file: " + doc.getURI().toString() + ": " + ex.getMessage(), ex);
}
documents.remove(doc.getFileURI().lastSegmentString());
} finally {
indexController.endIndexDocument(transaction, listener);
}
trigger.afterDeleteDocument(broker, transaction, doc.getURI());
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
}
// NOTE: early release of Collection lock inline with Asymmetrical Locking scheme
collectionLock.close();
}
}
}
use of org.exist.indexing.StreamListener in project exist by eXist-db.
the class MutableCollection method addBinaryResource.
private BinaryDocument addBinaryResource(final Database db, final Txn transaction, final DBBroker broker, final BinaryDocument blob, final InputStream is, final String mimeType, @Deprecated final long size, final Date created, final Date modified, @Nullable final Permission permission, final DBBroker.PreserveType preserve, final DocumentImpl oldDoc, final ManagedCollectionLock collectionLock) throws EXistException, PermissionDeniedException, LockException, TriggerException, IOException {
final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, null, this, broker.isTriggersEnabled() ? getConfiguration(broker) : null);
final XmldbURI docUri = blob.getFileURI();
try {
db.getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_BINARY, docUri);
checkPermissionsForAddDocument(broker, oldDoc);
checkCollectionConflict(docUri);
// manageDocumentInformation(oldDoc, blob);
if (!broker.preserveOnCopy(preserve)) {
blob.copyOf(broker, blob, oldDoc);
}
blob.setMimeType(mimeType == null ? MimeType.BINARY_TYPE.getName() : mimeType);
if (created != null) {
blob.setCreated(created.getTime());
}
if (modified != null) {
blob.setLastModified(modified.getTime());
}
if (oldDoc == null) {
trigger.beforeCreateDocument(broker, transaction, blob.getURI());
} else {
trigger.beforeUpdateDocument(broker, transaction, oldDoc);
}
if (oldDoc != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("removing old document db entry{}", oldDoc.getFileURI());
}
if (!broker.preserveOnCopy(preserve)) {
updateModificationTime(blob);
}
// remove the old document
broker.removeResource(transaction, oldDoc);
}
if (permission != null) {
blob.setPermissions(permission);
}
// store the binary content (create/replace)
broker.storeBinaryResource(transaction, blob, is);
addDocument(transaction, broker, blob, oldDoc);
final IndexController indexController = broker.getIndexController();
final StreamListener listener = indexController.getStreamListener(blob, StreamListener.ReindexMode.STORE);
indexController.startIndexDocument(transaction, listener);
try {
broker.storeXMLResource(transaction, blob);
} finally {
indexController.endIndexDocument(transaction, listener);
}
if (oldDoc == null) {
trigger.afterCreateDocument(broker, transaction, blob);
} else {
trigger.afterUpdateDocument(broker, transaction, blob);
}
// NOTE: early release of Collection lock inline with Asymmetrical Locking scheme
collectionLock.close();
return blob;
} finally {
broker.getBrokerPool().getProcessMonitor().endJob();
}
}
use of org.exist.indexing.StreamListener in project exist by eXist-db.
the class NativeBroker method copyXMLResource.
private void copyXMLResource(final Txn transaction, @EnsureLocked(mode = LockMode.READ_LOCK) final DocumentImpl oldDoc, @EnsureLocked(mode = LockMode.WRITE_LOCK) final DocumentImpl newDoc) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Copying document {} to {}", oldDoc.getFileURI(), newDoc.getURI());
}
final long start = System.currentTimeMillis();
final StreamListener listener = getIndexController().getStreamListener(newDoc, ReindexMode.STORE);
final NodeList nodes = oldDoc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) nodes.item(i);
try (final INodeIterator iterator = getNodeIterator(node)) {
iterator.next();
copyNodes(transaction, iterator, node, new NodePath2(), newDoc, false, listener);
}
}
flush();
closeDocument();
if (LOG.isDebugEnabled()) {
LOG.debug("Copy took {} ms.", (System.currentTimeMillis() - start));
}
}
use of org.exist.indexing.StreamListener in project exist by eXist-db.
the class NativeBroker method defragXMLResource.
@Override
public void defragXMLResource(final Txn transaction, final DocumentImpl doc) {
// TODO : use dedicated function in XmldbURI
if (LOG.isDebugEnabled())
LOG.debug("============> Defragmenting document {}", doc.getURI());
final long start = System.currentTimeMillis();
try {
final long firstChild = doc.getFirstChildAddress();
// dropping old structure index
dropIndex(transaction, doc);
// dropping dom index
final NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) {
@Override
public Object start() {
try {
domDb.remove(transaction, idx, null);
domDb.flush();
} catch (final IOException | DBException e) {
LOG.error("start() - " + "error while removing doc", e);
} catch (final TerminatedException e) {
LOG.error("method terminated", e);
}
return null;
}
}.run();
// create a copy of the old doc to copy the nodes into it
final DocumentImpl tempDoc = new DocumentImpl(pool, doc.getCollection(), doc.getDocId(), doc.getFileURI());
tempDoc.copyOf(this, doc, doc);
final StreamListener listener = getIndexController().getStreamListener(doc, ReindexMode.STORE);
// copy the nodes
final NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) nodes.item(i);
try (final INodeIterator iterator = getNodeIterator(node)) {
iterator.next();
copyNodes(transaction, iterator, node, new NodePath2(), tempDoc, true, listener);
}
}
flush();
// remove the old nodes
new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) {
@Override
public Object start() {
domDb.removeAll(transaction, firstChild);
try {
domDb.flush();
} catch (final DBException e) {
LOG.error("start() - error while removing doc", e);
}
return null;
}
}.run();
doc.copyChildren(tempDoc);
doc.setSplitCount(0);
doc.setPageCount(tempDoc.getPageCount());
storeXMLResource(transaction, doc);
closeDocument();
if (LOG.isDebugEnabled()) {
LOG.debug("Defragmentation took {} ms.", (System.currentTimeMillis() - start));
}
} catch (final PermissionDeniedException | IOException e) {
LOG.error(e);
}
}
use of org.exist.indexing.StreamListener in project exist by eXist-db.
the class NativeBroker method dropIndex.
private void dropIndex(final Txn transaction, @EnsureLocked(mode = LockMode.WRITE_LOCK) final DocumentImpl document) {
final StreamListener listener = getIndexController().getStreamListener(document, ReindexMode.REMOVE_ALL_NODES);
listener.startIndexDocument(transaction);
final NodeList nodes = document.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) nodes.item(i);
try (final INodeIterator iterator = getNodeIterator(node)) {
iterator.next();
scanNodes(transaction, iterator, node, new NodePath2(), IndexMode.REMOVE, listener);
} catch (final IOException ioe) {
LOG.error("Unable to close node iterator", ioe);
}
}
listener.endIndexDocument(transaction);
notifyDropIndex(document);
getIndexController().flush();
}
Aggregations