use of org.exist.util.LockException in project exist by eXist-db.
the class HasIndex method eval.
@Override
public Sequence eval(final Sequence[] args, final Sequence contextSequence) throws XPathException {
final String id = args[0].getStringValue();
final SortIndexWorker index = (SortIndexWorker) context.getBroker().getIndexController().getWorkerByIndexId(SortIndex.ID);
try {
return BooleanValue.valueOf(index.hasIndex(id));
} catch (final EXistException e) {
throw new XPathException(this, e.getMessage(), e);
} catch (final LockException e) {
throw new XPathException(this, "Caught lock error while searching index. Giving up.", e);
}
}
use of org.exist.util.LockException in project exist by eXist-db.
the class MutableCollection method getDescendants.
@Override
public List<Collection> getDescendants(final DBBroker broker, final Subject user) throws PermissionDeniedException {
final ArrayList<Collection> collectionList = new ArrayList<>();
final Iterator<XmldbURI> i;
try (final ManagedCollectionLock collectionLock = lockManager.acquireCollectionReadLock(path)) {
if (!getPermissionsNoLock().validate(broker.getCurrentSubject(), Permission.READ)) {
throw new PermissionDeniedException("Permission to list sub-collections denied on " + this.getURI());
}
collectionList.ensureCapacity(subCollections.size());
i = stableIterator(subCollections);
} catch (final LockException e) {
LOG.error(e.getMessage(), e);
return Collections.emptyList();
}
while (i.hasNext()) {
final XmldbURI childName = i.next();
// TODO : resolve URI !
final Collection child = broker.getCollection(path.append(childName));
if (getPermissions().validate(user, Permission.READ)) {
collectionList.add(child);
if (child.getChildCollectionCount(broker) > 0) {
// Recursive call
collectionList.addAll(child.getDescendants(broker, user));
}
}
}
return collectionList;
}
use of org.exist.util.LockException in project exist by eXist-db.
the class MutableCollection method validateXMLResourceInternal.
/**
* Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
* Since the process is dependant from the collection configuration, the collection acquires a write lock during
* the process.
*
* @param transaction The database transaction
* @param broker The database broker
* @param name the name (without path) of the document
* @param validator A function which validates the document of throws an Exception
*
* @return An {@link IndexInfo} with a write lock on the document.
*/
private IndexInfo validateXMLResourceInternal(final Txn transaction, final DBBroker broker, final XmldbURI name, final CollectionConfiguration config, final Consumer2E<IndexInfo, SAXException, EXistException> validator) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException {
// Make the necessary operations if we process a collection configuration document
checkConfigurationDocument(transaction, broker, name);
final Database db = broker.getBrokerPool();
if (db.isReadOnly()) {
throw new IOException("Database is read-only");
}
ManagedDocumentLock documentWriteLock = null;
DocumentImpl oldDoc = null;
db.getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, name);
try {
try (final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) {
// acquire the WRITE_LOCK on the Document, this lock is released in storeXMLInternal via IndexInfo
documentWriteLock = lockManager.acquireDocumentWriteLock(getURI().append(name.lastSegment()));
oldDoc = documents.get(name.lastSegmentString());
checkPermissionsForAddDocument(broker, oldDoc);
// NOTE: the new `document` object actually gets discarded in favour of the `oldDoc` below if there is an oldDoc and it is XML (so we can use -1 as the docId because it will never be used)
final int docId = (oldDoc != null && oldDoc.getResourceType() == DocumentImpl.XML_FILE) ? -1 : broker.getNextResourceId(transaction);
DocumentImpl document = new DocumentImpl((BrokerPool) db, this, docId, name);
checkCollectionConflict(name);
manageDocumentInformation(oldDoc, document);
final Indexer indexer = new Indexer(broker, transaction);
final IndexInfo info = new IndexInfo(indexer, config, documentWriteLock);
info.setCreating(oldDoc == null);
info.setOldDocPermissions(oldDoc != null ? oldDoc.getPermissions() : null);
indexer.setDocument(document, config);
indexer.setValidating(true);
final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, indexer, this, broker.isTriggersEnabled() ? config : null);
trigger.setValidating(true);
info.setTriggers(trigger);
if (oldDoc == null) {
trigger.beforeCreateDocument(broker, transaction, getURI().append(name));
} else {
trigger.beforeUpdateDocument(broker, transaction, oldDoc);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning document {}", getURI().append(name));
}
validator.accept(info);
// new document is valid: remove old document
if (oldDoc != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("removing old document {}", oldDoc.getFileURI());
}
updateModificationTime(document);
/**
* Matching {@link StreamListener#endReplaceDocument(Txn)} call is in
* {@link #storeXMLInternal(Txn, DBBroker, IndexInfo, Consumer2E)}
*/
final StreamListener listener = broker.getIndexController().getStreamListener(document, StreamListener.ReindexMode.REPLACE_DOCUMENT);
listener.startReplaceDocument(transaction);
if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) {
// TODO : use a more elaborated method ? No triggers...
broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
documents.remove(oldDoc.getFileURI().lastSegmentString());
addDocument(transaction, broker, document);
} else {
// TODO : use a more elaborated method ? No triggers...
broker.removeXMLResource(transaction, oldDoc, false);
oldDoc.copyOf(broker, document, oldDoc);
indexer.setDocumentObject(oldDoc);
// old has become new at this point
document = oldDoc;
}
if (LOG.isDebugEnabled()) {
LOG.debug("removed old document {}", oldDoc.getFileURI());
}
} else {
addDocument(transaction, broker, document);
}
trigger.setValidating(false);
return info;
}
} catch (final EXistException | PermissionDeniedException | SAXException | LockException | IOException e) {
// if there is an exception and we hold the document WRITE_LOCK we must release it
if (documentWriteLock != null) {
documentWriteLock.close();
}
throw e;
} finally {
db.getProcessMonitor().endJob();
}
}
use of org.exist.util.LockException in project exist by eXist-db.
the class MutableCollection method storeXMLInternal.
/**
* Stores an XML document in the database. {@link #validateXMLResourceInternal(Txn, DBBroker, XmldbURI,
* CollectionConfiguration, Consumer2E)}should have been called previously in order to acquire a write lock
* for the document. Launches the finish trigger.
*
* @param transaction The database transaction
* @param broker The database broker
* @param info Tracks information between validate and store phases
* @param parserFn A function which parses the XML document
*/
private void storeXMLInternal(final Txn transaction, final DBBroker broker, final IndexInfo info, final Consumer2E<IndexInfo, EXistException, SAXException> parserFn) throws EXistException, SAXException, PermissionDeniedException {
final DocumentImpl document = info.getIndexer().getDocument();
final Database db = broker.getBrokerPool();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("storing document {} ...", document.getDocId());
}
// Sanity check
if (!lockManager.isDocumentLockedForWrite(document.getURI())) {
LOG.warn("document is not locked for write !");
}
db.getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_DOC, document.getFileURI());
parserFn.accept(info);
broker.storeXMLResource(transaction, document);
broker.flush();
broker.closeDocument();
// broker.checkTree(document);
LOG.debug("document stored.");
} finally {
// This lock has been acquired in validateXMLResourceInternal()
info.getDocumentLock().close();
broker.getBrokerPool().getProcessMonitor().endJob();
}
if (info.isCreating()) {
info.getTriggers().afterCreateDocument(broker, transaction, document);
} else {
final StreamListener listener = broker.getIndexController().getStreamListener();
listener.endReplaceDocument(transaction);
info.getTriggers().afterUpdateDocument(broker, transaction, document);
}
db.getNotificationService().notifyUpdate(document, (info.isCreating() ? UpdateListener.ADD : UpdateListener.UPDATE));
// Is it a collection configuration file ?
final XmldbURI docName = document.getFileURI();
// TODO: *resolve* URI against CollectionConfigurationManager.CONFIG_COLLECTION_URI
if (getURI().startsWith(XmldbURI.CONFIG_COLLECTION_URI) && docName.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI)) {
broker.sync(Sync.MAJOR);
final CollectionConfigurationManager manager = broker.getBrokerPool().getConfigurationManager();
if (manager != null) {
try {
manager.invalidate(getURI(), broker.getBrokerPool());
manager.loadConfiguration(broker, this);
} catch (final PermissionDeniedException | LockException pde) {
throw new EXistException(pde.getMessage(), pde);
} catch (final CollectionConfigurationException e) {
// DIZ: should this exception really been thrown? bugid=1807744
throw new EXistException("Error while reading new collection configuration: " + e.getMessage(), e);
}
}
}
}
use of org.exist.util.LockException in project exist by eXist-db.
the class STXTransformerTrigger method configure.
@Override
public void configure(DBBroker broker, Txn transaction, Collection parent, Map<String, List<?>> parameters) throws TriggerException {
super.configure(broker, transaction, parent, parameters);
final String stylesheet = (String) parameters.get("src").get(0);
if (stylesheet == null) {
throw new TriggerException("STXTransformerTrigger requires an attribute 'src'");
}
/*
String origProperty = System.getProperty("javax.xml.transform.TransformerFactory");
System.setProperty("javax.xml.transform.TransformerFactory", "net.sf.joost.trax.TransformerFactoryImpl");
factory = (SAXTransformerFactory)TransformerFactory.newInstance();
// reset property to previous setting
if(origProperty != null) {
System.setProperty("javax.xml.transform.TransformerFactory", origProperty);
}
*/
/*ServiceLoader<TransformerFactory> loader = ServiceLoader.load(TransformerFactory.class);
for(TransformerFactory transformerFactory : loader) {
if(transformerFactory.getClass().getName().equals("net.sf.joost.trax.TransformerFactoryImpl")) {
factory = transformerFactory.ne
}
}*/
XmldbURI stylesheetUri = null;
try {
stylesheetUri = XmldbURI.xmldbUriFor(stylesheet);
} catch (final URISyntaxException e) {
}
// TODO: allow full XmldbURIs to be used as well.
if (stylesheetUri == null || stylesheet.indexOf(':') == Constants.STRING_NOT_FOUND) {
stylesheetUri = parent.getURI().resolveCollectionPath(stylesheetUri);
DocumentImpl doc;
try {
doc = (DocumentImpl) broker.getXMLResource(stylesheetUri);
if (doc == null) {
throw new TriggerException("stylesheet " + stylesheetUri + " not found in database");
}
if (doc instanceof BinaryDocument) {
throw new TriggerException("stylesheet " + stylesheetUri + " must be stored as an xml document and not a binary document!");
}
handler = factory.newTransformerHandler(STXTemplatesCache.getInstance().getOrUpdateTemplate(broker, doc));
} catch (final TransformerConfigurationException | PermissionDeniedException | SAXException | LockException e) {
throw new TriggerException(e.getMessage(), e);
}
} else {
try {
LOG.debug("compiling stylesheet {}", stylesheet);
final Templates template = factory.newTemplates(new StreamSource(stylesheet));
handler = factory.newTransformerHandler(template);
} catch (final TransformerConfigurationException e) {
throw new TriggerException(e.getMessage(), e);
}
}
}
Aggregations