use of org.exist.dom.persistent.IStoredNode in project exist by eXist-db.
the class IndexUtils method scanNode.
private static void scanNode(Txn transaction, INodeIterator iterator, IStoredNode node, StreamListener listener, NodePath currentPath) {
switch(node.getNodeType()) {
case Node.ELEMENT_NODE:
if (listener != null) {
listener.startElement(transaction, (ElementImpl) node, currentPath);
}
if (node.hasChildNodes() || node.hasAttributes()) {
final int childCount = node.getChildCount();
for (int i = 0; i < childCount; i++) {
final IStoredNode child = iterator.next();
if (child.getNodeType() == Node.ELEMENT_NODE) {
currentPath.addComponent(child.getQName());
}
scanNode(transaction, iterator, child, listener, currentPath);
if (child.getNodeType() == Node.ELEMENT_NODE) {
currentPath.removeLastComponent();
}
}
}
if (listener != null) {
listener.endElement(transaction, (ElementImpl) node, currentPath);
}
break;
case Node.TEXT_NODE:
if (listener != null) {
listener.characters(transaction, (TextImpl) node, currentPath);
}
break;
case Node.ATTRIBUTE_NODE:
if (listener != null) {
listener.attribute(transaction, (AttrImpl) node, currentPath);
}
break;
}
}
use of org.exist.dom.persistent.IStoredNode in project exist by eXist-db.
the class NodeIterator method next.
/**
* Returns the next node in document order.
*/
@Override
public IStoredNode next() {
try (final ManagedLock<ReentrantLock> domFileLock = lockManager.acquireBtreeReadLock(db.getLockName())) {
db.setOwnerObject(broker);
IStoredNode nextNode = null;
if (gotoNextPosition()) {
long backLink = 0;
do {
final DOMFile.DOMFilePageHeader pageHeader = page.getPageHeader();
// Next value larger than length of the current page?
if (offset >= pageHeader.getDataLength()) {
// Load next page in chain
long nextPageNum = pageHeader.getNextDataPage();
if (nextPageNum == Page.NO_PAGE) {
SanityCheck.TRACE("bad link to next " + page.page.getPageInfo() + "; previous: " + pageHeader.getPreviousDataPage() + "; offset = " + offset + "; lastTupleID = " + lastTupleID);
if (LOG.isDebugEnabled()) {
LOG.debug(db.debugPageContents(page));
}
// TODO : throw exception here ? -pb
return null;
}
pageNum = nextPageNum;
page = db.getDOMPage(nextPageNum);
db.addToBuffer(page);
offset = 0;
}
// Extract the tuple ID
lastTupleID = ByteConversion.byteToShort(page.data, offset);
offset += DOMFile.LENGTH_TID;
// Check if this is just a link to a relocated node
if (ItemId.isLink(lastTupleID)) {
// Skip this
offset += DOMFile.LENGTH_FORWARD_LOCATION;
// Continue the iteration
continue;
}
// Read data length
short vlen = ByteConversion.byteToShort(page.data, offset);
offset += DOMFile.LENGTH_DATA_LENGTH;
if (vlen < 0) {
LOG.error("Got negative length{} at offset {}!!!", vlen, offset);
if (LOG.isDebugEnabled()) {
LOG.debug(db.debugPageContents(page));
}
// TODO : throw an exception right now ?
}
if (ItemId.isRelocated(lastTupleID)) {
// Found a relocated node. Read the original address
backLink = ByteConversion.byteToLong(page.data, offset);
offset += DOMFile.LENGTH_ORIGINAL_LOCATION;
}
// Overflow page? Load the overflow value
if (vlen == DOMFile.OVERFLOW) {
vlen = DOMFile.LENGTH_OVERFLOW_LOCATION;
final long overflow = ByteConversion.byteToLong(page.data, offset);
offset += DOMFile.LENGTH_OVERFLOW_LOCATION;
try {
final byte[] overflowValue = db.getOverflowValue(overflow);
nextNode = StoredNode.deserialize(overflowValue, 0, overflowValue.length, doc, useNodePool);
} catch (final Exception e) {
LOG.warn("Exception while loading overflow value: {}; originating page: {}", e.getMessage(), page.page.getPageInfo());
// TODO : rethrow exception ? -pb
}
// Normal node
} else {
try {
nextNode = StoredNode.deserialize(page.data, offset, vlen, doc, useNodePool);
offset += vlen;
} catch (final Exception e) {
LOG.error("Error while deserializing node: {}", e.getMessage(), e);
LOG.error("Reading from offset: {}; len = {}", offset, vlen);
if (LOG.isDebugEnabled()) {
LOG.debug(db.debugPageContents(page));
}
throw new RuntimeException(e);
}
}
if (nextNode == null) {
LOG.error("illegal node on page {}; tid = {}; next = {}; prev = {}; offset = {}; len = {}", page.getPageNum(), ItemId.getId(lastTupleID), page.getPageHeader().getNextDataPage(), page.getPageHeader().getPreviousDataPage(), offset - vlen, page.getPageHeader().getDataLength());
if (LOG.isDebugEnabled()) {
LOG.debug(db.debugPageContents(page));
}
// TODO : throw an exception here ? -pb
return null;
}
if (ItemId.isRelocated(lastTupleID)) {
nextNode.setInternalAddress(backLink);
} else {
nextNode.setInternalAddress(StorageAddress.createPointer((int) pageNum, ItemId.getId(lastTupleID)));
}
nextNode.setOwnerDocument(doc);
} while (nextNode == null);
}
return nextNode;
} catch (final LockException e) {
LOG.warn("Failed to acquire read lock on {}", FileUtils.fileName(db.getFile()));
// TODO : throw exception here ? -pb
return null;
} catch (final BTreeException | IOException e) {
LOG.error(e.getMessage(), e);
// TODO : re-throw exception ? -pb
}
return null;
}
use of org.exist.dom.persistent.IStoredNode in project exist by eXist-db.
the class LuceneMatchListener method walkAncestor.
private static void walkAncestor(final IStoredNode node, final NodePath2 path) {
if (node == null) {
return;
}
final IStoredNode parent = node.getParentStoredNode();
walkAncestor(parent, path);
path.addNode(node);
}
use of org.exist.dom.persistent.IStoredNode in project exist by eXist-db.
the class NGramIndexWorker method getReindexRoot.
@Override
public <T extends IStoredNode> IStoredNode getReindexRoot(final IStoredNode<T> node, final NodePath path, final boolean insert, final boolean includeSelf) {
if (node.getNodeType() == Node.ATTRIBUTE_NODE) {
return null;
}
final IndexSpec indexConf = node.getOwnerDocument().getCollection().getIndexConfiguration(broker);
if (indexConf != null) {
final Map<?, ?> config = (Map<?, ?>) indexConf.getCustomIndexSpec(NGramIndex.ID);
if (config == null) {
return null;
}
boolean reindexRequired = false;
final int len = node.getNodeType() == Node.ELEMENT_NODE && !includeSelf ? path.length() - 1 : path.length();
for (int i = 0; i < len; i++) {
final QName qn = path.getComponent(i);
if (config.get(qn) != null) {
reindexRequired = true;
break;
}
}
if (reindexRequired) {
IStoredNode topMost = null;
IStoredNode<T> currentNode = node;
while (currentNode != null) {
if (config.get(currentNode.getQName()) != null) {
topMost = currentNode;
}
if (currentNode.getOwnerDocument().getCollection().isTempCollection() && currentNode.getNodeId().getTreeLevel() == 2) {
break;
}
// currentNode = (StoredNode) currentNode.getParentNode();
currentNode = currentNode.getParentStoredNode();
}
return topMost;
}
}
return null;
}
use of org.exist.dom.persistent.IStoredNode in project exist by eXist-db.
the class RangeIndexWorker method getReindexRoot.
@Override
public <T extends IStoredNode> IStoredNode getReindexRoot(IStoredNode<T> node, NodePath path, boolean insert, boolean includeSelf) {
// return null;
if (config == null)
return null;
NodePath2 p = new NodePath2((NodePath2) path);
boolean reindexRequired = false;
if (node.getNodeType() == Node.ELEMENT_NODE && !includeSelf) {
p.removeLastNode();
}
while (p.length() > 0) {
if (config.matches(p)) {
reindexRequired = true;
break;
}
p.removeLastNode();
}
if (reindexRequired) {
p = new NodePath2((NodePath2) path);
IStoredNode topMost = null;
IStoredNode currentNode = node;
if (currentNode.getNodeType() != Node.ELEMENT_NODE)
currentNode = currentNode.getParentStoredNode();
while (currentNode != null) {
if (config.matches(p))
topMost = currentNode;
currentNode = currentNode.getParentStoredNode();
p.removeLastNode();
}
return topMost;
}
return null;
}
Aggregations