use of org.exist.storage.dom.DOMFile in project exist by eXist-db.
the class ConsistencyCheck method checkXMLTree.
/**
* Check the persistent DOM of a document. The method traverses the entire node tree and checks it for consistency, including node relationships,
* child and attribute counts etc.
*
* @param doc the document to check
* @return null if the document is consistent, an error report otherwise.
*/
public ErrorReport checkXMLTree(final DocumentImpl doc) {
final DOMFile domDb = ((NativeBroker) broker).getDOMFile();
return new DOMTransaction<ErrorReport>(this, domDb, () -> broker.getBrokerPool().getLockManager().acquireBtreeWriteLock(domDb.getLockName()), doc) {
public ErrorReport start() {
EmbeddedXMLStreamReader reader = null;
try {
final Node root = doc.getFirstChild();
reader = (EmbeddedXMLStreamReader) broker.getXMLStreamReader((NodeHandle) root, true);
boolean attribsAllowed = false;
int expectedAttribs = 0;
int attributeCount = 0;
while (reader.hasNext()) {
final int status = reader.next();
final NodeId nodeId = (NodeId) reader.getProperty(EmbeddedXMLStreamReader.PROPERTY_NODE_ID);
if ((status != XMLStreamReader.END_ELEMENT) && !elementStack.isEmpty()) {
final ElementNode parent = elementStack.peek();
parent.childCount++;
// test parent-child relation
if (!nodeId.isChildOf(parent.elem.getNodeId())) {
return new ErrorReport.ResourceError(ErrorReport.NODE_HIERARCHY, "Node " + nodeId + " is not a child of " + parent.elem.getNodeId());
}
// test sibling relation
if ((parent.prevSibling != null) && !(nodeId.isSiblingOf(parent.prevSibling) && (nodeId.compareTo(parent.prevSibling) > 0))) {
return new ErrorReport.ResourceError(ErrorReport.INCORRECT_NODE_ID, "Node " + nodeId + " is not a sibling of " + parent.prevSibling);
}
parent.prevSibling = nodeId;
}
switch(status) {
case XMLStreamReader.ATTRIBUTE:
{
attributeCount++;
break;
}
case XMLStreamReader.END_ELEMENT:
{
if (elementStack.isEmpty()) {
return new org.exist.backup.ErrorReport.ResourceError(ErrorReport.NODE_HIERARCHY, "Error in node hierarchy: received END_ELEMENT event " + "but stack was empty!");
}
final ElementNode lastElem = elementStack.pop();
if (lastElem.childCount != lastElem.elem.getChildCount()) {
return new ErrorReport.ResourceError(org.exist.backup.ErrorReport.NODE_HIERARCHY, "Element reports incorrect child count: expected " + lastElem.elem.getChildCount() + " but found " + lastElem.childCount);
}
break;
}
case XMLStreamReader.START_ELEMENT:
{
if (nodeId.getTreeLevel() <= defaultIndexDepth) {
// check dom.dbx btree, which maps the node
// id to the node's storage address
// look up the node id and check if the
// returned storage address is correct
final NativeBroker.NodeRef nodeRef = new NativeBroker.NodeRef(doc.getDocId(), nodeId);
try {
final long p = domDb.findValue(nodeRef);
if (p != reader.getCurrentPosition()) {
final Value v = domDb.get(p);
if (v == null) {
return new ErrorReport.IndexError(ErrorReport.DOM_INDEX, "Failed to access node " + nodeId + " through dom.dbx index. Wrong storage address. Expected: " + p + "; got: " + reader.getCurrentPosition() + " - ", doc.getDocId());
}
}
} catch (final Exception e) {
e.printStackTrace();
return new ErrorReport.IndexError(ErrorReport.DOM_INDEX, "Failed to access node " + nodeId + " through dom.dbx index.", e, doc.getDocId());
}
}
final IStoredNode node = reader.getNode();
if (node.getNodeType() != Node.ELEMENT_NODE) {
return new org.exist.backup.ErrorReport.ResourceError(ErrorReport.INCORRECT_NODE_TYPE, "Expected an element node, received node of type " + node.getNodeType());
}
elementStack.push(new ElementNode((ElementImpl) node));
attribsAllowed = true;
attributeCount = 0;
expectedAttribs = reader.getAttributeCount();
break;
}
default:
{
if (attribsAllowed) {
if (attributeCount != expectedAttribs) {
return new org.exist.backup.ErrorReport.ResourceError(ErrorReport.INCORRECT_NODE_TYPE, "Wrong number of attributes. Expected: " + expectedAttribs + "; found: " + attributeCount);
}
}
attribsAllowed = false;
break;
}
}
}
if (!elementStack.isEmpty()) {
return new org.exist.backup.ErrorReport.ResourceError(ErrorReport.NODE_HIERARCHY, "Error in node hierarchy: reached end of tree but " + "stack was not empty!");
}
return null;
} catch (final IOException | XMLStreamException e) {
e.printStackTrace();
return new org.exist.backup.ErrorReport.ResourceError(ErrorReport.RESOURCE_ACCESS_FAILED, e.getMessage(), e);
} finally {
elementStack.clear();
if (reader != null) {
try {
reader.close();
} catch (final XMLStreamException e) {
e.printStackTrace();
}
}
}
}
}.run();
}
use of org.exist.storage.dom.DOMFile in project exist by eXist-db.
the class RecoveryTest2 method store.
@Test
public void store() throws DatabaseConfigurationException, EXistException, PermissionDeniedException, IOException, SAXException, BTreeException, LockException {
BrokerPool.FORCE_CORRUPTION = true;
final BrokerPool pool = startDb();
final TransactionManager transact = pool.getTransactionManager();
try (final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()));
final Txn transaction = transact.beginTransaction()) {
Collection root = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI);
assertNotNull(root);
broker.saveCollection(transaction, root);
Collection test2 = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI2);
assertNotNull(test2);
broker.saveCollection(transaction, test2);
DOMFile domDb = ((NativeBroker) broker).getDOMFile();
assertNotNull(domDb);
try (final Writer writer = new StringWriter()) {
domDb.dump(writer);
}
// store some documents. Will be replaced below
final Path dir = Paths.get(xmlDir);
final List<Path> docs = FileUtils.list(dir);
for (final Path f : docs) {
broker.storeDocument(transaction, XmldbURI.create(FileUtils.fileName(f)), new InputSource(f.toUri().toASCIIString()), MimeType.XML_TYPE, test2);
}
transact.commit(transaction);
}
}
use of org.exist.storage.dom.DOMFile in project exist by eXist-db.
the class DOMFileRecoverTest method add.
@Test
public void add() throws EXistException, ReadOnlyException, TerminatedException, IOException, BTreeException {
BrokerPool.FORCE_CORRUPTION = false;
final BrokerPool pool = existEmbeddedServer.getBrokerPool();
final NodeIdFactory idFact = pool.getNodeFactory();
try (final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) {
// Add some random data and force db corruption
broker.flush();
final DOMFile domDb = ((NativeBroker) broker).getDOMFile();
domDb.setOwnerObject(this);
final TransactionManager mgr = pool.getTransactionManager();
long firstToRemove = -1;
try (final Txn txn = mgr.beginTransaction()) {
// put 1000 values into the btree
for (int i = 1; i <= 10000; i++) {
byte[] data = ("Value" + i).getBytes();
NodeId id = idFact.createInstance(i);
long addr = domDb.put(txn, new NativeBroker.NodeRef(500, id), data);
// TODO : test addr ?
if (i == 1)
firstToRemove = addr;
}
domDb.closeDocument();
// remove all
NativeBroker.NodeRef ref = new NativeBroker.NodeRef(500);
assertNotNull(ref);
IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
assertNotNull(idx);
domDb.remove(txn, idx, null);
domDb.removeAll(txn, firstToRemove);
// put some more
for (int i = 1; i <= 10000; i++) {
byte[] data = ("Value" + i).getBytes();
@SuppressWarnings("unused") long addr = domDb.put(txn, new NativeBroker.NodeRef(500, idFact.createInstance(i)), data);
// TODO : test addr ?
}
domDb.closeDocument();
mgr.commit(txn);
}
try (final Txn txn = mgr.beginTransaction()) {
// put 1000 new values into the btree
for (int i = 1; i <= 1000; i++) {
byte[] data = ("Value" + i).getBytes();
long addr = domDb.put(txn, new NativeBroker.NodeRef(501, idFact.createInstance(i)), data);
// TODO : test addr ?
if (i == 1)
firstToRemove = addr;
}
domDb.closeDocument();
mgr.commit(txn);
}
// the following transaction is not committed and will be rolled back during recovery
try (final Txn txn = mgr.beginTransaction()) {
for (int i = 1; i <= 200; i++) {
domDb.remove(txn, new NativeBroker.NodeRef(500, idFact.createInstance(i)));
}
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, new NativeBroker.NodeRef(501));
domDb.remove(txn, idx, null);
domDb.removeAll(txn, firstToRemove);
// Don't commit...
mgr.commit(txn);
}
pool.getJournalManager().get().flush(true, false);
Writer writer = new StringWriter();
domDb.dump(writer);
}
}
use of org.exist.storage.dom.DOMFile in project exist by eXist-db.
the class RecoveryTest method verify.
private void verify(final BrokerPool pool) throws EXistException, PermissionDeniedException, SAXException, XPathException, IOException, BTreeException, LockException {
try (final DBBroker broker = pool.get(Optional.of(pool.getSecurityManager().getSystemSubject()))) {
final Serializer serializer = broker.borrowSerializer();
try {
try (final LockedDocument lockedDoc = broker.getXMLResource(XmldbURI.ROOT_COLLECTION_URI.append("test/test2/hamlet.xml"), LockMode.READ_LOCK)) {
assertNotNull("Document '" + XmldbURI.ROOT_COLLECTION + "/test/test2/hamlet.xml' should not be null", lockedDoc);
final String data = serializer.serialize(lockedDoc.getDocument());
assertNotNull(data);
}
try (final LockedDocument lockedDoc = broker.getXMLResource(XmldbURI.ROOT_COLLECTION_URI.append("test/test2/test_string.xml"), LockMode.READ_LOCK)) {
assertNotNull("Document '" + XmldbURI.ROOT_COLLECTION + "/test/test2/test_string.xml' should not be null", lockedDoc);
final String data = serializer.serialize(lockedDoc.getDocument());
assertNotNull(data);
}
final String lastSampleName = SAMPLES.getShakespeareXmlSampleNames()[SAMPLES.getShakespeareXmlSampleNames().length - 1];
try (final LockedDocument lockedDoc = broker.getXMLResource(TestConstants.TEST_COLLECTION_URI2.append(lastSampleName), LockMode.READ_LOCK)) {
assertNull("Document '" + XmldbURI.ROOT_COLLECTION + "/test/test2/'" + lastSampleName + " should not exist anymore", lockedDoc);
}
final XQuery xquery = pool.getXQueryService();
assertNotNull(xquery);
final Sequence seq = xquery.execute(broker, "//SPEECH[contains(LINE, 'king')]", null);
assertNotNull(seq);
for (final SequenceIterator i = seq.iterate(); i.hasNext(); ) {
final Item next = i.nextItem();
final String value = serializer.serialize((NodeValue) next);
}
} finally {
broker.returnSerializer(serializer);
}
try (final LockedDocument lockedBinDoc = broker.getXMLResource(TestConstants.TEST_COLLECTION_URI2.append(TestConstants.TEST_BINARY_URI), LockMode.READ_LOCK)) {
assertNotNull("Binary document is null", lockedBinDoc);
final BinaryDocument binDoc = (BinaryDocument) lockedBinDoc.getDocument();
try (final InputStream is = broker.getBinaryResource(binDoc)) {
final byte[] bdata = new byte[(int) binDoc.getContentLength()];
is.read(bdata);
final String data = new String(bdata);
assertNotNull(data);
}
}
final DOMFile domDb = ((NativeBroker) broker).getDOMFile();
assertNotNull(domDb);
try (final Writer writer = new StringWriter()) {
domDb.dump(writer);
}
final TransactionManager transact = pool.getTransactionManager();
try (final Txn transaction = transact.beginTransaction()) {
try (final Collection root = broker.openCollection(TestConstants.TEST_COLLECTION_URI, LockMode.WRITE_LOCK)) {
assertNotNull(root);
transaction.acquireCollectionLock(() -> broker.getBrokerPool().getLockManager().acquireCollectionWriteLock(root.getURI()));
broker.removeCollection(transaction, root);
}
transact.commit(transaction);
}
}
}
use of org.exist.storage.dom.DOMFile in project exist by eXist-db.
the class Statistics method generateIndexStatistics.
/**
* Generate index statistics.
*
* @param conf the configuration
* @param indexStats the index stats
*/
public static void generateIndexStatistics(Configuration conf, Map<String, IndexStats> indexStats) {
final DOMFile dom = (DOMFile) conf.getProperty(DOMFile.CONFIG_KEY_FOR_FILE);
if (dom != null) {
indexStats.put(DOMFile.FILE_NAME, new IndexStats(dom));
}
BFile db = (BFile) conf.getProperty(CollectionStore.FILE_KEY_IN_CONFIG);
if (db != null) {
indexStats.put(CollectionStore.FILE_NAME, new IndexStats(db));
}
db = (BFile) conf.getProperty(NativeValueIndex.FILE_KEY_IN_CONFIG);
if (db != null) {
indexStats.put(NativeValueIndex.FILE_NAME, new IndexStats(db));
}
}
Aggregations