use of org.sirix.page.UnorderedKeyValuePage in project sirix by sirixdb.
the class CacheTestHelper method setUp.
/**
* Setup the cache.
*
* @param cache cache to fill
* @throws SirixException if setting up Sirix session fails
*/
public static void setUp(final Cache<Long, PageContainer> cache) throws SirixException {
PAGE_READ_TRX = Holder.openResourceManager().getResourceManager().beginPageReadTrx();
PAGES = new UnorderedKeyValuePage[LRUCache.CACHE_CAPACITY + 1][ResourceConfiguration.VERSIONSTORESTORE + 1];
for (int i = 0; i < PAGES.length; i++) {
final UnorderedKeyValuePage page = new UnorderedKeyValuePage(i, PageKind.RECORDPAGE, Constants.NULL_ID_LONG, PAGE_READ_TRX);
final UnorderedKeyValuePage[] revs = new UnorderedKeyValuePage[ResourceConfiguration.VERSIONSTORESTORE];
for (int j = 0; j < ResourceConfiguration.VERSIONSTORESTORE; j++) {
PAGES[i][j + 1] = new UnorderedKeyValuePage(i, PageKind.RECORDPAGE, Constants.NULL_ID_LONG, PAGE_READ_TRX);
revs[j] = PAGES[i][j + 1];
}
PAGES[i][0] = page;
cache.put((long) i, new PageContainer(page, page));
}
}
use of org.sirix.page.UnorderedKeyValuePage in project sirix by sirixdb.
the class XdmNodeWriterTrxImpl method rollingUpdate.
/**
* Adapting the structure with a rolling hash for all ancestors only with update.
*
* @param oldHash pOldHash to be removed
* @throws SirixIOException if anything weird happened
*/
private void rollingUpdate(final long oldHash) throws SirixIOException {
final ImmutableNode newNode = getCurrentNode();
final long hash = newNode.hashCode();
final long newNodeHash = hash;
long resultNew = hash;
// go the path to the root
do {
final Node node = (Node) getPageTransaction().prepareEntryForModification(mNodeReader.getCurrentNode().getNodeKey(), PageKind.RECORDPAGE, -1, Optional.<UnorderedKeyValuePage>empty());
if (node.getNodeKey() == newNode.getNodeKey()) {
resultNew = node.getHash() - oldHash;
resultNew = resultNew + newNodeHash;
} else {
resultNew = node.getHash() - oldHash * PRIME;
resultNew = resultNew + newNodeHash * PRIME;
}
node.setHash(resultNew);
} while (moveTo(mNodeReader.getCurrentNode().getParentKey()).hasMoved());
mNodeReader.setCurrentNode(newNode);
}
use of org.sirix.page.UnorderedKeyValuePage in project sirix by sirixdb.
the class XdmNodeWriterTrxImpl method setValue.
@Override
public XdmNodeWriteTrx setValue(final String value) throws SirixException {
checkNotNull(value);
acquireLock();
try {
if (getCurrentNode() instanceof ValueNode) {
checkAccessAndCommit();
// XDM).
if (value.isEmpty()) {
remove();
return this;
}
// Remove old value from indexes.
mIndexController.notifyChange(ChangeType.DELETE, getNode(), getPathNodeKey());
final long oldHash = mNodeReader.getCurrentNode().hashCode();
final byte[] byteVal = getBytes(value);
final ValueNode node = (ValueNode) getPageTransaction().prepareEntryForModification(mNodeReader.getCurrentNode().getNodeKey(), PageKind.RECORDPAGE, -1, Optional.<UnorderedKeyValuePage>empty());
node.setValue(byteVal);
mNodeReader.setCurrentNode(node);
adaptHashedWithUpdate(oldHash);
// Index new value.
mIndexController.notifyChange(ChangeType.INSERT, getNode(), getPathNodeKey());
return this;
} else {
throw new SirixUsageException("setValue(String) is not allowed if current node is not an IValNode implementation!");
}
} finally {
unLock();
}
}
use of org.sirix.page.UnorderedKeyValuePage in project sirix by sirixdb.
the class XdmNodeWriterTrxImpl method addHashAndDescendantCount.
/**
* Add a hash and the descendant count.
*/
private void addHashAndDescendantCount() throws SirixIOException {
switch(mHashKind) {
case ROLLING:
// Setup.
final ImmutableNode startNode = getCurrentNode();
final long oldDescendantCount = mNodeReader.getStructuralNode().getDescendantCount();
final long descendantCount = oldDescendantCount == 0 ? 1 : oldDescendantCount + 1;
// Set start node.
final long hashToAdd = mHash.hashLong(startNode.hashCode()).asLong();
Node node = (Node) getPageTransaction().prepareEntryForModification(mNodeReader.getCurrentNode().getNodeKey(), PageKind.RECORDPAGE, -1, Optional.<UnorderedKeyValuePage>empty());
node.setHash(hashToAdd);
// Set parent node.
if (startNode.hasParent()) {
moveToParent();
node = (Node) getPageTransaction().prepareEntryForModification(mNodeReader.getCurrentNode().getNodeKey(), PageKind.RECORDPAGE, -1, Optional.<UnorderedKeyValuePage>empty());
node.setHash(node.getHash() + hashToAdd * PRIME);
setAddDescendants(startNode, node, descendantCount);
}
mNodeReader.setCurrentNode(startNode);
break;
case POSTORDER:
postorderAdd();
break;
default:
}
}
use of org.sirix.page.UnorderedKeyValuePage in project sirix by sirixdb.
the class XdmNodeWriterTrxImpl method postorderAdd.
/**
* Adapting the structure with a rolling hash for all ancestors only with insert.
*
* @throws SirixIOException if anything weird happened
*/
private void postorderAdd() throws SirixIOException {
// start with hash to add
final ImmutableNode startNode = getCurrentNode();
// long for adapting the hash of the parent
long hashCodeForParent = 0;
// adapting the parent if the current node is no structural one.
if (!(startNode instanceof StructNode)) {
final Node node = (Node) getPageTransaction().prepareEntryForModification(mNodeReader.getCurrentNode().getNodeKey(), PageKind.RECORDPAGE, -1, Optional.<UnorderedKeyValuePage>empty());
node.setHash(mHash.hashLong(mNodeReader.getCurrentNode().hashCode()).asLong());
moveTo(mNodeReader.getCurrentNode().getParentKey());
}
// Cursor to root
StructNode cursorToRoot;
do {
cursorToRoot = (StructNode) getPageTransaction().prepareEntryForModification(mNodeReader.getCurrentNode().getNodeKey(), PageKind.RECORDPAGE, -1, Optional.<UnorderedKeyValuePage>empty());
hashCodeForParent = mNodeReader.getCurrentNode().hashCode() + hashCodeForParent * PRIME;
// Caring about attributes and namespaces if node is an element.
if (cursorToRoot.getKind() == Kind.ELEMENT) {
final ElementNode currentElement = (ElementNode) cursorToRoot;
// setting the attributes and namespaces
final int attCount = ((ElementNode) cursorToRoot).getAttributeCount();
for (int i = 0; i < attCount; i++) {
moveTo(currentElement.getAttributeKey(i));
hashCodeForParent = mNodeReader.getCurrentNode().hashCode() + hashCodeForParent * PRIME;
}
final int nspCount = ((ElementNode) cursorToRoot).getNamespaceCount();
for (int i = 0; i < nspCount; i++) {
moveTo(currentElement.getNamespaceKey(i));
hashCodeForParent = mNodeReader.getCurrentNode().hashCode() + hashCodeForParent * PRIME;
}
moveTo(cursorToRoot.getNodeKey());
}
// Caring about the children of a node
if (moveTo(mNodeReader.getStructuralNode().getFirstChildKey()).hasMoved()) {
do {
hashCodeForParent = mNodeReader.getCurrentNode().getHash() + hashCodeForParent * PRIME;
} while (moveTo(mNodeReader.getStructuralNode().getRightSiblingKey()).hasMoved());
moveTo(mNodeReader.getStructuralNode().getParentKey());
}
// setting hash and resetting hash
cursorToRoot.setHash(hashCodeForParent);
hashCodeForParent = 0;
} while (moveTo(cursorToRoot.getParentKey()).hasMoved());
mNodeReader.setCurrentNode(startNode);
}
Aggregations