use of org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuples method createMergeOperation.
@Override
protected ILSMIOOperation createMergeOperation(AbstractLSMIndexOperationContext opCtx, List<ILSMComponent> mergingComponents, LSMComponentFileReferences mergeFileRefs, ILSMIOOperationCallback callback) throws HyracksDataException {
boolean returnDeletedTuples = false;
if (mergingComponents.get(mergingComponents.size() - 1) != diskComponents.get(diskComponents.size() - 1)) {
returnDeletedTuples = true;
}
ITreeIndexCursor cursor = new LSMRTreeWithAntiMatterTuplesSearchCursor(opCtx, returnDeletedTuples);
ILSMIndexAccessor accessor = new LSMTreeIndexAccessor(getLsmHarness(), opCtx, cursorFactory);
return new MergeOperation(accessor, mergeFileRefs.getInsertIndexFileReference(), callback, fileManager.getBaseDir(), mergingComponents, cursor);
}
use of org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor in project asterixdb by apache.
the class LSMBTreeTestWorker method performOp.
@Override
public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMTreeIndexAccessor accessor = (LSMTreeIndexAccessor) indexAccessor;
IIndexCursor searchCursor = accessor.createSearchCursor(false);
LSMBTreeOpContext concreteCtx = (LSMBTreeOpContext) accessor.getCtx();
MultiComparator cmp = concreteCtx.getCmp();
RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
switch(op) {
case INSERT:
try {
accessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
// Ignore duplicate keys, since we get random tuples.
throw e;
}
}
break;
case DELETE:
// Create a tuple reference with only key fields.
deleteTb.reset();
for (int i = 0; i < numKeyFields; i++) {
deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
}
deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
try {
accessor.delete(deleteTuple);
} catch (HyracksDataException e) {
// Ignore non-existant keys, since we get random tuples.
if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw e;
}
}
break;
case UPDATE:
try {
accessor.update(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
// Ignore not updateable exception due to numKeys == numFields.
throw e;
}
}
break;
case POINT_SEARCH:
searchCursor.reset();
rangePred.setLowKey(tuple, true);
rangePred.setHighKey(tuple, true);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case SCAN:
searchCursor.reset();
rangePred.setLowKey(null, true);
rangePred.setHighKey(null, true);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case MERGE:
accessor.scheduleMerge(NoOpIOOperationCallbackFactory.INSTANCE.createIoOpCallback(), lsmBTree.getImmutableComponents());
break;
default:
throw new HyracksDataException("Op " + op.toString() + " not supported.");
}
}
use of org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor in project asterixdb by apache.
the class ExternalBTree method scheduleMerge.
// The only reason to override the following method is that it uses a different context object
// in addition, determining whether or not to keep deleted tuples is different here
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException {
ExternalBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, -1);
opCtx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
boolean returnDeletedTuples = false;
if (version == 0) {
if (ctx.getComponentHolder().get(ctx.getComponentHolder().size() - 1) != diskComponents.get(diskComponents.size() - 1)) {
returnDeletedTuples = true;
}
} else {
if (ctx.getComponentHolder().get(ctx.getComponentHolder().size() - 1) != secondDiskComponents.get(secondDiskComponents.size() - 1)) {
returnDeletedTuples = true;
}
}
ITreeIndexCursor cursor = new LSMBTreeRangeSearchCursor(opCtx, returnDeletedTuples);
BTree firstBTree = ((LSMBTreeDiskComponent) mergingComponents.get(0)).getBTree();
BTree lastBTree = ((LSMBTreeDiskComponent) mergingComponents.get(mergingComponents.size() - 1)).getBTree();
FileReference firstFile = firstBTree.getFileReference();
FileReference lastFile = lastBTree.getFileReference();
LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(firstFile.getFile().getName(), lastFile.getFile().getName());
ILSMIndexAccessor accessor = new LSMTreeIndexAccessor(getLsmHarness(), opCtx, cursorFactory);
ioScheduler.scheduleOperation(new LSMBTreeMergeOperation(accessor, mergingComponents, cursor, relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs.getBloomFilterFileReference(), callback, fileManager.getBaseDir()));
}
use of org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuplesTestWorker method performOp.
@Override
public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMTreeIndexAccessor accessor = (LSMTreeIndexAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
LSMRTreeOpContext concreteCtx = (LSMRTreeOpContext) accessor.getCtx();
MultiComparator cmp = concreteCtx.getCurrentRTreeOpContext().getCmp();
SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
switch(op) {
case INSERT:
rearrangeTuple(tuple, cmp);
accessor.insert(rearrangedTuple);
break;
case DELETE:
rearrangeTuple(tuple, cmp);
accessor.delete(rearrangedTuple);
break;
case SCAN:
searchCursor.reset();
rangePred.setSearchKey(null);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case MERGE:
accessor.scheduleMerge(NoOpIOOperationCallbackFactory.INSTANCE.createIoOpCallback(), ((AbstractLSMRTree) lsmRTree).getImmutableComponents());
break;
default:
throw new HyracksDataException("Op " + op.toString() + " not supported.");
}
}
use of org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor in project asterixdb by apache.
the class ExternalBTreeWithBuddy method scheduleMerge.
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException {
ILSMIndexOperationContext bctx = createOpContext(NoOpOperationCallback.INSTANCE, 0);
bctx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
ITreeIndexCursor cursor = new LSMBTreeWithBuddySortedCursor(bctx, buddyBTreeFields);
LSMComponentFileReferences relMergeFileRefs = getMergeTargetFileName(mergingComponents);
ILSMIndexAccessor accessor = new LSMTreeIndexAccessor(getLsmHarness(), bctx, opCtx -> new LSMBTreeWithBuddySearchCursor(opCtx, buddyBTreeFields));
// Since we have two lists of components, to tell whether we need to
// keep deleted tuples, we need to know
// which list to check against and we need to synchronize for this
boolean keepDeleteTuples = false;
if (version == 0) {
keepDeleteTuples = mergingComponents.get(mergingComponents.size() - 1) != diskComponents.get(diskComponents.size() - 1);
} else {
keepDeleteTuples = mergingComponents.get(mergingComponents.size() - 1) != secondDiskComponents.get(secondDiskComponents.size() - 1);
}
ioScheduler.scheduleOperation(new LSMBTreeWithBuddyMergeOperation(accessor, mergingComponents, cursor, relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs.getDeleteIndexFileReference(), relMergeFileRefs.getBloomFilterFileReference(), callback, fileManager.getBaseDir(), keepDeleteTuples));
}
Aggregations