use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuplesSearchCursor method open.
@Override
public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
cmp = lsmInitialState.getHilbertCmp();
btreeCmp = lsmInitialState.getBTreeCmp();
lsmHarness = lsmInitialState.getLSMHarness();
comparatorFields = lsmInitialState.getComparatorFields();
operationalComponents = lsmInitialState.getOperationalComponents();
rtreeSearchPredicate = (SearchPredicate) searchPred;
searchCallback = lsmInitialState.getSearchOperationCallback();
includeMutableComponent = false;
numMutableComponents = 0;
int numImmutableComponents = 0;
for (ILSMComponent component : operationalComponents) {
if (component.getType() == LSMComponentType.MEMORY) {
includeMutableComponent = true;
numMutableComponents++;
} else {
numImmutableComponents++;
}
}
if (includeMutableComponent) {
btreeRangePredicate = new RangePredicate(null, null, true, true, btreeCmp, btreeCmp);
}
mutableRTreeCursors = new RTreeSearchCursor[numMutableComponents];
mutableRTreeAccessors = new ITreeIndexAccessor[numMutableComponents];
btreeCursors = new BTreeRangeSearchCursor[numMutableComponents];
btreeAccessors = new ITreeIndexAccessor[numMutableComponents];
for (int i = 0; i < numMutableComponents; i++) {
ILSMComponent component = operationalComponents.get(i);
RTree rtree = ((LSMRTreeMemoryComponent) component).getRTree();
BTree btree = ((LSMRTreeMemoryComponent) component).getBTree();
mutableRTreeCursors[i] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
btreeCursors[i] = new BTreeRangeSearchCursor((IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory().createFrame(), false);
btreeAccessors[i] = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
mutableRTreeAccessors[i] = rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
}
rangeCursors = new RTreeSearchCursor[numImmutableComponents];
ITreeIndexAccessor[] immutableRTreeAccessors = new ITreeIndexAccessor[numImmutableComponents];
int j = 0;
for (int i = numMutableComponents; i < operationalComponents.size(); i++) {
ILSMComponent component = operationalComponents.get(i);
rangeCursors[j] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
RTree rtree = ((LSMRTreeDiskComponent) component).getRTree();
immutableRTreeAccessors[j] = rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
immutableRTreeAccessors[j].search(rangeCursors[j], searchPred);
j++;
}
searchNextCursor();
setPriorityQueueComparator();
initPriorityQueue();
open = true;
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class LSMRTree method flush.
@Override
public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
LSMRTreeMemoryComponent flushingComponent = (LSMRTreeMemoryComponent) flushOp.getFlushingComponent();
// Renaming order is critical because we use assume ordering when we
// read the file names when we open the tree.
// The RTree should be renamed before the BTree.
// scan the memory RTree
ITreeIndexAccessor memRTreeAccessor = flushingComponent.getRTree().createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
RTreeSearchCursor rtreeScanCursor = (RTreeSearchCursor) memRTreeAccessor.createSearchCursor(false);
SearchPredicate rtreeNullPredicate = new SearchPredicate(null, null);
memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
LSMRTreeDiskComponent component = createDiskComponent(componentFactory, flushOp.getTarget(), flushOp.getBTreeTarget(), flushOp.getBloomFilterTarget(), true);
//count the number of tuples in the buddy btree
ITreeIndexAccessor memBTreeAccessor = flushingComponent.getBTree().createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
RangePredicate btreeNullPredicate = new RangePredicate(null, null, true, true, null, null);
IIndexCursor btreeCountingCursor = ((BTreeAccessor) memBTreeAccessor).createCountingSearchCursor();
memBTreeAccessor.search(btreeCountingCursor, btreeNullPredicate);
long numBTreeTuples = 0L;
try {
while (btreeCountingCursor.hasNext()) {
btreeCountingCursor.next();
ITupleReference countTuple = btreeCountingCursor.getTuple();
numBTreeTuples = IntegerPointable.getInteger(countTuple.getFieldData(0), countTuple.getFieldStart(0));
}
} finally {
btreeCountingCursor.close();
}
ILSMDiskComponentBulkLoader componentBulkLoader = createComponentBulkLoader(component, 1.0f, false, numBTreeTuples, false, false);
ITreeIndexCursor cursor;
IBinaryComparatorFactory[] linearizerArray = { linearizer };
TreeTupleSorter rTreeTupleSorter = new TreeTupleSorter(flushingComponent.getRTree().getFileId(), linearizerArray, rtreeLeafFrameFactory.createFrame(), rtreeLeafFrameFactory.createFrame(), flushingComponent.getRTree().getBufferCache(), comparatorFields);
// BulkLoad the tuples from the in-memory tree into the new disk
// RTree.
boolean isEmpty = true;
try {
while (rtreeScanCursor.hasNext()) {
isEmpty = false;
rtreeScanCursor.next();
rTreeTupleSorter.insertTupleEntry(rtreeScanCursor.getPageId(), rtreeScanCursor.getTupleOffset());
}
} finally {
rtreeScanCursor.close();
}
rTreeTupleSorter.sort();
cursor = rTreeTupleSorter;
if (!isEmpty) {
try {
while (cursor.hasNext()) {
cursor.next();
ITupleReference frameTuple = cursor.getTuple();
componentBulkLoader.add(frameTuple);
}
} finally {
cursor.close();
}
}
// scan the memory BTree
IIndexCursor btreeScanCursor = memBTreeAccessor.createSearchCursor(false);
memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
try {
while (btreeScanCursor.hasNext()) {
btreeScanCursor.next();
ITupleReference frameTuple = btreeScanCursor.getTuple();
componentBulkLoader.delete(frameTuple);
}
} finally {
btreeScanCursor.close();
}
if (component.getLSMComponentFilter() != null) {
List<ITupleReference> filterTuples = new ArrayList<>();
filterTuples.add(flushingComponent.getLSMComponentFilter().getMinTuple());
filterTuples.add(flushingComponent.getLSMComponentFilter().getMaxTuple());
getFilterManager().updateFilter(component.getLSMComponentFilter(), filterTuples);
getFilterManager().writeFilter(component.getLSMComponentFilter(), component.getRTree());
}
// Note. If we change the filter to write to metadata object, we don't need the if block above
flushingComponent.getMetadata().copy(component.getMetadata());
componentBulkLoader.end();
return component;
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class OrderedIndexExamplesTest method rangeSearch.
protected void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes, ITupleReference lowKey, ITupleReference highKey, ITupleReference minFilterTuple, ITupleReference maxFilterTuple) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
LOGGER.info("Range-Search in: [ " + lowKeyString + ", " + highKeyString + "]");
}
ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
MultiComparator lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, lowKey);
MultiComparator highKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, highKey);
RangePredicate rangePred;
if (minFilterTuple != null && maxFilterTuple != null) {
rangePred = new RangePredicate(lowKey, highKey, true, true, lowKeySearchCmp, highKeySearchCmp, minFilterTuple, maxFilterTuple);
} else {
rangePred = new RangePredicate(lowKey, highKey, true, true, lowKeySearchCmp, highKeySearchCmp);
}
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
ITupleReference frameTuple = rangeCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(rec);
}
}
} finally {
rangeCursor.close();
}
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class OrderedIndexExamplesTest method orderedScan.
protected void orderedScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Ordered Scan:");
}
IIndexCursor scanCursor = indexAccessor.createSearchCursor(false);
RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(scanCursor, nullPred);
try {
while (scanCursor.hasNext()) {
scanCursor.next();
ITupleReference frameTuple = scanCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(rec);
}
}
} finally {
scanCursor.close();
}
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class LSMBTreeTestWorker method performOp.
@Override
public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMTreeIndexAccessor accessor = (LSMTreeIndexAccessor) indexAccessor;
IIndexCursor searchCursor = accessor.createSearchCursor(false);
LSMBTreeOpContext concreteCtx = (LSMBTreeOpContext) accessor.getCtx();
MultiComparator cmp = concreteCtx.getCmp();
RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
switch(op) {
case INSERT:
try {
accessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
// Ignore duplicate keys, since we get random tuples.
throw e;
}
}
break;
case DELETE:
// Create a tuple reference with only key fields.
deleteTb.reset();
for (int i = 0; i < numKeyFields; i++) {
deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
}
deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
try {
accessor.delete(deleteTuple);
} catch (HyracksDataException e) {
// Ignore non-existant keys, since we get random tuples.
if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw e;
}
}
break;
case UPDATE:
try {
accessor.update(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
// Ignore not updateable exception due to numKeys == numFields.
throw e;
}
}
break;
case POINT_SEARCH:
searchCursor.reset();
rangePred.setLowKey(tuple, true);
rangePred.setHighKey(tuple, true);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case SCAN:
searchCursor.reset();
rangePred.setLowKey(null, true);
rangePred.setHighKey(null, true);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case MERGE:
accessor.scheduleMerge(NoOpIOOperationCallbackFactory.INSTANCE.createIoOpCallback(), lsmBTree.getImmutableComponents());
break;
default:
throw new HyracksDataException("Op " + op.toString() + " not supported.");
}
}
Aggregations