use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class ExternalFileIndexAccessor method open.
public void open() throws HyracksDataException {
// Open the index and get the instance
indexDataflowHelper.open();
index = (ExternalBTree) indexDataflowHelper.getIndexInstance();
// Create search key and search predicate objects
searchKey = new ArrayTupleReference();
searchKeyTupleBuilder = new ArrayTupleBuilder(FilesIndexDescription.FILE_KEY_SIZE);
searchKeyTupleBuilder.reset();
searchKeyTupleBuilder.addField(intSerde, currentFileNumber);
searchKey.reset(searchKeyTupleBuilder.getFieldEndOffsets(), searchKeyTupleBuilder.getByteArray());
MultiComparator searchCmp = BTreeUtils.getSearchMultiComparator(index.getComparatorFactories(), searchKey);
searchPredicate = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp);
// create the accessor and the cursor using the passed version
ISearchOperationCallback searchCallback = searchCallbackFactory.createSearchOperationCallback(indexDataflowHelper.getResource().getId(), ctx, null);
fileIndexAccessor = index.createAccessor(searchCallback, version);
fileIndexSearchCursor = fileIndexAccessor.createSearchCursor(false);
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class BTreeSearchOperatorNodePushable method createSearchPredicate.
@Override
protected ISearchPredicate createSearchPredicate() {
ITreeIndex treeIndex = (ITreeIndex) index;
lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), lowKey);
highKeySearchCmp = BTreeUtils.getSearchMultiComparator(treeIndex.getComparatorFactories(), highKey);
return new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp, highKeySearchCmp, minFilterKey, maxFilterKey);
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class MetadataNode method initializeDatasetIdFactory.
@Override
public void initializeDatasetIdFactory(JobId jobId) throws MetadataException, RemoteException {
int mostRecentDatasetId = MetadataIndexImmutableProperties.FIRST_AVAILABLE_USER_DATASET_ID;
try {
String resourceName = MetadataPrimaryIndexes.DATASET_DATASET.getFile().getRelativePath();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
try {
IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IIndexCursor rangeCursor = indexAccessor.createSearchCursor(false);
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
int datasetId;
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
final ITupleReference ref = rangeCursor.getTuple();
final Dataset ds = valueExtractor.getValue(jobId, ref);
datasetId = ds.getDatasetId();
if (mostRecentDatasetId < datasetId) {
mostRecentDatasetId = datasetId;
}
}
} finally {
rangeCursor.close();
}
} finally {
datasetLifecycleManager.close(resourceName);
}
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
DatasetIdFactory.initialize(mostRecentDatasetId);
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class LSMRTreeDeletedKeysBTreeMergeCursor method open.
@Override
public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
cmp = lsmInitialState.getBTreeCmp();
operationalComponents = lsmInitialState.getOperationalComponents();
// We intentionally set the lsmHarness to null so that we don't call lsmHarness.endSearch() because we already do that when we merge r-trees.
lsmHarness = null;
int numBTrees = operationalComponents.size();
rangeCursors = new IIndexCursor[numBTrees];
RangePredicate btreePredicate = new RangePredicate(null, null, true, true, cmp, cmp);
IIndexAccessor[] btreeAccessors = new ITreeIndexAccessor[numBTrees];
for (int i = 0; i < numBTrees; i++) {
ILSMComponent component = operationalComponents.get(i);
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory().createFrame();
rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
BTree btree = ((LSMRTreeDiskComponent) component).getBTree();
btreeAccessors[i] = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
btreeAccessors[i].search(rangeCursors[i], btreePredicate);
}
setPriorityQueueComparator();
initPriorityQueue();
}
use of org.apache.hyracks.storage.am.btree.impls.RangePredicate in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuples method flush.
@Override
public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
// Renaming order is critical because we use assume ordering when we
// read the file names when we open the tree.
// The RTree should be renamed before the BTree.
LSMRTreeMemoryComponent flushingComponent = (LSMRTreeMemoryComponent) flushOp.getFlushingComponent();
ITreeIndexAccessor memRTreeAccessor = flushingComponent.getRTree().createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
RTreeSearchCursor rtreeScanCursor = (RTreeSearchCursor) memRTreeAccessor.createSearchCursor(false);
SearchPredicate rtreeNullPredicate = new SearchPredicate(null, null);
memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
LSMRTreeDiskComponent component = createDiskComponent(componentFactory, flushOp.getTarget(), null, null, true);
ILSMDiskComponentBulkLoader componentBulkLoader = createComponentBulkLoader(component, 1.0f, false, 0L, false, false);
// Since the LSM-RTree is used as a secondary assumption, the
// primary key will be the last comparator in the BTree comparators
TreeTupleSorter rTreeTupleSorter = new TreeTupleSorter(flushingComponent.getRTree().getFileId(), linearizerArray, rtreeLeafFrameFactory.createFrame(), rtreeLeafFrameFactory.createFrame(), flushingComponent.getRTree().getBufferCache(), comparatorFields);
boolean isEmpty = true;
try {
while (rtreeScanCursor.hasNext()) {
isEmpty = false;
rtreeScanCursor.next();
rTreeTupleSorter.insertTupleEntry(rtreeScanCursor.getPageId(), rtreeScanCursor.getTupleOffset());
}
} finally {
rtreeScanCursor.close();
}
if (!isEmpty) {
rTreeTupleSorter.sort();
}
// scan the memory BTree
ITreeIndexAccessor memBTreeAccessor = flushingComponent.getBTree().createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
BTreeRangeSearchCursor btreeScanCursor = (BTreeRangeSearchCursor) memBTreeAccessor.createSearchCursor(false);
RangePredicate btreeNullPredicate = new RangePredicate(null, null, true, true, null, null);
memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
TreeTupleSorter bTreeTupleSorter = new TreeTupleSorter(flushingComponent.getBTree().getFileId(), linearizerArray, btreeLeafFrameFactory.createFrame(), btreeLeafFrameFactory.createFrame(), flushingComponent.getBTree().getBufferCache(), comparatorFields);
isEmpty = true;
try {
while (btreeScanCursor.hasNext()) {
isEmpty = false;
btreeScanCursor.next();
bTreeTupleSorter.insertTupleEntry(btreeScanCursor.getPageId(), btreeScanCursor.getTupleOffset());
}
} finally {
btreeScanCursor.close();
}
if (!isEmpty) {
bTreeTupleSorter.sort();
}
LSMRTreeWithAntiMatterTuplesFlushCursor cursor = new LSMRTreeWithAntiMatterTuplesFlushCursor(rTreeTupleSorter, bTreeTupleSorter, comparatorFields, linearizerArray);
cursor.open(null, null);
try {
while (cursor.hasNext()) {
cursor.next();
ITupleReference frameTuple = cursor.getTuple();
componentBulkLoader.add(frameTuple);
}
} finally {
cursor.close();
}
if (component.getLSMComponentFilter() != null) {
List<ITupleReference> filterTuples = new ArrayList<>();
filterTuples.add(flushingComponent.getLSMComponentFilter().getMinTuple());
filterTuples.add(flushingComponent.getLSMComponentFilter().getMaxTuple());
getFilterManager().updateFilter(component.getLSMComponentFilter(), filterTuples);
getFilterManager().writeFilter(component.getLSMComponentFilter(), component.getRTree());
}
flushingComponent.getMetadata().copy(component.getMetadata());
componentBulkLoader.end();
return component;
}
Aggregations