use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class ExternalBTreeWithBuddy method scheduleMerge.
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException {
ILSMIndexOperationContext bctx = createOpContext(NoOpOperationCallback.INSTANCE, 0);
bctx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
ITreeIndexCursor cursor = new LSMBTreeWithBuddySortedCursor(bctx, buddyBTreeFields);
LSMComponentFileReferences relMergeFileRefs = getMergeTargetFileName(mergingComponents);
ILSMIndexAccessor accessor = new LSMTreeIndexAccessor(getLsmHarness(), bctx, opCtx -> new LSMBTreeWithBuddySearchCursor(opCtx, buddyBTreeFields));
// Since we have two lists of components, to tell whether we need to
// keep deleted tuples, we need to know
// which list to check against and we need to synchronize for this
boolean keepDeleteTuples = false;
if (version == 0) {
keepDeleteTuples = mergingComponents.get(mergingComponents.size() - 1) != diskComponents.get(diskComponents.size() - 1);
} else {
keepDeleteTuples = mergingComponents.get(mergingComponents.size() - 1) != secondDiskComponents.get(secondDiskComponents.size() - 1);
}
ioScheduler.scheduleOperation(new LSMBTreeWithBuddyMergeOperation(accessor, mergingComponents, cursor, relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs.getDeleteIndexFileReference(), relMergeFileRefs.getBloomFilterFileReference(), callback, fileManager.getBaseDir(), keepDeleteTuples));
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class LSMBTree method createMergeOperation.
@Override
protected ILSMIOOperation createMergeOperation(AbstractLSMIndexOperationContext opCtx, List<ILSMComponent> mergingComponents, LSMComponentFileReferences mergeFileRefs, ILSMIOOperationCallback callback) {
boolean returnDeletedTuples = false;
ILSMIndexAccessor accessor = createAccessor(opCtx);
if (mergingComponents.get(mergingComponents.size() - 1) != diskComponents.get(diskComponents.size() - 1)) {
returnDeletedTuples = true;
}
ITreeIndexCursor cursor = new LSMBTreeRangeSearchCursor(opCtx, returnDeletedTuples);
return new LSMBTreeMergeOperation(accessor, mergingComponents, cursor, mergeFileRefs.getInsertIndexFileReference(), mergeFileRefs.getBloomFilterFileReference(), callback, fileManager.getBaseDir());
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuplesTestWorker method performOp.
@Override
public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMTreeIndexAccessor accessor = (LSMTreeIndexAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
LSMRTreeOpContext concreteCtx = (LSMRTreeOpContext) accessor.getCtx();
MultiComparator cmp = concreteCtx.getCurrentRTreeOpContext().getCmp();
SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
switch(op) {
case INSERT:
rearrangeTuple(tuple, cmp);
accessor.insert(rearrangedTuple);
break;
case DELETE:
rearrangeTuple(tuple, cmp);
accessor.delete(rearrangedTuple);
break;
case SCAN:
searchCursor.reset();
rangePred.setSearchKey(null);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case MERGE:
accessor.scheduleMerge(NoOpIOOperationCallbackFactory.INSTANCE.createIoOpCallback(), ((AbstractLSMRTree) lsmRTree).getImmutableComponents());
break;
default:
throw new HyracksDataException("Op " + op.toString() + " not supported.");
}
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class RTreeSearchCursorTest method rangeSearchTest.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void rangeSearchTest() throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
}
IBufferCache bufferCache = harness.getBufferCache();
// Declare fields.
int fieldCount = 5;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
typeTraits[2] = IntegerPointable.TYPE_TRAITS;
typeTraits[3] = IntegerPointable.TYPE_TRAITS;
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare keys.
int keyFieldCount = 4;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// create value providers
IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
RTree rtree = new RTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference(), false);
rtree.create();
rtree.activate();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
ITreeIndexAccessor indexAccessor = rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<>();
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
int pk = rnd.nextInt();
;
TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x), Math.max(p1y, p2y), pk);
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
throw e;
}
}
RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
checkTuple.appendField(Math.min(p1x, p2x));
checkTuple.appendField(Math.min(p1y, p2y));
checkTuple.appendField(Math.max(p1x, p2x));
checkTuple.appendField(Math.max(p1y, p2y));
checkTuple.appendField(pk);
checkTuples.add(checkTuple);
}
// Build key.
ArrayTupleBuilder keyTb = new ArrayTupleBuilder(keyFieldCount);
ArrayTupleReference key = new ArrayTupleReference();
TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
MultiComparator cmp = MultiComparator.create(cmpFactories);
ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes, keyFieldCount);
HashMultiSet<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
indexAccessor.search(searchCursor, searchPredicate);
rTreeTestUtils.checkExpectedResults(searchCursor, expectedResult, fieldSerdes, keyFieldCount, null);
rtree.deactivate();
rtree.destroy();
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class BTreeSearchCursorTest method performSearches.
public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean lowKeyInclusive, boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
ArrayList<Integer> results = new ArrayList<>();
ArrayList<Integer> expectedResults = new ArrayList<>();
for (int i = minKey; i < maxKey; i++) {
for (int j = minKey; j < maxKey; j++) {
results.clear();
expectedResults.clear();
int lowKey = i;
int highKey = j;
ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
RangePredicate rangePred = createRangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive);
ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
ITupleReference frameTuple = rangeCursor.getTuple();
ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(0), frameTuple.getFieldStart(0), frameTuple.getFieldLength(0));
DataInput dataIn = new DataInputStream(inStream);
Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
results.add(res);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
rangeCursor.close();
}
getExpectedResults(expectedResults, keys, lowKey, highKey, lowKeyInclusive, highKeyInclusive);
if (printExpectedResults) {
if (expectedResults.size() > 0) {
char l, u;
if (lowKeyInclusive) {
l = '[';
} else {
l = '(';
}
if (highKeyInclusive) {
u = ']';
} else {
u = ')';
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
}
StringBuilder strBuilder = new StringBuilder();
for (Integer r : expectedResults) {
strBuilder.append(r + " ");
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(strBuilder.toString());
}
}
}
if (results.size() == expectedResults.size()) {
for (int k = 0; k < results.size(); k++) {
if (!results.get(k).equals(expectedResults.get(k))) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
LOGGER.info(results.get(k) + " " + expectedResults.get(k));
}
return false;
}
}
} else {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
LOGGER.info("RESULTS: " + results.size());
LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
}
return false;
}
}
}
return true;
}
Aggregations