use of org.apache.hyracks.storage.common.MultiComparator in project asterixdb by apache.
the class LSMInvertedIndexDeletedKeysBTreeMergeCursor method open.
@Override
public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMInvertedIndexRangeSearchCursorInitialState lsmInitialState = (LSMInvertedIndexRangeSearchCursorInitialState) initialState;
cmp = lsmInitialState.getOriginalKeyComparator();
operationalComponents = lsmInitialState.getOperationalComponents();
// We intentionally set the lsmHarness to null so that we don't call lsmHarness.endSearch() because we already do that when we merge the inverted indexes.
lsmHarness = null;
int numBTrees = operationalComponents.size();
rangeCursors = new IIndexCursor[numBTrees];
MultiComparator keyCmp = lsmInitialState.getKeyComparator();
RangePredicate btreePredicate = new RangePredicate(null, null, true, true, keyCmp, keyCmp);
ArrayList<IIndexAccessor> btreeAccessors = lsmInitialState.getDeletedKeysBTreeAccessors();
for (int i = 0; i < numBTrees; i++) {
rangeCursors[i] = btreeAccessors.get(i).createSearchCursor(false);
btreeAccessors.get(i).search(rangeCursors[i], btreePredicate);
}
setPriorityQueueComparator();
initPriorityQueue();
}
use of org.apache.hyracks.storage.common.MultiComparator in project asterixdb by apache.
the class LSMRTreeWithAntiMatterTuplesTestWorker method performOp.
@Override
public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMTreeIndexAccessor accessor = (LSMTreeIndexAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
LSMRTreeOpContext concreteCtx = (LSMRTreeOpContext) accessor.getCtx();
MultiComparator cmp = concreteCtx.getCurrentRTreeOpContext().getCmp();
SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
switch(op) {
case INSERT:
rearrangeTuple(tuple, cmp);
accessor.insert(rearrangedTuple);
break;
case DELETE:
rearrangeTuple(tuple, cmp);
accessor.delete(rearrangedTuple);
break;
case SCAN:
searchCursor.reset();
rangePred.setSearchKey(null);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case MERGE:
accessor.scheduleMerge(NoOpIOOperationCallbackFactory.INSTANCE.createIoOpCallback(), ((AbstractLSMRTree) lsmRTree).getImmutableComponents());
break;
default:
throw new HyracksDataException("Op " + op.toString() + " not supported.");
}
}
use of org.apache.hyracks.storage.common.MultiComparator in project asterixdb by apache.
the class RTreeSearchCursorTest method rangeSearchTest.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void rangeSearchTest() throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
}
IBufferCache bufferCache = harness.getBufferCache();
// Declare fields.
int fieldCount = 5;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
typeTraits[2] = IntegerPointable.TYPE_TRAITS;
typeTraits[3] = IntegerPointable.TYPE_TRAITS;
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare keys.
int keyFieldCount = 4;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// create value providers
IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
RTree rtree = new RTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference(), false);
rtree.create();
rtree.activate();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
ITreeIndexAccessor indexAccessor = rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<>();
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
int pk = rnd.nextInt();
;
TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x), Math.max(p1y, p2y), pk);
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
throw e;
}
}
RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
checkTuple.appendField(Math.min(p1x, p2x));
checkTuple.appendField(Math.min(p1y, p2y));
checkTuple.appendField(Math.max(p1x, p2x));
checkTuple.appendField(Math.max(p1y, p2y));
checkTuple.appendField(pk);
checkTuples.add(checkTuple);
}
// Build key.
ArrayTupleBuilder keyTb = new ArrayTupleBuilder(keyFieldCount);
ArrayTupleReference key = new ArrayTupleReference();
TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
MultiComparator cmp = MultiComparator.create(cmpFactories);
ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes, keyFieldCount);
HashMultiSet<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
indexAccessor.search(searchCursor, searchPredicate);
rTreeTestUtils.checkExpectedResults(searchCursor, expectedResult, fieldSerdes, keyFieldCount, null);
rtree.deactivate();
rtree.destroy();
}
use of org.apache.hyracks.storage.common.MultiComparator in project asterixdb by apache.
the class BTreeSearchCursorTest method createRangePredicate.
public RangePredicate createRangePredicate(int lk, int hk, boolean lowKeyInclusive, boolean highKeyInclusive) throws HyracksDataException {
// create tuplereferences for search keys
ITupleReference lowKey = TupleUtils.createIntegerTuple(false, lk);
ITupleReference highKey = TupleUtils.createIntegerTuple(false, hk);
IBinaryComparator[] searchCmps = new IBinaryComparator[1];
searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
MultiComparator searchCmp = new MultiComparator(searchCmps);
RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp, searchCmp);
return rangePred;
}
use of org.apache.hyracks.storage.common.MultiComparator in project asterixdb by apache.
the class FieldPrefixNSMTest method test01.
@Test
public void test01() throws Exception {
// declare fields
int fieldCount = 3;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
typeTraits[2] = IntegerPointable.TYPE_TRAITS;
// declare keys
int keyFieldCount = 3;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
cmps[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
MultiComparator cmp = new MultiComparator(cmps);
// just for printing
@SuppressWarnings("rawtypes") ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
Random rnd = new Random();
rnd.setSeed(50);
IBufferCache bufferCache = harness.getBufferCache();
IFileMapProvider fileMapProvider = harness.getFileMapProvider();
bufferCache.createFile(harness.getFileReference());
int btreeFileId = fileMapProvider.lookupFileId(harness.getFileReference());
bufferCache.openFile(btreeFileId);
IHyracksTaskContext ctx = harness.getHyracksTaskContext();
ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), true);
try {
ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
frame.setPage(page);
frame.initBuffer((byte) 0);
frame.setMultiComparator(cmp);
frame.setPrefixTupleCount(0);
String before = new String();
String after = new String();
int compactFreq = 5;
int compressFreq = 5;
int smallMax = 10;
int numRecords = 1000;
int[][] savedFields = new int[numRecords][3];
// insert records with random calls to compact and compress
for (int i = 0; i < numRecords; i++) {
if (LOGGER.isLoggable(Level.INFO)) {
if ((i + 1) % 100 == 0) {
LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
}
}
int a = rnd.nextInt() % smallMax;
int b = rnd.nextInt() % smallMax;
int c = i;
ITupleReference tuple = createTuple(ctx, a, b, c, false);
try {
int targetTupleIndex = frame.findInsertTupleIndex(tuple);
frame.insert(tuple, targetTupleIndex);
} catch (Exception e) {
e.printStackTrace();
}
savedFields[i][0] = a;
savedFields[i][1] = b;
savedFields[i][2] = c;
if (rnd.nextInt() % compactFreq == 0) {
before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
frame.compact();
after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
Assert.assertEquals(before, after);
}
if (rnd.nextInt() % compressFreq == 0) {
before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
frame.compress();
after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
Assert.assertEquals(before, after);
}
}
// delete records with random calls to compact and compress
for (int i = 0; i < numRecords; i++) {
if (LOGGER.isLoggable(Level.INFO)) {
if ((i + 1) % 100 == 0) {
LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
}
}
ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
try {
int tupleIndex = frame.findDeleteTupleIndex(tuple);
frame.delete(tuple, tupleIndex);
} catch (Exception e) {
}
if (rnd.nextInt() % compactFreq == 0) {
before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
frame.compact();
after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
Assert.assertEquals(before, after);
}
if (rnd.nextInt() % compressFreq == 0) {
before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
frame.compress();
after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
Assert.assertEquals(before, after);
}
}
} finally {
bufferCache.unpin(page);
bufferCache.closeFile(btreeFileId);
bufferCache.close();
}
}
Aggregations