use of org.apache.hyracks.storage.common.IIndexAccessor in project asterixdb by apache.
the class MetadataNode method printMetadata.
// Debugging Method
public String printMetadata() {
StringBuilder sb = new StringBuilder();
try {
IMetadataIndex index = MetadataPrimaryIndexes.DATAVERSE_DATASET;
String resourceName = index.getFile().toString();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
RangePredicate rangePred = null;
rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
index = MetadataPrimaryIndexes.DATASET_DATASET;
indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
rangePred = null;
rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING), SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
index = MetadataPrimaryIndexes.INDEX_DATASET;
indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
rangePred = null;
rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING), SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING), SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
} catch (Exception e) {
// Debugging method
e.printStackTrace();
}
return sb.toString();
}
use of org.apache.hyracks.storage.common.IIndexAccessor in project asterixdb by apache.
the class AbstractLSMRTreeExamplesTest method additionalFilteringingExample.
/**
* Test the LSM component filters.
*/
@Test
public void additionalFilteringingExample() throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Testing LSMRTree or LSMRTreeWithAntiMatterTuples component filters.");
}
// Declare fields.
int fieldCount = 6;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
typeTraits[2] = IntegerPointable.TYPE_TRAITS;
typeTraits[3] = IntegerPointable.TYPE_TRAITS;
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
typeTraits[5] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare RTree keys.
int rtreeKeyFieldCount = 4;
IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// Declare BTree keys, this will only be used for LSMRTree
int btreeKeyFieldCount;
IBinaryComparatorFactory[] btreeCmpFactories;
int[] btreeFields = null;
if (rTreeType == RTreeType.LSMRTREE) {
//Parameters look different for LSM RTREE from LSM RTREE WITH ANTI MATTER TUPLES
btreeKeyFieldCount = 1;
btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeFields = new int[btreeKeyFieldCount];
for (int i = 0; i < btreeKeyFieldCount; i++) {
btreeFields[i] = rtreeKeyFieldCount + i;
}
} else {
btreeKeyFieldCount = 6;
btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
}
// create value providers
IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
int[] rtreeFields = { 0, 1, 2, 3, 4 };
ITypeTraits[] filterTypeTraits = { IntegerPointable.TYPE_TRAITS };
IBinaryComparatorFactory[] filterCmpFactories = { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
int[] filterFields = { 5 };
ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, RTreePolicyType.RTREE, rtreeFields, btreeFields, filterTypeTraits, filterCmpFactories, filterFields);
treeIndex.create();
treeIndex.activate();
long start = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
int pk = 5;
int filter = i;
TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x), Math.max(p1y, p2y), pk, filter);
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
throw e;
}
}
}
long end = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
scan(indexAccessor, fieldSerdes);
diskOrderScan(indexAccessor, fieldSerdes);
// Build key.
ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
ArrayTupleReference key = new ArrayTupleReference();
TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
// Build min filter key.
ArrayTupleBuilder minFilterTb = new ArrayTupleBuilder(filterFields.length);
ArrayTupleReference minTuple = new ArrayTupleReference();
TupleUtils.createIntegerTuple(minFilterTb, minTuple, 400);
// Build max filter key.
ArrayTupleBuilder maxFilterTb = new ArrayTupleBuilder(filterFields.length);
ArrayTupleReference maxTuple = new ArrayTupleReference();
TupleUtils.createIntegerTuple(maxFilterTb, maxTuple, 500);
rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key, minTuple, maxTuple);
treeIndex.deactivate();
treeIndex.destroy();
}
use of org.apache.hyracks.storage.common.IIndexAccessor in project asterixdb by apache.
the class LSMInvertedIndexDeletedKeysBTreeMergeCursor method open.
@Override
public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMInvertedIndexRangeSearchCursorInitialState lsmInitialState = (LSMInvertedIndexRangeSearchCursorInitialState) initialState;
cmp = lsmInitialState.getOriginalKeyComparator();
operationalComponents = lsmInitialState.getOperationalComponents();
// We intentionally set the lsmHarness to null so that we don't call lsmHarness.endSearch() because we already do that when we merge the inverted indexes.
lsmHarness = null;
int numBTrees = operationalComponents.size();
rangeCursors = new IIndexCursor[numBTrees];
MultiComparator keyCmp = lsmInitialState.getKeyComparator();
RangePredicate btreePredicate = new RangePredicate(null, null, true, true, keyCmp, keyCmp);
ArrayList<IIndexAccessor> btreeAccessors = lsmInitialState.getDeletedKeysBTreeAccessors();
for (int i = 0; i < numBTrees; i++) {
rangeCursors[i] = btreeAccessors.get(i).createSearchCursor(false);
btreeAccessors.get(i).search(rangeCursors[i], btreePredicate);
}
setPriorityQueueComparator();
initPriorityQueue();
}
Aggregations