use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class BTreeUpdateSearchTest method test01.
// Update scan test on fixed-length tuples.
@Test
public void test01() throws Exception {
IBufferCache bufferCache = harness.getBufferCache();
// declare fields
int fieldCount = 2;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
// declare keys
int keyFieldCount = 1;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
@SuppressWarnings("rawtypes") ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
btree.create();
btree.activate();
Random rnd = new Random();
rnd.setSeed(50);
long start = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("INSERTING INTO TREE");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference insertTuple = new ArrayTupleReference();
ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int f0 = rnd.nextInt() % 10000;
int f1 = 5;
TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
if (LOGGER.isLoggable(Level.INFO)) {
if (i % 10000 == 0) {
long end = System.currentTimeMillis();
LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
}
}
try {
indexAccessor.insert(insertTuple);
} catch (HyracksDataException hde) {
if (hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
hde.printStackTrace();
throw hde;
}
}
}
long end = System.currentTimeMillis();
long duration = end - start;
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("DURATION: " + duration);
}
// Update scan.
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("UPDATE SCAN:");
}
// Set the cursor to X latch nodes.
ITreeIndexCursor updateScanCursor = new BTreeRangeSearchCursor(leafFrame, true);
RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(updateScanCursor, nullPred);
try {
while (updateScanCursor.hasNext()) {
updateScanCursor.next();
ITupleReference tuple = updateScanCursor.getTuple();
// Change the value field.
IntegerPointable.setInteger(tuple.getFieldData(1), tuple.getFieldStart(1), 10);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
updateScanCursor.close();
}
// Ordered scan to verify the values.
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("ORDERED SCAN:");
}
// Set the cursor to X latch nodes.
ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, true);
indexAccessor.search(scanCursor, nullPred);
try {
while (scanCursor.hasNext()) {
scanCursor.next();
ITupleReference tuple = scanCursor.getTuple();
String rec = TupleUtils.printTuple(tuple, recDescSers);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(rec);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
scanCursor.close();
}
btree.deactivate();
btree.destroy();
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class BTreeTestWorker method performOp.
@Override
public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
BTree.BTreeAccessor accessor = (BTree.BTreeAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
MultiComparator cmp = accessor.getOpContext().getCmp();
RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
switch(op) {
case INSERT:
try {
accessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
// Ignore duplicate keys, since we get random tuples.
throw e;
}
}
break;
case DELETE:
// Create a tuple reference with only key fields.
deleteTb.reset();
for (int i = 0; i < numKeyFields; i++) {
deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
}
deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
try {
accessor.delete(deleteTuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
// Ignore non-existant keys, since we get random tuples.
throw e;
}
}
break;
case UPDATE:
try {
accessor.update(tuple);
} catch (HyracksDataException e) {
// Ignore non-existant keys, since we get random tuples.
if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
// Ignore not updateable exception due to numKeys == numFields.
throw e;
}
}
break;
case UPSERT:
accessor.upsert(tuple);
// a bigger problem and the test should fail.
break;
case POINT_SEARCH:
searchCursor.reset();
rangePred.setLowKey(tuple, true);
rangePred.setHighKey(tuple, true);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case SCAN:
searchCursor.reset();
rangePred.setLowKey(null, true);
rangePred.setHighKey(null, true);
accessor.search(searchCursor, rangePred);
consumeCursorTuples(searchCursor);
break;
case DISKORDER_SCAN:
diskOrderScanCursor.reset();
accessor.diskOrderScan(diskOrderScanCursor);
consumeCursorTuples(diskOrderScanCursor);
break;
default:
throw new HyracksDataException("Op " + op.toString() + " not supported.");
}
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class LSMRTree method createMergeOperation.
@Override
protected ILSMIOOperation createMergeOperation(AbstractLSMIndexOperationContext opCtx, List<ILSMComponent> mergingComponents, LSMComponentFileReferences mergeFileRefs, ILSMIOOperationCallback callback) throws HyracksDataException {
ITreeIndexCursor cursor = new LSMRTreeSortedCursor(opCtx, linearizer, buddyBTreeFields);
ILSMIndexAccessor accessor = new LSMRTreeAccessor(getLsmHarness(), opCtx, buddyBTreeFields);
return new LSMRTreeMergeOperation(accessor, mergingComponents, cursor, mergeFileRefs.getInsertIndexFileReference(), mergeFileRefs.getDeleteIndexFileReference(), mergeFileRefs.getBloomFilterFileReference(), callback, fileManager.getBaseDir());
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class MetadataNode method searchIndex.
private <ResultType> void searchIndex(JobId jobId, IMetadataIndex index, ITupleReference searchKey, IValueExtractor<ResultType> valueExtractor, List<ResultType> results) throws MetadataException, HyracksDataException, RemoteException {
IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
if (index.getFile() == null) {
throw new MetadataException("No file for Index " + index.getDataverseName() + "." + index.getIndexName());
}
String resourceName = index.getFile().getRelativePath();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
IBinaryComparator[] searchCmps = null;
MultiComparator searchCmp = null;
RangePredicate rangePred = null;
if (searchKey != null) {
searchCmps = new IBinaryComparator[searchKey.getFieldCount()];
for (int i = 0; i < searchKey.getFieldCount(); i++) {
searchCmps[i] = comparatorFactories[i].createBinaryComparator();
}
searchCmp = new MultiComparator(searchCmps);
}
rangePred = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
ResultType result = valueExtractor.getValue(jobId, rangeCursor.getTuple());
if (result != null) {
results.add(result);
}
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
}
use of org.apache.hyracks.storage.am.common.api.ITreeIndexCursor in project asterixdb by apache.
the class MetadataNode method printMetadata.
// Debugging Method
public String printMetadata() {
StringBuilder sb = new StringBuilder();
try {
IMetadataIndex index = MetadataPrimaryIndexes.DATAVERSE_DATASET;
String resourceName = index.getFile().toString();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
RangePredicate rangePred = null;
rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
index = MetadataPrimaryIndexes.DATASET_DATASET;
indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
rangePred = null;
rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING), SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
index = MetadataPrimaryIndexes.INDEX_DATASET;
indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
rangePred = null;
rangePred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(rangeCursor, rangePred);
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING), SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING), SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
} catch (Exception e) {
// Debugging method
e.printStackTrace();
}
return sb.toString();
}
Aggregations