use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class RTreeSearchCursorTest method rangeSearchTest.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void rangeSearchTest() throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
}
IBufferCache bufferCache = harness.getBufferCache();
// Declare fields.
int fieldCount = 5;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
typeTraits[2] = IntegerPointable.TYPE_TRAITS;
typeTraits[3] = IntegerPointable.TYPE_TRAITS;
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare keys.
int keyFieldCount = 4;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// create value providers
IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
RTree rtree = new RTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference(), false);
rtree.create();
rtree.activate();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
ITreeIndexAccessor indexAccessor = rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<>();
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
int pk = rnd.nextInt();
;
TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x), Math.max(p1y, p2y), pk);
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
throw e;
}
}
RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
checkTuple.appendField(Math.min(p1x, p2x));
checkTuple.appendField(Math.min(p1y, p2y));
checkTuple.appendField(Math.max(p1x, p2x));
checkTuple.appendField(Math.max(p1y, p2y));
checkTuple.appendField(pk);
checkTuples.add(checkTuple);
}
// Build key.
ArrayTupleBuilder keyTb = new ArrayTupleBuilder(keyFieldCount);
ArrayTupleReference key = new ArrayTupleReference();
TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
MultiComparator cmp = MultiComparator.create(cmpFactories);
ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes, keyFieldCount);
HashMultiSet<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
indexAccessor.search(searchCursor, searchPredicate);
rTreeTestUtils.checkExpectedResults(searchCursor, expectedResult, fieldSerdes, keyFieldCount, null);
rtree.deactivate();
rtree.destroy();
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class RTreeSecondaryIndexInsertOperatorTest method searchUpdatedSecondaryIndexTest.
@Test
public void searchUpdatedSecondaryIndexTest() throws Exception {
JobSpecification spec = new JobSpecification();
// build tuple
ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
DataOutput dos = tb.getDataOutput();
tb.reset();
DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
int[] keyFields = { 0, 1, 2, 3 };
RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc, keyFields, true, true, secondaryHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null, false);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
// fifth field from the tuples coming from secondary index
int[] primaryLowKeyFields = { 4 };
// fifth field from the tuples coming from secondary index
int[] primaryHighKeyFields = { 4 };
// search primary index
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc, primaryLowKeyFields, primaryHighKeyFields, true, true, primaryHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null, false);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
spec.addRoot(printer);
runTest(spec);
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class RTreeSecondaryIndexScanOperatorTest method scanPrimaryIndexTest.
@Test
public void scanPrimaryIndexTest() throws Exception {
JobSpecification spec = new JobSpecification();
// build dummy tuple
ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
DataOutput dos = tb.getDataOutput();
tb.reset();
DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
tb.addFieldEndOffset();
DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
int[] keyFields = null;
RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc, keyFields, true, true, secondaryHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null, false);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, printer, 0);
spec.addRoot(printer);
runTest(spec);
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class BTreeStatsTest method test01.
@Test
public void test01() throws Exception {
TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = harness.getBufferCache();
IFileMapProvider fmp = harness.getFileMapProvider();
// declare fields
int fieldCount = 2;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
// declare keys
int keyFieldCount = 1;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
ITreeIndexMetadataFrame metaFrame = metaFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
btree.create();
btree.activate();
Random rnd = new Random();
rnd.setSeed(50);
long start = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("INSERTING INTO TREE");
}
IFrame frame = new VSizeFrame(ctx);
FrameTupleAppender appender = new FrameTupleAppender();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
DataOutput dos = tb.getDataOutput();
ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
accessor.reset(frame.getBuffer());
FrameTupleReference tuple = new FrameTupleReference();
ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// 10000
for (int i = 0; i < 100000; i++) {
int f0 = rnd.nextInt() % 100000;
int f1 = 5;
tb.reset();
IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
tb.addFieldEndOffset();
IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
tb.addFieldEndOffset();
appender.reset(frame, true);
appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
tuple.reset(accessor, 0);
if (LOGGER.isLoggable(Level.INFO)) {
if (i % 10000 == 0) {
long end = System.currentTimeMillis();
LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
}
}
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
e.printStackTrace();
throw e;
}
}
}
int fileId = fmp.lookupFileId(harness.getFileReference());
TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId, btree.getRootPageId());
TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("\n" + stats.toString());
}
TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager, fileId);
bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
btree.deactivate();
btree.destroy();
bufferCache.close();
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class BTreeUpdateSearchTest method test01.
// Update scan test on fixed-length tuples.
@Test
public void test01() throws Exception {
IBufferCache bufferCache = harness.getBufferCache();
// declare fields
int fieldCount = 2;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
// declare keys
int keyFieldCount = 1;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
@SuppressWarnings("rawtypes") ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
btree.create();
btree.activate();
Random rnd = new Random();
rnd.setSeed(50);
long start = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("INSERTING INTO TREE");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference insertTuple = new ArrayTupleReference();
ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int f0 = rnd.nextInt() % 10000;
int f1 = 5;
TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
if (LOGGER.isLoggable(Level.INFO)) {
if (i % 10000 == 0) {
long end = System.currentTimeMillis();
LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
}
}
try {
indexAccessor.insert(insertTuple);
} catch (HyracksDataException hde) {
if (hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
hde.printStackTrace();
throw hde;
}
}
}
long end = System.currentTimeMillis();
long duration = end - start;
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("DURATION: " + duration);
}
// Update scan.
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("UPDATE SCAN:");
}
// Set the cursor to X latch nodes.
ITreeIndexCursor updateScanCursor = new BTreeRangeSearchCursor(leafFrame, true);
RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(updateScanCursor, nullPred);
try {
while (updateScanCursor.hasNext()) {
updateScanCursor.next();
ITupleReference tuple = updateScanCursor.getTuple();
// Change the value field.
IntegerPointable.setInteger(tuple.getFieldData(1), tuple.getFieldStart(1), 10);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
updateScanCursor.close();
}
// Ordered scan to verify the values.
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("ORDERED SCAN:");
}
// Set the cursor to X latch nodes.
ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, true);
indexAccessor.search(scanCursor, nullPred);
try {
while (scanCursor.hasNext()) {
scanCursor.next();
ITupleReference tuple = scanCursor.getTuple();
String rec = TupleUtils.printTuple(tuple, recDescSers);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(rec);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
scanCursor.close();
}
btree.deactivate();
btree.destroy();
}
Aggregations