use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class FieldPrefixNSMTest method createTuple.
private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print) throws HyracksDataException {
if (print) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
}
}
IFrame buf = new VSizeFrame(ctx);
FrameTupleAppender appender = new FrameTupleAppender(buf);
ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
DataOutput dos = tb.getDataOutput();
@SuppressWarnings("rawtypes") ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
accessor.reset(buf.getBuffer());
FrameTupleReference tuple = new FrameTupleReference();
tb.reset();
IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
tb.addFieldEndOffset();
IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
tb.addFieldEndOffset();
IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
tb.addFieldEndOffset();
appender.reset(buf, true);
appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
tuple.reset(accessor, 0);
return tuple;
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class BTreeSearchCursorTest method nonUniqueFieldPrefixIndexTest.
@Test
public void nonUniqueFieldPrefixIndexTest() throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
}
IBufferCache bufferCache = harness.getBufferCache();
// declare keys
int keyFieldCount = 2;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
BTree btree = new BTree(bufferCache, harness.getFileMapProvider(), freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
btree.create();
btree.activate();
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// generate keys
int numKeys = 50;
int maxKey = 10;
ArrayList<Integer> keys = new ArrayList<>();
for (int i = 0; i < numKeys; i++) {
int k = rnd.nextInt() % maxKey;
keys.add(k);
}
Collections.sort(keys);
// insert keys into btree
for (int i = 0; i < keys.size(); i++) {
TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
try {
indexAccessor.insert(tuple);
} catch (Exception e) {
e.printStackTrace();
}
}
int minSearchKey = -100;
int maxSearchKey = 100;
// forward searches
Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
btree.deactivate();
btree.destroy();
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class LSMBTreeFilterMergeTestDriver method filterToMinMax.
private Pair<ITupleReference, ITupleReference> filterToMinMax(ILSMComponentFilter f) throws HyracksDataException {
ArrayTupleBuilder builder = new ArrayTupleBuilder(1);
builder.addField(f.getMinTuple().getFieldData(0), f.getMinTuple().getFieldStart(0), f.getMinTuple().getFieldLength(0));
ArrayTupleReference minCopy = new ArrayTupleReference();
minCopy.reset(builder.getFieldEndOffsets(), builder.getByteArray());
builder = new ArrayTupleBuilder(1);
builder.addField(f.getMaxTuple().getFieldData(0), f.getMaxTuple().getFieldStart(0), f.getMaxTuple().getFieldLength(0));
ArrayTupleReference maxCopy = new ArrayTupleReference();
maxCopy.reset(builder.getFieldEndOffsets(), builder.getByteArray());
builder.reset();
return Pair.of(minCopy, maxCopy);
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class ExternalBTreeSearchOperatorNodePushable method open.
// We override the open function to search a specific version of the index
@Override
public void open() throws HyracksDataException {
writer.open();
accessor = new FrameTupleAccessor(inputRecDesc);
indexHelper.open();
index = indexHelper.getIndexInstance();
if (retainMissing) {
int fieldCount = getFieldCount();
nonMatchTupleBuild = new ArrayTupleBuilder(fieldCount);
DataOutput out = nonMatchTupleBuild.getDataOutput();
for (int i = 0; i < fieldCount; i++) {
try {
nonMatchWriter.writeMissing(out);
} catch (IOException e) {
e.printStackTrace();
}
nonMatchTupleBuild.addFieldEndOffset();
}
} else {
nonMatchTupleBuild = null;
}
ExternalBTreeWithBuddy externalIndex = (ExternalBTreeWithBuddy) index;
try {
searchPred = createSearchPredicate();
tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
dos = tb.getDataOutput();
appender = new FrameTupleAppender(new VSizeFrame(ctx));
ISearchOperationCallback searchCallback = searchCallbackFactory.createSearchOperationCallback(indexHelper.getResource().getId(), ctx, null);
// The next line is the reason we override this method
indexAccessor = externalIndex.createAccessor(searchCallback, version);
cursor = createCursor();
if (retainInput) {
frameTuple = new FrameTupleReference();
}
} catch (Throwable th) {
throw new HyracksDataException(th);
}
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class AbstractLSMRTreeExamplesTest method additionalFilteringingExample.
/**
* Test the LSM component filters.
*/
@Test
public void additionalFilteringingExample() throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Testing LSMRTree or LSMRTreeWithAntiMatterTuples component filters.");
}
// Declare fields.
int fieldCount = 6;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
typeTraits[2] = IntegerPointable.TYPE_TRAITS;
typeTraits[3] = IntegerPointable.TYPE_TRAITS;
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
typeTraits[5] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare RTree keys.
int rtreeKeyFieldCount = 4;
IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// Declare BTree keys, this will only be used for LSMRTree
int btreeKeyFieldCount;
IBinaryComparatorFactory[] btreeCmpFactories;
int[] btreeFields = null;
if (rTreeType == RTreeType.LSMRTREE) {
//Parameters look different for LSM RTREE from LSM RTREE WITH ANTI MATTER TUPLES
btreeKeyFieldCount = 1;
btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeFields = new int[btreeKeyFieldCount];
for (int i = 0; i < btreeKeyFieldCount; i++) {
btreeFields[i] = rtreeKeyFieldCount + i;
}
} else {
btreeKeyFieldCount = 6;
btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
}
// create value providers
IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
int[] rtreeFields = { 0, 1, 2, 3, 4 };
ITypeTraits[] filterTypeTraits = { IntegerPointable.TYPE_TRAITS };
IBinaryComparatorFactory[] filterCmpFactories = { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
int[] filterFields = { 5 };
ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, RTreePolicyType.RTREE, rtreeFields, btreeFields, filterTypeTraits, filterCmpFactories, filterFields);
treeIndex.create();
treeIndex.activate();
long start = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
int pk = 5;
int filter = i;
TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x), Math.max(p1y, p2y), pk, filter);
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
throw e;
}
}
}
long end = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
scan(indexAccessor, fieldSerdes);
diskOrderScan(indexAccessor, fieldSerdes);
// Build key.
ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
ArrayTupleReference key = new ArrayTupleReference();
TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
// Build min filter key.
ArrayTupleBuilder minFilterTb = new ArrayTupleBuilder(filterFields.length);
ArrayTupleReference minTuple = new ArrayTupleReference();
TupleUtils.createIntegerTuple(minFilterTb, minTuple, 400);
// Build max filter key.
ArrayTupleBuilder maxFilterTb = new ArrayTupleBuilder(filterFields.length);
ArrayTupleReference maxTuple = new ArrayTupleReference();
TupleUtils.createIntegerTuple(maxFilterTb, maxTuple, 500);
rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key, minTuple, maxTuple);
treeIndex.deactivate();
treeIndex.destroy();
}
Aggregations