Search in sources :

Example 1 with ITreeIndexTupleWriter

use of org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter in project asterixdb by apache.

the class FieldPrefixNSMTest method test01.

@Test
public void test01() throws Exception {
    // declare fields
    int fieldCount = 3;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = IntegerPointable.TYPE_TRAITS;
    typeTraits[1] = IntegerPointable.TYPE_TRAITS;
    typeTraits[2] = IntegerPointable.TYPE_TRAITS;
    // declare keys
    int keyFieldCount = 3;
    IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
    cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
    cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
    cmps[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
    MultiComparator cmp = new MultiComparator(cmps);
    // just for printing
    @SuppressWarnings("rawtypes") ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
    Random rnd = new Random();
    rnd.setSeed(50);
    IBufferCache bufferCache = harness.getBufferCache();
    IFileMapProvider fileMapProvider = harness.getFileMapProvider();
    bufferCache.createFile(harness.getFileReference());
    int btreeFileId = fileMapProvider.lookupFileId(harness.getFileReference());
    bufferCache.openFile(btreeFileId);
    IHyracksTaskContext ctx = harness.getHyracksTaskContext();
    ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), true);
    try {
        ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
        BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
        frame.setPage(page);
        frame.initBuffer((byte) 0);
        frame.setMultiComparator(cmp);
        frame.setPrefixTupleCount(0);
        String before = new String();
        String after = new String();
        int compactFreq = 5;
        int compressFreq = 5;
        int smallMax = 10;
        int numRecords = 1000;
        int[][] savedFields = new int[numRecords][3];
        // insert records with random calls to compact and compress
        for (int i = 0; i < numRecords; i++) {
            if (LOGGER.isLoggable(Level.INFO)) {
                if ((i + 1) % 100 == 0) {
                    LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
                }
            }
            int a = rnd.nextInt() % smallMax;
            int b = rnd.nextInt() % smallMax;
            int c = i;
            ITupleReference tuple = createTuple(ctx, a, b, c, false);
            try {
                int targetTupleIndex = frame.findInsertTupleIndex(tuple);
                frame.insert(tuple, targetTupleIndex);
            } catch (Exception e) {
                e.printStackTrace();
            }
            savedFields[i][0] = a;
            savedFields[i][1] = b;
            savedFields[i][2] = c;
            if (rnd.nextInt() % compactFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compact();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
            if (rnd.nextInt() % compressFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compress();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
        }
        // delete records with random calls to compact and compress
        for (int i = 0; i < numRecords; i++) {
            if (LOGGER.isLoggable(Level.INFO)) {
                if ((i + 1) % 100 == 0) {
                    LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
                }
            }
            ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
            try {
                int tupleIndex = frame.findDeleteTupleIndex(tuple);
                frame.delete(tuple, tupleIndex);
            } catch (Exception e) {
            }
            if (rnd.nextInt() % compactFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compact();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
            if (rnd.nextInt() % compressFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compress();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
        }
    } finally {
        bufferCache.unpin(page);
        bufferCache.closeFile(btreeFileId);
        bufferCache.close();
    }
}
Also used : ICachedPage(org.apache.hyracks.storage.common.buffercache.ICachedPage) ITreeIndexTupleWriter(org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) MultiComparator(org.apache.hyracks.storage.common.MultiComparator) IBinaryComparator(org.apache.hyracks.api.dataflow.value.IBinaryComparator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) BTreeFieldPrefixNSMLeafFrame(org.apache.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IFileMapProvider(org.apache.hyracks.storage.common.file.IFileMapProvider) Random(java.util.Random) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) IBufferCache(org.apache.hyracks.storage.common.buffercache.IBufferCache) TypeAwareTupleWriter(org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter) Test(org.junit.Test) AbstractBTreeTest(org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest)

Aggregations

Random (java.util.Random)1 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)1 IBinaryComparator (org.apache.hyracks.api.dataflow.value.IBinaryComparator)1 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)1 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)1 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)1 ITupleReference (org.apache.hyracks.dataflow.common.data.accessors.ITupleReference)1 BTreeFieldPrefixNSMLeafFrame (org.apache.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame)1 AbstractBTreeTest (org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest)1 ITreeIndexTupleWriter (org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter)1 TypeAwareTupleWriter (org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter)1 MultiComparator (org.apache.hyracks.storage.common.MultiComparator)1 IBufferCache (org.apache.hyracks.storage.common.buffercache.IBufferCache)1 ICachedPage (org.apache.hyracks.storage.common.buffercache.ICachedPage)1 IFileMapProvider (org.apache.hyracks.storage.common.file.IFileMapProvider)1 Test (org.junit.Test)1