Search in sources :

Example 6 with IFileMapProvider

use of org.apache.hyracks.storage.common.file.IFileMapProvider in project asterixdb by apache.

the class BufferCacheRegressionTest method flushBehaviorTest.

private void flushBehaviorTest(boolean deleteFile) throws IOException {
    TestStorageManagerComponentHolder.init(PAGE_SIZE, 10, 1);
    IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext());
    IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider();
    IOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
    FileReference firstFileRef = ioManager.resolve(fileName);
    bufferCache.createFile(firstFileRef);
    int firstFileId = fmp.lookupFileId(firstFileRef);
    bufferCache.openFile(firstFileId);
    // Fill the first page with known data and make it dirty by write
    // latching it.
    ICachedPage writePage = bufferCache.pin(BufferedFileHandle.getDiskPageId(firstFileId, 0), true);
    writePage.acquireWriteLatch();
    try {
        ByteBuffer buf = writePage.getBuffer();
        for (int i = 0; i < buf.capacity(); i++) {
            buf.put(Byte.MAX_VALUE);
        }
    } finally {
        writePage.releaseWriteLatch(true);
        bufferCache.unpin(writePage);
    }
    bufferCache.closeFile(firstFileId);
    if (deleteFile) {
        bufferCache.deleteFile(firstFileId, false);
    }
    // Create a file with the same name.
    FileReference secondFileRef = ioManager.resolve(fileName);
    bufferCache.createFile(secondFileRef);
    int secondFileId = fmp.lookupFileId(secondFileRef);
    // This open will replace the firstFileRef's slot in the BufferCache,
    // causing it's pages to be cleaned up. We want to make sure that those
    // dirty pages are not flushed to the disk, because the file was
    // declared as deleted, and
    // somebody might be already using the same filename again (having been
    // assigned a different fileId).
    bufferCache.openFile(secondFileId);
    // Manually open the file and inspect it's contents. We cannot simply
    // ask the BufferCache to pin the page, because it would return the same
    // physical memory again, and for performance reasons pages are never
    // reset with 0's.
    FileReference testFileRef = ioManager.resolve(fileName);
    IFileHandle testFileHandle = ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY, FileSyncMode.METADATA_SYNC_DATA_SYNC);
    ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE + BufferCache.RESERVED_HEADER_BYTES);
    ioManager.syncRead(testFileHandle, 0, testBuffer);
    for (int i = BufferCache.RESERVED_HEADER_BYTES; i < testBuffer.capacity(); i++) {
        if (deleteFile) {
            // We deleted the file. We expect to see a clean buffer.
            if (testBuffer.get(i) == Byte.MAX_VALUE) {
                fail("Page 0 of deleted file was fazily flushed in openFile(), " + "corrupting the data of a newly created file with the same name.");
            }
        } else {
            // Byte.MAX_VALUE.
            if (testBuffer.get(i) != Byte.MAX_VALUE) {
                fail("Page 0 of closed file was not flushed when properly, when reclaiming the file slot of fileId 0 in the BufferCache.");
            }
        }
    }
    ioManager.close(testFileHandle);
    bufferCache.closeFile(secondFileId);
    if (deleteFile) {
        bufferCache.deleteFile(secondFileId, false);
    }
    bufferCache.close();
}
Also used : IFileMapProvider(org.apache.hyracks.storage.common.file.IFileMapProvider) ICachedPage(org.apache.hyracks.storage.common.buffercache.ICachedPage) IFileHandle(org.apache.hyracks.api.io.IFileHandle) IOManager(org.apache.hyracks.control.nc.io.IOManager) FileReference(org.apache.hyracks.api.io.FileReference) ByteBuffer(java.nio.ByteBuffer) IBufferCache(org.apache.hyracks.storage.common.buffercache.IBufferCache)

Example 7 with IFileMapProvider

use of org.apache.hyracks.storage.common.file.IFileMapProvider in project asterixdb by apache.

the class BufferCacheTest method simpleMaxOpenFilesTest.

@Test
public void simpleMaxOpenFilesTest() throws HyracksException {
    TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
    IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext());
    IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider();
    IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
    List<Integer> fileIds = new ArrayList<>();
    for (int i = 0; i < MAX_OPEN_FILES; i++) {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
        fileIds.add(fileId);
    }
    boolean exceptionThrown = false;
    // since all files are open, next open should fail
    try {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
    } catch (HyracksDataException e) {
        exceptionThrown = true;
    }
    Assert.assertTrue(exceptionThrown);
    // close a random file
    int ix = Math.abs(rnd.nextInt()) % fileIds.size();
    bufferCache.closeFile(fileIds.get(ix));
    fileIds.remove(ix);
    // now open should succeed again
    exceptionThrown = false;
    try {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
        fileIds.add(fileId);
    } catch (HyracksDataException e) {
        exceptionThrown = true;
    }
    Assert.assertFalse(exceptionThrown);
    for (Integer i : fileIds) {
        bufferCache.closeFile(i.intValue());
    }
    bufferCache.close();
}
Also used : IFileMapProvider(org.apache.hyracks.storage.common.file.IFileMapProvider) ArrayList(java.util.ArrayList) FileReference(org.apache.hyracks.api.io.FileReference) IIOManager(org.apache.hyracks.api.io.IIOManager) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IBufferCache(org.apache.hyracks.storage.common.buffercache.IBufferCache) Test(org.junit.Test)

Example 8 with IFileMapProvider

use of org.apache.hyracks.storage.common.file.IFileMapProvider in project asterixdb by apache.

the class BufferCacheTest method contentCheckingMaxOpenFilesTest.

@Test
public void contentCheckingMaxOpenFilesTest() throws HyracksException {
    TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
    IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext());
    IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider();
    IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
    List<Integer> fileIds = new ArrayList<>();
    Map<Integer, ArrayList<Integer>> pageContents = new HashMap<>();
    int num = 10;
    int testPageId = 0;
    // open max number of files and write some stuff into their first page
    for (int i = 0; i < MAX_OPEN_FILES; i++) {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
        fileIds.add(fileId);
        ICachedPage page = null;
        page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), true);
        page.acquireWriteLatch();
        try {
            ArrayList<Integer> values = new ArrayList<>();
            for (int j = 0; j < num; j++) {
                int x = Math.abs(rnd.nextInt());
                page.getBuffer().putInt(j * 4, x);
                values.add(x);
            }
            pageContents.put(fileId, values);
        } finally {
            page.releaseWriteLatch(true);
            bufferCache.unpin(page);
        }
    }
    boolean exceptionThrown = false;
    // since all files are open, next open should fail
    try {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
    } catch (HyracksDataException e) {
        exceptionThrown = true;
    }
    Assert.assertTrue(exceptionThrown);
    // close a few random files
    ArrayList<Integer> closedFileIds = new ArrayList<>();
    int filesToClose = 5;
    for (int i = 0; i < filesToClose; i++) {
        int ix = Math.abs(rnd.nextInt()) % fileIds.size();
        bufferCache.closeFile(fileIds.get(ix));
        closedFileIds.add(fileIds.get(ix));
        fileIds.remove(ix);
    }
    // now open a few new files
    for (int i = 0; i < filesToClose; i++) {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
        fileIds.add(fileId);
    }
    // since all files are open, next open should fail
    try {
        String fileName = getFileName();
        FileReference file = ioManager.resolve(fileName);
        bufferCache.createFile(file);
        int fileId = fmp.lookupFileId(file);
        bufferCache.openFile(fileId);
    } catch (HyracksDataException e) {
        exceptionThrown = true;
    }
    Assert.assertTrue(exceptionThrown);
    // close a few random files again
    for (int i = 0; i < filesToClose; i++) {
        int ix = Math.abs(rnd.nextInt()) % fileIds.size();
        bufferCache.closeFile(fileIds.get(ix));
        closedFileIds.add(fileIds.get(ix));
        fileIds.remove(ix);
    }
    // now open those closed files again and verify their contents
    for (int i = 0; i < filesToClose; i++) {
        int closedFileId = closedFileIds.get(i);
        bufferCache.openFile(closedFileId);
        fileIds.add(closedFileId);
        // pin first page and verify contents
        ICachedPage page = null;
        page = bufferCache.pin(BufferedFileHandle.getDiskPageId(closedFileId, testPageId), false);
        page.acquireReadLatch();
        try {
            ArrayList<Integer> values = pageContents.get(closedFileId);
            for (int j = 0; j < values.size(); j++) {
                Assert.assertEquals(values.get(j).intValue(), page.getBuffer().getInt(j * 4));
            }
        } finally {
            page.releaseReadLatch();
            bufferCache.unpin(page);
        }
    }
    for (Integer i : fileIds) {
        bufferCache.closeFile(i.intValue());
    }
    bufferCache.close();
}
Also used : ICachedPage(org.apache.hyracks.storage.common.buffercache.ICachedPage) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IIOManager(org.apache.hyracks.api.io.IIOManager) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IFileMapProvider(org.apache.hyracks.storage.common.file.IFileMapProvider) FileReference(org.apache.hyracks.api.io.FileReference) IBufferCache(org.apache.hyracks.storage.common.buffercache.IBufferCache) Test(org.junit.Test)

Example 9 with IFileMapProvider

use of org.apache.hyracks.storage.common.file.IFileMapProvider in project asterixdb by apache.

the class BTreeStatsTest method test01.

@Test
public void test01() throws Exception {
    TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
    IBufferCache bufferCache = harness.getBufferCache();
    IFileMapProvider fmp = harness.getFileMapProvider();
    // declare fields
    int fieldCount = 2;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = IntegerPointable.TYPE_TRAITS;
    typeTraits[1] = IntegerPointable.TYPE_TRAITS;
    // declare keys
    int keyFieldCount = 1;
    IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
    cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
    TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
    ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
    ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
    ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
    IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
    IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
    ITreeIndexMetadataFrame metaFrame = metaFrameFactory.createFrame();
    IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
    BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
    btree.create();
    btree.activate();
    Random rnd = new Random();
    rnd.setSeed(50);
    long start = System.currentTimeMillis();
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("INSERTING INTO TREE");
    }
    IFrame frame = new VSizeFrame(ctx);
    FrameTupleAppender appender = new FrameTupleAppender();
    ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
    DataOutput dos = tb.getDataOutput();
    ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
    RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
    IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
    accessor.reset(frame.getBuffer());
    FrameTupleReference tuple = new FrameTupleReference();
    ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
    // 10000
    for (int i = 0; i < 100000; i++) {
        int f0 = rnd.nextInt() % 100000;
        int f1 = 5;
        tb.reset();
        IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
        tb.addFieldEndOffset();
        IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
        tb.addFieldEndOffset();
        appender.reset(frame, true);
        appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
        tuple.reset(accessor, 0);
        if (LOGGER.isLoggable(Level.INFO)) {
            if (i % 10000 == 0) {
                long end = System.currentTimeMillis();
                LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
            }
        }
        try {
            indexAccessor.insert(tuple);
        } catch (HyracksDataException e) {
            if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
                e.printStackTrace();
                throw e;
            }
        }
    }
    int fileId = fmp.lookupFileId(harness.getFileReference());
    TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId, btree.getRootPageId());
    TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("\n" + stats.toString());
    }
    TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager, fileId);
    bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
    btree.deactivate();
    btree.destroy();
    bufferCache.close();
}
Also used : DataOutput(java.io.DataOutput) IBTreeInteriorFrame(org.apache.hyracks.storage.am.btree.api.IBTreeInteriorFrame) IFrame(org.apache.hyracks.api.comm.IFrame) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) TreeIndexStatsGatherer(org.apache.hyracks.storage.am.common.util.TreeIndexStatsGatherer) BTree(org.apache.hyracks.storage.am.btree.impls.BTree) ITreeIndexMetadataFrame(org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame) BTreeNSMInteriorFrameFactory(org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory) ITreeIndexFrameFactory(org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory) BTreeNSMLeafFrameFactory(org.apache.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory) ITreeIndexMetadataFrameFactory(org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory) LIFOMetaDataFrameFactory(org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory) Random(java.util.Random) IBTreeLeafFrame(org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame) FrameTupleAppender(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) TypeAwareTupleWriterFactory(org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) FrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) TreeIndexStats(org.apache.hyracks.storage.am.common.util.TreeIndexStats) IMetadataPageManager(org.apache.hyracks.storage.am.common.api.IMetadataPageManager) LinkedMetaDataPageManager(org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) ITreeIndexAccessor(org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IFileMapProvider(org.apache.hyracks.storage.common.file.IFileMapProvider) TreeIndexBufferCacheWarmup(org.apache.hyracks.storage.am.common.util.TreeIndexBufferCacheWarmup) IBufferCache(org.apache.hyracks.storage.common.buffercache.IBufferCache) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) Test(org.junit.Test) AbstractBTreeTest(org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest)

Example 10 with IFileMapProvider

use of org.apache.hyracks.storage.common.file.IFileMapProvider in project asterixdb by apache.

the class FieldPrefixNSMTest method test01.

@Test
public void test01() throws Exception {
    // declare fields
    int fieldCount = 3;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = IntegerPointable.TYPE_TRAITS;
    typeTraits[1] = IntegerPointable.TYPE_TRAITS;
    typeTraits[2] = IntegerPointable.TYPE_TRAITS;
    // declare keys
    int keyFieldCount = 3;
    IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
    cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
    cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
    cmps[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
    MultiComparator cmp = new MultiComparator(cmps);
    // just for printing
    @SuppressWarnings("rawtypes") ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
    Random rnd = new Random();
    rnd.setSeed(50);
    IBufferCache bufferCache = harness.getBufferCache();
    IFileMapProvider fileMapProvider = harness.getFileMapProvider();
    bufferCache.createFile(harness.getFileReference());
    int btreeFileId = fileMapProvider.lookupFileId(harness.getFileReference());
    bufferCache.openFile(btreeFileId);
    IHyracksTaskContext ctx = harness.getHyracksTaskContext();
    ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), true);
    try {
        ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
        BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
        frame.setPage(page);
        frame.initBuffer((byte) 0);
        frame.setMultiComparator(cmp);
        frame.setPrefixTupleCount(0);
        String before = new String();
        String after = new String();
        int compactFreq = 5;
        int compressFreq = 5;
        int smallMax = 10;
        int numRecords = 1000;
        int[][] savedFields = new int[numRecords][3];
        // insert records with random calls to compact and compress
        for (int i = 0; i < numRecords; i++) {
            if (LOGGER.isLoggable(Level.INFO)) {
                if ((i + 1) % 100 == 0) {
                    LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
                }
            }
            int a = rnd.nextInt() % smallMax;
            int b = rnd.nextInt() % smallMax;
            int c = i;
            ITupleReference tuple = createTuple(ctx, a, b, c, false);
            try {
                int targetTupleIndex = frame.findInsertTupleIndex(tuple);
                frame.insert(tuple, targetTupleIndex);
            } catch (Exception e) {
                e.printStackTrace();
            }
            savedFields[i][0] = a;
            savedFields[i][1] = b;
            savedFields[i][2] = c;
            if (rnd.nextInt() % compactFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compact();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
            if (rnd.nextInt() % compressFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compress();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
        }
        // delete records with random calls to compact and compress
        for (int i = 0; i < numRecords; i++) {
            if (LOGGER.isLoggable(Level.INFO)) {
                if ((i + 1) % 100 == 0) {
                    LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
                }
            }
            ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
            try {
                int tupleIndex = frame.findDeleteTupleIndex(tuple);
                frame.delete(tuple, tupleIndex);
            } catch (Exception e) {
            }
            if (rnd.nextInt() % compactFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compact();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
            if (rnd.nextInt() % compressFreq == 0) {
                before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                frame.compress();
                after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                Assert.assertEquals(before, after);
            }
        }
    } finally {
        bufferCache.unpin(page);
        bufferCache.closeFile(btreeFileId);
        bufferCache.close();
    }
}
Also used : ICachedPage(org.apache.hyracks.storage.common.buffercache.ICachedPage) ITreeIndexTupleWriter(org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) MultiComparator(org.apache.hyracks.storage.common.MultiComparator) IBinaryComparator(org.apache.hyracks.api.dataflow.value.IBinaryComparator) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) BTreeFieldPrefixNSMLeafFrame(org.apache.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IFileMapProvider(org.apache.hyracks.storage.common.file.IFileMapProvider) Random(java.util.Random) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) IBufferCache(org.apache.hyracks.storage.common.buffercache.IBufferCache) TypeAwareTupleWriter(org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter) Test(org.junit.Test) AbstractBTreeTest(org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest)

Aggregations

IFileMapProvider (org.apache.hyracks.storage.common.file.IFileMapProvider)10 IBufferCache (org.apache.hyracks.storage.common.buffercache.IBufferCache)9 FileReference (org.apache.hyracks.api.io.FileReference)6 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)5 IIOManager (org.apache.hyracks.api.io.IIOManager)5 Test (org.junit.Test)5 ICachedPage (org.apache.hyracks.storage.common.buffercache.ICachedPage)4 DataOutput (java.io.DataOutput)2 ArrayList (java.util.ArrayList)2 Random (java.util.Random)2 VSizeFrame (org.apache.hyracks.api.comm.VSizeFrame)2 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)2 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)2 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)2 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)2 FrameTupleAppender (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender)2 AbstractBTreeTest (org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest)2 ITreeIndexMetadataFrameFactory (org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory)2 ByteBuffer (java.nio.ByteBuffer)1 HashMap (java.util.HashMap)1