Search in sources :

Example 21 with MemoryBuffer

use of org.apache.hadoop.hive.common.io.encoded.MemoryBuffer in project hive by apache.

the class EncodedReaderImpl method releaseInitialRefcounts.

private void releaseInitialRefcounts(DiskRangeList current) {
    while (current != null) {
        DiskRangeList toFree = current;
        current = current.next;
        if (toFree instanceof ProcCacheChunk) {
            ProcCacheChunk pcc = (ProcCacheChunk) toFree;
            if (pcc.originalData != null) {
                // it. Deallocate the buffer directly, do not decref.
                if (pcc.getBuffer() != null) {
                    cacheWrapper.getAllocator().deallocate(pcc.getBuffer());
                }
                continue;
            }
        }
        if (!(toFree instanceof CacheChunk))
            continue;
        CacheChunk cc = (CacheChunk) toFree;
        if (cc.getBuffer() == null)
            continue;
        MemoryBuffer buffer = cc.getBuffer();
        cacheWrapper.releaseBuffer(buffer);
        cc.setBuffer(null);
    }
}
Also used : MemoryBuffer(org.apache.hadoop.hive.common.io.encoded.MemoryBuffer) DiskRangeList(org.apache.hadoop.hive.common.io.DiskRangeList)

Example 22 with MemoryBuffer

use of org.apache.hadoop.hive.common.io.encoded.MemoryBuffer in project hive by apache.

the class TestLowLevelCacheImpl method testCacheMetrics.

@Test
public void testCacheMetrics() {
    CreateHelper list = new CreateHelper();
    list.addOrMerge(0, 100, true, false);
    list.addOrMerge(100, 200, true, false);
    list.addOrMerge(200, 300, true, false);
    list.addOrMerge(300, 400, true, false);
    list.addOrMerge(400, 500, true, false);
    assertEquals(1, list.get().listSize());
    assertEquals(500, list.get().getTotalLength());
    list = new CreateHelper();
    list.addOrMerge(0, 100, false, false);
    list.addOrMerge(100, 200, false, false);
    list.addOrMerge(200, 300, false, false);
    list.addOrMerge(300, 400, false, false);
    list.addOrMerge(400, 500, false, false);
    assertEquals(5, list.get().listSize());
    assertEquals(500, list.get().getTotalLength());
    list = new CreateHelper();
    list.addOrMerge(0, 100, true, false);
    list.addOrMerge(100, 200, true, false);
    list.addOrMerge(200, 300, false, false);
    list.addOrMerge(300, 400, true, false);
    list.addOrMerge(400, 500, true, false);
    assertEquals(2, list.get().listSize());
    assertEquals(500, list.get().getTotalLength());
    LlapDaemonCacheMetrics metrics = LlapDaemonCacheMetrics.create("test", "1");
    LowLevelCacheImpl cache = new LowLevelCacheImpl(metrics, new DummyCachePolicy(), new DummyAllocator(), true, // no cleanup thread
    -1);
    long fn = 1;
    MemoryBuffer[] fakes = new MemoryBuffer[] { fb(), fb(), fb() };
    cache.putFileData(fn, new DiskRange[] { dr(0, 100), dr(300, 500), dr(800, 1000) }, fakes, 0, Priority.NORMAL, null, null);
    assertEquals(0, metrics.getCacheRequestedBytes());
    assertEquals(0, metrics.getCacheHitBytes());
    list = new CreateHelper();
    list.addOrMerge(0, 1000, true, false);
    cache.getFileData(fn, list.get(), 0, testFactory, null, null);
    assertEquals(1000, metrics.getCacheRequestedBytes());
    assertEquals(500, metrics.getCacheHitBytes());
    list = new CreateHelper();
    list.addOrMerge(0, 100, true, false);
    cache.getFileData(fn, list.get(), 0, testFactory, null, null);
    assertEquals(1100, metrics.getCacheRequestedBytes());
    assertEquals(600, metrics.getCacheHitBytes());
    list = new CreateHelper();
    list.addOrMerge(0, 100, true, false);
    list.addOrMerge(300, 500, true, false);
    list.addOrMerge(800, 1000, true, false);
    cache.getFileData(fn, list.get(), 0, testFactory, null, null);
    assertEquals(1600, metrics.getCacheRequestedBytes());
    assertEquals(1100, metrics.getCacheHitBytes());
    list = new CreateHelper();
    list.addOrMerge(300, 500, true, false);
    list.addOrMerge(1000, 2000, true, false);
    cache.getFileData(fn, list.get(), 0, testFactory, null, null);
    assertEquals(2800, metrics.getCacheRequestedBytes());
    assertEquals(1300, metrics.getCacheHitBytes());
}
Also used : CreateHelper(org.apache.hadoop.hive.common.io.DiskRangeList.CreateHelper) MemoryBuffer(org.apache.hadoop.hive.common.io.encoded.MemoryBuffer) LlapDaemonCacheMetrics(org.apache.hadoop.hive.llap.metrics.LlapDaemonCacheMetrics) Test(org.junit.Test)

Aggregations

MemoryBuffer (org.apache.hadoop.hive.common.io.encoded.MemoryBuffer)22 ByteBuffer (java.nio.ByteBuffer)12 DiskRangeList (org.apache.hadoop.hive.common.io.DiskRangeList)10 DiskRange (org.apache.hadoop.hive.common.io.DiskRange)6 IOException (java.io.IOException)5 CreateHelper (org.apache.hadoop.hive.common.io.DiskRangeList.CreateHelper)4 ColumnStreamData (org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData)4 OrcProto (org.apache.orc.OrcProto)4 BufferChunk (org.apache.orc.impl.BufferChunk)4 CodedInputStream (com.google.protobuf.CodedInputStream)2 InputStream (java.io.InputStream)2 ArrayList (java.util.ArrayList)2 IdentityHashMap (java.util.IdentityHashMap)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 MutateHelper (org.apache.hadoop.hive.common.io.DiskRangeList.MutateHelper)2 LlapBufferOrBuffers (org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers)2 CacheChunk (org.apache.hadoop.hive.ql.io.orc.encoded.CacheChunk)2 Stream (org.apache.orc.OrcProto.Stream)2 Kind (org.apache.orc.OrcProto.Stream.Kind)2 InStream (org.apache.orc.impl.InStream)2