use of org.apache.hadoop.hive.common.io.encoded.MemoryBuffer in project hive by apache.
the class EncodedReaderImpl method releaseInitialRefcounts.
private void releaseInitialRefcounts(DiskRangeList current) {
while (current != null) {
DiskRangeList toFree = current;
current = current.next;
if (toFree instanceof ProcCacheChunk) {
ProcCacheChunk pcc = (ProcCacheChunk) toFree;
if (pcc.originalData != null) {
// it. Deallocate the buffer directly, do not decref.
if (pcc.getBuffer() != null) {
cacheWrapper.getAllocator().deallocate(pcc.getBuffer());
}
continue;
}
}
if (!(toFree instanceof CacheChunk))
continue;
CacheChunk cc = (CacheChunk) toFree;
if (cc.getBuffer() == null)
continue;
MemoryBuffer buffer = cc.getBuffer();
cacheWrapper.releaseBuffer(buffer);
cc.setBuffer(null);
}
}
use of org.apache.hadoop.hive.common.io.encoded.MemoryBuffer in project hive by apache.
the class TestLowLevelCacheImpl method testCacheMetrics.
@Test
public void testCacheMetrics() {
CreateHelper list = new CreateHelper();
list.addOrMerge(0, 100, true, false);
list.addOrMerge(100, 200, true, false);
list.addOrMerge(200, 300, true, false);
list.addOrMerge(300, 400, true, false);
list.addOrMerge(400, 500, true, false);
assertEquals(1, list.get().listSize());
assertEquals(500, list.get().getTotalLength());
list = new CreateHelper();
list.addOrMerge(0, 100, false, false);
list.addOrMerge(100, 200, false, false);
list.addOrMerge(200, 300, false, false);
list.addOrMerge(300, 400, false, false);
list.addOrMerge(400, 500, false, false);
assertEquals(5, list.get().listSize());
assertEquals(500, list.get().getTotalLength());
list = new CreateHelper();
list.addOrMerge(0, 100, true, false);
list.addOrMerge(100, 200, true, false);
list.addOrMerge(200, 300, false, false);
list.addOrMerge(300, 400, true, false);
list.addOrMerge(400, 500, true, false);
assertEquals(2, list.get().listSize());
assertEquals(500, list.get().getTotalLength());
LlapDaemonCacheMetrics metrics = LlapDaemonCacheMetrics.create("test", "1");
LowLevelCacheImpl cache = new LowLevelCacheImpl(metrics, new DummyCachePolicy(), new DummyAllocator(), true, // no cleanup thread
-1);
long fn = 1;
MemoryBuffer[] fakes = new MemoryBuffer[] { fb(), fb(), fb() };
cache.putFileData(fn, new DiskRange[] { dr(0, 100), dr(300, 500), dr(800, 1000) }, fakes, 0, Priority.NORMAL, null, null);
assertEquals(0, metrics.getCacheRequestedBytes());
assertEquals(0, metrics.getCacheHitBytes());
list = new CreateHelper();
list.addOrMerge(0, 1000, true, false);
cache.getFileData(fn, list.get(), 0, testFactory, null, null);
assertEquals(1000, metrics.getCacheRequestedBytes());
assertEquals(500, metrics.getCacheHitBytes());
list = new CreateHelper();
list.addOrMerge(0, 100, true, false);
cache.getFileData(fn, list.get(), 0, testFactory, null, null);
assertEquals(1100, metrics.getCacheRequestedBytes());
assertEquals(600, metrics.getCacheHitBytes());
list = new CreateHelper();
list.addOrMerge(0, 100, true, false);
list.addOrMerge(300, 500, true, false);
list.addOrMerge(800, 1000, true, false);
cache.getFileData(fn, list.get(), 0, testFactory, null, null);
assertEquals(1600, metrics.getCacheRequestedBytes());
assertEquals(1100, metrics.getCacheHitBytes());
list = new CreateHelper();
list.addOrMerge(300, 500, true, false);
list.addOrMerge(1000, 2000, true, false);
cache.getFileData(fn, list.get(), 0, testFactory, null, null);
assertEquals(2800, metrics.getCacheRequestedBytes());
assertEquals(1300, metrics.getCacheHitBytes());
}
Aggregations