use of com.facebook.presto.common.Page in project presto by prestodb.
the class TestAggregationOperator method testMemoryTracking.
private void testMemoryTracking(boolean useSystemMemory) throws Exception {
Page input = getOnlyElement(rowPagesBuilder(BIGINT).addSequencePage(100, 0).build());
OperatorFactory operatorFactory = new AggregationOperatorFactory(0, new PlanNodeId("test"), Step.SINGLE, ImmutableList.of(LONG_SUM.bind(ImmutableList.of(0), Optional.empty())), useSystemMemory);
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION).addPipelineContext(0, true, true, false).addDriverContext();
try (Operator operator = operatorFactory.createOperator(driverContext)) {
assertTrue(operator.needsInput());
operator.addInput(input);
if (useSystemMemory) {
assertThat(driverContext.getSystemMemoryUsage()).isGreaterThan(0);
assertEquals(driverContext.getMemoryUsage(), 0);
} else {
assertEquals(driverContext.getSystemMemoryUsage(), 0);
assertThat(driverContext.getMemoryUsage()).isGreaterThan(0);
}
toPages(operator, emptyIterator());
}
assertEquals(driverContext.getSystemMemoryUsage(), 0);
assertEquals(driverContext.getMemoryUsage(), 0);
}
use of com.facebook.presto.common.Page in project presto by prestodb.
the class TestFileFragmentResultCacheManager method testBasic.
@Test(timeOut = 30_000)
public void testBasic() throws Exception {
URI cacheDirectory = getNewCacheDirectory("testBasic");
FragmentCacheStats stats = new FragmentCacheStats();
FileFragmentResultCacheManager cacheManager = fileFragmentResultCacheManager(stats, cacheDirectory);
// Test fetching new fragment. Current cache status: empty
assertFalse(cacheManager.get(SERIALIZED_PLAN_FRAGMENT_1, SPLIT_1).isPresent());
assertEquals(stats.getCacheMiss(), 1);
assertEquals(stats.getCacheHit(), 0);
assertEquals(stats.getCacheEntries(), 0);
assertEquals(stats.getCacheSizeInBytes(), 0);
// Test empty page. Current cache status: empty
cacheManager.put(SERIALIZED_PLAN_FRAGMENT_1, SPLIT_1, ImmutableList.of()).get();
Optional<Iterator<Page>> result = cacheManager.get(SERIALIZED_PLAN_FRAGMENT_1, SPLIT_1);
assertTrue(result.isPresent());
assertFalse(result.get().hasNext());
assertEquals(stats.getCacheMiss(), 1);
assertEquals(stats.getCacheHit(), 1);
assertEquals(stats.getCacheEntries(), 1);
assertEquals(stats.getCacheSizeInBytes(), 0);
// Test non-empty page. Current cache status: { (plan1, split1) -> [] }
List<Page> pages = ImmutableList.of(new Page(createStringsBlock("plan-1-split-2")));
cacheManager.put(SERIALIZED_PLAN_FRAGMENT_2, SPLIT_2, pages).get();
result = cacheManager.get(SERIALIZED_PLAN_FRAGMENT_2, SPLIT_2);
assertTrue(result.isPresent());
assertPagesEqual(result.get(), pages.iterator());
assertEquals(stats.getCacheMiss(), 1);
assertEquals(stats.getCacheHit(), 2);
assertEquals(stats.getCacheEntries(), 2);
assertEquals(stats.getCacheSizeInBytes(), getCachePhysicalSize(cacheDirectory));
// Test cache miss for plan mismatch and split mismatch. Current cache status: { (plan1, split1) -> [], (plan2, split2) -> ["plan-1-split-2"] }
cacheManager.get(SERIALIZED_PLAN_FRAGMENT_1, SPLIT_2);
assertEquals(stats.getCacheMiss(), 2);
assertEquals(stats.getCacheHit(), 2);
assertEquals(stats.getCacheEntries(), 2);
cacheManager.get(SERIALIZED_PLAN_FRAGMENT_2, SPLIT_1);
assertEquals(stats.getCacheMiss(), 3);
assertEquals(stats.getCacheHit(), 2);
assertEquals(stats.getCacheEntries(), 2);
assertEquals(stats.getCacheSizeInBytes(), getCachePhysicalSize(cacheDirectory));
// Test cache invalidation
cacheManager.invalidateAllCache();
assertEquals(stats.getCacheMiss(), 3);
assertEquals(stats.getCacheHit(), 2);
assertEquals(stats.getCacheEntries(), 0);
assertEquals(stats.getCacheRemoval(), 2);
assertEquals(stats.getCacheSizeInBytes(), 0);
cleanupCacheDirectory(cacheDirectory);
}
use of com.facebook.presto.common.Page in project presto by prestodb.
the class TestGroupByHash method testForceRehash.
@Test
public void testForceRehash() {
// Create a page with positionCount >> expected size of groupByHash
Block valuesBlock = BlockAssertions.createStringSequenceBlock(0, 100);
Block hashBlock = TypeUtils.getHashBlock(ImmutableList.of(VARCHAR), valuesBlock);
// Create group by hash with extremely small size
GroupByHash groupByHash = createGroupByHash(TEST_SESSION, ImmutableList.of(VARCHAR), new int[] { 0 }, Optional.of(1), 4, JOIN_COMPILER);
groupByHash.getGroupIds(new Page(valuesBlock, hashBlock)).process();
// Ensure that all groups are present in group by hash
for (int i = 0; i < valuesBlock.getPositionCount(); i++) {
assertTrue(groupByHash.contains(i, new Page(valuesBlock, hashBlock), CONTAINS_CHANNELS));
}
}
use of com.facebook.presto.common.Page in project presto by prestodb.
the class TestGroupByHash method testAppendToMultipleTuplesPerGroup.
@Test
public void testAppendToMultipleTuplesPerGroup() {
List<Long> values = new ArrayList<>();
for (long i = 0; i < 100; i++) {
values.add(i % 50);
}
Block valuesBlock = BlockAssertions.createLongsBlock(values);
Block hashBlock = TypeUtils.getHashBlock(ImmutableList.of(BIGINT), valuesBlock);
GroupByHash groupByHash = createGroupByHash(TEST_SESSION, ImmutableList.of(BIGINT), new int[] { 0 }, Optional.of(1), 100, JOIN_COMPILER);
groupByHash.getGroupIds(new Page(valuesBlock, hashBlock)).process();
assertEquals(groupByHash.getGroupCount(), 50);
PageBuilder pageBuilder = new PageBuilder(groupByHash.getTypes());
for (int i = 0; i < groupByHash.getGroupCount(); i++) {
pageBuilder.declarePosition();
groupByHash.appendValuesTo(i, pageBuilder, 0);
}
Page outputPage = pageBuilder.build();
assertEquals(outputPage.getPositionCount(), 50);
BlockAssertions.assertBlockEquals(BIGINT, outputPage.getBlock(0), BlockAssertions.createLongSequenceBlock(0, 50));
}
use of com.facebook.presto.common.Page in project presto by prestodb.
the class TestGroupByHash method testNullGroup.
@Test
public void testNullGroup() {
GroupByHash groupByHash = createGroupByHash(TEST_SESSION, ImmutableList.of(BIGINT), new int[] { 0 }, Optional.of(1), 100, JOIN_COMPILER);
Block block = createLongsBlock((Long) null);
Block hashBlock = getHashBlock(ImmutableList.of(BIGINT), block);
Page page = new Page(block, hashBlock);
groupByHash.addPage(page).process();
// Add enough values to force a rehash
block = createLongSequenceBlock(1, 132748);
hashBlock = getHashBlock(ImmutableList.of(BIGINT), block);
page = new Page(block, hashBlock);
groupByHash.addPage(page).process();
block = createLongsBlock(0);
hashBlock = getHashBlock(ImmutableList.of(BIGINT), block);
page = new Page(block, hashBlock);
assertFalse(groupByHash.contains(0, page, CONTAINS_CHANNELS));
}
Aggregations