use of com.facebook.presto.RowPagesBuilder in project presto by prestodb.
the class TestHashAggregationOperator method testSpillerFailure.
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = ".* Failed to spill")
public void testSpillerFailure() {
MetadataManager metadata = MetadataManager.createTestMetadataManager();
InternalAggregationFunction maxVarcharColumn = metadata.getFunctionRegistry().getAggregateFunctionImplementation(new Signature("max", AGGREGATE, parseTypeSignature(StandardTypes.VARCHAR), parseTypeSignature(StandardTypes.VARCHAR)));
List<Integer> hashChannels = Ints.asList(1);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(false, hashChannels, VARCHAR, BIGINT, VARCHAR, BIGINT);
List<Page> input = rowPagesBuilder.addSequencePage(10, 100, 0, 100, 0).addSequencePage(10, 100, 0, 200, 0).addSequencePage(10, 100, 0, 300, 0).build();
DriverContext driverContext = createTaskContext(executor, TEST_SESSION, new DataSize(10, Unit.BYTE)).addPipelineContext(0, true, true).addDriverContext();
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BIGINT), hashChannels, ImmutableList.of(), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty()), LONG_SUM.bind(ImmutableList.of(3), Optional.empty()), LONG_AVERAGE.bind(ImmutableList.of(3), Optional.empty()), maxVarcharColumn.bind(ImmutableList.of(2), Optional.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, new DataSize(16, MEGABYTE), true, succinctBytes(8), succinctBytes(Integer.MAX_VALUE), new FailingSpillerFactory(), joinCompiler);
toPages(operatorFactory, driverContext, input);
}
use of com.facebook.presto.RowPagesBuilder in project presto by prestodb.
the class TestHashAggregationOperator method testMultiSliceAggregationOutput.
@Test(dataProvider = "hashEnabled")
public void testMultiSliceAggregationOutput(boolean hashEnabled) {
// estimate the number of entries required to create 1.5 pages of results
int fixedWidthSize = SIZE_OF_LONG + SIZE_OF_DOUBLE + SIZE_OF_DOUBLE;
int multiSlicePositionCount = (int) (1.5 * PageBuilderStatus.DEFAULT_MAX_PAGE_SIZE_IN_BYTES / fixedWidthSize);
multiSlicePositionCount = Math.min((int) (1.5 * DEFAULT_MAX_BLOCK_SIZE_IN_BYTES / SIZE_OF_DOUBLE), multiSlicePositionCount);
List<Integer> hashChannels = Ints.asList(1);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, BIGINT, BIGINT);
List<Page> input = rowPagesBuilder.addSequencePage(multiSlicePositionCount, 0, 0).build();
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BIGINT), hashChannels, ImmutableList.of(), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty()), LONG_AVERAGE.bind(ImmutableList.of(1), Optional.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, new DataSize(16, MEGABYTE), joinCompiler);
assertEquals(toPages(operatorFactory, driverContext, input).size(), 2);
}
use of com.facebook.presto.RowPagesBuilder in project presto by prestodb.
the class TestHashAggregationOperator method testHashBuilderResizeLimit.
@Test(dataProvider = "hashEnabled", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded local memory limit of 3MB")
public void testHashBuilderResizeLimit(boolean hashEnabled) {
BlockBuilder builder = VARCHAR.createBlockBuilder(new BlockBuilderStatus(), 1, DEFAULT_MAX_BLOCK_SIZE_IN_BYTES);
// this must be larger than DEFAULT_MAX_BLOCK_SIZE, 64K
VARCHAR.writeSlice(builder, Slices.allocate(5_000_000));
builder.build();
List<Integer> hashChannels = Ints.asList(0);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR);
List<Page> input = rowPagesBuilder.addSequencePage(10, 100).addBlocksPage(builder.build()).addSequencePage(10, 100).build();
DriverContext driverContext = createTaskContext(executor, TEST_SESSION, new DataSize(3, MEGABYTE)).addPipelineContext(0, true, true).addDriverContext();
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR), hashChannels, ImmutableList.of(), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, new DataSize(16, MEGABYTE), joinCompiler);
toPages(operatorFactory, driverContext, input);
}
use of com.facebook.presto.RowPagesBuilder in project presto by prestodb.
the class TestHashJoinOperator method testInnerJoin.
@Test(dataProvider = "hashEnabledValues")
public void testInnerJoin(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) throws Exception {
TaskContext taskContext = createTaskContext();
// build
RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT)).addSequencePage(10, 20, 30, 40);
LookupSourceFactory lookupSourceFactory = buildHash(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty());
// probe
RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT));
List<Page> probeInput = probePages.addSequencePage(1000, 0, 1000, 2000).build();
OperatorFactory joinOperatorFactory = LOOKUP_JOIN_OPERATORS.innerJoin(0, new PlanNodeId("test"), lookupSourceFactory, probePages.getTypes(), Ints.asList(0), probePages.getHashChannel(), Optional.empty());
// expected
MaterializedResult expected = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probePages.getTypes(), buildPages.getTypes())).row("20", 1020L, 2020L, "20", 30L, 40L).row("21", 1021L, 2021L, "21", 31L, 41L).row("22", 1022L, 2022L, "22", 32L, 42L).row("23", 1023L, 2023L, "23", 33L, 43L).row("24", 1024L, 2024L, "24", 34L, 44L).row("25", 1025L, 2025L, "25", 35L, 45L).row("26", 1026L, 2026L, "26", 36L, 46L).row("27", 1027L, 2027L, "27", 37L, 47L).row("28", 1028L, 2028L, "28", 38L, 48L).row("29", 1029L, 2029L, "29", 39L, 49L).build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true).addDriverContext(), probeInput, expected, true, getHashChannels(probePages, buildPages));
}
use of com.facebook.presto.RowPagesBuilder in project presto by prestodb.
the class TestHashJoinOperator method testOuterJoinWithNullOnBothSides.
@Test(dataProvider = "hashEnabledValues")
public void testOuterJoinWithNullOnBothSides(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) throws Exception {
TaskContext taskContext = createTaskContext();
// build
RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR)).row("a").row((String) null).row((String) null).row("a").row("b");
LookupSourceFactory lookupSourceFactory = buildHash(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty());
// probe
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), probeTypes);
List<Page> probeInput = probePages.row("a").row("b").row((String) null).row("c").build();
OperatorFactory joinOperatorFactory = LOOKUP_JOIN_OPERATORS.probeOuterJoin(0, new PlanNodeId("test"), lookupSourceFactory, probePages.getTypes(), Ints.asList(0), probePages.getHashChannel(), Optional.empty());
// expected
MaterializedResult expected = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildPages.getTypes())).row("a", "a").row("a", "a").row("b", "b").row(null, null).row("c", null).build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true).addDriverContext(), probeInput, expected, true, getHashChannels(probePages, buildPages));
}
Aggregations