Search in sources :

Example 11 with RowPagesBuilder

use of io.trino.RowPagesBuilder in project trino by trinodb.

the class TestStreamingAggregationOperator method testSinglePage.

@Test
public void testSinglePage() {
    RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, VARCHAR, BIGINT);
    List<Page> input = rowPagesBuilder.row(false, "a", 5).build();
    MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT).row("a", 1L, 5L).build();
    assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
Also used : RowPagesBuilder(io.trino.RowPagesBuilder) Page(io.trino.spi.Page) MaterializedResult(io.trino.testing.MaterializedResult) Test(org.testng.annotations.Test)

Example 12 with RowPagesBuilder

use of io.trino.RowPagesBuilder in project trino by trinodb.

the class TestStreamingAggregationOperator method testLargeInputPage.

@Test
public void testLargeInputPage() {
    RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, VARCHAR, BIGINT);
    List<Page> input = rowPagesBuilder.addSequencePage(1_000_000, 0, 0, 1).build();
    MaterializedResult.Builder expectedBuilder = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT);
    for (int i = 0; i < 1_000_000; ++i) {
        expectedBuilder.row(String.valueOf(i), 1L, i + 1L);
    }
    assertOperatorEquals(operatorFactory, driverContext, input, expectedBuilder.build());
}
Also used : RowPagesBuilder(io.trino.RowPagesBuilder) Page(io.trino.spi.Page) MaterializedResult(io.trino.testing.MaterializedResult) Test(org.testng.annotations.Test)

Example 13 with RowPagesBuilder

use of io.trino.RowPagesBuilder in project trino by trinodb.

the class TestHashAggregationOperator method testHashAggregationWithGlobals.

@Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
public void testHashAggregationWithGlobals(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory) {
    TestingAggregationFunction countVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction(QualifiedName.of("count"), fromTypes(VARCHAR));
    TestingAggregationFunction countBooleanColumn = FUNCTION_RESOLUTION.getAggregateFunction(QualifiedName.of("count"), fromTypes(BOOLEAN));
    TestingAggregationFunction maxVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction(QualifiedName.of("max"), fromTypes(VARCHAR));
    Optional<Integer> groupIdChannel = Optional.of(1);
    List<Integer> groupByChannels = Ints.asList(1, 2);
    List<Integer> globalAggregationGroupIds = Ints.asList(42, 49);
    RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, groupByChannels, VARCHAR, VARCHAR, VARCHAR, BIGINT, BIGINT, BOOLEAN);
    List<Page> input = rowPagesBuilder.build();
    HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT), groupByChannels, globalAggregationGroupIds, SINGLE, true, ImmutableList.of(COUNT.createAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.empty()), LONG_MIN.createAggregatorFactory(SINGLE, ImmutableList.of(4), OptionalInt.empty()), LONG_AVERAGE.createAggregatorFactory(SINGLE, ImmutableList.of(4), OptionalInt.empty()), maxVarcharColumn.createAggregatorFactory(SINGLE, ImmutableList.of(2), OptionalInt.empty()), countVarcharColumn.createAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.empty()), countBooleanColumn.createAggregatorFactory(SINGLE, ImmutableList.of(5), OptionalInt.empty())), rowPagesBuilder.getHashChannel(), groupIdChannel, 100_000, Optional.of(DataSize.of(16, MEGABYTE)), spillEnabled, succinctBytes(memoryLimitForMerge), succinctBytes(memoryLimitForMergeWithMemory), spillerFactory, joinCompiler, blockTypeOperators, Optional.empty());
    DriverContext driverContext = createDriverContext(memoryLimitForMerge);
    MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT, BIGINT, DOUBLE, VARCHAR, BIGINT, BIGINT).row(null, 42L, 0L, null, null, null, 0L, 0L).row(null, 49L, 0L, null, null, null, 0L, 0L).build();
    assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected, hashEnabled, Optional.of(groupByChannels.size()), revokeMemoryWhenAddingPages);
}
Also used : PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) RowPagesBuilder(io.trino.RowPagesBuilder) Page(io.trino.spi.Page) MaterializedResult(io.trino.testing.MaterializedResult) OperatorAssertion.toMaterializedResult(io.trino.operator.OperatorAssertion.toMaterializedResult) TestingAggregationFunction(io.trino.operator.aggregation.TestingAggregationFunction) HashAggregationOperatorFactory(io.trino.operator.HashAggregationOperator.HashAggregationOperatorFactory) Test(org.testng.annotations.Test)

Example 14 with RowPagesBuilder

use of io.trino.RowPagesBuilder in project trino by trinodb.

the class TestHashAggregationOperator method testSpillerFailure.

@Test
public void testSpillerFailure() {
    TestingAggregationFunction maxVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction(QualifiedName.of("max"), fromTypes(VARCHAR));
    List<Integer> hashChannels = Ints.asList(1);
    ImmutableList<Type> types = ImmutableList.of(VARCHAR, BIGINT, VARCHAR, BIGINT);
    RowPagesBuilder rowPagesBuilder = rowPagesBuilder(false, hashChannels, types);
    List<Page> input = rowPagesBuilder.addSequencePage(10, 100, 0, 100, 0).addSequencePage(10, 100, 0, 200, 0).addSequencePage(10, 100, 0, 300, 0).build();
    DriverContext driverContext = TestingTaskContext.builder(executor, scheduledExecutor, TEST_SESSION).setQueryMaxMemory(DataSize.valueOf("7MB")).setMemoryPoolSize(DataSize.valueOf("1GB")).build().addPipelineContext(0, true, true, false).addDriverContext();
    HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BIGINT), hashChannels, ImmutableList.of(), SINGLE, false, ImmutableList.of(COUNT.createAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.empty()), LONG_MIN.createAggregatorFactory(SINGLE, ImmutableList.of(3), OptionalInt.empty()), LONG_AVERAGE.createAggregatorFactory(SINGLE, ImmutableList.of(3), OptionalInt.empty()), maxVarcharColumn.createAggregatorFactory(SINGLE, ImmutableList.of(2), OptionalInt.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, Optional.of(DataSize.of(16, MEGABYTE)), true, succinctBytes(8), succinctBytes(Integer.MAX_VALUE), new FailingSpillerFactory(), joinCompiler, blockTypeOperators, Optional.empty());
    assertThatThrownBy(() -> toPages(operatorFactory, driverContext, input)).isInstanceOf(RuntimeException.class).hasCauseInstanceOf(IOException.class).hasMessageEndingWith("Failed to spill");
}
Also used : RowPagesBuilder(io.trino.RowPagesBuilder) Page(io.trino.spi.Page) IOException(java.io.IOException) TestingAggregationFunction(io.trino.operator.aggregation.TestingAggregationFunction) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) Type(io.trino.spi.type.Type) HashAggregationOperatorFactory(io.trino.operator.HashAggregationOperator.HashAggregationOperatorFactory) Test(org.testng.annotations.Test)

Example 15 with RowPagesBuilder

use of io.trino.RowPagesBuilder in project trino by trinodb.

the class TestHashAggregationOperator method testHashAggregationMemoryReservation.

@Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
public void testHashAggregationMemoryReservation(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory) {
    TestingAggregationFunction arrayAggColumn = FUNCTION_RESOLUTION.getAggregateFunction(QualifiedName.of("array_agg"), fromTypes(BIGINT));
    List<Integer> hashChannels = Ints.asList(1);
    RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, BIGINT, BIGINT);
    List<Page> input = rowPagesBuilder.addSequencePage(10, 100, 0).addSequencePage(10, 200, 0).addSequencePage(10, 300, 0).build();
    DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, DataSize.of(11, Unit.MEGABYTE)).addPipelineContext(0, true, true, false).addDriverContext();
    HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BIGINT), hashChannels, ImmutableList.of(), SINGLE, true, ImmutableList.of(arrayAggColumn.createAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, Optional.of(DataSize.of(16, MEGABYTE)), spillEnabled, succinctBytes(memoryLimitForMerge), succinctBytes(memoryLimitForMergeWithMemory), spillerFactory, joinCompiler, blockTypeOperators, Optional.empty());
    Operator operator = operatorFactory.createOperator(driverContext);
    toPages(operator, input.iterator(), revokeMemoryWhenAddingPages);
    // TODO (https://github.com/trinodb/trino/issues/10596): it should be 0, since operator is finished
    assertEquals(getOnlyElement(operator.getOperatorContext().getNestedOperatorStats()).getUserMemoryReservation().toBytes(), spillEnabled && revokeMemoryWhenAddingPages ? 5_322_192 : 0);
    assertEquals(getOnlyElement(operator.getOperatorContext().getNestedOperatorStats()).getRevocableMemoryReservation().toBytes(), 0);
}
Also used : PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) RowPagesBuilder(io.trino.RowPagesBuilder) Page(io.trino.spi.Page) TestingAggregationFunction(io.trino.operator.aggregation.TestingAggregationFunction) HashAggregationOperatorFactory(io.trino.operator.HashAggregationOperator.HashAggregationOperatorFactory) Test(org.testng.annotations.Test)

Aggregations

RowPagesBuilder (io.trino.RowPagesBuilder)84 Test (org.testng.annotations.Test)81 Page (io.trino.spi.Page)71 MaterializedResult (io.trino.testing.MaterializedResult)64 PlanNodeId (io.trino.sql.planner.plan.PlanNodeId)47 TaskContext (io.trino.operator.TaskContext)46 TestingTaskContext (io.trino.testing.TestingTaskContext)46 Type (io.trino.spi.type.Type)38 OperatorFactory (io.trino.operator.OperatorFactory)29 BuildSideSetup (io.trino.operator.join.JoinTestUtils.BuildSideSetup)26 WorkProcessorOperatorFactory (io.trino.operator.WorkProcessorOperatorFactory)25 ValuesOperatorFactory (io.trino.operator.ValuesOperator.ValuesOperatorFactory)24 PageBufferOperatorFactory (io.trino.operator.index.PageBufferOperator.PageBufferOperatorFactory)24 JoinTestUtils.innerJoinOperatorFactory (io.trino.operator.join.JoinTestUtils.innerJoinOperatorFactory)24 DriverContext (io.trino.operator.DriverContext)14 Operator (io.trino.operator.Operator)12 HashAggregationOperatorFactory (io.trino.operator.HashAggregationOperator.HashAggregationOperatorFactory)11 WorkProcessorOperator (io.trino.operator.WorkProcessorOperator)11 NestedLoopJoinOperatorFactory (io.trino.operator.join.NestedLoopJoinOperator.NestedLoopJoinOperatorFactory)10 StageId (io.trino.execution.StageId)8