use of io.trino.sql.gen.JoinCompiler in project trino by trinodb.
the class TestHashSemiJoinOperator method testMemoryLimit.
@Test(dataProvider = "hashEnabledValues", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node memory limit of.*")
public void testMemoryLimit(boolean hashEnabled) {
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, DataSize.ofBytes(100)).addPipelineContext(0, true, true, false).addDriverContext();
OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName());
List<Type> buildTypes = ImmutableList.of(BIGINT);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), buildTypes);
Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder.addSequencePage(10000, 20).build());
SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory(1, new PlanNodeId("test"), buildTypes.get(0), 0, rowPagesBuilder.getHashChannel(), 10, new JoinCompiler(typeOperators), blockTypeOperators);
Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext);
Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator);
while (!driver.isFinished()) {
driver.process();
}
}
use of io.trino.sql.gen.JoinCompiler in project trino by trinodb.
the class TestHashSemiJoinOperator method testSemiJoinMemoryReservationYield.
@Test(dataProvider = "dataType")
public void testSemiJoinMemoryReservationYield(Type type) {
// We only need the first column so we are creating the pages with hashEnabled false
List<Page> input = createPagesWithDistinctHashKeys(type, 5_000, 500);
// create the operator
SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory(1, new PlanNodeId("test"), type, 0, Optional.of(1), 10, new JoinCompiler(typeOperators), blockTypeOperators);
// run test
GroupByHashYieldAssertion.GroupByHashYieldResult result = finishOperatorWithYieldingGroupByHash(input, type, setBuilderOperatorFactory, operator -> ((SetBuilderOperator) operator).getCapacity(), 1_400_000);
assertGreaterThanOrEqual(result.getYieldCount(), 5);
assertGreaterThan(result.getMaxReservedBytes(), 20L << 20);
assertEquals(result.getOutput().stream().mapToInt(Page::getPositionCount).sum(), 0);
}
use of io.trino.sql.gen.JoinCompiler in project trino by trinodb.
the class TaskTestUtils method createTestingPlanner.
public static LocalExecutionPlanner createTestingPlanner() {
PageSourceManager pageSourceManager = new PageSourceManager();
pageSourceManager.addConnectorPageSourceProvider(CONNECTOR_ID, new TestingPageSourceProvider());
// we don't start the finalizer so nothing will be collected, which is ok for a test
FinalizerService finalizerService = new FinalizerService();
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(PLANNER_CONTEXT.getTypeOperators());
NodeScheduler nodeScheduler = new NodeScheduler(new UniformNodeSelectorFactory(new InMemoryNodeManager(), new NodeSchedulerConfig().setIncludeCoordinator(true), new NodeTaskMap(finalizerService)));
NodePartitioningManager nodePartitioningManager = new NodePartitioningManager(nodeScheduler, blockTypeOperators);
PageFunctionCompiler pageFunctionCompiler = new PageFunctionCompiler(PLANNER_CONTEXT.getFunctionManager(), 0);
return new LocalExecutionPlanner(PLANNER_CONTEXT, createTestingTypeAnalyzer(PLANNER_CONTEXT), Optional.empty(), pageSourceManager, new IndexManager(), nodePartitioningManager, new PageSinkManager(), new MockDirectExchangeClientSupplier(), new ExpressionCompiler(PLANNER_CONTEXT.getFunctionManager(), pageFunctionCompiler), pageFunctionCompiler, new JoinFilterFunctionCompiler(PLANNER_CONTEXT.getFunctionManager()), new IndexJoinLookupStats(), new TaskManagerConfig(), new GenericSpillerFactory((types, spillContext, memoryContext) -> {
throw new UnsupportedOperationException();
}), (types, spillContext, memoryContext) -> {
throw new UnsupportedOperationException();
}, (types, partitionFunction, spillContext, memoryContext) -> {
throw new UnsupportedOperationException();
}, new PagesIndex.TestingFactory(false), new JoinCompiler(PLANNER_CONTEXT.getTypeOperators()), new TrinoOperatorFactories(), new OrderingCompiler(PLANNER_CONTEXT.getTypeOperators()), new DynamicFilterConfig(), blockTypeOperators, new TableExecuteContextManager(), new ExchangeManagerRegistry(new ExchangeHandleResolver()));
}
use of io.trino.sql.gen.JoinCompiler in project trino by trinodb.
the class TestTopNRankingOperator method setUp.
@BeforeMethod
public void setUp() {
executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION).addPipelineContext(0, true, true, false).addDriverContext();
joinCompiler = new JoinCompiler(typeOperators);
}
use of io.trino.sql.gen.JoinCompiler in project trino by trinodb.
the class TestStreamingAggregationOperator method test.
@Test
public void test() {
OperatorFactory operatorFactory = StreamingAggregationOperator.createOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BOOLEAN, DOUBLE, BIGINT), ImmutableList.of(DOUBLE), ImmutableList.of(1), ImmutableList.of(COUNT.createAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.empty()), LONG_SUM.createAggregatorFactory(SINGLE, ImmutableList.of(2), OptionalInt.empty())), new JoinCompiler(new TypeOperators()));
RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, DOUBLE, BIGINT);
List<Page> input = rowPagesBuilder.addSequencePage(3, 0, 0, 1).row(true, 3.0, 4).row(false, 3.0, 5).pageBreak().row(true, 3.0, 6).row(false, 4.0, 7).row(true, 4.0, 8).row(false, 4.0, 9).row(true, 4.0, 10).pageBreak().row(false, 5.0, 11).row(true, 5.0, 12).row(false, 5.0, 13).row(true, 5.0, 14).row(false, 5.0, 15).pageBreak().addSequencePage(3, 0, 6, 16).row(false, Double.NaN, 1).row(false, Double.NaN, 10).row(false, null, 2).row(false, null, 20).build();
MaterializedResult expected = resultBuilder(driverContext.getSession(), DOUBLE, BIGINT, BIGINT).row(0.0, 1L, 1L).row(1.0, 1L, 2L).row(2.0, 1L, 3L).row(3.0, 3L, 15L).row(4.0, 4L, 34L).row(5.0, 5L, 65L).row(6.0, 1L, 16L).row(7.0, 1L, 17L).row(8.0, 1L, 18L).row(Double.NaN, 2L, 11L).row(null, 2L, 22L).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
Aggregations