use of io.trino.spi.Page in project trino by trinodb.
the class TestMergeHashSort method testBinaryMergeIteratorOverEmptyPage.
@Test
public void testBinaryMergeIteratorOverEmptyPage() {
Page emptyPage = new Page(0, BIGINT.createFixedSizeBlockBuilder(0).build());
WorkProcessor<Page> mergedPage = new MergeHashSort(newSimpleAggregatedMemoryContext(), blockTypeOperators).merge(ImmutableList.of(BIGINT), ImmutableList.of(BIGINT), ImmutableList.of(ImmutableList.of(emptyPage).iterator()).stream().map(WorkProcessor::fromIterator).collect(toImmutableList()), new DriverYieldSignal());
assertFinishes(mergedPage);
}
use of io.trino.spi.Page in project trino by trinodb.
the class TestMergeHashSort method testBinaryMergeIteratorOverPageWith.
@Test
public void testBinaryMergeIteratorOverPageWith() {
Page emptyPage = new Page(0, BIGINT.createFixedSizeBlockBuilder(0).build());
Page page = rowPagesBuilder(BIGINT).row(42).build().get(0);
WorkProcessor<Page> mergedPage = new MergeHashSort(newSimpleAggregatedMemoryContext(), blockTypeOperators).merge(ImmutableList.of(BIGINT), ImmutableList.of(BIGINT), ImmutableList.of(ImmutableList.of(emptyPage, page).iterator()).stream().map(WorkProcessor::fromIterator).collect(toImmutableList()), new DriverYieldSignal());
assertTrue(mergedPage.process());
Page actualPage = mergedPage.getResult();
assertEquals(actualPage.getPositionCount(), 1);
assertEquals(actualPage.getChannelCount(), 1);
assertEquals(actualPage.getBlock(0).getLong(0, 0), 42);
assertFinishes(mergedPage);
}
use of io.trino.spi.Page in project trino by trinodb.
the class TestMergeHashSort method testBinaryMergeIteratorOverEmptyPageAndNonEmptyPage.
@Test
public void testBinaryMergeIteratorOverEmptyPageAndNonEmptyPage() {
Page emptyPage = new Page(0, BIGINT.createFixedSizeBlockBuilder(0).build());
Page page = rowPagesBuilder(BIGINT).row(42).build().get(0);
WorkProcessor<Page> mergedPage = new MergeHashSort(newSimpleAggregatedMemoryContext(), blockTypeOperators).merge(ImmutableList.of(BIGINT), ImmutableList.of(BIGINT), ImmutableList.of(ImmutableList.of(emptyPage, page).iterator()).stream().map(WorkProcessor::fromIterator).collect(toImmutableList()), new DriverYieldSignal());
assertTrue(mergedPage.process());
Page actualPage = mergedPage.getResult();
assertEquals(actualPage.getPositionCount(), 1);
assertEquals(actualPage.getChannelCount(), 1);
assertEquals(actualPage.getBlock(0).getLong(0, 0), 42);
assertFinishes(mergedPage);
}
use of io.trino.spi.Page in project trino by trinodb.
the class TestFilterAndProjectOperator method testMergeOutput.
@Test
public void testMergeOutput() {
List<Page> input = rowPagesBuilder(VARCHAR, BIGINT).addSequencePage(100, 0, 0).addSequencePage(100, 0, 0).addSequencePage(100, 0, 0).addSequencePage(100, 0, 0).build();
TestingFunctionResolution functionResolution = new TestingFunctionResolution();
RowExpression filter = call(functionResolution.resolveOperator(EQUAL, ImmutableList.of(BIGINT, BIGINT)), field(1, BIGINT), constant(10L, BIGINT));
ExpressionCompiler compiler = functionResolution.getExpressionCompiler();
Supplier<PageProcessor> processor = compiler.compilePageProcessor(Optional.of(filter), ImmutableList.of(field(1, BIGINT)));
OperatorFactory operatorFactory = FilterAndProjectOperator.createOperatorFactory(0, new PlanNodeId("test"), processor, ImmutableList.of(BIGINT), DataSize.of(64, KILOBYTE), 2);
List<Page> expected = rowPagesBuilder(BIGINT).row(10L).row(10L).row(10L).row(10L).build();
assertOperatorEquals(operatorFactory, ImmutableList.of(BIGINT), driverContext, input, expected);
}
use of io.trino.spi.Page in project trino by trinodb.
the class TestGroupByHash method testProperWorkTypesSelected.
@Test
public void testProperWorkTypesSelected() {
Block bigintBlock = BlockAssertions.createLongsBlock(1, 2, 3, 4, 5, 6, 7, 8);
Block bigintDictionaryBlock = BlockAssertions.createLongDictionaryBlock(0, 8);
Block bigintRleBlock = BlockAssertions.createRLEBlock(42, 8);
Block varcharBlock = BlockAssertions.createStringsBlock("1", "2", "3", "4", "5", "6", "7", "8");
Block varcharDictionaryBlock = BlockAssertions.createStringDictionaryBlock(1, 8);
Block varcharRleBlock = new RunLengthEncodedBlock(new VariableWidthBlock(1, Slices.EMPTY_SLICE, new int[] { 0, 1 }, Optional.empty()), 8);
Block bigintBigDictionaryBlock = BlockAssertions.createLongDictionaryBlock(1, 8, 1000);
Block bigintSingletonDictionaryBlock = BlockAssertions.createLongDictionaryBlock(1, 500000, 1);
// Above Short.MAX_VALUE
Block bigintHugeDictionaryBlock = BlockAssertions.createLongDictionaryBlock(1, 500000, 66000);
Page singleBigintPage = new Page(bigintBlock);
assertGroupByHashWork(singleBigintPage, ImmutableList.of(BIGINT), BigintGroupByHash.GetGroupIdsWork.class);
Page singleBigintDictionaryPage = new Page(bigintDictionaryBlock);
assertGroupByHashWork(singleBigintDictionaryPage, ImmutableList.of(BIGINT), BigintGroupByHash.GetDictionaryGroupIdsWork.class);
Page singleBigintRlePage = new Page(bigintRleBlock);
assertGroupByHashWork(singleBigintRlePage, ImmutableList.of(BIGINT), BigintGroupByHash.GetRunLengthEncodedGroupIdsWork.class);
Page singleVarcharPage = new Page(varcharBlock);
assertGroupByHashWork(singleVarcharPage, ImmutableList.of(VARCHAR), MultiChannelGroupByHash.GetNonDictionaryGroupIdsWork.class);
Page singleVarcharDictionaryPage = new Page(varcharDictionaryBlock);
assertGroupByHashWork(singleVarcharDictionaryPage, ImmutableList.of(VARCHAR), MultiChannelGroupByHash.GetDictionaryGroupIdsWork.class);
Page singleVarcharRlePage = new Page(varcharRleBlock);
assertGroupByHashWork(singleVarcharRlePage, ImmutableList.of(VARCHAR), MultiChannelGroupByHash.GetRunLengthEncodedGroupIdsWork.class);
Page lowCardinalityDictionaryPage = new Page(bigintDictionaryBlock, varcharDictionaryBlock);
assertGroupByHashWork(lowCardinalityDictionaryPage, ImmutableList.of(BIGINT, VARCHAR), MultiChannelGroupByHash.GetLowCardinalityDictionaryGroupIdsWork.class);
Page highCardinalityDictionaryPage = new Page(bigintDictionaryBlock, bigintBigDictionaryBlock);
assertGroupByHashWork(highCardinalityDictionaryPage, ImmutableList.of(BIGINT, VARCHAR), MultiChannelGroupByHash.GetNonDictionaryGroupIdsWork.class);
// Cardinality above Short.MAX_VALUE
Page lowCardinalityHugeDictionaryPage = new Page(bigintSingletonDictionaryBlock, bigintHugeDictionaryBlock);
assertGroupByHashWork(lowCardinalityHugeDictionaryPage, ImmutableList.of(BIGINT, BIGINT), MultiChannelGroupByHash.GetNonDictionaryGroupIdsWork.class);
}
Aggregations