use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestGlobalFunctionCatalog method testConflictingScalarAggregation.
@Test
public void testConflictingScalarAggregation() {
FunctionBundle functions = extractFunctions(ScalarSum.class);
TypeOperators typeOperators = new TypeOperators();
GlobalFunctionCatalog globalFunctionCatalog = new GlobalFunctionCatalog();
globalFunctionCatalog.addFunctions(SystemFunctionBundle.create(new FeaturesConfig(), typeOperators, new BlockTypeOperators(typeOperators), NodeVersion.UNKNOWN));
assertThatThrownBy(() -> globalFunctionCatalog.addFunctions(functions)).isInstanceOf(IllegalStateException.class).hasMessage("'sum' is both an aggregation and a scalar function");
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestParquetPredicateUtils method testParquetTupleDomainMap.
@Test
public void testParquetTupleDomainMap() {
MapType mapType = new MapType(INTEGER, INTEGER, new TypeOperators());
HiveColumnHandle columnHandle = createBaseColumn("my_map", 0, HiveType.valueOf("map<int,int>"), mapType, REGULAR, Optional.empty());
TupleDomain<HiveColumnHandle> domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(mapType)));
MessageType fileSchema = new MessageType("hive_schema", new GroupType(OPTIONAL, "my_map", new GroupType(REPEATED, "map", new PrimitiveType(REQUIRED, INT32, "key"), new PrimitiveType(OPTIONAL, INT32, "value"))));
Map<List<String>, RichColumnDescriptor> descriptorsByPath = getDescriptors(fileSchema, fileSchema);
TupleDomain<ColumnDescriptor> tupleDomain = getParquetTupleDomain(descriptorsByPath, domain, fileSchema, true);
assertTrue(tupleDomain.isAll());
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestSimplePagesHashStrategy method testCompareSortChannelPositionsWithMapType.
@Test
public void testCompareSortChannelPositionsWithMapType() {
MapType mapType = new MapType(INTEGER, INTEGER, new TypeOperators());
Block block = mapType.createBlockFromKeyValue(Optional.empty(), new int[] { 0, 1 }, new IntArrayBlock(1, Optional.empty(), new int[] { 1234 }), new IntArrayBlock(1, Optional.empty(), new int[] { 5678 }));
SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(mapType, ImmutableList.of(block));
// This fails because MapType is not orderable.
assertThatThrownBy(() -> strategy.compareSortChannelPositions(0, 0, 0, 0)).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("type is not orderable");
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestSimplePagesHashStrategy method testRowEqualsRowWithMapType.
@Test
public void testRowEqualsRowWithMapType() {
MapType mapType = new MapType(INTEGER, INTEGER, new TypeOperators());
SimplePagesHashStrategy strategy = createSimplePagesHashStrategy(mapType, ImmutableList.of());
Page leftPage = new Page(mapType.createBlockFromKeyValue(Optional.empty(), new int[] { 0, 1 }, new IntArrayBlock(1, Optional.empty(), new int[] { 1234 }), new IntArrayBlock(1, Optional.empty(), new int[] { 5678 })));
Page rightPage1 = new Page(mapType.createBlockFromKeyValue(Optional.empty(), new int[] { 0, 1 }, new IntArrayBlock(1, Optional.empty(), new int[] { 1234 }), new IntArrayBlock(1, Optional.empty(), new int[] { 5678 })));
Page rightPage2 = new Page(mapType.createBlockFromKeyValue(Optional.empty(), new int[] { 0, 1 }, new IntArrayBlock(1, Optional.empty(), new int[] { 1234 }), new IntArrayBlock(1, Optional.empty(), new int[] { 1234 })));
// This works because MapType is comparable.
assertTrue(strategy.rowEqualsRow(0, leftPage, 0, rightPage1));
assertFalse(strategy.rowEqualsRow(0, leftPage, 0, rightPage2));
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestStreamingAggregationOperator method test.
@Test
public void test() {
OperatorFactory operatorFactory = StreamingAggregationOperator.createOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BOOLEAN, DOUBLE, BIGINT), ImmutableList.of(DOUBLE), ImmutableList.of(1), ImmutableList.of(COUNT.createAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.empty()), LONG_SUM.createAggregatorFactory(SINGLE, ImmutableList.of(2), OptionalInt.empty())), new JoinCompiler(new TypeOperators()));
RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(BOOLEAN, DOUBLE, BIGINT);
List<Page> input = rowPagesBuilder.addSequencePage(3, 0, 0, 1).row(true, 3.0, 4).row(false, 3.0, 5).pageBreak().row(true, 3.0, 6).row(false, 4.0, 7).row(true, 4.0, 8).row(false, 4.0, 9).row(true, 4.0, 10).pageBreak().row(false, 5.0, 11).row(true, 5.0, 12).row(false, 5.0, 13).row(true, 5.0, 14).row(false, 5.0, 15).pageBreak().addSequencePage(3, 0, 6, 16).row(false, Double.NaN, 1).row(false, Double.NaN, 10).row(false, null, 2).row(false, null, 20).build();
MaterializedResult expected = resultBuilder(driverContext.getSession(), DOUBLE, BIGINT, BIGINT).row(0.0, 1L, 1L).row(1.0, 1L, 2L).row(2.0, 1L, 3L).row(3.0, 3L, 15L).row(4.0, 4L, 34L).row(5.0, 5L, 65L).row(6.0, 1L, 16L).row(7.0, 1L, 17L).row(8.0, 1L, 18L).row(Double.NaN, 2L, 11L).row(null, 2L, 22L).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
Aggregations