use of io.trino.spi.type.MapType in project trino by trinodb.
the class IcebergParquetColumnIOConverter method constructField.
public static Optional<Field> constructField(FieldContext context, ColumnIO columnIO) {
requireNonNull(context, "context is null");
if (columnIO == null) {
return Optional.empty();
}
boolean required = columnIO.getType().getRepetition() != OPTIONAL;
int repetitionLevel = columnRepetitionLevel(columnIO);
int definitionLevel = columnDefinitionLevel(columnIO);
Type type = context.getType();
if (type instanceof RowType) {
RowType rowType = (RowType) type;
List<ColumnIdentity> subColumns = context.getColumnIdentity().getChildren();
GroupColumnIO groupColumnIO = (GroupColumnIO) columnIO;
ImmutableList.Builder<Optional<Field>> fieldsBuilder = ImmutableList.builder();
List<RowType.Field> fields = rowType.getFields();
boolean structHasParameters = false;
for (int i = 0; i < fields.size(); i++) {
RowType.Field rowField = fields.get(i);
ColumnIdentity fieldIdentity = subColumns.get(i);
Optional<Field> field = constructField(new FieldContext(rowField.getType(), fieldIdentity), lookupColumnById(groupColumnIO, fieldIdentity.getId()));
structHasParameters |= field.isPresent();
fieldsBuilder.add(field);
}
if (structHasParameters) {
return Optional.of(new GroupField(type, repetitionLevel, definitionLevel, required, fieldsBuilder.build()));
}
return Optional.empty();
}
if (type instanceof MapType) {
MapType mapType = (MapType) type;
GroupColumnIO groupColumnIO = (GroupColumnIO) columnIO;
GroupColumnIO keyValueColumnIO = getMapKeyValueColumn(groupColumnIO);
if (keyValueColumnIO.getChildrenCount() != 2) {
return Optional.empty();
}
List<ColumnIdentity> subColumns = context.getColumnIdentity().getChildren();
checkArgument(subColumns.size() == 2, "Not a map: %s", context);
ColumnIdentity keyIdentity = subColumns.get(0);
ColumnIdentity valueIdentity = subColumns.get(1);
// TODO validate column ID
Optional<Field> keyField = constructField(new FieldContext(mapType.getKeyType(), keyIdentity), keyValueColumnIO.getChild(0));
// TODO validate column ID
Optional<Field> valueField = constructField(new FieldContext(mapType.getValueType(), valueIdentity), keyValueColumnIO.getChild(1));
return Optional.of(new GroupField(type, repetitionLevel, definitionLevel, required, ImmutableList.of(keyField, valueField)));
}
if (type instanceof ArrayType) {
ArrayType arrayType = (ArrayType) type;
GroupColumnIO groupColumnIO = (GroupColumnIO) columnIO;
if (groupColumnIO.getChildrenCount() != 1) {
return Optional.empty();
}
List<ColumnIdentity> subColumns = context.getColumnIdentity().getChildren();
checkArgument(subColumns.size() == 1, "Not an array: %s", context);
ColumnIdentity elementIdentity = getOnlyElement(subColumns);
// TODO validate column ID
Optional<Field> field = constructField(new FieldContext(arrayType.getElementType(), elementIdentity), getArrayElementColumn(groupColumnIO.getChild(0)));
return Optional.of(new GroupField(type, repetitionLevel, definitionLevel, required, ImmutableList.of(field)));
}
PrimitiveColumnIO primitiveColumnIO = (PrimitiveColumnIO) columnIO;
RichColumnDescriptor column = new RichColumnDescriptor(primitiveColumnIO.getColumnDescriptor(), columnIO.getType().asPrimitiveType());
return Optional.of(new PrimitiveField(type, repetitionLevel, definitionLevel, required, column, primitiveColumnIO.getId()));
}
use of io.trino.spi.type.MapType in project trino by trinodb.
the class MaterializedResult method writeValue.
private static void writeValue(Type type, BlockBuilder blockBuilder, Object value) {
if (value == null) {
blockBuilder.appendNull();
} else if (BIGINT.equals(type)) {
type.writeLong(blockBuilder, (Long) value);
} else if (INTEGER.equals(type)) {
type.writeLong(blockBuilder, (Integer) value);
} else if (SMALLINT.equals(type)) {
type.writeLong(blockBuilder, (Short) value);
} else if (TINYINT.equals(type)) {
type.writeLong(blockBuilder, (Byte) value);
} else if (REAL.equals(type)) {
type.writeLong(blockBuilder, floatToRawIntBits(((Float) value)));
} else if (DOUBLE.equals(type)) {
type.writeDouble(blockBuilder, (Double) value);
} else if (BOOLEAN.equals(type)) {
type.writeBoolean(blockBuilder, (Boolean) value);
} else if (JSON.equals(type)) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
} else if (type instanceof VarcharType) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
} else if (type instanceof CharType) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
} else if (VARBINARY.equals(type)) {
type.writeSlice(blockBuilder, Slices.wrappedBuffer((byte[]) value));
} else if (DATE.equals(type)) {
int days = ((SqlDate) value).getDays();
type.writeLong(blockBuilder, days);
} else if (type instanceof TimeType) {
SqlTime time = (SqlTime) value;
type.writeLong(blockBuilder, time.getPicos());
} else if (type instanceof TimeWithTimeZoneType) {
long nanos = roundDiv(((SqlTimeWithTimeZone) value).getPicos(), PICOSECONDS_PER_NANOSECOND);
int offsetMinutes = ((SqlTimeWithTimeZone) value).getOffsetMinutes();
type.writeLong(blockBuilder, packTimeWithTimeZone(nanos, offsetMinutes));
} else if (type instanceof TimestampType) {
long micros = ((SqlTimestamp) value).getEpochMicros();
if (((TimestampType) type).getPrecision() <= TimestampType.MAX_SHORT_PRECISION) {
type.writeLong(blockBuilder, micros);
} else {
type.writeObject(blockBuilder, new LongTimestamp(micros, ((SqlTimestamp) value).getPicosOfMicros()));
}
} else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
long millisUtc = ((SqlTimestampWithTimeZone) value).getMillisUtc();
TimeZoneKey timeZoneKey = ((SqlTimestampWithTimeZone) value).getTimeZoneKey();
type.writeLong(blockBuilder, packDateTimeWithZone(millisUtc, timeZoneKey));
} else if (type instanceof ArrayType) {
List<?> list = (List<?>) value;
Type elementType = ((ArrayType) type).getElementType();
BlockBuilder arrayBlockBuilder = blockBuilder.beginBlockEntry();
for (Object element : list) {
writeValue(elementType, arrayBlockBuilder, element);
}
blockBuilder.closeEntry();
} else if (type instanceof MapType) {
Map<?, ?> map = (Map<?, ?>) value;
Type keyType = ((MapType) type).getKeyType();
Type valueType = ((MapType) type).getValueType();
BlockBuilder mapBlockBuilder = blockBuilder.beginBlockEntry();
for (Entry<?, ?> entry : map.entrySet()) {
writeValue(keyType, mapBlockBuilder, entry.getKey());
writeValue(valueType, mapBlockBuilder, entry.getValue());
}
blockBuilder.closeEntry();
} else if (type instanceof RowType) {
List<?> row = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry();
for (int field = 0; field < row.size(); field++) {
writeValue(fieldTypes.get(field), rowBlockBuilder, row.get(field));
}
blockBuilder.closeEntry();
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
use of io.trino.spi.type.MapType in project trino by trinodb.
the class TestMapUnionAggregation method testSimpleWithNulls.
@Test
public void testSimpleWithNulls() {
MapType mapType = mapType(DOUBLE, VARCHAR);
Map<Object, Object> expected = mapOf(23.0, "aaa", 33.0, null, 43.0, "ccc", 53.0, "ddd");
assertAggregation(FUNCTION_RESOLUTION, QualifiedName.of(MapUnionAggregation.NAME), fromTypes(mapType), expected, arrayBlockOf(mapType, mapBlockOf(DOUBLE, VARCHAR, mapOf(23.0, "aaa", 33.0, null, 53.0, "ddd")), null, mapBlockOf(DOUBLE, VARCHAR, mapOf(43.0, "ccc", 53.0, "ddd"))));
}
use of io.trino.spi.type.MapType in project trino by trinodb.
the class TestDeltaLakeFileStatistics method testStatisticsValues.
private static void testStatisticsValues(DeltaLakeFileStatistics fileStatistics) {
assertEquals(fileStatistics.getNumRecords(), Optional.of(1L));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("byt", TINYINT, REGULAR)), Optional.of(42L));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("dat", DATE, REGULAR)), Optional.of(LocalDate.parse("5000-01-01").toEpochDay()));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("dec_long", DecimalType.createDecimalType(25, 3), REGULAR)), Optional.of(encodeScaledValue(new BigDecimal("999999999999.123"), 3)));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("dec_short", DecimalType.createDecimalType(5, 1), REGULAR)), Optional.of(new BigDecimal("10.1").unscaledValue().longValueExact()));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("dou", DoubleType.DOUBLE, REGULAR)), Optional.of(0.321));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("fl", REAL, REGULAR)), Optional.of((long) floatToIntBits(0.123f)));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("in", INTEGER, REGULAR)), Optional.of(20000000L));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("l", BIGINT, REGULAR)), Optional.of(10000000L));
Type rowType = RowType.rowType(RowType.field("s1", INTEGER), RowType.field("s3", VarcharType.createUnboundedVarcharType()));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("row", rowType, REGULAR)), Optional.empty());
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("arr", new ArrayType(INTEGER), REGULAR)), Optional.empty());
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("m", new MapType(INTEGER, VarcharType.createUnboundedVarcharType(), new TypeOperators()), REGULAR)), Optional.empty());
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("sh", SMALLINT, REGULAR)), Optional.of(123L));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("str", VarcharType.createUnboundedVarcharType(), REGULAR)), Optional.of(utf8Slice("a")));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("ts", TIMESTAMP_TZ_MILLIS, REGULAR)), Optional.of(packDateTimeWithZone(LocalDateTime.parse("2960-10-31T01:00:00.000").toInstant(UTC).toEpochMilli(), UTC_KEY)));
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("bool", BOOLEAN, REGULAR)), Optional.empty());
assertEquals(fileStatistics.getMinColumnValue(new DeltaLakeColumnHandle("bin", VARBINARY, REGULAR)), Optional.empty());
}
use of io.trino.spi.type.MapType in project trino by trinodb.
the class AbstractTestHive method assertValueTypes.
private static void assertValueTypes(MaterializedRow row, List<ColumnMetadata> schema) {
for (int columnIndex = 0; columnIndex < schema.size(); columnIndex++) {
ColumnMetadata column = schema.get(columnIndex);
Object value = row.getField(columnIndex);
if (value != null) {
if (BOOLEAN.equals(column.getType())) {
assertInstanceOf(value, Boolean.class);
} else if (TINYINT.equals(column.getType())) {
assertInstanceOf(value, Byte.class);
} else if (SMALLINT.equals(column.getType())) {
assertInstanceOf(value, Short.class);
} else if (INTEGER.equals(column.getType())) {
assertInstanceOf(value, Integer.class);
} else if (BIGINT.equals(column.getType())) {
assertInstanceOf(value, Long.class);
} else if (DOUBLE.equals(column.getType())) {
assertInstanceOf(value, Double.class);
} else if (REAL.equals(column.getType())) {
assertInstanceOf(value, Float.class);
} else if (column.getType() instanceof VarcharType) {
assertInstanceOf(value, String.class);
} else if (column.getType() instanceof CharType) {
assertInstanceOf(value, String.class);
} else if (VARBINARY.equals(column.getType())) {
assertInstanceOf(value, SqlVarbinary.class);
} else if (TIMESTAMP_MILLIS.equals(column.getType())) {
assertInstanceOf(value, SqlTimestamp.class);
} else if (TIMESTAMP_WITH_TIME_ZONE.equals(column.getType())) {
assertInstanceOf(value, SqlTimestampWithTimeZone.class);
} else if (DATE.equals(column.getType())) {
assertInstanceOf(value, SqlDate.class);
} else if (column.getType() instanceof ArrayType || column.getType() instanceof RowType) {
assertInstanceOf(value, List.class);
} else if (column.getType() instanceof MapType) {
assertInstanceOf(value, Map.class);
} else {
fail("Unknown primitive type " + columnIndex);
}
}
}
}
Aggregations