use of io.trino.spi.type.RowType in project trino by trinodb.
the class PartitionTable method buildRecordCursor.
private RecordCursor buildRecordCursor(Map<StructLikeWrapper, IcebergStatistics> partitionStatistics, List<PartitionField> partitionFields) {
List<Type> partitionTypes = partitionTypes(partitionFields);
List<? extends Class<?>> partitionColumnClass = partitionTypes.stream().map(type -> type.typeId().javaClass()).collect(toImmutableList());
ImmutableList.Builder<List<Object>> records = ImmutableList.builder();
for (Map.Entry<StructLikeWrapper, IcebergStatistics> partitionEntry : partitionStatistics.entrySet()) {
StructLikeWrapper partitionStruct = partitionEntry.getKey();
IcebergStatistics icebergStatistics = partitionEntry.getValue();
List<Object> row = new ArrayList<>();
// add data for partition columns
partitionColumnType.ifPresent(partitionColumnType -> {
BlockBuilder partitionRowBlockBuilder = partitionColumnType.createBlockBuilder(null, 1);
BlockBuilder partitionBlockBuilder = partitionRowBlockBuilder.beginBlockEntry();
for (int i = 0; i < partitionColumnTypes.size(); i++) {
io.trino.spi.type.Type trinoType = partitionColumnType.getFields().get(i).getType();
Object value = convertIcebergValueToTrino(partitionTypes.get(i), partitionStruct.get().get(i, partitionColumnClass.get(i)));
writeNativeValue(trinoType, partitionBlockBuilder, value);
}
partitionRowBlockBuilder.closeEntry();
row.add(partitionColumnType.getObject(partitionRowBlockBuilder, 0));
});
// add the top level metrics.
row.add(icebergStatistics.getRecordCount());
row.add(icebergStatistics.getFileCount());
row.add(icebergStatistics.getSize());
// add column level metrics
dataColumnType.ifPresent(dataColumnType -> {
BlockBuilder dataRowBlockBuilder = dataColumnType.createBlockBuilder(null, 1);
BlockBuilder dataBlockBuilder = dataRowBlockBuilder.beginBlockEntry();
for (int i = 0; i < columnMetricTypes.size(); i++) {
Integer fieldId = nonPartitionPrimitiveColumns.get(i).fieldId();
Object min = icebergStatistics.getMinValues().get(fieldId);
Object max = icebergStatistics.getMaxValues().get(fieldId);
Long nullCount = icebergStatistics.getNullCounts().get(fieldId);
if (min == null && max == null && nullCount == null) {
row.add(null);
return;
}
RowType columnMetricType = columnMetricTypes.get(i);
columnMetricType.writeObject(dataBlockBuilder, getColumnMetricBlock(columnMetricType, min, max, nullCount));
}
dataRowBlockBuilder.closeEntry();
row.add(dataColumnType.getObject(dataRowBlockBuilder, 0));
});
records.add(row);
}
return new InMemoryRecordSet(resultTypes, records.build()).cursor();
}
use of io.trino.spi.type.RowType in project trino by trinodb.
the class TypeConverter method fromRow.
private static org.apache.iceberg.types.Type fromRow(RowType type) {
List<Types.NestedField> fields = new ArrayList<>();
for (RowType.Field field : type.getFields()) {
String name = field.getName().orElseThrow(() -> new TrinoException(NOT_SUPPORTED, "Row type field does not have a name: " + type.getDisplayName()));
fields.add(Types.NestedField.optional(fields.size() + 1, name, toIcebergType(field.getType())));
}
return Types.StructType.of(fields);
}
use of io.trino.spi.type.RowType in project trino by trinodb.
the class IcebergParquetColumnIOConverter method constructField.
public static Optional<Field> constructField(FieldContext context, ColumnIO columnIO) {
requireNonNull(context, "context is null");
if (columnIO == null) {
return Optional.empty();
}
boolean required = columnIO.getType().getRepetition() != OPTIONAL;
int repetitionLevel = columnRepetitionLevel(columnIO);
int definitionLevel = columnDefinitionLevel(columnIO);
Type type = context.getType();
if (type instanceof RowType) {
RowType rowType = (RowType) type;
List<ColumnIdentity> subColumns = context.getColumnIdentity().getChildren();
GroupColumnIO groupColumnIO = (GroupColumnIO) columnIO;
ImmutableList.Builder<Optional<Field>> fieldsBuilder = ImmutableList.builder();
List<RowType.Field> fields = rowType.getFields();
boolean structHasParameters = false;
for (int i = 0; i < fields.size(); i++) {
RowType.Field rowField = fields.get(i);
ColumnIdentity fieldIdentity = subColumns.get(i);
Optional<Field> field = constructField(new FieldContext(rowField.getType(), fieldIdentity), lookupColumnById(groupColumnIO, fieldIdentity.getId()));
structHasParameters |= field.isPresent();
fieldsBuilder.add(field);
}
if (structHasParameters) {
return Optional.of(new GroupField(type, repetitionLevel, definitionLevel, required, fieldsBuilder.build()));
}
return Optional.empty();
}
if (type instanceof MapType) {
MapType mapType = (MapType) type;
GroupColumnIO groupColumnIO = (GroupColumnIO) columnIO;
GroupColumnIO keyValueColumnIO = getMapKeyValueColumn(groupColumnIO);
if (keyValueColumnIO.getChildrenCount() != 2) {
return Optional.empty();
}
List<ColumnIdentity> subColumns = context.getColumnIdentity().getChildren();
checkArgument(subColumns.size() == 2, "Not a map: %s", context);
ColumnIdentity keyIdentity = subColumns.get(0);
ColumnIdentity valueIdentity = subColumns.get(1);
// TODO validate column ID
Optional<Field> keyField = constructField(new FieldContext(mapType.getKeyType(), keyIdentity), keyValueColumnIO.getChild(0));
// TODO validate column ID
Optional<Field> valueField = constructField(new FieldContext(mapType.getValueType(), valueIdentity), keyValueColumnIO.getChild(1));
return Optional.of(new GroupField(type, repetitionLevel, definitionLevel, required, ImmutableList.of(keyField, valueField)));
}
if (type instanceof ArrayType) {
ArrayType arrayType = (ArrayType) type;
GroupColumnIO groupColumnIO = (GroupColumnIO) columnIO;
if (groupColumnIO.getChildrenCount() != 1) {
return Optional.empty();
}
List<ColumnIdentity> subColumns = context.getColumnIdentity().getChildren();
checkArgument(subColumns.size() == 1, "Not an array: %s", context);
ColumnIdentity elementIdentity = getOnlyElement(subColumns);
// TODO validate column ID
Optional<Field> field = constructField(new FieldContext(arrayType.getElementType(), elementIdentity), getArrayElementColumn(groupColumnIO.getChild(0)));
return Optional.of(new GroupField(type, repetitionLevel, definitionLevel, required, ImmutableList.of(field)));
}
PrimitiveColumnIO primitiveColumnIO = (PrimitiveColumnIO) columnIO;
RichColumnDescriptor column = new RichColumnDescriptor(primitiveColumnIO.getColumnDescriptor(), columnIO.getType().asPrimitiveType());
return Optional.of(new PrimitiveField(type, repetitionLevel, definitionLevel, required, column, primitiveColumnIO.getId()));
}
use of io.trino.spi.type.RowType in project trino by trinodb.
the class MaterializedResult method writeValue.
private static void writeValue(Type type, BlockBuilder blockBuilder, Object value) {
if (value == null) {
blockBuilder.appendNull();
} else if (BIGINT.equals(type)) {
type.writeLong(blockBuilder, (Long) value);
} else if (INTEGER.equals(type)) {
type.writeLong(blockBuilder, (Integer) value);
} else if (SMALLINT.equals(type)) {
type.writeLong(blockBuilder, (Short) value);
} else if (TINYINT.equals(type)) {
type.writeLong(blockBuilder, (Byte) value);
} else if (REAL.equals(type)) {
type.writeLong(blockBuilder, floatToRawIntBits(((Float) value)));
} else if (DOUBLE.equals(type)) {
type.writeDouble(blockBuilder, (Double) value);
} else if (BOOLEAN.equals(type)) {
type.writeBoolean(blockBuilder, (Boolean) value);
} else if (JSON.equals(type)) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
} else if (type instanceof VarcharType) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
} else if (type instanceof CharType) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
} else if (VARBINARY.equals(type)) {
type.writeSlice(blockBuilder, Slices.wrappedBuffer((byte[]) value));
} else if (DATE.equals(type)) {
int days = ((SqlDate) value).getDays();
type.writeLong(blockBuilder, days);
} else if (type instanceof TimeType) {
SqlTime time = (SqlTime) value;
type.writeLong(blockBuilder, time.getPicos());
} else if (type instanceof TimeWithTimeZoneType) {
long nanos = roundDiv(((SqlTimeWithTimeZone) value).getPicos(), PICOSECONDS_PER_NANOSECOND);
int offsetMinutes = ((SqlTimeWithTimeZone) value).getOffsetMinutes();
type.writeLong(blockBuilder, packTimeWithTimeZone(nanos, offsetMinutes));
} else if (type instanceof TimestampType) {
long micros = ((SqlTimestamp) value).getEpochMicros();
if (((TimestampType) type).getPrecision() <= TimestampType.MAX_SHORT_PRECISION) {
type.writeLong(blockBuilder, micros);
} else {
type.writeObject(blockBuilder, new LongTimestamp(micros, ((SqlTimestamp) value).getPicosOfMicros()));
}
} else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
long millisUtc = ((SqlTimestampWithTimeZone) value).getMillisUtc();
TimeZoneKey timeZoneKey = ((SqlTimestampWithTimeZone) value).getTimeZoneKey();
type.writeLong(blockBuilder, packDateTimeWithZone(millisUtc, timeZoneKey));
} else if (type instanceof ArrayType) {
List<?> list = (List<?>) value;
Type elementType = ((ArrayType) type).getElementType();
BlockBuilder arrayBlockBuilder = blockBuilder.beginBlockEntry();
for (Object element : list) {
writeValue(elementType, arrayBlockBuilder, element);
}
blockBuilder.closeEntry();
} else if (type instanceof MapType) {
Map<?, ?> map = (Map<?, ?>) value;
Type keyType = ((MapType) type).getKeyType();
Type valueType = ((MapType) type).getValueType();
BlockBuilder mapBlockBuilder = blockBuilder.beginBlockEntry();
for (Entry<?, ?> entry : map.entrySet()) {
writeValue(keyType, mapBlockBuilder, entry.getKey());
writeValue(valueType, mapBlockBuilder, entry.getValue());
}
blockBuilder.closeEntry();
} else if (type instanceof RowType) {
List<?> row = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry();
for (int field = 0; field < row.size(); field++) {
writeValue(fieldTypes.get(field), rowBlockBuilder, row.get(field));
}
blockBuilder.closeEntry();
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
use of io.trino.spi.type.RowType in project trino by trinodb.
the class TestMultimapAggAggregation method testDoubleRowMap.
@Test
public void testDoubleRowMap() {
RowType innerRowType = RowType.from(ImmutableList.of(RowType.field("f1", BIGINT), RowType.field("f2", DOUBLE)));
testMultimapAgg(DOUBLE, ImmutableList.of(1.0, 2.0, 3.0), innerRowType, ImmutableList.of(ImmutableList.of(1L, 1.0), ImmutableList.of(2L, 2.0), ImmutableList.of(3L, 3.0)));
}
Aggregations