use of org.apache.iceberg.types.Type in project presto by prestodb.
the class FilesTable method buildPages.
private static List<Page> buildPages(ConnectorTableMetadata tableMetadata, ConnectorSession session, Table icebergTable, Optional<Long> snapshotId) {
PageListBuilder pagesBuilder = forTable(tableMetadata);
TableScan tableScan = getTableScan(TupleDomain.all(), snapshotId, icebergTable).includeColumnStats();
Map<Integer, Type> idToTypeMap = getIdToTypeMap(icebergTable.schema());
tableScan.planFiles().forEach(fileScanTask -> {
DataFile dataFile = fileScanTask.file();
pagesBuilder.beginRow();
pagesBuilder.appendVarchar(dataFile.path().toString());
pagesBuilder.appendVarchar(dataFile.format().name());
pagesBuilder.appendBigint(dataFile.recordCount());
pagesBuilder.appendBigint(dataFile.fileSizeInBytes());
if (checkNonNull(dataFile.columnSizes(), pagesBuilder)) {
pagesBuilder.appendIntegerBigintMap(dataFile.columnSizes());
}
if (checkNonNull(dataFile.valueCounts(), pagesBuilder)) {
pagesBuilder.appendIntegerBigintMap(dataFile.valueCounts());
}
if (checkNonNull(dataFile.nullValueCounts(), pagesBuilder)) {
pagesBuilder.appendIntegerBigintMap(dataFile.nullValueCounts());
}
if (checkNonNull(dataFile.lowerBounds(), pagesBuilder)) {
pagesBuilder.appendIntegerVarcharMap(dataFile.lowerBounds().entrySet().stream().collect(toImmutableMap(Map.Entry<Integer, ByteBuffer>::getKey, entry -> Transforms.identity(idToTypeMap.get(entry.getKey())).toHumanString(Conversions.fromByteBuffer(idToTypeMap.get(entry.getKey()), entry.getValue())))));
}
if (checkNonNull(dataFile.upperBounds(), pagesBuilder)) {
pagesBuilder.appendIntegerVarcharMap(dataFile.upperBounds().entrySet().stream().collect(toImmutableMap(Map.Entry<Integer, ByteBuffer>::getKey, entry -> Transforms.identity(idToTypeMap.get(entry.getKey())).toHumanString(Conversions.fromByteBuffer(idToTypeMap.get(entry.getKey()), entry.getValue())))));
}
if (checkNonNull(dataFile.keyMetadata(), pagesBuilder)) {
pagesBuilder.appendVarbinary(Slices.wrappedBuffer(dataFile.keyMetadata()));
}
if (checkNonNull(dataFile.splitOffsets(), pagesBuilder)) {
pagesBuilder.appendBigintArray(dataFile.splitOffsets());
}
pagesBuilder.endRow();
});
return pagesBuilder.build();
}
use of org.apache.iceberg.types.Type in project presto by prestodb.
the class PartitionTable method partitionTypes.
private List<Type> partitionTypes(List<PartitionField> partitionFields) {
ImmutableList.Builder<Type> partitionTypeBuilder = ImmutableList.builder();
for (PartitionField partitionField : partitionFields) {
Type.PrimitiveType sourceType = idToTypeMapping.get(partitionField.sourceId());
Type type = partitionField.transform().getResultType(sourceType);
partitionTypeBuilder.add(type);
}
return partitionTypeBuilder.build();
}
use of org.apache.iceberg.types.Type in project hive by apache.
the class HiveSchemaConverter method convertType.
Type convertType(TypeInfo typeInfo) {
switch(typeInfo.getCategory()) {
case PRIMITIVE:
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case FLOAT:
return Types.FloatType.get();
case DOUBLE:
return Types.DoubleType.get();
case BOOLEAN:
return Types.BooleanType.get();
case BYTE:
case SHORT:
Preconditions.checkArgument(autoConvert, "Unsupported Hive type: %s, use integer instead", ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
LOG.debug("Using auto conversion from SHORT/BYTE to INTEGER");
return Types.IntegerType.get();
case INT:
return Types.IntegerType.get();
case LONG:
return Types.LongType.get();
case BINARY:
return Types.BinaryType.get();
case CHAR:
case VARCHAR:
Preconditions.checkArgument(autoConvert, "Unsupported Hive type: %s, use string instead", ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
LOG.debug("Using auto conversion from CHAR/VARCHAR to STRING");
return Types.StringType.get();
case STRING:
return Types.StringType.get();
case TIMESTAMP:
return Types.TimestampType.withoutZone();
case DATE:
return Types.DateType.get();
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
return Types.DecimalType.of(decimalTypeInfo.precision(), decimalTypeInfo.scale());
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
default:
// special case for Timestamp with Local TZ which is only available in Hive3
if ("TIMESTAMPLOCALTZ".equalsIgnoreCase(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory().name())) {
return Types.TimestampType.withZone();
}
throw new IllegalArgumentException("Unsupported Hive type (" + ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() + ") for Iceberg tables.");
}
case STRUCT:
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<Types.NestedField> fields = convertInternal(structTypeInfo.getAllStructFieldNames(), structTypeInfo.getAllStructFieldTypeInfos(), Collections.emptyList());
return Types.StructType.of(fields);
case MAP:
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
Type keyType = convertType(mapTypeInfo.getMapKeyTypeInfo());
Type valueType = convertType(mapTypeInfo.getMapValueTypeInfo());
int keyId = id++;
int valueId = id++;
return Types.MapType.ofOptional(keyId, valueId, keyType, valueType);
case LIST:
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
Type listType = convertType(listTypeInfo.getListElementTypeInfo());
return Types.ListType.ofOptional(id++, listType);
case UNION:
default:
throw new IllegalArgumentException("Unknown type " + typeInfo.getCategory());
}
}
use of org.apache.iceberg.types.Type in project hive by apache.
the class TestHiveIcebergComplexTypeWrites method insertQueryForComplexType.
private String insertQueryForComplexType(String tableName, String dummyTableName, Schema schema, Record record) {
StringBuilder query = new StringBuilder("INSERT INTO TABLE ").append(tableName).append(" SELECT ").append(record.get(0)).append(", ");
Type type = schema.asStruct().fields().get(1).type();
query.append(buildComplexTypeInnerQuery(record.get(1), type));
query.setLength(query.length() - 1);
query.append(" FROM ").append(dummyTableName).append(" LIMIT 1");
return query.toString();
}
use of org.apache.iceberg.types.Type in project hive by apache.
the class TestHiveIcebergStorageHandlerNoScan method testCreateTableWithNotSupportedTypes.
@Test
public void testCreateTableWithNotSupportedTypes() {
TableIdentifier identifier = TableIdentifier.of("default", "not_supported_types");
// Can not create INTERVAL types from normal create table, so leave them out from this test
Map<String, Type> notSupportedTypes = ImmutableMap.of("TINYINT", Types.IntegerType.get(), "SMALLINT", Types.IntegerType.get(), "VARCHAR(1)", Types.StringType.get(), "CHAR(1)", Types.StringType.get());
for (String notSupportedType : notSupportedTypes.keySet()) {
AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Unsupported Hive type", () -> {
shell.executeStatement("CREATE EXTERNAL TABLE not_supported_types " + "(not_supported " + notSupportedType + ") " + "STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + testTables.propertiesForCreateTableSQL(ImmutableMap.of()));
});
}
}
Aggregations