use of io.trino.plugin.hive.HiveType in project trino by trinodb.
the class HiveBucketing method getHiveBuckets.
private static Optional<Set<Integer>> getHiveBuckets(HiveBucketProperty hiveBucketProperty, List<Column> dataColumns, Map<ColumnHandle, List<NullableValue>> bindings) {
if (bindings.isEmpty()) {
return Optional.empty();
}
// Get bucket columns names
List<String> bucketColumns = hiveBucketProperty.getBucketedBy();
// Verify the bucket column types are supported
Map<String, HiveType> hiveTypes = new HashMap<>();
for (Column column : dataColumns) {
hiveTypes.put(column.getName(), column.getType());
}
for (String column : bucketColumns) {
if (!SUPPORTED_TYPES_FOR_BUCKET_FILTER.contains(hiveTypes.get(column))) {
return Optional.empty();
}
}
// Get bindings for bucket columns
Map<String, List<NullableValue>> bucketBindings = new HashMap<>();
for (Entry<ColumnHandle, List<NullableValue>> entry : bindings.entrySet()) {
HiveColumnHandle columnHandle = (HiveColumnHandle) entry.getKey();
if (bucketColumns.contains(columnHandle.getName())) {
bucketBindings.put(columnHandle.getName(), entry.getValue());
}
}
// Check that we have bindings for all bucket columns
if (bucketBindings.size() != bucketColumns.size()) {
return Optional.empty();
}
// Order bucket column bindings accordingly to bucket columns order
List<List<NullableValue>> orderedBindings = bucketColumns.stream().map(bucketBindings::get).collect(toImmutableList());
// Get TypeInfos for bucket columns
List<TypeInfo> typeInfos = bucketColumns.stream().map(name -> hiveTypes.get(name).getTypeInfo()).collect(toImmutableList());
return getHiveBuckets(hiveBucketProperty.getBucketingVersion(), hiveBucketProperty.getBucketCount(), typeInfos, orderedBindings);
}
use of io.trino.plugin.hive.HiveType in project trino by trinodb.
the class AbstractFileFormat method createSchema.
static Properties createSchema(HiveStorageFormat format, List<String> columnNames, List<Type> columnTypes) {
Properties schema = new Properties();
schema.setProperty(SERIALIZATION_LIB, format.getSerde());
schema.setProperty(FILE_INPUT_FORMAT, format.getInputFormat());
schema.setProperty(META_TABLE_COLUMNS, join(",", columnNames));
schema.setProperty(META_TABLE_COLUMN_TYPES, columnTypes.stream().map(HiveType::toHiveType).map(HiveType::getHiveTypeName).map(HiveTypeName::toString).collect(joining(":")));
return schema;
}
use of io.trino.plugin.hive.HiveType in project trino by trinodb.
the class TestHiveBucketing method computeTrino.
private static int computeTrino(BucketingVersion bucketingVersion, List<String> hiveTypeStrings, List<Object> hiveValues, List<HiveType> hiveTypes, List<TypeInfo> hiveTypeInfos) {
ImmutableList.Builder<Block> blockListBuilder = ImmutableList.builder();
Object[] nativeContainerValues = new Object[hiveValues.size()];
for (int i = 0; i < hiveTypeStrings.size(); i++) {
Object hiveValue = hiveValues.get(i);
Type type = hiveTypes.get(i).getType(TESTING_TYPE_MANAGER);
BlockBuilder blockBuilder = type.createBlockBuilder(null, 3);
// prepend 2 nulls to make sure position is respected when HiveBucketing function
blockBuilder.appendNull();
blockBuilder.appendNull();
appendToBlockBuilder(type, hiveValue, blockBuilder);
Block block = blockBuilder.build();
blockListBuilder.add(block);
nativeContainerValues[i] = toNativeContainerValue(type, hiveValue);
}
ImmutableList<Block> blockList = blockListBuilder.build();
int result1 = bucketingVersion.getBucketHashCode(hiveTypeInfos, new Page(blockList.toArray(new Block[blockList.size()])), 2);
int result2 = bucketingVersion.getBucketHashCode(hiveTypeInfos, nativeContainerValues);
assertEquals(result1, result2, "overloads of getBucketHashCode produced different result");
return result1;
}
use of io.trino.plugin.hive.HiveType in project trino by trinodb.
the class HiveCoercionPolicy method canCoerce.
private boolean canCoerce(HiveType fromHiveType, HiveType toHiveType) {
Type fromType = typeManager.getType(fromHiveType.getTypeSignature());
Type toType = typeManager.getType(toHiveType.getTypeSignature());
if (fromType instanceof VarcharType) {
return toType instanceof VarcharType || toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG);
}
if (toType instanceof VarcharType) {
return fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_BYTE)) {
return toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_SHORT)) {
return toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_INT)) {
return toHiveType.equals(HIVE_LONG);
}
if (fromHiveType.equals(HIVE_FLOAT)) {
return toHiveType.equals(HIVE_DOUBLE) || toType instanceof DecimalType;
}
if (fromHiveType.equals(HIVE_DOUBLE)) {
return toHiveType.equals(HIVE_FLOAT) || toType instanceof DecimalType;
}
if (fromType instanceof DecimalType) {
return toType instanceof DecimalType || toHiveType.equals(HIVE_FLOAT) || toHiveType.equals(HIVE_DOUBLE);
}
return canCoerceForList(fromHiveType, toHiveType) || canCoerceForMap(fromHiveType, toHiveType) || canCoerceForStruct(fromHiveType, toHiveType);
}
use of io.trino.plugin.hive.HiveType in project trino by trinodb.
the class HiveUtil method getPartitionKeyColumnHandles.
public static List<HiveColumnHandle> getPartitionKeyColumnHandles(Table table, TypeManager typeManager) {
ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();
List<Column> partitionKeys = table.getPartitionColumns();
for (Column field : partitionKeys) {
HiveType hiveType = field.getType();
if (!hiveType.isSupportedType(table.getStorage().getStorageFormat())) {
throw new TrinoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName()));
}
columns.add(createBaseColumn(field.getName(), -1, hiveType, hiveType.getType(typeManager), PARTITION_KEY, field.getComment()));
}
return columns.build();
}
Aggregations