use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class HiveMetadata method getInsertLayout.
@Override
public Optional<ConnectorTableLayout> getInsertLayout(ConnectorSession session, ConnectorTableHandle tableHandle) {
HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle;
SchemaTableName tableName = hiveTableHandle.getSchemaTableName();
Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
if (table.getStorage().getBucketProperty().isPresent()) {
if (!isSupportedBucketing(table)) {
throw new TrinoException(NOT_SUPPORTED, "Cannot write to a table bucketed on an unsupported type");
}
} else // Note: we cannot use hiveTableHandle.isInAcidTransaction() here as transaction is not yet set in HiveTableHandle when getInsertLayout is called
if (isFullAcidTable(table.getParameters())) {
table = Table.builder(table).withStorage(storage -> storage.setBucketProperty(Optional.of(new HiveBucketProperty(ImmutableList.of(), HiveBucketing.BucketingVersion.BUCKETING_V2, 1, ImmutableList.of())))).build();
}
Optional<HiveBucketHandle> hiveBucketHandle = getHiveBucketHandle(session, table, typeManager);
List<Column> partitionColumns = table.getPartitionColumns();
if (hiveBucketHandle.isEmpty()) {
// return preferred layout which is partitioned by partition columns
if (partitionColumns.isEmpty()) {
return Optional.empty();
}
return Optional.of(new ConnectorTableLayout(partitionColumns.stream().map(Column::getName).collect(toImmutableList())));
}
HiveBucketProperty bucketProperty = table.getStorage().getBucketProperty().orElseThrow(() -> new NoSuchElementException("Bucket property should be set"));
if (!bucketProperty.getSortedBy().isEmpty() && !isSortedWritingEnabled(session)) {
throw new TrinoException(NOT_SUPPORTED, "Writing to bucketed sorted Hive tables is disabled");
}
ImmutableList.Builder<String> partitioningColumns = ImmutableList.builder();
hiveBucketHandle.get().getColumns().stream().map(HiveColumnHandle::getName).forEach(partitioningColumns::add);
partitionColumns.stream().map(Column::getName).forEach(partitioningColumns::add);
HivePartitioningHandle partitioningHandle = new HivePartitioningHandle(hiveBucketHandle.get().getBucketingVersion(), hiveBucketHandle.get().getTableBucketCount(), hiveBucketHandle.get().getColumns().stream().map(HiveColumnHandle::getHiveType).collect(toImmutableList()), OptionalInt.of(hiveBucketHandle.get().getTableBucketCount()), !partitionColumns.isEmpty() && isParallelPartitionedBucketedWrites(session));
return Optional.of(new ConnectorTableLayout(partitioningHandle, partitioningColumns.build()));
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class GlueHiveMetastore method dropColumn.
@Override
public void dropColumn(String databaseName, String tableName, String columnName) {
verifyCanDropColumn(this, databaseName, tableName, columnName);
Table oldTable = getExistingTable(databaseName, tableName);
if (oldTable.getColumn(columnName).isEmpty()) {
SchemaTableName name = new SchemaTableName(databaseName, tableName);
throw new ColumnNotFoundException(name, columnName);
}
ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder();
oldTable.getDataColumns().stream().filter(fieldSchema -> !fieldSchema.getName().equals(columnName)).forEach(newDataColumns::add);
Table newTable = Table.builder(oldTable).setDataColumns(newDataColumns.build()).build();
replaceTable(databaseName, tableName, newTable, null);
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class GlueHiveMetastore method renameColumn.
@Override
public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) {
Table oldTable = getExistingTable(databaseName, tableName);
if (oldTable.getPartitionColumns().stream().anyMatch(c -> c.getName().equals(oldColumnName))) {
throw new TrinoException(NOT_SUPPORTED, "Renaming partition columns is not supported");
}
ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder();
for (Column column : oldTable.getDataColumns()) {
if (column.getName().equals(oldColumnName)) {
newDataColumns.add(new Column(newColumnName, column.getType(), column.getComment()));
} else {
newDataColumns.add(column);
}
}
Table newTable = Table.builder(oldTable).setDataColumns(newDataColumns.build()).build();
replaceTable(databaseName, tableName, newTable, null);
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class FileHiveMetastore method dropColumn.
@Override
public synchronized void dropColumn(String databaseName, String tableName, String columnName) {
alterTable(databaseName, tableName, oldTable -> {
verifyCanDropColumn(this, databaseName, tableName, columnName);
if (oldTable.getColumn(columnName).isEmpty()) {
SchemaTableName name = new SchemaTableName(databaseName, tableName);
throw new ColumnNotFoundException(name, columnName);
}
ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder();
for (Column fieldSchema : oldTable.getDataColumns()) {
if (!fieldSchema.getName().equals(columnName)) {
newDataColumns.add(fieldSchema);
}
}
return oldTable.withDataColumns(currentVersion, newDataColumns.build());
});
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class GlueStatConverter method toGlueColumnStatisticsData.
private static ColumnStatisticsData toGlueColumnStatisticsData(HiveColumnStatistics statistics, HiveType columnType, OptionalLong rowCount) {
TypeInfo typeInfo = columnType.getTypeInfo();
checkArgument(typeInfo.getCategory() == PRIMITIVE, "Unsupported statistics type: %s", columnType);
ColumnStatisticsData catalogColumnStatisticsData = new ColumnStatisticsData();
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case BOOLEAN:
{
BooleanColumnStatisticsData data = new BooleanColumnStatisticsData();
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
statistics.getBooleanStatistics().ifPresent(booleanStatistics -> {
booleanStatistics.getFalseCount().ifPresent(data::setNumberOfFalses);
booleanStatistics.getTrueCount().ifPresent(data::setNumberOfTrues);
});
catalogColumnStatisticsData.setType(ColumnStatisticsType.BOOLEAN.toString());
catalogColumnStatisticsData.setBooleanColumnStatisticsData(data);
break;
}
case BINARY:
{
BinaryColumnStatisticsData data = new BinaryColumnStatisticsData();
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
data.setMaximumLength(statistics.getMaxValueSizeInBytes().orElse(0));
data.setAverageLength(getAverageColumnLength(statistics.getTotalSizeInBytes(), rowCount, statistics.getNullsCount()).orElse(0));
catalogColumnStatisticsData.setType(ColumnStatisticsType.BINARY.toString());
catalogColumnStatisticsData.setBinaryColumnStatisticsData(data);
break;
}
case DATE:
{
DateColumnStatisticsData data = new DateColumnStatisticsData();
statistics.getDateStatistics().ifPresent(dateStatistics -> {
dateStatistics.getMin().ifPresent(value -> data.setMinimumValue(localDateToDate(value)));
dateStatistics.getMax().ifPresent(value -> data.setMaximumValue(localDateToDate(value)));
});
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
toMetastoreDistinctValuesCount(statistics.getDistinctValuesCount(), statistics.getNullsCount()).ifPresent(data::setNumberOfDistinctValues);
catalogColumnStatisticsData.setType(ColumnStatisticsType.DATE.toString());
catalogColumnStatisticsData.setDateColumnStatisticsData(data);
break;
}
case DECIMAL:
{
DecimalColumnStatisticsData data = new DecimalColumnStatisticsData();
statistics.getDecimalStatistics().ifPresent(decimalStatistics -> {
decimalStatistics.getMin().ifPresent(value -> data.setMinimumValue(bigDecimalToGlueDecimal(value)));
decimalStatistics.getMax().ifPresent(value -> data.setMaximumValue(bigDecimalToGlueDecimal(value)));
});
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
toMetastoreDistinctValuesCount(statistics.getDistinctValuesCount(), statistics.getNullsCount()).ifPresent(data::setNumberOfDistinctValues);
catalogColumnStatisticsData.setType(ColumnStatisticsType.DECIMAL.toString());
catalogColumnStatisticsData.setDecimalColumnStatisticsData(data);
break;
}
case FLOAT:
case DOUBLE:
{
DoubleColumnStatisticsData data = new DoubleColumnStatisticsData();
statistics.getDoubleStatistics().ifPresent(doubleStatistics -> {
doubleStatistics.getMin().ifPresent(data::setMinimumValue);
doubleStatistics.getMax().ifPresent(data::setMaximumValue);
});
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
toMetastoreDistinctValuesCount(statistics.getDistinctValuesCount(), statistics.getNullsCount()).ifPresent(data::setNumberOfDistinctValues);
catalogColumnStatisticsData.setType(ColumnStatisticsType.DOUBLE.toString());
catalogColumnStatisticsData.setDoubleColumnStatisticsData(data);
break;
}
case BYTE:
case SHORT:
case INT:
case LONG:
case TIMESTAMP:
{
LongColumnStatisticsData data = new LongColumnStatisticsData();
statistics.getIntegerStatistics().ifPresent(stats -> {
stats.getMin().ifPresent(data::setMinimumValue);
stats.getMax().ifPresent(data::setMaximumValue);
});
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
toMetastoreDistinctValuesCount(statistics.getDistinctValuesCount(), statistics.getNullsCount()).ifPresent(data::setNumberOfDistinctValues);
catalogColumnStatisticsData.setType(ColumnStatisticsType.LONG.toString());
catalogColumnStatisticsData.setLongColumnStatisticsData(data);
break;
}
case VARCHAR:
case CHAR:
case STRING:
{
StringColumnStatisticsData data = new StringColumnStatisticsData();
statistics.getNullsCount().ifPresent(data::setNumberOfNulls);
toMetastoreDistinctValuesCount(statistics.getDistinctValuesCount(), statistics.getNullsCount()).ifPresent(data::setNumberOfDistinctValues);
data.setMaximumLength(statistics.getMaxValueSizeInBytes().orElse(0));
data.setAverageLength(getAverageColumnLength(statistics.getTotalSizeInBytes(), rowCount, statistics.getNullsCount()).orElse(0));
catalogColumnStatisticsData.setType(ColumnStatisticsType.STRING.toString());
catalogColumnStatisticsData.setStringColumnStatisticsData(data);
break;
}
default:
throw new TrinoException(HIVE_INVALID_METADATA, "Invalid column statistics type: " + ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
}
return catalogColumnStatisticsData;
}
Aggregations