Search in sources :

Example 1 with WriteInfo

use of io.trino.plugin.hive.LocationService.WriteInfo in project trino by trinodb.

the class HiveMetadata method beginOptimize.

private BeginTableExecuteResult<ConnectorTableExecuteHandle, ConnectorTableHandle> beginOptimize(ConnectorSession session, ConnectorTableExecuteHandle tableExecuteHandle, ConnectorTableHandle sourceTableHandle) {
    HiveTableExecuteHandle hiveExecuteHandle = (HiveTableExecuteHandle) tableExecuteHandle;
    HiveTableHandle hiveSourceTableHandle = (HiveTableHandle) sourceTableHandle;
    WriteInfo writeInfo = locationService.getQueryWriteInfo(hiveExecuteHandle.getLocationHandle());
    String writeDeclarationId = metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), hiveExecuteHandle.getSchemaTableName());
    return new BeginTableExecuteResult<>(hiveExecuteHandle.withWriteDeclarationId(writeDeclarationId), hiveSourceTableHandle.withMaxScannedFileSize(hiveExecuteHandle.getMaxScannedFileSize()).withRecordScannedFiles(true));
}
Also used : WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult)

Example 2 with WriteInfo

use of io.trino.plugin.hive.LocationService.WriteInfo in project trino by trinodb.

the class HiveMetadata method beginDelete.

@Override
public ConnectorTableHandle beginDelete(ConnectorSession session, ConnectorTableHandle tableHandle, RetryMode retryMode) {
    HiveTableHandle handle = (HiveTableHandle) tableHandle;
    SchemaTableName tableName = handle.getSchemaTableName();
    Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
    ensureTableSupportsDelete(table);
    if (retryMode != NO_RETRIES) {
        throw new TrinoException(NOT_SUPPORTED, "Deleting from Hive tables is not supported with query retries enabled");
    }
    if (!autoCommit) {
        throw new TrinoException(NOT_SUPPORTED, "Deleting from Hive transactional tables is not supported in explicit transactions (use autocommit mode)");
    }
    if (isSparkBucketedTable(table)) {
        throw new TrinoException(NOT_SUPPORTED, "Deleting from Spark bucketed tables is not supported");
    }
    LocationHandle locationHandle = locationService.forExistingTable(metastore, session, table);
    AcidTransaction transaction = metastore.beginDelete(session, table);
    WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle);
    metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), handle.getSchemaTableName());
    return handle.withTransaction(transaction);
}
Also used : TableNotFoundException(io.trino.spi.connector.TableNotFoundException) AcidTransaction.forCreateTable(io.trino.plugin.hive.acid.AcidTransaction.forCreateTable) SystemTables.getSourceTableNameFromSystemTable(io.trino.plugin.hive.util.SystemTables.getSourceTableNameFromSystemTable) Table(io.trino.plugin.hive.metastore.Table) AcidUtils.isTransactionalTable(org.apache.hadoop.hive.ql.io.AcidUtils.isTransactionalTable) HiveUtil.isIcebergTable(io.trino.plugin.hive.util.HiveUtil.isIcebergTable) HiveUtil.isSparkBucketedTable(io.trino.plugin.hive.util.HiveUtil.isSparkBucketedTable) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) SystemTable(io.trino.spi.connector.SystemTable) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) TrinoException(io.trino.spi.TrinoException) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 3 with WriteInfo

use of io.trino.plugin.hive.LocationService.WriteInfo in project trino by trinodb.

the class HiveMetadata method finishDelete.

@Override
public void finishDelete(ConnectorSession session, ConnectorTableHandle tableHandle, Collection<Slice> fragments) {
    HiveTableHandle handle = (HiveTableHandle) tableHandle;
    checkArgument(handle.isAcidDelete(), "handle should be a delete handle, but is %s", handle);
    requireNonNull(fragments, "fragments is null");
    SchemaTableName tableName = handle.getSchemaTableName();
    Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
    ensureTableSupportsDelete(table);
    List<PartitionAndStatementId> partitionAndStatementIds = fragments.stream().map(Slice::getBytes).map(PartitionAndStatementId.CODEC::fromJson).collect(toImmutableList());
    HdfsContext context = new HdfsContext(session);
    for (PartitionAndStatementId ps : partitionAndStatementIds) {
        createOrcAcidVersionFile(context, new Path(ps.getDeleteDeltaDirectory()));
    }
    LocationHandle locationHandle = locationService.forExistingTable(metastore, session, table);
    WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle);
    metastore.finishRowLevelDelete(session, table.getDatabaseName(), table.getTableName(), writeInfo.getWritePath(), partitionAndStatementIds);
}
Also used : Path(org.apache.hadoop.fs.Path) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) AcidTransaction.forCreateTable(io.trino.plugin.hive.acid.AcidTransaction.forCreateTable) SystemTables.getSourceTableNameFromSystemTable(io.trino.plugin.hive.util.SystemTables.getSourceTableNameFromSystemTable) Table(io.trino.plugin.hive.metastore.Table) AcidUtils.isTransactionalTable(org.apache.hadoop.hive.ql.io.AcidUtils.isTransactionalTable) HiveUtil.isIcebergTable(io.trino.plugin.hive.util.HiveUtil.isIcebergTable) HiveUtil.isSparkBucketedTable(io.trino.plugin.hive.util.HiveUtil.isSparkBucketedTable) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) SystemTable(io.trino.spi.connector.SystemTable) Slice(io.airlift.slice.Slice) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 4 with WriteInfo

use of io.trino.plugin.hive.LocationService.WriteInfo in project trino by trinodb.

the class HiveMetadata method beginInsert.

@Override
public HiveInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle, List<ColumnHandle> columns, RetryMode retryMode) {
    SchemaTableName tableName = ((HiveTableHandle) tableHandle).getSchemaTableName();
    Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
    checkTableIsWritable(table, writesToNonManagedTablesEnabled);
    for (Column column : table.getDataColumns()) {
        if (!isWritableType(column.getType())) {
            throw new TrinoException(NOT_SUPPORTED, format("Inserting into Hive table %s with column type %s not supported", tableName, column.getType()));
        }
    }
    boolean isTransactional = isTransactionalTable(table.getParameters());
    if (isTransactional && retryMode != NO_RETRIES) {
        throw new TrinoException(NOT_SUPPORTED, "Inserting into Hive transactional tables is not supported with query retries enabled");
    }
    if (isTransactional && !autoCommit) {
        throw new TrinoException(NOT_SUPPORTED, "Inserting into Hive transactional tables is not supported in explicit transactions (use autocommit mode)");
    }
    if (isSparkBucketedTable(table)) {
        throw new TrinoException(NOT_SUPPORTED, "Inserting into Spark bucketed tables is not supported");
    }
    List<HiveColumnHandle> handles = hiveColumnHandles(table, typeManager, getTimestampPrecision(session)).stream().filter(columnHandle -> !columnHandle.isHidden()).collect(toImmutableList());
    HiveStorageFormat tableStorageFormat = extractHiveStorageFormat(table);
    Optional.ofNullable(table.getParameters().get(SKIP_HEADER_COUNT_KEY)).map(Integer::parseInt).ifPresent(headerSkipCount -> {
        if (headerSkipCount > 1) {
            throw new TrinoException(NOT_SUPPORTED, format("Inserting into Hive table with value of %s property greater than 1 is not supported", SKIP_HEADER_COUNT_KEY));
        }
    });
    if (table.getParameters().containsKey(SKIP_FOOTER_COUNT_KEY)) {
        throw new TrinoException(NOT_SUPPORTED, format("Inserting into Hive table with %s property not supported", SKIP_FOOTER_COUNT_KEY));
    }
    LocationHandle locationHandle = locationService.forExistingTable(metastore, session, table);
    AcidTransaction transaction = isTransactional ? metastore.beginInsert(session, table) : NO_ACID_TRANSACTION;
    HiveInsertTableHandle result = new HiveInsertTableHandle(tableName.getSchemaName(), tableName.getTableName(), handles, metastore.generatePageSinkMetadata(tableName), locationHandle, table.getStorage().getBucketProperty(), tableStorageFormat, isRespectTableFormat(session) ? tableStorageFormat : getHiveStorageFormat(session), transaction, retryMode != NO_RETRIES);
    WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle);
    if (getInsertExistingPartitionsBehavior(session) == InsertExistingPartitionsBehavior.OVERWRITE && writeInfo.getWriteMode() == DIRECT_TO_TARGET_EXISTING_DIRECTORY) {
        if (isTransactional) {
            throw new TrinoException(NOT_SUPPORTED, "Overwriting existing partition in transactional tables doesn't support DIRECT_TO_TARGET_EXISTING_DIRECTORY write mode");
        }
        // Partition overwrite operation is nonatomic thus can't and shouldn't be used in non autocommit context.
        if (!autoCommit) {
            throw new TrinoException(NOT_SUPPORTED, "Overwriting existing partition in non auto commit context doesn't support DIRECT_TO_TARGET_EXISTING_DIRECTORY write mode");
        }
    }
    metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), tableName);
    return result;
}
Also used : HiveTableProperties.getBucketProperty(io.trino.plugin.hive.HiveTableProperties.getBucketProperty) InsertExistingPartitionsBehavior(io.trino.plugin.hive.HiveSessionProperties.InsertExistingPartitionsBehavior) HiveSessionProperties.getQueryPartitionFilterRequiredSchemas(io.trino.plugin.hive.HiveSessionProperties.getQueryPartitionFilterRequiredSchemas) ORC_BLOOM_FILTER_FPP(io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_FPP) FileSystem(org.apache.hadoop.fs.FileSystem) HiveBucketing.isSupportedBucketing(io.trino.plugin.hive.util.HiveBucketing.isSupportedBucketing) ANALYZE_COLUMNS_PROPERTY(io.trino.plugin.hive.HiveTableProperties.ANALYZE_COLUMNS_PROPERTY) TypeUtils.isFloatingPointNaN(io.trino.spi.type.TypeUtils.isFloatingPointNaN) ColumnStatisticType(io.trino.spi.statistics.ColumnStatisticType) HiveApplyProjectionUtil.replaceWithNewVariables(io.trino.plugin.hive.HiveApplyProjectionUtil.replaceWithNewVariables) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) HiveTableProperties.getFooterSkipCount(io.trino.plugin.hive.HiveTableProperties.getFooterSkipCount) TABLE_NOT_FOUND(io.trino.spi.StandardErrorCode.TABLE_NOT_FOUND) Map(java.util.Map) ViewNotFoundException(io.trino.spi.connector.ViewNotFoundException) HiveSessionProperties.isBucketExecutionEnabled(io.trino.plugin.hive.HiveSessionProperties.isBucketExecutionEnabled) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) Domain(io.trino.spi.predicate.Domain) HiveSessionProperties.isStatisticsEnabled(io.trino.plugin.hive.HiveSessionProperties.isStatisticsEnabled) NULL_FORMAT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.NULL_FORMAT_PROPERTY) HiveUtil.verifyPartitionTypeSupported(io.trino.plugin.hive.util.HiveUtil.verifyPartitionTypeSupported) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) Collectors.joining(java.util.stream.Collectors.joining) Stream(java.util.stream.Stream) AcidUtils.deltaSubdir(org.apache.hadoop.hive.ql.io.AcidUtils.deltaSubdir) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) HIVE_CONCURRENT_MODIFICATION_DETECTED(io.trino.plugin.hive.HiveErrorCode.HIVE_CONCURRENT_MODIFICATION_DETECTED) HIVE_UNKNOWN_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_UNKNOWN_ERROR) TableScanRedirectApplicationResult(io.trino.spi.connector.TableScanRedirectApplicationResult) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) HiveSessionProperties.isSortedWritingEnabled(io.trino.plugin.hive.HiveSessionProperties.isSortedWritingEnabled) Joiner(com.google.common.base.Joiner) Partition(io.trino.plugin.hive.metastore.Partition) HiveColumnHandle.updateRowIdColumnHandle(io.trino.plugin.hive.HiveColumnHandle.updateRowIdColumnHandle) HiveUtil(io.trino.plugin.hive.util.HiveUtil) HiveWriterFactory.computeNonTransactionalBucketedFilename(io.trino.plugin.hive.HiveWriterFactory.computeNonTransactionalBucketedFilename) BUCKETED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.BUCKETED_BY_PROPERTY) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) INVALID_ANALYZE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) HiveBasicStatistics.createEmptyStatistics(io.trino.plugin.hive.HiveBasicStatistics.createEmptyStatistics) Variable(io.trino.spi.expression.Variable) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) Supplier(java.util.function.Supplier) HiveTableProperties.getSingleCharacterProperty(io.trino.plugin.hive.HiveTableProperties.getSingleCharacterProperty) TimestampType(io.trino.spi.type.TimestampType) ADD(io.trino.plugin.hive.util.Statistics.ReduceOperator.ADD) HiveSessionProperties.getCompressionCodec(io.trino.plugin.hive.HiveSessionProperties.getCompressionCodec) OptionalLong(java.util.OptionalLong) HIVE_INVALID_METADATA(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA) PATH_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.PATH_COLUMN_NAME) GrantInfo(io.trino.spi.security.GrantInfo) MaterializedViewFreshness(io.trino.spi.connector.MaterializedViewFreshness) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) INVALID_TABLE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) HiveWriteUtils(io.trino.plugin.hive.util.HiveWriteUtils) HiveAnalyzeProperties.getColumnNames(io.trino.plugin.hive.HiveAnalyzeProperties.getColumnNames) MapType(io.trino.spi.type.MapType) HIVE_UNSUPPORTED_FORMAT(io.trino.plugin.hive.HiveErrorCode.HIVE_UNSUPPORTED_FORMAT) HiveSessionProperties.getTimestampPrecision(io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) HiveTableProperties.getPartitionedBy(io.trino.plugin.hive.HiveTableProperties.getPartitionedBy) CatalogName(io.trino.plugin.base.CatalogName) HiveSessionProperties.isCollectColumnStatisticsOnWrite(io.trino.plugin.hive.HiveSessionProperties.isCollectColumnStatisticsOnWrite) AVRO_SCHEMA_URL(io.trino.plugin.hive.HiveTableProperties.AVRO_SCHEMA_URL) HiveBucketing.getHiveBucketHandle(io.trino.plugin.hive.util.HiveBucketing.getHiveBucketHandle) HiveUtil.isHiveSystemSchema(io.trino.plugin.hive.util.HiveUtil.isHiveSystemSchema) HiveTableProperties.getHeaderSkipCount(io.trino.plugin.hive.HiveTableProperties.getHeaderSkipCount) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) Boolean.parseBoolean(java.lang.Boolean.parseBoolean) HiveWriteUtils.initializeSerializer(io.trino.plugin.hive.util.HiveWriteUtils.initializeSerializer) HiveSessionProperties.isCreateEmptyBucketFiles(io.trino.plugin.hive.HiveSessionProperties.isCreateEmptyBucketFiles) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) HiveSessionProperties.isNonTransactionalOptimizeEnabled(io.trino.plugin.hive.HiveSessionProperties.isNonTransactionalOptimizeEnabled) PARTITION_KEY(io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY) HivePartitionManager.extractPartitionValues(io.trino.plugin.hive.HivePartitionManager.extractPartitionValues) HiveTableProperties.getHiveStorageFormat(io.trino.plugin.hive.HiveTableProperties.getHiveStorageFormat) HiveTableProperties.getAvroSchemaUrl(io.trino.plugin.hive.HiveTableProperties.getAvroSchemaUrl) CompressionConfigUtil.configureCompression(io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression) HiveUtil.toPartitionValues(io.trino.plugin.hive.util.HiveUtil.toPartitionValues) Database(io.trino.plugin.hive.metastore.Database) DIRECT_TO_TARGET_EXISTING_DIRECTORY(io.trino.plugin.hive.LocationHandle.WriteMode.DIRECT_TO_TARGET_EXISTING_DIRECTORY) NO_RETRIES(io.trino.spi.connector.RetryMode.NO_RETRIES) AcidOperation(io.trino.plugin.hive.acid.AcidOperation) ConnectorMaterializedViewDefinition(io.trino.spi.connector.ConnectorMaterializedViewDefinition) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) MetadataProvider(io.trino.spi.connector.MetadataProvider) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Collectors.toMap(java.util.stream.Collectors.toMap) Block(io.trino.spi.block.Block) ViewReaderUtil.encodeViewData(io.trino.plugin.hive.ViewReaderUtil.encodeViewData) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) Statistics.reduce(io.trino.plugin.hive.util.Statistics.reduce) ImmutableSet(com.google.common.collect.ImmutableSet) SemiTransactionalHiveMetastore.cleanExtraOutputFiles(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore.cleanExtraOutputFiles) Collection(java.util.Collection) VIEW_STORAGE_FORMAT(io.trino.plugin.hive.metastore.StorageFormat.VIEW_STORAGE_FORMAT) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) ROW_COUNT(io.trino.spi.statistics.TableStatisticType.ROW_COUNT) Constraint.alwaysTrue(io.trino.spi.connector.Constraint.alwaysTrue) PRESTO_VIEW_FLAG(io.trino.plugin.hive.ViewReaderUtil.PRESTO_VIEW_FLAG) INVALID_SCHEMA_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) BUCKET_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.BUCKET_COLUMN_NAME) BIGINT(io.trino.spi.type.BigintType.BIGINT) StandardErrorCode(io.trino.spi.StandardErrorCode) SORTED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.SORTED_BY_PROPERTY) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) AcidTransaction.forCreateTable(io.trino.plugin.hive.acid.AcidTransaction.forCreateTable) ProjectedColumnRepresentation(io.trino.plugin.hive.HiveApplyProjectionUtil.ProjectedColumnRepresentation) TableStatisticType(io.trino.spi.statistics.TableStatisticType) JsonCodec(io.airlift.json.JsonCodec) Constraint(io.trino.spi.connector.Constraint) HiveSessionProperties.getHiveStorageFormat(io.trino.plugin.hive.HiveSessionProperties.getHiveStorageFormat) AUTO_PURGE(io.trino.plugin.hive.HiveTableProperties.AUTO_PURGE) NANOSECONDS(io.trino.plugin.hive.HiveTimestampPrecision.NANOSECONDS) CompletableFuture(java.util.concurrent.CompletableFuture) HiveSessionProperties.isProjectionPushdownEnabled(io.trino.plugin.hive.HiveSessionProperties.isProjectionPushdownEnabled) Statistics.createComputedStatisticsToPartitionMap(io.trino.plugin.hive.util.Statistics.createComputedStatisticsToPartitionMap) OptionalInt(java.util.OptionalInt) Function(java.util.function.Function) HiveTableProperties.isTransactional(io.trino.plugin.hive.HiveTableProperties.isTransactional) HashSet(java.util.HashSet) ViewReaderUtil.createViewReader(io.trino.plugin.hive.ViewReaderUtil.createViewReader) OpenCSVSerde(org.apache.hadoop.hive.serde2.OpenCSVSerde) ImmutableList(com.google.common.collect.ImmutableList) TEXTFILE_FIELD_SEPARATOR_ESCAPE(io.trino.plugin.hive.HiveTableProperties.TEXTFILE_FIELD_SEPARATOR_ESCAPE) TableStatistics(io.trino.spi.statistics.TableStatistics) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) NoSuchElementException(java.util.NoSuchElementException) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) RetryMode(io.trino.spi.connector.RetryMode) NEW(io.trino.plugin.hive.PartitionUpdate.UpdateMode.NEW) HIVE_VIEW_TRANSLATION_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_VIEW_TRANSLATION_ERROR) HiveUtil.getRegularColumnHandles(io.trino.plugin.hive.util.HiveUtil.getRegularColumnHandles) HiveWriteUtils.isFileCreatedByQuery(io.trino.plugin.hive.util.HiveWriteUtils.isFileCreatedByQuery) HIVE_STRING(io.trino.plugin.hive.HiveType.HIVE_STRING) JobConf(org.apache.hadoop.mapred.JobConf) HiveTableProperties.getNullFormat(io.trino.plugin.hive.HiveTableProperties.getNullFormat) HiveTableProperties.getOrcBloomFilterFpp(io.trino.plugin.hive.HiveTableProperties.getOrcBloomFilterFpp) OrcAcidVersion.writeVersionFile(org.apache.hadoop.hive.ql.io.AcidUtils.OrcAcidVersion.writeVersionFile) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) HiveWriterFactory.computeTransactionalBucketedFilename(io.trino.plugin.hive.HiveWriterFactory.computeTransactionalBucketedFilename) TypeManager(io.trino.spi.type.TypeManager) SystemTables.getSourceTableNameFromSystemTable(io.trino.plugin.hive.util.SystemTables.getSourceTableNameFromSystemTable) HiveTableProperties.getExternalLocation(io.trino.plugin.hive.HiveTableProperties.getExternalLocation) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) ColumnStatisticMetadata(io.trino.spi.statistics.ColumnStatisticMetadata) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) HiveAnalyzeProperties.getPartitionList(io.trino.plugin.hive.HiveAnalyzeProperties.getPartitionList) Column(io.trino.plugin.hive.metastore.Column) FILE_MODIFIED_TIME_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.FILE_MODIFIED_TIME_COLUMN_NAME) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) PARTITION_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.PARTITION_COLUMN_NAME) ORC_BLOOM_FILTER_COLUMNS(io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_COLUMNS) Table(io.trino.plugin.hive.metastore.Table) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) PARTITIONED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.PARTITIONED_BY_PROPERTY) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) MetastoreUtil.getHiveSchema(io.trino.plugin.hive.metastore.MetastoreUtil.getHiveSchema) SchemaTableName(io.trino.spi.connector.SchemaTableName) SortingProperty(io.trino.spi.connector.SortingProperty) HIVE_COLUMN_ORDER_MISMATCH(io.trino.plugin.hive.HiveErrorCode.HIVE_COLUMN_ORDER_MISMATCH) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) CSV_QUOTE(io.trino.plugin.hive.HiveTableProperties.CSV_QUOTE) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) MetastoreUtil.getProtectMode(io.trino.plugin.hive.metastore.MetastoreUtil.getProtectMode) HIVE_INVALID_VIEW_DATA(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_VIEW_DATA) AcidUtils.isTransactionalTable(org.apache.hadoop.hive.ql.io.AcidUtils.isTransactionalTable) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) Slice(io.airlift.slice.Slice) NullableValue(io.trino.spi.predicate.NullableValue) ConfigurationUtils.toJobConf(io.trino.plugin.hive.util.ConfigurationUtils.toJobConf) NON_TRANSACTIONAL_OPTIMIZE_ENABLED(io.trino.plugin.hive.HiveSessionProperties.NON_TRANSACTIONAL_OPTIMIZE_ENABLED) HiveUtil.columnExtraInfo(io.trino.plugin.hive.util.HiveUtil.columnExtraInfo) ArrayList(java.util.ArrayList) PrincipalPrivileges.fromHivePrivilegeInfos(io.trino.plugin.hive.metastore.PrincipalPrivileges.fromHivePrivilegeInfos) ColumnHandle(io.trino.spi.connector.ColumnHandle) HiveUtil.hiveColumnHandles(io.trino.plugin.hive.util.HiveUtil.hiveColumnHandles) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) STORAGE_FORMAT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.STORAGE_FORMAT_PROPERTY) ViewReaderUtil.isPrestoView(io.trino.plugin.hive.ViewReaderUtil.isPrestoView) HiveUtil.isIcebergTable(io.trino.plugin.hive.util.HiveUtil.isIcebergTable) HiveSessionProperties.isRespectTableFormat(io.trino.plugin.hive.HiveSessionProperties.isRespectTableFormat) RetryDriver.retry(io.trino.plugin.hive.util.RetryDriver.retry) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) Properties(java.util.Properties) HiveWriteUtils.checkTableIsWritable(io.trino.plugin.hive.util.HiveWriteUtils.checkTableIsWritable) EXTERNAL_LOCATION_PROPERTY(io.trino.plugin.hive.HiveTableProperties.EXTERNAL_LOCATION_PROPERTY) TEXTFILE_FIELD_SEPARATOR(io.trino.plugin.hive.HiveTableProperties.TEXTFILE_FIELD_SEPARATOR) ConnectorSession(io.trino.spi.connector.ConnectorSession) HiveStatisticsProvider(io.trino.plugin.hive.statistics.HiveStatisticsProvider) RoleGrant(io.trino.spi.security.RoleGrant) File(java.io.File) HiveSessionProperties.isDelegateTransactionalManagedTableLocationToMetastore(io.trino.plugin.hive.HiveSessionProperties.isDelegateTransactionalManagedTableLocationToMetastore) HiveUtil.isSparkBucketedTable(io.trino.plugin.hive.util.HiveUtil.isSparkBucketedTable) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) Sets.intersection(com.google.common.collect.Sets.intersection) TableType(org.apache.hadoop.hive.metastore.TableType) HiveSessionProperties.isParallelPartitionedBucketedWrites(io.trino.plugin.hive.HiveSessionProperties.isParallelPartitionedBucketedWrites) ViewReaderUtil.isHiveOrPrestoView(io.trino.plugin.hive.ViewReaderUtil.isHiveOrPrestoView) HiveSessionProperties.isQueryPartitionFilterRequired(io.trino.plugin.hive.HiveSessionProperties.isQueryPartitionFilterRequired) HIVE_WRITER_CLOSE_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_CLOSE_ERROR) URL(java.net.URL) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) HiveBucketing(io.trino.plugin.hive.util.HiveBucketing) Iterables.concat(com.google.common.collect.Iterables.concat) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Path(org.apache.hadoop.fs.Path) ErrorType(io.trino.spi.ErrorType) HIVE_FILESYSTEM_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR) Splitter(com.google.common.base.Splitter) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) OVERWRITE(io.trino.plugin.hive.PartitionUpdate.UpdateMode.OVERWRITE) SKIP_HEADER_LINE_COUNT(io.trino.plugin.hive.HiveTableProperties.SKIP_HEADER_LINE_COUNT) RowType(io.trino.spi.type.RowType) ImmutableMap(com.google.common.collect.ImmutableMap) AccessControlMetadata(io.trino.plugin.hive.security.AccessControlMetadata) HiveWriteUtils.isS3FileSystem(io.trino.plugin.hive.util.HiveWriteUtils.isS3FileSystem) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) HiveTableProperties.getAnalyzeColumns(io.trino.plugin.hive.HiveTableProperties.getAnalyzeColumns) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) Statistics.fromComputedStatistics(io.trino.plugin.hive.util.Statistics.fromComputedStatistics) HiveApplyProjectionUtil.find(io.trino.plugin.hive.HiveApplyProjectionUtil.find) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) Sets(com.google.common.collect.Sets) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) MetastoreUtil.buildInitialPrivilegeSet(io.trino.plugin.hive.metastore.MetastoreUtil.buildInitialPrivilegeSet) MetastoreUtil.verifyOnline(io.trino.plugin.hive.metastore.MetastoreUtil.verifyOnline) FILE_SIZE_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.FILE_SIZE_COLUMN_NAME) Assignment(io.trino.spi.connector.Assignment) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) SystemTable(io.trino.spi.connector.SystemTable) CSV_SEPARATOR(io.trino.plugin.hive.HiveTableProperties.CSV_SEPARATOR) HiveWriteUtils.isWritableType(io.trino.plugin.hive.util.HiveWriteUtils.isWritableType) HiveSessionProperties.isOptimizedMismatchedBucketCount(io.trino.plugin.hive.HiveSessionProperties.isOptimizedMismatchedBucketCount) Logger(io.airlift.log.Logger) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) HashMap(java.util.HashMap) HiveBasicStatistics.createZeroStatistics(io.trino.plugin.hive.HiveBasicStatistics.createZeroStatistics) AcidSchema(io.trino.plugin.hive.acid.AcidSchema) HiveColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics) Verify.verify(com.google.common.base.Verify.verify) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) Objects.requireNonNull(java.util.Objects.requireNonNull) Suppliers(com.google.common.base.Suppliers) HiveTableProperties.getOrcBloomFilterColumns(io.trino.plugin.hive.HiveTableProperties.getOrcBloomFilterColumns) HiveApplyProjectionUtil.extractSupportedProjectedColumns(io.trino.plugin.hive.HiveApplyProjectionUtil.extractSupportedProjectedColumns) Privilege(io.trino.spi.security.Privilege) VerifyException(com.google.common.base.VerifyException) APPEND(io.trino.plugin.hive.PartitionUpdate.UpdateMode.APPEND) HiveSessionProperties.getInsertExistingPartitionsBehavior(io.trino.plugin.hive.HiveSessionProperties.getInsertExistingPartitionsBehavior) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) NO_PRIVILEGES(io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES) MalformedURLException(java.net.MalformedURLException) HiveSessionProperties.isPropagateTableScanSortingProperties(io.trino.plugin.hive.HiveSessionProperties.isPropagateTableScanSortingProperties) Statistics.createEmptyPartitionStatistics(io.trino.plugin.hive.util.Statistics.createEmptyPartitionStatistics) TupleDomain(io.trino.spi.predicate.TupleDomain) Maps(com.google.common.collect.Maps) OptimizeTableProcedure(io.trino.plugin.hive.procedure.OptimizeTableProcedure) SKIP_FOOTER_LINE_COUNT(io.trino.plugin.hive.HiveTableProperties.SKIP_FOOTER_LINE_COUNT) SYNTHESIZED(io.trino.plugin.hive.HiveColumnHandle.ColumnType.SYNTHESIZED) LocalProperty(io.trino.spi.connector.LocalProperty) BUCKET_COUNT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.BUCKET_COUNT_PROPERTY) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) VisibleForTesting(com.google.common.annotations.VisibleForTesting) HiveUtil.getPartitionKeyColumnHandles(io.trino.plugin.hive.util.HiveUtil.getPartitionKeyColumnHandles) Collections(java.util.Collections) CSV_ESCAPE(io.trino.plugin.hive.HiveTableProperties.CSV_ESCAPE) REGULAR(io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR) AcidTransaction.forCreateTable(io.trino.plugin.hive.acid.AcidTransaction.forCreateTable) SystemTables.getSourceTableNameFromSystemTable(io.trino.plugin.hive.util.SystemTables.getSourceTableNameFromSystemTable) Table(io.trino.plugin.hive.metastore.Table) AcidUtils.isTransactionalTable(org.apache.hadoop.hive.ql.io.AcidUtils.isTransactionalTable) HiveUtil.isIcebergTable(io.trino.plugin.hive.util.HiveUtil.isIcebergTable) HiveUtil.isSparkBucketedTable(io.trino.plugin.hive.util.HiveUtil.isSparkBucketedTable) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) SystemTable(io.trino.spi.connector.SystemTable) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) HiveTableProperties.getHiveStorageFormat(io.trino.plugin.hive.HiveTableProperties.getHiveStorageFormat) HiveSessionProperties.getHiveStorageFormat(io.trino.plugin.hive.HiveSessionProperties.getHiveStorageFormat) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) TrinoException(io.trino.spi.TrinoException)

Example 5 with WriteInfo

use of io.trino.plugin.hive.LocationService.WriteInfo in project trino by trinodb.

the class HiveMetadata method finishUpdate.

@Override
public void finishUpdate(ConnectorSession session, ConnectorTableHandle tableHandle, Collection<Slice> fragments) {
    HiveTableHandle handle = (HiveTableHandle) tableHandle;
    checkArgument(handle.isAcidUpdate(), "handle should be a update handle, but is %s", handle);
    requireNonNull(fragments, "fragments is null");
    SchemaTableName tableName = handle.getSchemaTableName();
    Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
    List<PartitionAndStatementId> partitionAndStatementIds = fragments.stream().map(Slice::getBytes).map(PartitionAndStatementId.CODEC::fromJson).collect(toImmutableList());
    HdfsContext context = new HdfsContext(session);
    for (PartitionAndStatementId ps : partitionAndStatementIds) {
        createOrcAcidVersionFile(context, new Path(ps.getDeleteDeltaDirectory()));
    }
    LocationHandle locationHandle = locationService.forExistingTable(metastore, session, table);
    WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle);
    metastore.finishUpdate(session, table.getDatabaseName(), table.getTableName(), writeInfo.getWritePath(), partitionAndStatementIds);
}
Also used : Path(org.apache.hadoop.fs.Path) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) AcidTransaction.forCreateTable(io.trino.plugin.hive.acid.AcidTransaction.forCreateTable) SystemTables.getSourceTableNameFromSystemTable(io.trino.plugin.hive.util.SystemTables.getSourceTableNameFromSystemTable) Table(io.trino.plugin.hive.metastore.Table) AcidUtils.isTransactionalTable(org.apache.hadoop.hive.ql.io.AcidUtils.isTransactionalTable) HiveUtil.isIcebergTable(io.trino.plugin.hive.util.HiveUtil.isIcebergTable) HiveUtil.isSparkBucketedTable(io.trino.plugin.hive.util.HiveUtil.isSparkBucketedTable) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) SystemTable(io.trino.spi.connector.SystemTable) Slice(io.airlift.slice.Slice) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Aggregations

WriteInfo (io.trino.plugin.hive.LocationService.WriteInfo)12 SchemaTableName (io.trino.spi.connector.SchemaTableName)8 Path (org.apache.hadoop.fs.Path)8 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)7 Table (io.trino.plugin.hive.metastore.Table)7 TrinoException (io.trino.spi.TrinoException)7 CatalogSchemaTableName (io.trino.spi.connector.CatalogSchemaTableName)7 AcidTransaction (io.trino.plugin.hive.acid.AcidTransaction)6 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)4 ImmutableList (com.google.common.collect.ImmutableList)4 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)4 ImmutableMap (com.google.common.collect.ImmutableMap)4 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)4 ImmutableSet (com.google.common.collect.ImmutableSet)4 Sets (com.google.common.collect.Sets)4 Slice (io.airlift.slice.Slice)4 AcidTransaction.forCreateTable (io.trino.plugin.hive.acid.AcidTransaction.forCreateTable)4 Column (io.trino.plugin.hive.metastore.Column)4 SortingColumn (io.trino.plugin.hive.metastore.SortingColumn)4 StorageFormat.fromHiveStorageFormat (io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat)4