Search in sources :

Example 1 with ParquetFileWriter

use of io.trino.plugin.hive.parquet.ParquetFileWriter in project trino by trinodb.

the class DeltaLakePageSink method createParquetFileWriter.

private FileWriter createParquetFileWriter(Path path) {
    ParquetWriterOptions parquetWriterOptions = ParquetWriterOptions.builder().setMaxBlockSize(getParquetWriterBlockSize(session)).setMaxPageSize(getParquetWriterPageSize(session)).build();
    CompressionCodecName compressionCodecName = getCompressionCodec(session).getParquetCompressionCodec();
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getIdentity(), path, conf);
        Callable<Void> rollbackAction = () -> {
            fileSystem.delete(path, false);
            return null;
        };
        List<Type> parquetTypes = dataColumnTypes.stream().map(type -> {
            if (type instanceof TimestampWithTimeZoneType) {
                verify(((TimestampWithTimeZoneType) type).getPrecision() == 3, "Unsupported type: %s", type);
                return TIMESTAMP_MILLIS;
            }
            return type;
        }).collect(toImmutableList());
        // we use identity column mapping; input page already contains only data columns per
        // DataLagePageSink.getDataPage()
        int[] identityMapping = new int[dataColumnTypes.size()];
        for (int i = 0; i < identityMapping.length; ++i) {
            identityMapping[i] = i;
        }
        ParquetSchemaConverter schemaConverter = new ParquetSchemaConverter(parquetTypes, dataColumnNames);
        return new ParquetFileWriter(fileSystem.create(path), rollbackAction, parquetTypes, schemaConverter.getMessageType(), schemaConverter.getPrimitiveTypes(), parquetWriterOptions, identityMapping, compressionCodecName, trinoVersion);
    } catch (IOException e) {
        throw new TrinoException(DELTA_LAKE_BAD_WRITE, "Error creating Parquet file", e);
    }
}
Also used : RecordFileWriter(io.trino.plugin.hive.RecordFileWriter) DateTimeZone(org.joda.time.DateTimeZone) FileSystem(org.apache.hadoop.fs.FileSystem) CompressionConfigUtil.configureCompression(io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression) TransactionLogAccess(io.trino.plugin.deltalake.transactionlog.TransactionLogAccess) Slices.wrappedBuffer(io.airlift.slice.Slices.wrappedBuffer) PARQUET(io.trino.plugin.hive.HiveStorageFormat.PARQUET) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Block(io.trino.spi.block.Block) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) ParquetFileWriter(io.trino.plugin.hive.parquet.ParquetFileWriter) Path(org.apache.hadoop.fs.Path) DeltaLakeSessionProperties.getParquetWriterPageSize(io.trino.plugin.deltalake.DeltaLakeSessionProperties.getParquetWriterPageSize) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) ParquetSchemaConverter(io.trino.parquet.writer.ParquetSchemaConverter) Collection(java.util.Collection) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) DeltaLakeSessionProperties.isParquetOptimizedWriterEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isParquetOptimizedWriterEnabled) TrinoException(io.trino.spi.TrinoException) FileUtils.escapePathName(org.apache.hadoop.hive.common.FileUtils.escapePathName) String.format(java.lang.String.format) Collectors.joining(java.util.stream.Collectors.joining) DeltaLakeSessionProperties.getParquetWriterBlockSize(io.trino.plugin.deltalake.DeltaLakeSessionProperties.getParquetWriterBlockSize) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Function.identity(java.util.function.Function.identity) FileUtils(org.apache.hadoop.hive.common.FileUtils) Optional(java.util.Optional) CompressionCodecName(org.apache.parquet.hadoop.metadata.CompressionCodecName) JsonCodec(io.airlift.json.JsonCodec) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Slice(io.airlift.slice.Slice) Logger(io.airlift.log.Logger) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) Type(io.trino.spi.type.Type) ConfigurationUtils.toJobConf(io.trino.plugin.hive.util.ConfigurationUtils.toJobConf) Page(io.trino.spi.Page) Callable(java.util.concurrent.Callable) CompletableFuture(java.util.concurrent.CompletableFuture) IOConstants(org.apache.hadoop.hive.ql.io.IOConstants) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) ArrayList(java.util.ArrayList) FileWriter(io.trino.plugin.hive.FileWriter) HiveType(io.trino.plugin.hive.HiveType) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) PageIndexerFactory(io.trino.spi.PageIndexerFactory) Objects.requireNonNull(java.util.Objects.requireNonNull) ParquetWriterOptions(io.trino.parquet.writer.ParquetWriterOptions) HiveWriteUtils(io.trino.plugin.hive.util.HiveWriteUtils) HiveTypeName(io.trino.plugin.hive.HiveTypeName) ConnectorPageSink(io.trino.spi.connector.ConnectorPageSink) Properties(java.util.Properties) DELTA_LAKE_BAD_WRITE(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE) MoreFutures(io.airlift.concurrent.MoreFutures) HivePartitionKey(io.trino.plugin.hive.HivePartitionKey) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) Ints(com.google.common.primitives.Ints) JobConf(org.apache.hadoop.mapred.JobConf) DeltaLakeSessionProperties.getCompressionCodec(io.trino.plugin.deltalake.DeltaLakeSessionProperties.getCompressionCodec) PageIndexer(io.trino.spi.PageIndexer) Futures(com.google.common.util.concurrent.Futures) UUID.randomUUID(java.util.UUID.randomUUID) Collectors.toList(java.util.stream.Collectors.toList) TypeManager(io.trino.spi.type.TypeManager) IOException(java.io.IOException) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Type(io.trino.spi.type.Type) HiveType(io.trino.plugin.hive.HiveType) ParquetFileWriter(io.trino.plugin.hive.parquet.ParquetFileWriter) CompressionCodecName(org.apache.parquet.hadoop.metadata.CompressionCodecName) FileSystem(org.apache.hadoop.fs.FileSystem) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) TrinoException(io.trino.spi.TrinoException) ParquetWriterOptions(io.trino.parquet.writer.ParquetWriterOptions) ParquetSchemaConverter(io.trino.parquet.writer.ParquetSchemaConverter)

Aggregations

Verify.verify (com.google.common.base.Verify.verify)1 ImmutableList (com.google.common.collect.ImmutableList)1 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)1 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)1 Ints (com.google.common.primitives.Ints)1 Futures (com.google.common.util.concurrent.Futures)1 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)1 MoreFutures (io.airlift.concurrent.MoreFutures)1 JsonCodec (io.airlift.json.JsonCodec)1 Logger (io.airlift.log.Logger)1 Slice (io.airlift.slice.Slice)1 Slices.wrappedBuffer (io.airlift.slice.Slices.wrappedBuffer)1 ParquetSchemaConverter (io.trino.parquet.writer.ParquetSchemaConverter)1 ParquetWriterOptions (io.trino.parquet.writer.ParquetWriterOptions)1 DELTA_LAKE_BAD_WRITE (io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE)1 DeltaLakeSessionProperties.getCompressionCodec (io.trino.plugin.deltalake.DeltaLakeSessionProperties.getCompressionCodec)1 DeltaLakeSessionProperties.getParquetWriterBlockSize (io.trino.plugin.deltalake.DeltaLakeSessionProperties.getParquetWriterBlockSize)1 DeltaLakeSessionProperties.getParquetWriterPageSize (io.trino.plugin.deltalake.DeltaLakeSessionProperties.getParquetWriterPageSize)1 DeltaLakeSessionProperties.isParquetOptimizedWriterEnabled (io.trino.plugin.deltalake.DeltaLakeSessionProperties.isParquetOptimizedWriterEnabled)1 TransactionLogAccess (io.trino.plugin.deltalake.transactionlog.TransactionLogAccess)1