Search in sources :

Example 1 with RcFileEncoding

use of io.trino.rcfile.RcFileEncoding in project trino by trinodb.

the class RcFileFileWriterFactory method createFileWriter.

@Override
public Optional<FileWriter> createFileWriter(Path path, List<String> inputColumnNames, StorageFormat storageFormat, Properties schema, JobConf configuration, ConnectorSession session, OptionalInt bucketNumber, AcidTransaction transaction, boolean useAcidSchema, WriterKind writerKind) {
    if (!RCFileOutputFormat.class.getName().equals(storageFormat.getOutputFormat())) {
        return Optional.empty();
    }
    RcFileEncoding rcFileEncoding;
    if (LazyBinaryColumnarSerDe.class.getName().equals(storageFormat.getSerde())) {
        rcFileEncoding = new BinaryRcFileEncoding(timeZone);
    } else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerde())) {
        rcFileEncoding = createTextVectorEncoding(schema);
    } else {
        return Optional.empty();
    }
    Optional<String> codecName = Optional.ofNullable(configuration.get(FileOutputFormat.COMPRESS_CODEC));
    // existing tables and partitions may have columns in a different order than the writer is providing, so build
    // an index to rearrange columns in the proper order
    List<String> fileColumnNames = getColumnNames(schema);
    List<Type> fileColumnTypes = getColumnTypes(schema).stream().map(hiveType -> hiveType.getType(typeManager, getTimestampPrecision(session))).collect(toList());
    int[] fileInputColumnIndexes = fileColumnNames.stream().mapToInt(inputColumnNames::indexOf).toArray();
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getIdentity(), path, configuration);
        OutputStream outputStream = fileSystem.create(path, false);
        Optional<Supplier<RcFileDataSource>> validationInputFactory = Optional.empty();
        if (isRcfileOptimizedWriterValidate(session)) {
            validationInputFactory = Optional.of(() -> {
                try {
                    return new HdfsRcFileDataSource(path.toString(), fileSystem.open(path), fileSystem.getFileStatus(path).getLen(), stats);
                } catch (IOException e) {
                    throw new TrinoException(HIVE_WRITE_VALIDATION_FAILED, e);
                }
            });
        }
        Callable<Void> rollbackAction = () -> {
            fileSystem.delete(path, false);
            return null;
        };
        return Optional.of(new RcFileFileWriter(outputStream, rollbackAction, rcFileEncoding, fileColumnTypes, codecName, fileInputColumnIndexes, ImmutableMap.<String, String>builder().put(PRESTO_VERSION_NAME, nodeVersion.toString()).put(PRESTO_QUERY_ID_NAME, session.getQueryId()).buildOrThrow(), validationInputFactory));
    } catch (Exception e) {
        throw new TrinoException(HIVE_WRITER_OPEN_ERROR, "Error creating RCFile file", e);
    }
}
Also used : HdfsRcFileDataSource(io.trino.plugin.hive.rcfile.HdfsRcFileDataSource) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding) DateTimeZone(org.joda.time.DateTimeZone) FileSystem(org.apache.hadoop.fs.FileSystem) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) Type(io.trino.spi.type.Type) Callable(java.util.concurrent.Callable) RcFileDataSource(io.trino.rcfile.RcFileDataSource) OptionalInt(java.util.OptionalInt) Supplier(java.util.function.Supplier) Inject(javax.inject.Inject) HiveUtil.getColumnNames(io.trino.plugin.hive.util.HiveUtil.getColumnNames) HiveSessionProperties.isRcfileOptimizedWriterValidate(io.trino.plugin.hive.HiveSessionProperties.isRcfileOptimizedWriterValidate) Objects.requireNonNull(java.util.Objects.requireNonNull) Path(org.apache.hadoop.fs.Path) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) OutputStream(java.io.OutputStream) HIVE_WRITER_OPEN_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) HiveUtil.getColumnTypes(io.trino.plugin.hive.util.HiveUtil.getColumnTypes) Properties(java.util.Properties) ImmutableMap(com.google.common.collect.ImmutableMap) RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) HiveSessionProperties.getTimestampPrecision(io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision) TrinoException(io.trino.spi.TrinoException) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) RcFileEncoding(io.trino.rcfile.RcFileEncoding) ColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe) JobConf(org.apache.hadoop.mapred.JobConf) FileOutputFormat(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) List(java.util.List) RcFilePageSourceFactory.createTextVectorEncoding(io.trino.plugin.hive.rcfile.RcFilePageSourceFactory.createTextVectorEncoding) Collectors.toList(java.util.stream.Collectors.toList) HIVE_WRITE_VALIDATION_FAILED(io.trino.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED) Optional(java.util.Optional) PRESTO_VERSION_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME) TypeManager(io.trino.spi.type.TypeManager) OutputStream(java.io.OutputStream) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding) RcFileEncoding(io.trino.rcfile.RcFileEncoding) IOException(java.io.IOException) TrinoException(io.trino.spi.TrinoException) IOException(java.io.IOException) Type(io.trino.spi.type.Type) HdfsRcFileDataSource(io.trino.plugin.hive.rcfile.HdfsRcFileDataSource) FileSystem(org.apache.hadoop.fs.FileSystem) TrinoException(io.trino.spi.TrinoException) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) Supplier(java.util.function.Supplier) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding)

Example 2 with RcFileEncoding

use of io.trino.rcfile.RcFileEncoding in project trino by trinodb.

the class RcFilePageSourceFactory method createPageSource.

@Override
public Optional<ReaderPageSource> createPageSource(Configuration configuration, ConnectorSession session, Path path, long start, long length, long estimatedFileSize, Properties schema, List<HiveColumnHandle> columns, TupleDomain<HiveColumnHandle> effectivePredicate, Optional<AcidInfo> acidInfo, OptionalInt bucketNumber, boolean originalFile, AcidTransaction transaction) {
    RcFileEncoding rcFileEncoding;
    String deserializerClassName = getDeserializerClassName(schema);
    if (deserializerClassName.equals(LazyBinaryColumnarSerDe.class.getName())) {
        rcFileEncoding = new BinaryRcFileEncoding(timeZone);
    } else if (deserializerClassName.equals(ColumnarSerDe.class.getName())) {
        rcFileEncoding = createTextVectorEncoding(schema);
    } else {
        return Optional.empty();
    }
    checkArgument(acidInfo.isEmpty(), "Acid is not supported");
    List<HiveColumnHandle> projectedReaderColumns = columns;
    Optional<ReaderColumns> readerProjections = projectBaseColumns(columns);
    if (readerProjections.isPresent()) {
        projectedReaderColumns = readerProjections.get().get().stream().map(HiveColumnHandle.class::cast).collect(toImmutableList());
    }
    RcFileDataSource dataSource;
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(session.getIdentity(), path, configuration);
        FSDataInputStream inputStream = hdfsEnvironment.doAs(session.getIdentity(), () -> fileSystem.open(path));
        if (estimatedFileSize < BUFFER_SIZE.toBytes()) {
            // Handle potentially imprecise file lengths by reading the footer
            try {
                FSDataInputStreamTail fileTail = FSDataInputStreamTail.readTail(path.toString(), estimatedFileSize, inputStream, toIntExact(BUFFER_SIZE.toBytes()));
                dataSource = new MemoryRcFileDataSource(new RcFileDataSourceId(path.toString()), fileTail.getTailSlice());
            } finally {
                inputStream.close();
            }
        } else {
            long fileSize = hdfsEnvironment.doAs(session.getIdentity(), () -> fileSystem.getFileStatus(path).getLen());
            dataSource = new HdfsRcFileDataSource(path.toString(), inputStream, fileSize, stats);
        }
    } catch (Exception e) {
        if (nullToEmpty(e.getMessage()).trim().equals("Filesystem closed") || e instanceof FileNotFoundException) {
            throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, e);
        }
        throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, splitError(e, path, start, length), e);
    }
    length = min(dataSource.getSize() - start, length);
    // Split may be empty now that the correct file size is known
    if (length <= 0) {
        return Optional.of(noProjectionAdaptation(new EmptyPageSource()));
    }
    try {
        ImmutableMap.Builder<Integer, Type> readColumns = ImmutableMap.builder();
        HiveTimestampPrecision timestampPrecision = getTimestampPrecision(session);
        for (HiveColumnHandle column : projectedReaderColumns) {
            readColumns.put(column.getBaseHiveColumnIndex(), column.getHiveType().getType(typeManager, timestampPrecision));
        }
        RcFileReader rcFileReader = new RcFileReader(dataSource, rcFileEncoding, readColumns.buildOrThrow(), new AircompressorCodecFactory(new HadoopCodecFactory(configuration.getClassLoader())), start, length, BUFFER_SIZE);
        ConnectorPageSource pageSource = new RcFilePageSource(rcFileReader, projectedReaderColumns);
        return Optional.of(new ReaderPageSource(pageSource, readerProjections));
    } catch (Throwable e) {
        try {
            dataSource.close();
        } catch (IOException ignored) {
        }
        if (e instanceof TrinoException) {
            throw (TrinoException) e;
        }
        String message = splitError(e, path, start, length);
        if (e instanceof RcFileCorruptionException) {
            throw new TrinoException(HIVE_BAD_DATA, message, e);
        }
        if (e instanceof BlockMissingException) {
            throw new TrinoException(HIVE_MISSING_DATA, message, e);
        }
        throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, message, e);
    }
}
Also used : FileNotFoundException(java.io.FileNotFoundException) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding) TextRcFileEncoding(io.trino.rcfile.text.TextRcFileEncoding) RcFileEncoding(io.trino.rcfile.RcFileEncoding) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) RcFileCorruptionException(io.trino.rcfile.RcFileCorruptionException) EmptyPageSource(io.trino.spi.connector.EmptyPageSource) FileSystem(org.apache.hadoop.fs.FileSystem) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) RcFileDataSourceId(io.trino.rcfile.RcFileDataSourceId) HiveTimestampPrecision(io.trino.plugin.hive.HiveTimestampPrecision) IOException(java.io.IOException) FSDataInputStreamTail(io.trino.plugin.hive.util.FSDataInputStreamTail) RcFileReader(io.trino.rcfile.RcFileReader) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) TrinoException(io.trino.spi.TrinoException) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) RcFileCorruptionException(io.trino.rcfile.RcFileCorruptionException) ImmutableMap(com.google.common.collect.ImmutableMap) Type(io.trino.spi.type.Type) MemoryRcFileDataSource(io.trino.rcfile.MemoryRcFileDataSource) HadoopCodecFactory(io.trino.rcfile.HadoopCodecFactory) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) TrinoException(io.trino.spi.TrinoException) ReaderPageSource(io.trino.plugin.hive.ReaderPageSource) ReaderColumns(io.trino.plugin.hive.ReaderColumns) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding) AircompressorCodecFactory(io.trino.rcfile.AircompressorCodecFactory) RcFileDataSource(io.trino.rcfile.RcFileDataSource) MemoryRcFileDataSource(io.trino.rcfile.MemoryRcFileDataSource)

Aggregations

ImmutableMap (com.google.common.collect.ImmutableMap)2 RcFileDataSource (io.trino.rcfile.RcFileDataSource)2 RcFileEncoding (io.trino.rcfile.RcFileEncoding)2 BinaryRcFileEncoding (io.trino.rcfile.binary.BinaryRcFileEncoding)2 TrinoException (io.trino.spi.TrinoException)2 Type (io.trino.spi.type.Type)2 IOException (java.io.IOException)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 LazyBinaryColumnarSerDe (org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe)2 HiveColumnHandle (io.trino.plugin.hive.HiveColumnHandle)1 HIVE_WRITER_OPEN_ERROR (io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR)1 HIVE_WRITE_VALIDATION_FAILED (io.trino.plugin.hive.HiveErrorCode.HIVE_WRITE_VALIDATION_FAILED)1 PRESTO_QUERY_ID_NAME (io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME)1 PRESTO_VERSION_NAME (io.trino.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME)1 HiveSessionProperties.getTimestampPrecision (io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision)1 HiveSessionProperties.isRcfileOptimizedWriterValidate (io.trino.plugin.hive.HiveSessionProperties.isRcfileOptimizedWriterValidate)1 HiveTimestampPrecision (io.trino.plugin.hive.HiveTimestampPrecision)1 ReaderColumns (io.trino.plugin.hive.ReaderColumns)1 ReaderPageSource (io.trino.plugin.hive.ReaderPageSource)1 AcidTransaction (io.trino.plugin.hive.acid.AcidTransaction)1