Search in sources :

Example 21 with OrcColumnId

use of io.trino.orc.metadata.OrcColumnId in project trino by trinodb.

the class OrcWriter method bufferStripeData.

/**
 * Collect the data for the stripe.  This is not the actual data, but
 * instead are functions that know how to write the data.
 */
private List<OrcDataOutput> bufferStripeData(long stripeStartOffset, FlushReason flushReason) throws IOException {
    if (stripeRowCount == 0) {
        verify(flushReason == CLOSED, "An empty stripe is not allowed");
        // column writers must be closed or the reset call will fail
        columnWriters.forEach(ColumnWriter::close);
        return ImmutableList.of();
    }
    if (rowGroupRowCount > 0) {
        finishRowGroup();
    }
    // convert any dictionary encoded column with a low compression ratio to direct
    dictionaryCompressionOptimizer.finalOptimize(bufferedBytes);
    columnWriters.forEach(ColumnWriter::close);
    List<OrcDataOutput> outputData = new ArrayList<>();
    List<Stream> allStreams = new ArrayList<>(columnWriters.size() * 3);
    // get index streams
    long indexLength = 0;
    for (ColumnWriter columnWriter : columnWriters) {
        for (StreamDataOutput indexStream : columnWriter.getIndexStreams(metadataWriter)) {
            // The ordering is critical because the stream only contain a length with no offset.
            outputData.add(indexStream);
            allStreams.add(indexStream.getStream());
            indexLength += indexStream.size();
        }
        for (StreamDataOutput bloomFilter : columnWriter.getBloomFilters(metadataWriter)) {
            outputData.add(bloomFilter);
            allStreams.add(bloomFilter.getStream());
            indexLength += bloomFilter.size();
        }
    }
    // data streams (sorted by size)
    long dataLength = 0;
    List<StreamDataOutput> dataStreams = new ArrayList<>(columnWriters.size() * 2);
    for (ColumnWriter columnWriter : columnWriters) {
        List<StreamDataOutput> streams = columnWriter.getDataStreams();
        dataStreams.addAll(streams);
        dataLength += streams.stream().mapToLong(StreamDataOutput::size).sum();
    }
    Collections.sort(dataStreams);
    // add data streams
    for (StreamDataOutput dataStream : dataStreams) {
        // The ordering is critical because the stream only contain a length with no offset.
        outputData.add(dataStream);
        allStreams.add(dataStream.getStream());
    }
    Map<OrcColumnId, ColumnEncoding> columnEncodings = new HashMap<>();
    columnWriters.forEach(columnWriter -> columnEncodings.putAll(columnWriter.getColumnEncodings()));
    Map<OrcColumnId, ColumnStatistics> columnStatistics = new HashMap<>();
    columnWriters.forEach(columnWriter -> columnStatistics.putAll(columnWriter.getColumnStripeStatistics()));
    // the 0th column is a struct column for the whole row
    columnEncodings.put(ROOT_COLUMN, new ColumnEncoding(DIRECT, 0));
    columnStatistics.put(ROOT_COLUMN, new ColumnStatistics((long) stripeRowCount, 0, null, null, null, null, null, null, null, null, null));
    // add footer
    StripeFooter stripeFooter = new StripeFooter(allStreams, toColumnMetadata(columnEncodings, orcTypes.size()), ZoneId.of("UTC"));
    Slice footer = metadataWriter.writeStripeFooter(stripeFooter);
    outputData.add(createDataOutput(footer));
    // create final stripe statistics
    StripeStatistics statistics = new StripeStatistics(toColumnMetadata(columnStatistics, orcTypes.size()));
    recordValidation(validation -> validation.addStripeStatistics(stripeStartOffset, statistics));
    StripeInformation stripeInformation = new StripeInformation(stripeRowCount, stripeStartOffset, indexLength, dataLength, footer.length());
    ClosedStripe closedStripe = new ClosedStripe(stripeInformation, statistics);
    closedStripes.add(closedStripe);
    closedStripesRetainedBytes += closedStripe.getRetainedSizeInBytes();
    recordValidation(validation -> validation.addStripe(stripeInformation.getNumberOfRows()));
    stats.recordStripeWritten(flushReason, stripeInformation.getTotalLength(), stripeInformation.getNumberOfRows(), dictionaryCompressionOptimizer.getDictionaryMemoryBytes());
    return outputData;
}
Also used : ColumnStatistics(io.trino.orc.metadata.statistics.ColumnStatistics) OrcColumnId(io.trino.orc.metadata.OrcColumnId) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) StripeStatistics(io.trino.orc.metadata.statistics.StripeStatistics) ColumnWriters.createColumnWriter(io.trino.orc.writer.ColumnWriters.createColumnWriter) ColumnWriter(io.trino.orc.writer.ColumnWriter) SliceDictionaryColumnWriter(io.trino.orc.writer.SliceDictionaryColumnWriter) StreamDataOutput(io.trino.orc.stream.StreamDataOutput) OrcDataOutput(io.trino.orc.stream.OrcDataOutput) ColumnEncoding(io.trino.orc.metadata.ColumnEncoding) StripeFooter(io.trino.orc.metadata.StripeFooter) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) Slice(io.airlift.slice.Slice) Stream(io.trino.orc.metadata.Stream) StripeInformation(io.trino.orc.metadata.StripeInformation)

Example 22 with OrcColumnId

use of io.trino.orc.metadata.OrcColumnId in project trino by trinodb.

the class OrcWriter method toFileStats.

private static Optional<ColumnMetadata<ColumnStatistics>> toFileStats(List<ColumnMetadata<ColumnStatistics>> stripes) {
    if (stripes.isEmpty()) {
        return Optional.empty();
    }
    int columnCount = stripes.get(0).size();
    checkArgument(stripes.stream().allMatch(stripe -> columnCount == stripe.size()));
    ImmutableList.Builder<ColumnStatistics> fileStats = ImmutableList.builder();
    for (int i = 0; i < columnCount; i++) {
        OrcColumnId columnId = new OrcColumnId(i);
        fileStats.add(ColumnStatistics.mergeColumnStatistics(stripes.stream().map(stripe -> stripe.get(columnId)).collect(toList())));
    }
    return Optional.of(new ColumnMetadata<>(fileStats.build()));
}
Also used : ColumnEncoding(io.trino.orc.metadata.ColumnEncoding) DICTIONARY_FULL(io.trino.orc.OrcWriterStats.FlushReason.DICTIONARY_FULL) DIRECT(io.trino.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT) OrcWriteValidationBuilder(io.trino.orc.OrcWriteValidation.OrcWriteValidationBuilder) Stream(io.trino.orc.metadata.Stream) StreamDataOutput(io.trino.orc.stream.StreamDataOutput) FlushReason(io.trino.orc.OrcWriterStats.FlushReason) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) BloomFilterBuilder(io.trino.orc.metadata.statistics.BloomFilterBuilder) Slices(io.airlift.slice.Slices) Map(java.util.Map) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) MAGIC(io.trino.orc.metadata.PostScript.MAGIC) ROOT_COLUMN(io.trino.orc.metadata.OrcColumnId.ROOT_COLUMN) OrcMetadataWriter(io.trino.orc.metadata.OrcMetadataWriter) ImmutableSet(com.google.common.collect.ImmutableSet) OrcDataOutput.createDataOutput(io.trino.orc.stream.OrcDataOutput.createDataOutput) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ColumnWriters.createColumnWriter(io.trino.orc.writer.ColumnWriters.createColumnWriter) ColumnWriter(io.trino.orc.writer.ColumnWriter) Collectors(java.util.stream.Collectors) ZoneId(java.time.ZoneId) Preconditions.checkState(com.google.common.base.Preconditions.checkState) Metadata(io.trino.orc.metadata.Metadata) List(java.util.List) ClassLayout(org.openjdk.jol.info.ClassLayout) Entry(java.util.Map.Entry) Optional(java.util.Optional) ColumnStatistics(io.trino.orc.metadata.statistics.ColumnStatistics) Slice(io.airlift.slice.Slice) OrcWriteValidationMode(io.trino.orc.OrcWriteValidation.OrcWriteValidationMode) SliceDictionaryColumnWriter(io.trino.orc.writer.SliceDictionaryColumnWriter) OrcDataOutput(io.trino.orc.stream.OrcDataOutput) Type(io.trino.spi.type.Type) Page(io.trino.spi.Page) HashMap(java.util.HashMap) CLOSED(io.trino.orc.OrcWriterStats.FlushReason.CLOSED) OptionalInt(java.util.OptionalInt) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) Utf8BloomFilterBuilder(io.trino.orc.metadata.statistics.Utf8BloomFilterBuilder) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) StripeInformation(io.trino.orc.metadata.StripeInformation) Math.toIntExact(java.lang.Math.toIntExact) Nullable(javax.annotation.Nullable) MAX_ROWS(io.trino.orc.OrcWriterStats.FlushReason.MAX_ROWS) StripeFooter(io.trino.orc.metadata.StripeFooter) Integer.min(java.lang.Integer.min) OrcType(io.trino.orc.metadata.OrcType) UnsignedBytes(com.google.common.primitives.UnsignedBytes) MAX_BYTES(io.trino.orc.OrcWriterStats.FlushReason.MAX_BYTES) IOException(java.io.IOException) ColumnMetadata(io.trino.orc.metadata.ColumnMetadata) CompressionKind(io.trino.orc.metadata.CompressionKind) Footer(io.trino.orc.metadata.Footer) Consumer(java.util.function.Consumer) Collectors.toList(java.util.stream.Collectors.toList) StripeStatistics(io.trino.orc.metadata.statistics.StripeStatistics) Closeable(java.io.Closeable) NoOpBloomFilterBuilder(io.trino.orc.metadata.statistics.NoOpBloomFilterBuilder) OrcReader.validateFile(io.trino.orc.OrcReader.validateFile) Collections(java.util.Collections) CompressedMetadataWriter(io.trino.orc.metadata.CompressedMetadataWriter) OrcColumnId(io.trino.orc.metadata.OrcColumnId) ColumnStatistics(io.trino.orc.metadata.statistics.ColumnStatistics) OrcColumnId(io.trino.orc.metadata.OrcColumnId) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList)

Example 23 with OrcColumnId

use of io.trino.orc.metadata.OrcColumnId in project trino by trinodb.

the class OrcReader method createOrcColumn.

private static OrcColumn createOrcColumn(String parentStreamName, String fieldName, OrcColumnId columnId, ColumnMetadata<OrcType> types, OrcDataSourceId orcDataSourceId) {
    String path = fieldName.isEmpty() ? parentStreamName : parentStreamName + "." + fieldName;
    OrcType orcType = types.get(columnId);
    List<OrcColumn> nestedColumns = ImmutableList.of();
    if (orcType.getOrcTypeKind() == OrcTypeKind.STRUCT) {
        nestedColumns = IntStream.range(0, orcType.getFieldCount()).mapToObj(fieldId -> createOrcColumn(path, orcType.getFieldName(fieldId), orcType.getFieldTypeIndex(fieldId), types, orcDataSourceId)).collect(toImmutableList());
    } else if (orcType.getOrcTypeKind() == OrcTypeKind.LIST) {
        nestedColumns = ImmutableList.of(createOrcColumn(path, "item", orcType.getFieldTypeIndex(0), types, orcDataSourceId));
    } else if (orcType.getOrcTypeKind() == OrcTypeKind.MAP) {
        nestedColumns = ImmutableList.of(createOrcColumn(path, "key", orcType.getFieldTypeIndex(0), types, orcDataSourceId), createOrcColumn(path, "value", orcType.getFieldTypeIndex(1), types, orcDataSourceId));
    } else if (orcType.getOrcTypeKind() == OrcTypeKind.UNION) {
        nestedColumns = IntStream.range(0, orcType.getFieldCount()).mapToObj(fieldId -> createOrcColumn(path, "field" + fieldId, orcType.getFieldTypeIndex(fieldId), types, orcDataSourceId)).collect(toImmutableList());
    }
    return new OrcColumn(path, columnId, fieldName, orcType.getOrcTypeKind(), orcDataSourceId, nestedColumns, orcType.getAttributes());
}
Also used : IntStream(java.util.stream.IntStream) DateTimeZone(org.joda.time.DateTimeZone) Slice(io.airlift.slice.Slice) ExceptionWrappingMetadataReader(io.trino.orc.metadata.ExceptionWrappingMetadataReader) Logger(io.airlift.log.Logger) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) Type(io.trino.spi.type.Type) Page(io.trino.spi.Page) Throwables.throwIfUnchecked(com.google.common.base.Throwables.throwIfUnchecked) OrcTypeKind(io.trino.orc.metadata.OrcType.OrcTypeKind) Function(java.util.function.Function) SIZE_OF_BYTE(io.airlift.slice.SizeOf.SIZE_OF_BYTE) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) Collectors.mapping(java.util.stream.Collectors.mapping) AggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext) Math.toIntExact(java.lang.Math.toIntExact) PostScript(io.trino.orc.metadata.PostScript) OrcMetadataReader(io.trino.orc.metadata.OrcMetadataReader) MAGIC(io.trino.orc.metadata.PostScript.MAGIC) ENGLISH(java.util.Locale.ENGLISH) OrcChunkLoader(io.trino.orc.stream.OrcChunkLoader) ROOT_COLUMN(io.trino.orc.metadata.OrcColumnId.ROOT_COLUMN) OrcType(io.trino.orc.metadata.OrcType) OrcDecompressor.createOrcDecompressor(io.trino.orc.OrcDecompressor.createOrcDecompressor) ImmutableMap(com.google.common.collect.ImmutableMap) HiveWriterVersion(io.trino.orc.metadata.PostScript.HiveWriterVersion) Predicate(java.util.function.Predicate) OrcInputStream(io.trino.orc.stream.OrcInputStream) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) IOException(java.io.IOException) ColumnMetadata(io.trino.orc.metadata.ColumnMetadata) Math.min(java.lang.Math.min) UTC(org.joda.time.DateTimeZone.UTC) Collectors(java.util.stream.Collectors) CompressionKind(io.trino.orc.metadata.CompressionKind) Footer(io.trino.orc.metadata.Footer) Metadata(io.trino.orc.metadata.Metadata) DataSize(io.airlift.units.DataSize) List(java.util.List) Collectors.toList(java.util.stream.Collectors.toList) Optional(java.util.Optional) Collections(java.util.Collections) Joiner(com.google.common.base.Joiner) OrcColumnId(io.trino.orc.metadata.OrcColumnId) InputStream(java.io.InputStream) OrcType(io.trino.orc.metadata.OrcType)

Example 24 with OrcColumnId

use of io.trino.orc.metadata.OrcColumnId in project trino by trinodb.

the class StripeReader method createDictionaryStreamSources.

private InputStreamSources createDictionaryStreamSources(Map<StreamId, Stream> streams, Map<StreamId, ValueInputStream<?>> valueStreams, ColumnMetadata<ColumnEncoding> columnEncodings) {
    ImmutableMap.Builder<StreamId, InputStreamSource<?>> dictionaryStreamBuilder = ImmutableMap.builder();
    for (Entry<StreamId, Stream> entry : streams.entrySet()) {
        StreamId streamId = entry.getKey();
        Stream stream = entry.getValue();
        OrcColumnId column = stream.getColumnId();
        // only process dictionary streams
        ColumnEncodingKind columnEncoding = columnEncodings.get(column).getColumnEncodingKind();
        if (!isDictionary(stream, columnEncoding)) {
            continue;
        }
        // skip streams without data
        ValueInputStream<?> valueStream = valueStreams.get(streamId);
        if (valueStream == null) {
            continue;
        }
        OrcTypeKind columnType = types.get(stream.getColumnId()).getOrcTypeKind();
        StreamCheckpoint streamCheckpoint = getDictionaryStreamCheckpoint(streamId, columnType, columnEncoding);
        InputStreamSource<?> streamSource = createCheckpointStreamSource(valueStream, streamCheckpoint);
        dictionaryStreamBuilder.put(streamId, streamSource);
    }
    return new InputStreamSources(dictionaryStreamBuilder.buildOrThrow());
}
Also used : OrcColumnId(io.trino.orc.metadata.OrcColumnId) OrcTypeKind(io.trino.orc.metadata.OrcType.OrcTypeKind) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ValueInputStreamSource(io.trino.orc.stream.ValueInputStreamSource) InputStreamSource(io.trino.orc.stream.InputStreamSource) InputStreamSources(io.trino.orc.stream.InputStreamSources) Stream(io.trino.orc.metadata.Stream) OrcInputStream(io.trino.orc.stream.OrcInputStream) ValueInputStream(io.trino.orc.stream.ValueInputStream) InputStream(java.io.InputStream) ColumnEncodingKind(io.trino.orc.metadata.ColumnEncoding.ColumnEncodingKind) StreamCheckpoint(io.trino.orc.checkpoint.StreamCheckpoint) Checkpoints.getDictionaryStreamCheckpoint(io.trino.orc.checkpoint.Checkpoints.getDictionaryStreamCheckpoint)

Aggregations

OrcColumnId (io.trino.orc.metadata.OrcColumnId)24 ImmutableMap (com.google.common.collect.ImmutableMap)10 Stream (io.trino.orc.metadata.Stream)9 ColumnStatistics (io.trino.orc.metadata.statistics.ColumnStatistics)9 ArrayList (java.util.ArrayList)9 OrcType (io.trino.orc.metadata.OrcType)8 List (java.util.List)8 ImmutableList (com.google.common.collect.ImmutableList)7 Slice (io.airlift.slice.Slice)5 CompressionKind (io.trino.orc.metadata.CompressionKind)5 Map (java.util.Map)5 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)4 Footer (io.trino.orc.metadata.Footer)4 OrcInputStream (io.trino.orc.stream.OrcInputStream)4 Page (io.trino.spi.Page)4 IOException (java.io.IOException)4 InputStream (java.io.InputStream)4 ByteBuffer (java.nio.ByteBuffer)3 Configuration (org.apache.hadoop.conf.Configuration)3 Path (org.apache.hadoop.fs.Path)3