use of io.trino.orc.metadata.OrcType.OrcTypeKind in project trino by trinodb.
the class StripeReader method createValueStreams.
private Map<StreamId, ValueInputStream<?>> createValueStreams(Map<StreamId, Stream> streams, Map<StreamId, OrcChunkLoader> streamsData, ColumnMetadata<ColumnEncoding> columnEncodings) {
ImmutableMap.Builder<StreamId, ValueInputStream<?>> valueStreams = ImmutableMap.builder();
for (Entry<StreamId, Stream> entry : streams.entrySet()) {
StreamId streamId = entry.getKey();
Stream stream = entry.getValue();
ColumnEncodingKind columnEncoding = columnEncodings.get(stream.getColumnId()).getColumnEncodingKind();
// skip index and empty streams
if (isIndexStream(stream) || stream.getLength() == 0) {
continue;
}
OrcChunkLoader chunkLoader = streamsData.get(streamId);
OrcTypeKind columnType = types.get(stream.getColumnId()).getOrcTypeKind();
valueStreams.put(streamId, ValueStreams.createValueStreams(streamId, chunkLoader, columnType, columnEncoding));
}
return valueStreams.buildOrThrow();
}
use of io.trino.orc.metadata.OrcType.OrcTypeKind in project trino by trinodb.
the class Checkpoints method getStreamCheckpoints.
public static Map<StreamId, StreamCheckpoint> getStreamCheckpoints(Set<OrcColumnId> columns, ColumnMetadata<OrcType> columnTypes, boolean compressed, int rowGroupId, ColumnMetadata<ColumnEncoding> columnEncodings, Map<StreamId, Stream> streams, Map<StreamId, List<RowGroupIndex>> columnIndexes) throws InvalidCheckpointException {
ImmutableSetMultimap.Builder<OrcColumnId, StreamKind> streamKindsBuilder = ImmutableSetMultimap.builder();
for (Stream stream : streams.values()) {
streamKindsBuilder.put(stream.getColumnId(), stream.getStreamKind());
}
SetMultimap<OrcColumnId, StreamKind> streamKinds = streamKindsBuilder.build();
ImmutableMap.Builder<StreamId, StreamCheckpoint> checkpoints = ImmutableMap.builder();
for (Map.Entry<StreamId, List<RowGroupIndex>> entry : columnIndexes.entrySet()) {
OrcColumnId columnId = entry.getKey().getColumnId();
if (!columns.contains(columnId)) {
continue;
}
List<Integer> positionsList = entry.getValue().get(rowGroupId).getPositions();
ColumnEncodingKind columnEncoding = columnEncodings.get(columnId).getColumnEncodingKind();
OrcTypeKind columnType = columnTypes.get(columnId).getOrcTypeKind();
Set<StreamKind> availableStreams = streamKinds.get(columnId);
ColumnPositionsList columnPositionsList = new ColumnPositionsList(columnId, columnType, positionsList);
switch(columnType) {
case BOOLEAN:
checkpoints.putAll(getBooleanColumnCheckpoints(columnId, compressed, availableStreams, columnPositionsList));
break;
case BYTE:
checkpoints.putAll(getByteColumnCheckpoints(columnId, compressed, availableStreams, columnPositionsList));
break;
case SHORT:
case INT:
case LONG:
case DATE:
checkpoints.putAll(getLongColumnCheckpoints(columnId, columnEncoding, compressed, availableStreams, columnPositionsList));
break;
case FLOAT:
checkpoints.putAll(getFloatColumnCheckpoints(columnId, compressed, availableStreams, columnPositionsList));
break;
case DOUBLE:
checkpoints.putAll(getDoubleColumnCheckpoints(columnId, compressed, availableStreams, columnPositionsList));
break;
case TIMESTAMP:
case TIMESTAMP_INSTANT:
checkpoints.putAll(getTimestampColumnCheckpoints(columnId, columnEncoding, compressed, availableStreams, columnPositionsList));
break;
case BINARY:
case STRING:
case VARCHAR:
case CHAR:
checkpoints.putAll(getSliceColumnCheckpoints(columnId, columnEncoding, compressed, availableStreams, columnPositionsList));
break;
case LIST:
case MAP:
checkpoints.putAll(getListOrMapColumnCheckpoints(columnId, columnEncoding, compressed, availableStreams, columnPositionsList));
break;
case STRUCT:
checkpoints.putAll(getStructColumnCheckpoints(columnId, compressed, availableStreams, columnPositionsList));
break;
case DECIMAL:
checkpoints.putAll(getDecimalColumnCheckpoints(columnId, columnEncoding, compressed, availableStreams, columnPositionsList));
break;
default:
throw new IllegalArgumentException("Unsupported column type " + columnType);
}
}
return checkpoints.buildOrThrow();
}
use of io.trino.orc.metadata.OrcType.OrcTypeKind in project trino by trinodb.
the class StripeReader method createDictionaryStreamSources.
private InputStreamSources createDictionaryStreamSources(Map<StreamId, Stream> streams, Map<StreamId, ValueInputStream<?>> valueStreams, ColumnMetadata<ColumnEncoding> columnEncodings) {
ImmutableMap.Builder<StreamId, InputStreamSource<?>> dictionaryStreamBuilder = ImmutableMap.builder();
for (Entry<StreamId, Stream> entry : streams.entrySet()) {
StreamId streamId = entry.getKey();
Stream stream = entry.getValue();
OrcColumnId column = stream.getColumnId();
// only process dictionary streams
ColumnEncodingKind columnEncoding = columnEncodings.get(column).getColumnEncodingKind();
if (!isDictionary(stream, columnEncoding)) {
continue;
}
// skip streams without data
ValueInputStream<?> valueStream = valueStreams.get(streamId);
if (valueStream == null) {
continue;
}
OrcTypeKind columnType = types.get(stream.getColumnId()).getOrcTypeKind();
StreamCheckpoint streamCheckpoint = getDictionaryStreamCheckpoint(streamId, columnType, columnEncoding);
InputStreamSource<?> streamSource = createCheckpointStreamSource(valueStream, streamCheckpoint);
dictionaryStreamBuilder.put(streamId, streamSource);
}
return new InputStreamSources(dictionaryStreamBuilder.buildOrThrow());
}
Aggregations