Search in sources :

Example 21 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class DeltaLakeMetadata method finishOptimize.

private void finishOptimize(ConnectorSession session, DeltaLakeTableExecuteHandle executeHandle, Collection<Slice> fragments, List<Object> splitSourceInfo) {
    DeltaTableOptimizeHandle optimizeHandle = (DeltaTableOptimizeHandle) executeHandle.getProcedureHandle();
    long readVersion = optimizeHandle.getCurrentVersion().orElseThrow(() -> new IllegalArgumentException("currentVersion not set"));
    Optional<Long> checkpointInterval = optimizeHandle.getMetadataEntry().getCheckpointInterval();
    String tableLocation = executeHandle.getTableLocation();
    // paths to be deleted
    Set<Path> scannedPaths = splitSourceInfo.stream().map(file -> new Path((String) file)).collect(toImmutableSet());
    // files to be added
    List<DataFileInfo> dataFileInfos = fragments.stream().map(Slice::getBytes).map(dataFileInfoCodec::fromJson).collect(toImmutableList());
    boolean writeCommitted = false;
    try {
        TransactionLogWriter transactionLogWriter = transactionLogWriterFactory.newWriter(session, tableLocation);
        long createdTime = Instant.now().toEpochMilli();
        long commitVersion = readVersion + 1;
        transactionLogWriter.appendCommitInfoEntry(new CommitInfoEntry(commitVersion, createdTime, session.getUser(), session.getUser(), OPTIMIZE_OPERATION, ImmutableMap.of("queryId", session.getQueryId()), null, null, "trino-" + nodeVersion + "-" + nodeId, readVersion, ISOLATION_LEVEL, true));
        // TODO: Delta writes another field "operationMetrics" that I haven't
        // seen before. It contains delete/update metrics. Investigate/include it.
        long writeTimestamp = Instant.now().toEpochMilli();
        for (Path scannedPath : scannedPaths) {
            String relativePath = new Path(tableLocation).toUri().relativize(scannedPath.toUri()).toString();
            transactionLogWriter.appendRemoveFileEntry(new RemoveFileEntry(relativePath, writeTimestamp, false));
        }
        // Note: during writes we want to preserve original case of partition columns
        List<String> partitionColumns = optimizeHandle.getMetadataEntry().getOriginalPartitionColumns();
        appendAddFileEntries(transactionLogWriter, dataFileInfos, partitionColumns, false);
        transactionLogWriter.flush();
        writeCommitted = true;
        writeCheckpointIfNeeded(session, executeHandle.getSchemaTableName(), checkpointInterval, commitVersion);
    } catch (Exception e) {
        if (!writeCommitted) {
            // TODO perhaps it should happen in a background thread
            cleanupFailedWrite(session, tableLocation, dataFileInfos);
        }
        throw new TrinoException(DELTA_LAKE_BAD_WRITE, "Failed to write Delta Lake transaction log entry", e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TransactionLogUtil.getTransactionLogDir(io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.getTransactionLogDir) FileSystem(org.apache.hadoop.fs.FileSystem) TableSnapshot(io.trino.plugin.deltalake.transactionlog.TableSnapshot) ColumnStatisticMetadata(io.trino.spi.statistics.ColumnStatisticMetadata) FileStatus(org.apache.hadoop.fs.FileStatus) DeltaLakeSchemaSupport.validateType(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.validateType) TypeUtils.isFloatingPointNaN(io.trino.spi.type.TypeUtils.isFloatingPointNaN) RemoveFileEntry(io.trino.plugin.deltalake.transactionlog.RemoveFileEntry) ConnectorTableExecuteHandle(io.trino.spi.connector.ConnectorTableExecuteHandle) Collections.singletonList(java.util.Collections.singletonList) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TransactionLogWriterFactory(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriterFactory) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) ValueSet.ofRanges(io.trino.spi.predicate.ValueSet.ofRanges) Column(io.trino.plugin.hive.metastore.Column) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) PARTITIONED_BY_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.PARTITIONED_BY_PROPERTY) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Table(io.trino.plugin.hive.metastore.Table) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) TABLE_PROVIDER_PROPERTY(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_PROPERTY) HiveWriteUtils.pathExists(io.trino.plugin.hive.util.HiveWriteUtils.pathExists) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) HyperLogLog(io.airlift.stats.cardinality.HyperLogLog) DateTimeEncoding.unpackMillisUtc(io.trino.spi.type.DateTimeEncoding.unpackMillisUtc) FILE_MODIFIED_TIME_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.FILE_MODIFIED_TIME_COLUMN_NAME) Predicate.not(java.util.function.Predicate.not) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) ANALYZE_COLUMNS_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.ANALYZE_COLUMNS_PROPERTY) REGULAR(io.trino.plugin.deltalake.DeltaLakeColumnType.REGULAR) TransactionLogParser.getMandatoryCurrentVersion(io.trino.plugin.deltalake.transactionlog.TransactionLogParser.getMandatoryCurrentVersion) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) Iterables(com.google.common.collect.Iterables) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) DeltaLakeColumnHandle.fileSizeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileSizeColumnHandle) Slice(io.airlift.slice.Slice) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) DeltaLakeTableProcedureId(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId) INVALID_ANALYZE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Variable(io.trino.spi.expression.Variable) DeltaLakeTableProperties.getLocation(io.trino.plugin.deltalake.DeltaLakeTableProperties.getLocation) Range.greaterThanOrEqual(io.trino.spi.predicate.Range.greaterThanOrEqual) TransactionConflictException(io.trino.plugin.deltalake.transactionlog.writer.TransactionConflictException) HiveType(io.trino.plugin.hive.HiveType) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) DeltaLakeStatisticsAccess(io.trino.plugin.deltalake.statistics.DeltaLakeStatisticsAccess) DeltaLakeSchemaSupport.extractPartitionColumns(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractPartitionColumns) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) INVALID_TABLE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) DeltaLakeSchemaSupport.serializeStatsAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeStatsAsJson) Nullable(javax.annotation.Nullable) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) MapType(io.trino.spi.type.MapType) PARTITION_KEY(io.trino.plugin.deltalake.DeltaLakeColumnType.PARTITION_KEY) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) DELTA_LAKE_INVALID_SCHEMA(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_INVALID_SCHEMA) CheckpointWriterManager(io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointWriterManager) ROW_ID_COLUMN_TYPE(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_TYPE) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HiveUtil.isHiveSystemSchema(io.trino.plugin.hive.util.HiveUtil.isHiveSystemSchema) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) MAX_VALUE(io.trino.spi.statistics.ColumnStatisticType.MAX_VALUE) DeltaLakeSessionProperties.isTableStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isTableStatisticsEnabled) LOCATION_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.LOCATION_PROPERTY) TableStatisticsMetadata(io.trino.spi.statistics.TableStatisticsMetadata) TINYINT(io.trino.spi.type.TinyintType.TINYINT) NotADeltaLakeTableException(io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException) DeltaLakeStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeStatistics) HiveUtil.isDeltaLakeTable(io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable) NodeManager(io.trino.spi.NodeManager) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) Database(io.trino.plugin.hive.metastore.Database) DeltaLakeSchemaSupport.extractSchema(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.extractSchema) SYNTHESIZED(io.trino.plugin.deltalake.DeltaLakeColumnType.SYNTHESIZED) TABLE_PROVIDER_VALUE(io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore.TABLE_PROVIDER_VALUE) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) AddFileEntry(io.trino.plugin.deltalake.transactionlog.AddFileEntry) DeltaLakeMetastore(io.trino.plugin.deltalake.metastore.DeltaLakeMetastore) Format(io.trino.plugin.deltalake.transactionlog.MetadataEntry.Format) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Locale(java.util.Locale) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Path(org.apache.hadoop.fs.Path) HyperLogLogType(io.trino.spi.type.HyperLogLogType) INTEGER(io.trino.spi.type.IntegerType.INTEGER) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) RowType(io.trino.spi.type.RowType) Range.range(io.trino.spi.predicate.Range.range) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) HiveWriteUtils.isS3FileSystem(io.trino.plugin.hive.util.HiveWriteUtils.isS3FileSystem) TransactionLogWriter(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriter) Collection(java.util.Collection) DeltaLakeTableExecuteHandle(io.trino.plugin.deltalake.procedure.DeltaLakeTableExecuteHandle) MetadataEntry(io.trino.plugin.deltalake.transactionlog.MetadataEntry) ComputedStatistics(io.trino.spi.statistics.ComputedStatistics) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) Instant(java.time.Instant) ConnectorOutputMetadata(io.trino.spi.connector.ConnectorOutputMetadata) Sets(com.google.common.collect.Sets) FileNotFoundException(java.io.FileNotFoundException) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) ROW_ID_COLUMN_NAME(io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_NAME) INVALID_SCHEMA_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) MetastoreUtil.buildInitialPrivilegeSet(io.trino.plugin.hive.metastore.MetastoreUtil.buildInitialPrivilegeSet) Assignment(io.trino.spi.connector.Assignment) BeginTableExecuteResult(io.trino.spi.connector.BeginTableExecuteResult) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) DecimalType(io.trino.spi.type.DecimalType) OPTIMIZE(io.trino.plugin.deltalake.procedure.DeltaLakeTableProcedureId.OPTIMIZE) JsonCodec(io.airlift.json.JsonCodec) Comparators(com.google.common.collect.Comparators) Constraint(io.trino.spi.connector.Constraint) Range.lessThanOrEqual(io.trino.spi.predicate.Range.lessThanOrEqual) DeltaLakeFileStatistics(io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics) Logger(io.airlift.log.Logger) DeltaLakeSchemaSupport.serializeSchemaAsJson(io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.serializeSchemaAsJson) DeltaLakeColumnStatistics(io.trino.plugin.deltalake.statistics.DeltaLakeColumnStatistics) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) DeltaLakeColumnHandle.pathColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.pathColumnHandle) DeltaLakeColumnHandle.fileModifiedTimeColumnHandle(io.trino.plugin.deltalake.DeltaLakeColumnHandle.fileModifiedTimeColumnHandle) AtomicReference(java.util.concurrent.atomic.AtomicReference) VarcharType(io.trino.spi.type.VarcharType) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) TableStatistics(io.trino.spi.statistics.TableStatistics) DeltaLakeSessionProperties.isExtendedStatisticsEnabled(io.trino.plugin.deltalake.DeltaLakeSessionProperties.isExtendedStatisticsEnabled) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) CHECKPOINT_INTERVAL_PROPERTY(io.trino.plugin.deltalake.DeltaLakeTableProperties.CHECKPOINT_INTERVAL_PROPERTY) StorageFormat.create(io.trino.plugin.hive.metastore.StorageFormat.create) MetadataEntry.buildDeltaMetadataConfiguration(io.trino.plugin.deltalake.transactionlog.MetadataEntry.buildDeltaMetadataConfiguration) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) DELTA_LAKE_BAD_WRITE(io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) TupleDomain(io.trino.spi.predicate.TupleDomain) DeltaLakeTableProperties.getPartitionedBy(io.trino.plugin.deltalake.DeltaLakeTableProperties.getPartitionedBy) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) GENERIC_INTERNAL_ERROR(io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) SchemaTableName.schemaTableName(io.trino.spi.connector.SchemaTableName.schemaTableName) UUID.randomUUID(java.util.UUID.randomUUID) ProtocolEntry(io.trino.plugin.deltalake.transactionlog.ProtocolEntry) DeltaTableOptimizeHandle(io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle) Collections.unmodifiableMap(java.util.Collections.unmodifiableMap) CommitInfoEntry(io.trino.plugin.deltalake.transactionlog.CommitInfoEntry) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) TypeManager(io.trino.spi.type.TypeManager) Collections(java.util.Collections) NUMBER_OF_DISTINCT_VALUES_SUMMARY(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_DISTINCT_VALUES_SUMMARY) CommitInfoEntry(io.trino.plugin.deltalake.transactionlog.CommitInfoEntry) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TransactionConflictException(io.trino.plugin.deltalake.transactionlog.writer.TransactionConflictException) IOException(java.io.IOException) NotADeltaLakeTableException(io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) TrinoException(io.trino.spi.TrinoException) FileNotFoundException(java.io.FileNotFoundException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) DeltaTableOptimizeHandle(io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle) Slice(io.airlift.slice.Slice) TrinoException(io.trino.spi.TrinoException) TransactionLogWriter(io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriter) RemoveFileEntry(io.trino.plugin.deltalake.transactionlog.RemoveFileEntry)

Example 22 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class TupleDomainParquetPredicate method getDomain.

/**
 * Get a domain for the ranges defined by each pair of elements from {@code minimums} and {@code maximums}.
 * Both arrays must have the same length.
 */
private static Domain getDomain(ColumnDescriptor column, Type type, List<Object> minimums, List<Object> maximums, boolean hasNullValue, DateTimeZone timeZone) {
    checkArgument(minimums.size() == maximums.size(), "Expected minimums and maximums to have the same size");
    if (type.equals(BOOLEAN)) {
        boolean hasTrueValues = minimums.stream().anyMatch(value -> (boolean) value) || maximums.stream().anyMatch(value -> (boolean) value);
        boolean hasFalseValues = minimums.stream().anyMatch(value -> !(boolean) value) || maximums.stream().anyMatch(value -> !(boolean) value);
        if (hasTrueValues && hasFalseValues) {
            return Domain.all(type);
        }
        if (hasTrueValues) {
            return Domain.create(ValueSet.of(type, true), hasNullValue);
        }
        if (hasFalseValues) {
            return Domain.create(ValueSet.of(type, false), hasNullValue);
        }
        // All nulls case is handled earlier
        throw new VerifyException("Impossible boolean statistics");
    }
    if (type.equals(BIGINT) || type.equals(INTEGER) || type.equals(DATE) || type.equals(SMALLINT) || type.equals(TINYINT)) {
        List<Range> ranges = new ArrayList<>();
        for (int i = 0; i < minimums.size(); i++) {
            long min = asLong(minimums.get(i));
            long max = asLong(maximums.get(i));
            if (isStatisticsOverflow(type, min, max)) {
                return Domain.create(ValueSet.all(type), hasNullValue);
            }
            ranges.add(Range.range(type, min, true, max, true));
        }
        return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
    }
    if (type instanceof DecimalType) {
        DecimalType decimalType = (DecimalType) type;
        List<Range> ranges = new ArrayList<>();
        if (decimalType.isShort()) {
            for (int i = 0; i < minimums.size(); i++) {
                Object min = minimums.get(i);
                Object max = maximums.get(i);
                long minValue = min instanceof Binary ? getShortDecimalValue(((Binary) min).getBytes()) : asLong(min);
                long maxValue = min instanceof Binary ? getShortDecimalValue(((Binary) max).getBytes()) : asLong(max);
                if (isStatisticsOverflow(type, minValue, maxValue)) {
                    return Domain.create(ValueSet.all(type), hasNullValue);
                }
                ranges.add(Range.range(type, minValue, true, maxValue, true));
            }
        } else {
            for (int i = 0; i < minimums.size(); i++) {
                Int128 min = Int128.fromBigEndian(((Binary) minimums.get(i)).getBytes());
                Int128 max = Int128.fromBigEndian(((Binary) maximums.get(i)).getBytes());
                ranges.add(Range.range(type, min, true, max, true));
            }
        }
        return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
    }
    if (type.equals(REAL)) {
        List<Range> ranges = new ArrayList<>();
        for (int i = 0; i < minimums.size(); i++) {
            Float min = (Float) minimums.get(i);
            Float max = (Float) maximums.get(i);
            if (min.isNaN() || max.isNaN()) {
                return Domain.create(ValueSet.all(type), hasNullValue);
            }
            ranges.add(Range.range(type, (long) floatToRawIntBits(min), true, (long) floatToRawIntBits(max), true));
        }
        return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
    }
    if (type.equals(DOUBLE)) {
        List<Range> ranges = new ArrayList<>();
        for (int i = 0; i < minimums.size(); i++) {
            Double min = (Double) minimums.get(i);
            Double max = (Double) maximums.get(i);
            if (min.isNaN() || max.isNaN()) {
                return Domain.create(ValueSet.all(type), hasNullValue);
            }
            ranges.add(Range.range(type, min, true, max, true));
        }
        return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
    }
    if (type instanceof VarcharType) {
        List<Range> ranges = new ArrayList<>();
        for (int i = 0; i < minimums.size(); i++) {
            Slice min = Slices.wrappedBuffer(((Binary) minimums.get(i)).toByteBuffer());
            Slice max = Slices.wrappedBuffer(((Binary) maximums.get(i)).toByteBuffer());
            ranges.add(Range.range(type, min, true, max, true));
        }
        return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
    }
    if (type instanceof TimestampType) {
        if (column.getPrimitiveType().getPrimitiveTypeName().equals(INT96)) {
            TrinoTimestampEncoder<?> timestampEncoder = createTimestampEncoder((TimestampType) type, timeZone);
            List<Object> values = new ArrayList<>();
            for (int i = 0; i < minimums.size(); i++) {
                Object min = minimums.get(i);
                Object max = maximums.get(i);
                // available and valid in that special case
                if (!(min instanceof Binary) || !(max instanceof Binary) || !min.equals(max)) {
                    return Domain.create(ValueSet.all(type), hasNullValue);
                }
                values.add(timestampEncoder.getTimestamp(decodeInt96Timestamp((Binary) min)));
            }
            return Domain.multipleValues(type, values, hasNullValue);
        }
        if (column.getPrimitiveType().getPrimitiveTypeName().equals(INT64)) {
            LogicalTypeAnnotation logicalTypeAnnotation = column.getPrimitiveType().getLogicalTypeAnnotation();
            if (!(logicalTypeAnnotation instanceof TimestampLogicalTypeAnnotation)) {
                // Invalid statistics. Unit and UTC adjustment are not known
                return Domain.create(ValueSet.all(type), hasNullValue);
            }
            TimestampLogicalTypeAnnotation timestampTypeAnnotation = (TimestampLogicalTypeAnnotation) logicalTypeAnnotation;
            // Bail out if the precision is not known
            if (timestampTypeAnnotation.getUnit() == null) {
                return Domain.create(ValueSet.all(type), hasNullValue);
            }
            TrinoTimestampEncoder<?> timestampEncoder = createTimestampEncoder((TimestampType) type, DateTimeZone.UTC);
            List<Range> ranges = new ArrayList<>();
            for (int i = 0; i < minimums.size(); i++) {
                long min = (long) minimums.get(i);
                long max = (long) maximums.get(i);
                ranges.add(Range.range(type, timestampEncoder.getTimestamp(decodeInt64Timestamp(min, timestampTypeAnnotation.getUnit())), true, timestampEncoder.getTimestamp(decodeInt64Timestamp(max, timestampTypeAnnotation.getUnit())), true));
            }
            return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
        }
    }
    return Domain.create(ValueSet.all(type), hasNullValue);
}
Also used : PrimitiveType(org.apache.parquet.schema.PrimitiveType) DateTimeZone(org.joda.time.DateTimeZone) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) ColumnIndex(org.apache.parquet.internal.column.columnindex.ColumnIndex) FilterApi(org.apache.parquet.filter2.predicate.FilterApi) ByteBuffer(java.nio.ByteBuffer) INT96(org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT96) TrinoTimestampEncoderFactory.createTimestampEncoder(io.trino.plugin.base.type.TrinoTimestampEncoderFactory.createTimestampEncoder) ParquetDataSourceId(io.trino.parquet.ParquetDataSourceId) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Slices(io.airlift.slice.Slices) Map(java.util.Map) INTEGER(io.trino.spi.type.IntegerType.INTEGER) UserDefinedPredicate(org.apache.parquet.filter2.predicate.UserDefinedPredicate) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) PredicateUtils.isStatisticsOverflow(io.trino.parquet.predicate.PredicateUtils.isStatisticsOverflow) Range(io.trino.spi.predicate.Range) TimestampLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation) Domain(io.trino.spi.predicate.Domain) ParquetTimestampUtils.decodeInt96Timestamp(io.trino.parquet.ParquetTimestampUtils.decodeInt96Timestamp) Dictionary(io.trino.parquet.dictionary.Dictionary) ColumnIndexStore(org.apache.parquet.internal.filter2.columnindex.ColumnIndexStore) String.format(java.lang.String.format) ValueSet(io.trino.spi.predicate.ValueSet) Binary(org.apache.parquet.io.api.Binary) Serializable(java.io.Serializable) INT64(org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64) List(java.util.List) TrinoTimestampEncoder(io.trino.plugin.base.type.TrinoTimestampEncoder) BIGINT(io.trino.spi.type.BigintType.BIGINT) LITTLE_ENDIAN(java.nio.ByteOrder.LITTLE_ENDIAN) ColumnDescriptor(org.apache.parquet.column.ColumnDescriptor) Optional(java.util.Optional) Operators(org.apache.parquet.filter2.predicate.Operators) DecimalType(io.trino.spi.type.DecimalType) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) ParquetCorruptionException(io.trino.parquet.ParquetCorruptionException) ColumnPath(org.apache.parquet.hadoop.metadata.ColumnPath) Slice(io.airlift.slice.Slice) Type(io.trino.spi.type.Type) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) LogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation) Function(java.util.function.Function) TimestampType(io.trino.spi.type.TimestampType) ArrayList(java.util.ArrayList) VarcharType(io.trino.spi.type.VarcharType) Float.floatToRawIntBits(java.lang.Float.floatToRawIntBits) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) RichColumnDescriptor(io.trino.parquet.RichColumnDescriptor) VerifyException(com.google.common.base.VerifyException) Int128(io.trino.spi.type.Int128) Statistics(org.apache.parquet.column.statistics.Statistics) DictionaryPage(io.trino.parquet.DictionaryPage) ParquetTimestampUtils.decodeInt64Timestamp(io.trino.parquet.ParquetTimestampUtils.decodeInt64Timestamp) ParquetTypeUtils.getShortDecimalValue(io.trino.parquet.ParquetTypeUtils.getShortDecimalValue) TupleDomain(io.trino.spi.predicate.TupleDomain) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) VisibleForTesting(com.google.common.annotations.VisibleForTesting) TINYINT(io.trino.spi.type.TinyintType.TINYINT) VarcharType(io.trino.spi.type.VarcharType) ArrayList(java.util.ArrayList) Range(io.trino.spi.predicate.Range) VerifyException(com.google.common.base.VerifyException) TimestampLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation) LogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation) Slice(io.airlift.slice.Slice) TimestampLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation) DecimalType(io.trino.spi.type.DecimalType) TimestampType(io.trino.spi.type.TimestampType) Binary(org.apache.parquet.io.api.Binary) Int128(io.trino.spi.type.Int128)

Example 23 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class QueryPlanner method planExpand.

public RelationPlan planExpand(Query query) {
    checkArgument(analysis.isExpandableQuery(query), "query is not registered as expandable");
    Union union = (Union) query.getQueryBody();
    ImmutableList.Builder<NodeAndMappings> recursionSteps = ImmutableList.builder();
    // plan anchor relation
    Relation anchorNode = union.getRelations().get(0);
    RelationPlan anchorPlan = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, plannerContext, outerContext, session, recursiveSubqueries).process(anchorNode, null);
    // prune anchor plan outputs to contain only the symbols exposed in the scope
    NodeAndMappings prunedAnchorPlan = pruneInvisibleFields(anchorPlan, idAllocator);
    // if the anchor plan has duplicate output symbols, add projection on top to make the symbols unique
    // This is necessary to successfully unroll recursion: the recursion step relation must follow
    // the same layout while it might not have duplicate outputs where the anchor plan did
    NodeAndMappings disambiguatedAnchorPlan = disambiguateOutputs(prunedAnchorPlan, symbolAllocator, idAllocator);
    anchorPlan = new RelationPlan(disambiguatedAnchorPlan.getNode(), analysis.getScope(query), disambiguatedAnchorPlan.getFields(), outerContext);
    recursionSteps.add(copy(anchorPlan.getRoot(), anchorPlan.getFieldMappings()));
    // plan recursion step
    Relation recursionStepRelation = union.getRelations().get(1);
    RelationPlan recursionStepPlan = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, plannerContext, outerContext, session, ImmutableMap.of(NodeRef.of(analysis.getRecursiveReference(query)), anchorPlan)).process(recursionStepRelation, null);
    // coerce recursion step outputs and prune them to contain only the symbols exposed in the scope
    NodeAndMappings coercedRecursionStep;
    List<Type> types = analysis.getRelationCoercion(recursionStepRelation);
    if (types == null) {
        coercedRecursionStep = pruneInvisibleFields(recursionStepPlan, idAllocator);
    } else {
        coercedRecursionStep = coerce(recursionStepPlan, types, symbolAllocator, idAllocator);
    }
    NodeAndMappings replacementSpot = new NodeAndMappings(anchorPlan.getRoot(), anchorPlan.getFieldMappings());
    PlanNode recursionStep = coercedRecursionStep.getNode();
    List<Symbol> mappings = coercedRecursionStep.getFields();
    // unroll recursion
    int maxRecursionDepth = getMaxRecursionDepth(session);
    for (int i = 0; i < maxRecursionDepth; i++) {
        recursionSteps.add(copy(recursionStep, mappings));
        NodeAndMappings replacement = copy(recursionStep, mappings);
        // if the recursion step plan has duplicate output symbols, add projection on top to make the symbols unique
        // This is necessary to successfully unroll recursion: the relation on the next recursion step must follow
        // the same layout while it might not have duplicate outputs where the plan for this step did
        replacement = disambiguateOutputs(replacement, symbolAllocator, idAllocator);
        recursionStep = replace(recursionStep, replacementSpot, replacement);
        replacementSpot = replacement;
    }
    // after the last recursion step, check if the recursion converged. the last step is expected to return empty result
    // 1. append window to count rows
    NodeAndMappings checkConvergenceStep = copy(recursionStep, mappings);
    Symbol countSymbol = symbolAllocator.newSymbol("count", BIGINT);
    ResolvedFunction function = plannerContext.getMetadata().resolveFunction(session, QualifiedName.of("count"), ImmutableList.of());
    WindowNode.Function countFunction = new WindowNode.Function(function, ImmutableList.of(), DEFAULT_FRAME, false);
    WindowNode windowNode = new WindowNode(idAllocator.getNextId(), checkConvergenceStep.getNode(), new WindowNode.Specification(ImmutableList.of(), Optional.empty()), ImmutableMap.of(countSymbol, countFunction), Optional.empty(), ImmutableSet.of(), 0);
    // 2. append filter to fail on non-empty result
    ResolvedFunction fail = plannerContext.getMetadata().resolveFunction(session, QualifiedName.of("fail"), fromTypes(VARCHAR));
    String recursionLimitExceededMessage = format("Recursion depth limit exceeded (%s). Use 'max_recursion_depth' session property to modify the limit.", maxRecursionDepth);
    Expression predicate = new IfExpression(new ComparisonExpression(GREATER_THAN_OR_EQUAL, countSymbol.toSymbolReference(), new GenericLiteral("BIGINT", "0")), new Cast(new FunctionCall(fail.toQualifiedName(), ImmutableList.of(new Cast(new StringLiteral(recursionLimitExceededMessage), toSqlType(VARCHAR)))), toSqlType(BOOLEAN)), TRUE_LITERAL);
    FilterNode filterNode = new FilterNode(idAllocator.getNextId(), windowNode, predicate);
    recursionSteps.add(new NodeAndMappings(filterNode, checkConvergenceStep.getFields()));
    // union all the recursion steps
    List<NodeAndMappings> recursionStepsToUnion = recursionSteps.build();
    List<Symbol> unionOutputSymbols = anchorPlan.getFieldMappings().stream().map(symbol -> symbolAllocator.newSymbol(symbol, "_expanded")).collect(toImmutableList());
    ImmutableListMultimap.Builder<Symbol, Symbol> unionSymbolMapping = ImmutableListMultimap.builder();
    for (NodeAndMappings plan : recursionStepsToUnion) {
        for (int i = 0; i < unionOutputSymbols.size(); i++) {
            unionSymbolMapping.put(unionOutputSymbols.get(i), plan.getFields().get(i));
        }
    }
    List<PlanNode> nodesToUnion = recursionStepsToUnion.stream().map(NodeAndMappings::getNode).collect(toImmutableList());
    PlanNode result = new UnionNode(idAllocator.getNextId(), nodesToUnion, unionSymbolMapping.build(), unionOutputSymbols);
    if (union.isDistinct()) {
        result = new AggregationNode(idAllocator.getNextId(), result, ImmutableMap.of(), singleGroupingSet(result.getOutputSymbols()), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty());
    }
    return new RelationPlan(result, anchorPlan.getScope(), unionOutputSymbols, outerContext);
}
Also used : Cast(io.trino.sql.tree.Cast) PatternRecognitionComponents(io.trino.sql.planner.RelationPlanner.PatternRecognitionComponents) Arrays(java.util.Arrays) TypeSignatureProvider.fromTypes(io.trino.sql.analyzer.TypeSignatureProvider.fromTypes) Delete(io.trino.sql.tree.Delete) PlanNode(io.trino.sql.planner.plan.PlanNode) Node(io.trino.sql.tree.Node) Offset(io.trino.sql.tree.Offset) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) LongLiteral(io.trino.sql.tree.LongLiteral) Map(java.util.Map) Union(io.trino.sql.tree.Union) FetchFirst(io.trino.sql.tree.FetchFirst) TableScanNode(io.trino.sql.planner.plan.TableScanNode) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) TableSchema(io.trino.metadata.TableSchema) SortItem(io.trino.sql.tree.SortItem) NodeUtils.getSortItemsFromOrderBy(io.trino.sql.NodeUtils.getSortItemsFromOrderBy) DEFAULT_FRAME(io.trino.sql.planner.plan.WindowNode.Frame.DEFAULT_FRAME) RelationType(io.trino.sql.analyzer.RelationType) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) AggregationNode.groupingSets(io.trino.sql.planner.plan.AggregationNode.groupingSets) DeleteTarget(io.trino.sql.planner.plan.TableWriterNode.DeleteTarget) PlanBuilder.newPlanBuilder(io.trino.sql.planner.PlanBuilder.newPlanBuilder) ImmutableListMultimap(com.google.common.collect.ImmutableListMultimap) DecimalLiteral(io.trino.sql.tree.DecimalLiteral) ValuesNode(io.trino.sql.planner.plan.ValuesNode) ExpressionAnalyzer.isNumericType(io.trino.sql.analyzer.ExpressionAnalyzer.isNumericType) Session(io.trino.Session) Iterables(com.google.common.collect.Iterables) LimitNode(io.trino.sql.planner.plan.LimitNode) TypeCoercion(io.trino.type.TypeCoercion) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) UpdateTarget(io.trino.sql.planner.plan.TableWriterNode.UpdateTarget) ScopeAware.scopeAwareKey(io.trino.sql.planner.ScopeAware.scopeAwareKey) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) NodeRef(io.trino.sql.tree.NodeRef) ColumnHandle(io.trino.spi.connector.ColumnHandle) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) GroupingOperationRewriter.rewriteGroupingOperation(io.trino.sql.planner.GroupingOperationRewriter.rewriteGroupingOperation) NodeUtils(io.trino.sql.NodeUtils) INTERVAL_DAY_TIME(io.trino.type.IntervalDayTimeType.INTERVAL_DAY_TIME) Query(io.trino.sql.tree.Query) StringLiteral(io.trino.sql.tree.StringLiteral) Relation(io.trino.sql.tree.Relation) GroupingSetAnalysis(io.trino.sql.analyzer.Analysis.GroupingSetAnalysis) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) SortOrder(io.trino.spi.connector.SortOrder) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) TableHandle(io.trino.metadata.TableHandle) Table(io.trino.sql.tree.Table) GroupIdNode(io.trino.sql.planner.plan.GroupIdNode) YEAR(io.trino.sql.tree.IntervalLiteral.IntervalField.YEAR) OffsetNode(io.trino.sql.planner.plan.OffsetNode) ROWS(io.trino.sql.tree.WindowFrame.Type.ROWS) NullTreatment(io.trino.sql.tree.FunctionCall.NullTreatment) DAY(io.trino.sql.tree.IntervalLiteral.IntervalField.DAY) MeasureDefinition(io.trino.sql.tree.MeasureDefinition) INTERVAL_YEAR_MONTH(io.trino.type.IntervalYearMonthType.INTERVAL_YEAR_MONTH) Aggregation(io.trino.sql.planner.plan.AggregationNode.Aggregation) FilterNode(io.trino.sql.planner.plan.FilterNode) LambdaArgumentDeclaration(io.trino.sql.tree.LambdaArgumentDeclaration) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) OrderingScheme.sortItemToSortOrder(io.trino.sql.planner.OrderingScheme.sortItemToSortOrder) SelectExpression(io.trino.sql.analyzer.Analysis.SelectExpression) GROUPS(io.trino.sql.tree.WindowFrame.Type.GROUPS) DeleteNode(io.trino.sql.planner.plan.DeleteNode) Update(io.trino.sql.tree.Update) FunctionCall(io.trino.sql.tree.FunctionCall) QuerySpecification(io.trino.sql.tree.QuerySpecification) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) ResolvedFunction(io.trino.metadata.ResolvedFunction) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) IntervalLiteral(io.trino.sql.tree.IntervalLiteral) RANGE(io.trino.sql.tree.WindowFrame.Type.RANGE) VariableDefinition(io.trino.sql.tree.VariableDefinition) PatternRecognitionNode(io.trino.sql.planner.plan.PatternRecognitionNode) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) LESS_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.LESS_THAN_OR_EQUAL) GenericLiteral(io.trino.sql.tree.GenericLiteral) SimplePlanRewriter(io.trino.sql.planner.plan.SimplePlanRewriter) List(java.util.List) POSITIVE(io.trino.sql.tree.IntervalLiteral.Sign.POSITIVE) IfExpression(io.trino.sql.tree.IfExpression) ColumnSchema(io.trino.spi.connector.ColumnSchema) BIGINT(io.trino.spi.type.BigintType.BIGINT) WindowFrame(io.trino.sql.tree.WindowFrame) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) WindowNode(io.trino.sql.planner.plan.WindowNode) DecimalType(io.trino.spi.type.DecimalType) OrderBy(io.trino.sql.tree.OrderBy) PlannerContext(io.trino.sql.PlannerContext) Analysis(io.trino.sql.analyzer.Analysis) IntStream(java.util.stream.IntStream) FieldId(io.trino.sql.analyzer.FieldId) UnionNode(io.trino.sql.planner.plan.UnionNode) WindowOperation(io.trino.sql.tree.WindowOperation) Type(io.trino.spi.type.Type) LambdaExpression(io.trino.sql.tree.LambdaExpression) HashMap(java.util.HashMap) SystemSessionProperties.getMaxRecursionDepth(io.trino.SystemSessionProperties.getMaxRecursionDepth) SortNode(io.trino.sql.planner.plan.SortNode) Function(java.util.function.Function) Cast(io.trino.sql.tree.Cast) HashSet(java.util.HashSet) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) GREATER_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL) ProjectNode(io.trino.sql.planner.plan.ProjectNode) ResolvedWindow(io.trino.sql.analyzer.Analysis.ResolvedWindow) RowsPerMatch(io.trino.sql.tree.PatternRecognitionRelation.RowsPerMatch) Iterator(java.util.Iterator) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) UpdateNode(io.trino.sql.planner.plan.UpdateNode) QualifiedName(io.trino.sql.tree.QualifiedName) SystemSessionProperties.isSkipRedundantSort(io.trino.SystemSessionProperties.isSkipRedundantSort) FrameBound(io.trino.sql.tree.FrameBound) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) FilterNode(io.trino.sql.planner.plan.FilterNode) Union(io.trino.sql.tree.Union) GenericLiteral(io.trino.sql.tree.GenericLiteral) ResolvedFunction(io.trino.metadata.ResolvedFunction) Function(java.util.function.Function) Relation(io.trino.sql.tree.Relation) PlanNode(io.trino.sql.planner.plan.PlanNode) UnionNode(io.trino.sql.planner.plan.UnionNode) ImmutableListMultimap(com.google.common.collect.ImmutableListMultimap) FunctionCall(io.trino.sql.tree.FunctionCall) WindowNode(io.trino.sql.planner.plan.WindowNode) IfExpression(io.trino.sql.tree.IfExpression) ResolvedFunction(io.trino.metadata.ResolvedFunction) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) RelationType(io.trino.sql.analyzer.RelationType) ExpressionAnalyzer.isNumericType(io.trino.sql.analyzer.ExpressionAnalyzer.isNumericType) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) StringLiteral(io.trino.sql.tree.StringLiteral) SelectExpression(io.trino.sql.analyzer.Analysis.SelectExpression) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) IfExpression(io.trino.sql.tree.IfExpression) Expression(io.trino.sql.tree.Expression) LambdaExpression(io.trino.sql.tree.LambdaExpression)

Example 24 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class PhoenixClient method beginCreateTable.

@Override
public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) {
    SchemaTableName schemaTableName = tableMetadata.getTable();
    String schema = schemaTableName.getSchemaName();
    String table = schemaTableName.getTableName();
    if (!getSchemaNames(session).contains(schema)) {
        throw new SchemaNotFoundException(schema);
    }
    try (Connection connection = connectionFactory.openConnection(session)) {
        ConnectorIdentity identity = session.getIdentity();
        schema = getIdentifierMapping().toRemoteSchemaName(identity, connection, schema);
        table = getIdentifierMapping().toRemoteTableName(identity, connection, schema, table);
        schema = toPhoenixSchemaName(schema);
        LinkedList<ColumnMetadata> tableColumns = new LinkedList<>(tableMetadata.getColumns());
        Map<String, Object> tableProperties = tableMetadata.getProperties();
        Optional<Boolean> immutableRows = PhoenixTableProperties.getImmutableRows(tableProperties);
        String immutable = immutableRows.isPresent() && immutableRows.get() ? "IMMUTABLE" : "";
        ImmutableList.Builder<String> columnNames = ImmutableList.builder();
        ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
        ImmutableList.Builder<String> columnList = ImmutableList.builder();
        Set<ColumnMetadata> rowkeyColumns = tableColumns.stream().filter(col -> isPrimaryKey(col, tableProperties)).collect(toSet());
        ImmutableList.Builder<String> pkNames = ImmutableList.builder();
        Optional<String> rowkeyColumn = Optional.empty();
        if (rowkeyColumns.isEmpty()) {
            // Add a rowkey when not specified in DDL
            columnList.add(ROWKEY + " bigint not null");
            pkNames.add(ROWKEY);
            execute(session, format("CREATE SEQUENCE %s", getEscapedTableName(schema, table + "_sequence")));
            rowkeyColumn = Optional.of(ROWKEY);
        }
        for (ColumnMetadata column : tableColumns) {
            String columnName = getIdentifierMapping().toRemoteColumnName(connection, column.getName());
            columnNames.add(columnName);
            columnTypes.add(column.getType());
            String typeStatement = toWriteMapping(session, column.getType()).getDataType();
            if (rowkeyColumns.contains(column)) {
                typeStatement += " not null";
                pkNames.add(columnName);
            }
            columnList.add(format("%s %s", getEscapedArgument(columnName), typeStatement));
        }
        ImmutableList.Builder<String> tableOptions = ImmutableList.builder();
        PhoenixTableProperties.getSaltBuckets(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.SALT_BUCKETS + "=" + value));
        PhoenixTableProperties.getSplitOn(tableProperties).ifPresent(value -> tableOptions.add("SPLIT ON (" + value.replace('"', '\'') + ")"));
        PhoenixTableProperties.getDisableWal(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DISABLE_WAL + "=" + value));
        PhoenixTableProperties.getDefaultColumnFamily(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DEFAULT_COLUMN_FAMILY + "=" + value));
        PhoenixTableProperties.getBloomfilter(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.BLOOMFILTER + "='" + value + "'"));
        PhoenixTableProperties.getVersions(tableProperties).ifPresent(value -> tableOptions.add(HConstants.VERSIONS + "=" + value));
        PhoenixTableProperties.getMinVersions(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.MIN_VERSIONS + "=" + value));
        PhoenixTableProperties.getCompression(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.COMPRESSION + "='" + value + "'"));
        PhoenixTableProperties.getTimeToLive(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.TTL + "=" + value));
        PhoenixTableProperties.getDataBlockEncoding(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.DATA_BLOCK_ENCODING + "='" + value + "'"));
        String sql = format("CREATE %s TABLE %s (%s , CONSTRAINT PK PRIMARY KEY (%s)) %s", immutable, getEscapedTableName(schema, table), join(", ", columnList.build()), join(", ", pkNames.build()), join(", ", tableOptions.build()));
        execute(session, sql);
        return new PhoenixOutputTableHandle(schema, table, columnNames.build(), columnTypes.build(), Optional.empty(), rowkeyColumn);
    } catch (SQLException e) {
        if (e.getErrorCode() == SQLExceptionCode.TABLE_ALREADY_EXIST.getErrorCode()) {
            throw new TrinoException(ALREADY_EXISTS, "Phoenix table already exists", e);
        }
        throw new TrinoException(PHOENIX_METADATA_ERROR, "Error creating Phoenix table", e);
    }
}
Also used : UNNECESSARY(java.math.RoundingMode.UNNECESSARY) StandardColumnMappings.varcharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping) HBaseFactoryProvider(org.apache.phoenix.query.HBaseFactoryProvider) StandardColumnMappings.bigintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction) PredicatePushdownController(io.trino.plugin.jdbc.PredicatePushdownController) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) StandardColumnMappings.booleanColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping) StandardColumnMappings.defaultVarcharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.defaultVarcharColumnMapping) ResultSet(java.sql.ResultSet) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) StandardColumnMappings.doubleWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction) DecimalSessionSessionProperties.getDecimalDefaultScale(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale) MapReduceParallelScanGrouper(org.apache.phoenix.iterate.MapReduceParallelScanGrouper) ENGLISH(java.util.Locale.ENGLISH) PhoenixArray(org.apache.phoenix.schema.types.PhoenixArray) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) LONGNVARCHAR(java.sql.Types.LONGNVARCHAR) ConcatResultIterator(org.apache.phoenix.iterate.ConcatResultIterator) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling(io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling) TIME_WITH_TIMEZONE(java.sql.Types.TIME_WITH_TIMEZONE) FOREVER(org.apache.hadoop.hbase.HConstants.FOREVER) LongWriteFunction(io.trino.plugin.jdbc.LongWriteFunction) Set(java.util.Set) LONGVARCHAR(java.sql.Types.LONGVARCHAR) PreparedStatement(java.sql.PreparedStatement) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) SchemaTableName(io.trino.spi.connector.SchemaTableName) Collectors.joining(java.util.stream.Collectors.joining) LongReadFunction(io.trino.plugin.jdbc.LongReadFunction) ResultIterator(org.apache.phoenix.iterate.ResultIterator) StandardColumnMappings.smallintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction) PhoenixClientModule.getConnectionProperties(io.trino.plugin.phoenix.PhoenixClientModule.getConnectionProperties) StandardColumnMappings.longDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction) ConnectionFactory(io.trino.plugin.jdbc.ConnectionFactory) CONVERT_TO_VARCHAR(io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR) SKIP_REGION_BOUNDARY_CHECK(org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK) PhoenixResultSet(org.apache.phoenix.jdbc.PhoenixResultSet) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) ARRAY(java.sql.Types.ARRAY) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) SchemaUtil(org.apache.phoenix.util.SchemaUtil) QueryConstants(org.apache.phoenix.query.QueryConstants) StandardColumnMappings.booleanWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction) JdbcSplit(io.trino.plugin.jdbc.JdbcSplit) SimpleDateFormat(java.text.SimpleDateFormat) ALLOW_OVERFLOW(io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) TableProperty(org.apache.phoenix.schema.TableProperty) DatabaseMetaData(java.sql.DatabaseMetaData) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) StandardColumnMappings.defaultCharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.defaultCharColumnMapping) ArrayList(java.util.ArrayList) JDBCType(java.sql.JDBCType) SQLException(java.sql.SQLException) TIMESTAMP_TZ_MILLIS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS) TypeUtils.jdbcObjectArrayToBlock(io.trino.plugin.phoenix.TypeUtils.jdbcObjectArrayToBlock) String.join(java.lang.String.join) FULL_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN) StandardColumnMappings.charWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction) PreparedQuery(io.trino.plugin.jdbc.PreparedQuery) PName(org.apache.phoenix.schema.PName) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) StandardColumnMappings.realWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction) TypeUtils.getJdbcObjectArray(io.trino.plugin.phoenix.TypeUtils.getJdbcObjectArray) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) QueryBuilder(io.trino.plugin.jdbc.QueryBuilder) SchemaUtil.getEscapedArgument(org.apache.phoenix.util.SchemaUtil.getEscapedArgument) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) IOException(java.io.IOException) PHOENIX_METADATA_ERROR(io.trino.plugin.phoenix.PhoenixErrorCode.PHOENIX_METADATA_ERROR) ConnectorSession(io.trino.spi.connector.ConnectorSession) Scan(org.apache.hadoop.hbase.client.Scan) MetadataUtil.getEscapedTableName(io.trino.plugin.phoenix.MetadataUtil.getEscapedTableName) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) IdentifierMapping(io.trino.plugin.jdbc.mapping.IdentifierMapping) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) DecimalSessionSessionProperties.getDecimalRoundingMode(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode) TINYINT(io.trino.spi.type.TinyintType.TINYINT) StandardColumnMappings.shortDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction) StandardColumnMappings.varbinaryWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction) WriteMapping(io.trino.plugin.jdbc.WriteMapping) Connection(java.sql.Connection) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) BiFunction(java.util.function.BiFunction) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) ObjectWriteFunction(io.trino.plugin.jdbc.ObjectWriteFunction) PhoenixInputSplit(org.apache.phoenix.mapreduce.PhoenixInputSplit) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Block(io.trino.spi.block.Block) DEFAULT_SCALE(io.trino.spi.type.DecimalType.DEFAULT_SCALE) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) ALREADY_EXISTS(io.trino.spi.StandardErrorCode.ALREADY_EXISTS) INTEGER(io.trino.spi.type.IntegerType.INTEGER) Collectors.toSet(java.util.stream.Collectors.toSet) NVARCHAR(java.sql.Types.NVARCHAR) ImmutableSet(com.google.common.collect.ImmutableSet) DecimalSessionSessionProperties.getDecimalRounding(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) DelegatePreparedStatement(org.apache.phoenix.jdbc.DelegatePreparedStatement) Compression(org.apache.hadoop.hbase.io.compress.Compression) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) StandardColumnMappings.realColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping) JdbcOutputTableHandle(io.trino.plugin.jdbc.JdbcOutputTableHandle) WriteFunction(io.trino.plugin.jdbc.WriteFunction) String.format(java.lang.String.format) PhoenixRuntime.getTable(org.apache.phoenix.util.PhoenixRuntime.getTable) JdbcSortItem(io.trino.plugin.jdbc.JdbcSortItem) PColumn(org.apache.phoenix.schema.PColumn) List(java.util.List) JdbcTypeHandle(io.trino.plugin.jdbc.JdbcTypeHandle) BIGINT(io.trino.spi.type.BigintType.BIGINT) ScanMetricsHolder(org.apache.phoenix.monitoring.ScanMetricsHolder) LocalDate(java.time.LocalDate) Decimals(io.trino.spi.type.Decimals) Optional(java.util.Optional) Math.max(java.lang.Math.max) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) VARCHAR(java.sql.Types.VARCHAR) ESCAPE_CHARACTER(org.apache.phoenix.util.SchemaUtil.ESCAPE_CHARACTER) SQLExceptionCode(org.apache.phoenix.exception.SQLExceptionCode) PDataType(org.apache.phoenix.schema.types.PDataType) StandardColumnMappings.tinyintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping) TypeUtils.getArrayElementPhoenixTypeName(io.trino.plugin.phoenix.TypeUtils.getArrayElementPhoenixTypeName) DecimalType(io.trino.spi.type.DecimalType) PeekingResultIterator(org.apache.phoenix.iterate.PeekingResultIterator) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping) Types(java.sql.Types) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) StatementContext(org.apache.phoenix.compile.StatementContext) SequenceResultIterator(org.apache.phoenix.iterate.SequenceResultIterator) StandardColumnMappings.varcharWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction) StandardColumnMappings.tinyintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) TIMESTAMP(java.sql.Types.TIMESTAMP) Inject(javax.inject.Inject) VarcharType(io.trino.spi.type.VarcharType) TIME_WITH_TIME_ZONE(io.trino.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE) HConstants(org.apache.hadoop.hbase.HConstants) TIMESTAMP_WITH_TIMEZONE(java.sql.Types.TIMESTAMP_WITH_TIMEZONE) ImmutableList(com.google.common.collect.ImmutableList) QueryPlan(org.apache.phoenix.compile.QueryPlan) Verify.verify(com.google.common.base.Verify.verify) MetadataUtil.toPhoenixSchemaName(io.trino.plugin.phoenix.MetadataUtil.toPhoenixSchemaName) TIME(io.trino.spi.type.TimeType.TIME) BaseJdbcClient(io.trino.plugin.jdbc.BaseJdbcClient) LinkedList(java.util.LinkedList) PHOENIX_QUERY_ERROR(io.trino.plugin.phoenix.PhoenixErrorCode.PHOENIX_QUERY_ERROR) Bytes(org.apache.hadoop.hbase.util.Bytes) PTable(org.apache.phoenix.schema.PTable) TypeUtils.toBoxedArray(io.trino.plugin.phoenix.TypeUtils.toBoxedArray) StandardColumnMappings.timeWriteFunctionUsingSqlTime(io.trino.plugin.jdbc.StandardColumnMappings.timeWriteFunctionUsingSqlTime) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) StandardColumnMappings.integerWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction) TableResultIterator(org.apache.phoenix.iterate.TableResultIterator) ConnectionQueryServices(org.apache.phoenix.query.ConnectionQueryServices) DEFAULT_PRECISION(io.trino.spi.type.DecimalType.DEFAULT_PRECISION) DISABLE_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN) ObjectReadFunction(io.trino.plugin.jdbc.ObjectReadFunction) DEFAULT_SCHEMA(io.trino.plugin.phoenix.PhoenixMetadata.DEFAULT_SCHEMA) DateTimeFormatter(java.time.format.DateTimeFormatter) StringJoiner(java.util.StringJoiner) LookAheadResultIterator(org.apache.phoenix.iterate.LookAheadResultIterator) PhoenixColumnProperties.isPrimaryKey(io.trino.plugin.phoenix.PhoenixColumnProperties.isPrimaryKey) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) Connection(java.sql.Connection) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) SchemaTableName(io.trino.spi.connector.SchemaTableName) LinkedList(java.util.LinkedList) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) JDBCType(java.sql.JDBCType) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) ArrayType(io.trino.spi.type.ArrayType) PDataType(org.apache.phoenix.schema.types.PDataType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) TrinoException(io.trino.spi.TrinoException) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Example 25 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class MongoSession method guessFieldType.

private Optional<TypeSignature> guessFieldType(Object value) {
    if (value == null) {
        return Optional.empty();
    }
    TypeSignature typeSignature = null;
    if (value instanceof String) {
        typeSignature = createUnboundedVarcharType().getTypeSignature();
    }
    if (value instanceof Binary) {
        typeSignature = VARBINARY.getTypeSignature();
    } else if (value instanceof Integer || value instanceof Long) {
        typeSignature = BIGINT.getTypeSignature();
    } else if (value instanceof Boolean) {
        typeSignature = BOOLEAN.getTypeSignature();
    } else if (value instanceof Float || value instanceof Double) {
        typeSignature = DOUBLE.getTypeSignature();
    } else if (value instanceof Date) {
        typeSignature = TIMESTAMP_MILLIS.getTypeSignature();
    } else if (value instanceof ObjectId) {
        typeSignature = OBJECT_ID.getTypeSignature();
    } else if (value instanceof List) {
        List<Optional<TypeSignature>> subTypes = ((List<?>) value).stream().map(this::guessFieldType).collect(toList());
        if (subTypes.isEmpty() || subTypes.stream().anyMatch(Optional::isEmpty)) {
            return Optional.empty();
        }
        Set<TypeSignature> signatures = subTypes.stream().map(Optional::get).collect(toSet());
        if (signatures.size() == 1) {
            typeSignature = new TypeSignature(StandardTypes.ARRAY, signatures.stream().map(TypeSignatureParameter::typeParameter).collect(Collectors.toList()));
        } else {
            // TODO: client doesn't handle empty field name row type yet
            typeSignature = new TypeSignature(StandardTypes.ROW, IntStream.range(0, subTypes.size()).mapToObj(idx -> TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(format("%s%d", implicitPrefix, idx + 1))), subTypes.get(idx).get()))).collect(toList()));
        }
    } else if (value instanceof Document) {
        List<TypeSignatureParameter> parameters = new ArrayList<>();
        for (String key : ((Document) value).keySet()) {
            Optional<TypeSignature> fieldType = guessFieldType(((Document) value).get(key));
            if (fieldType.isPresent()) {
                parameters.add(TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(key)), fieldType.get())));
            }
        }
        if (!parameters.isEmpty()) {
            typeSignature = new TypeSignature(StandardTypes.ROW, parameters);
        }
    } else if (value instanceof DBRef) {
        List<TypeSignatureParameter> parameters = new ArrayList<>();
        TypeSignature idFieldType = guessFieldType(((DBRef) value).getId()).orElseThrow(() -> new UnsupportedOperationException("Unable to guess $id field type of DBRef from: " + ((DBRef) value).getId()));
        parameters.add(TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(DATABASE_NAME)), VARCHAR.getTypeSignature())));
        parameters.add(TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(COLLECTION_NAME)), VARCHAR.getTypeSignature())));
        parameters.add(TypeSignatureParameter.namedTypeParameter(new NamedTypeSignature(Optional.of(new RowFieldName(ID)), idFieldType)));
        typeSignature = new TypeSignature(StandardTypes.ROW, parameters);
    }
    return Optional.ofNullable(typeSignature);
}
Also used : Document(org.bson.Document) Arrays(java.util.Arrays) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) Date(java.util.Date) MongoDatabase(com.mongodb.client.MongoDatabase) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) EvictableCacheBuilder(io.trino.collect.cache.EvictableCacheBuilder) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Map(java.util.Map) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) Collectors.toSet(java.util.stream.Collectors.toSet) TypeSignature(io.trino.spi.type.TypeSignature) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Range(io.trino.spi.predicate.Range) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) Collectors(java.util.stream.Collectors) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) FindIterable(com.mongodb.client.FindIterable) OBJECT_ID(io.trino.plugin.mongodb.ObjectIdType.OBJECT_ID) Optional(java.util.Optional) HostAddress(io.trino.spi.HostAddress) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) IntStream(java.util.stream.IntStream) MongoClient(com.mongodb.client.MongoClient) MongoCollection(com.mongodb.client.MongoCollection) Slice(io.airlift.slice.Slice) Logger(io.airlift.log.Logger) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) Type(io.trino.spi.type.Type) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) Shorts(com.google.common.primitives.Shorts) MINUTES(java.util.concurrent.TimeUnit.MINUTES) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) VarcharType(io.trino.spi.type.VarcharType) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) MongoCursor(com.mongodb.client.MongoCursor) Verify.verify(com.google.common.base.Verify.verify) UncheckedExecutionException(com.google.common.util.concurrent.UncheckedExecutionException) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) RowFieldName(io.trino.spi.type.RowFieldName) HostAddress.fromParts(io.trino.spi.HostAddress.fromParts) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) Math.toIntExact(java.lang.Math.toIntExact) Binary(org.bson.types.Binary) IntegerType(io.trino.spi.type.IntegerType) SignedBytes(com.google.common.primitives.SignedBytes) StandardTypes(io.trino.spi.type.StandardTypes) IndexOptions(com.mongodb.client.model.IndexOptions) TupleDomain(io.trino.spi.predicate.TupleDomain) Throwables.throwIfInstanceOf(com.google.common.base.Throwables.throwIfInstanceOf) Primitives(com.google.common.primitives.Primitives) ExecutionException(java.util.concurrent.ExecutionException) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) Collectors.toList(java.util.stream.Collectors.toList) DBRef(com.mongodb.DBRef) DeleteResult(com.mongodb.client.result.DeleteResult) ObjectId(org.bson.types.ObjectId) VisibleForTesting(com.google.common.annotations.VisibleForTesting) TINYINT(io.trino.spi.type.TinyintType.TINYINT) Cache(com.google.common.cache.Cache) TypeManager(io.trino.spi.type.TypeManager) Optional(java.util.Optional) ObjectId(org.bson.types.ObjectId) RowFieldName(io.trino.spi.type.RowFieldName) DBRef(com.mongodb.DBRef) ArrayList(java.util.ArrayList) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) Document(org.bson.Document) Date(java.util.Date) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) TypeSignature(io.trino.spi.type.TypeSignature) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) Binary(org.bson.types.Binary)

Aggregations

BOOLEAN (io.trino.spi.type.BooleanType.BOOLEAN)25 BIGINT (io.trino.spi.type.BigintType.BIGINT)24 List (java.util.List)22 Optional (java.util.Optional)22 ImmutableList (com.google.common.collect.ImmutableList)20 ImmutableMap (com.google.common.collect.ImmutableMap)18 DOUBLE (io.trino.spi.type.DoubleType.DOUBLE)18 Type (io.trino.spi.type.Type)18 Map (java.util.Map)18 INTEGER (io.trino.spi.type.IntegerType.INTEGER)17 VarcharType (io.trino.spi.type.VarcharType)17 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)16 DecimalType (io.trino.spi.type.DecimalType)16 REAL (io.trino.spi.type.RealType.REAL)16 SMALLINT (io.trino.spi.type.SmallintType.SMALLINT)16 TINYINT (io.trino.spi.type.TinyintType.TINYINT)16 Set (java.util.Set)16 ArrayType (io.trino.spi.type.ArrayType)15 DATE (io.trino.spi.type.DateType.DATE)15 String.format (java.lang.String.format)15