Search in sources :

Example 1 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class ColumnJdbcTable method applyFilter.

@Override
public TupleDomain<ColumnHandle> applyFilter(ConnectorSession connectorSession, Constraint constraint) {
    TupleDomain<ColumnHandle> tupleDomain = constraint.getSummary();
    if (tupleDomain.isNone() || constraint.predicate().isEmpty()) {
        return tupleDomain;
    }
    Predicate<Map<ColumnHandle, NullableValue>> predicate = constraint.predicate().get();
    Set<ColumnHandle> predicateColumns = constraint.getPredicateColumns().orElseThrow(() -> new VerifyException("columns not present for a predicate"));
    boolean hasSchemaPredicate = predicateColumns.contains(TABLE_SCHEMA_COLUMN);
    boolean hasTablePredicate = predicateColumns.contains(TABLE_NAME_COLUMN);
    if (!hasSchemaPredicate && !hasTablePredicate) {
        // No filter on schema name and table name at all.
        return tupleDomain;
    }
    Session session = ((FullConnectorSession) connectorSession).getSession();
    Optional<String> catalogFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_CATALOG_COLUMN);
    Optional<String> schemaFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_SCHEMA_COLUMN);
    Optional<String> tableFilter = tryGetSingleVarcharValue(tupleDomain, TABLE_NAME_COLUMN);
    if (schemaFilter.isPresent() && tableFilter.isPresent()) {
        // No need to narrow down the domain.
        return tupleDomain;
    }
    List<String> catalogs = listCatalogs(session, metadata, accessControl, catalogFilter).keySet().stream().filter(catalogName -> predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(catalogName)))).collect(toImmutableList());
    List<CatalogSchemaName> schemas = catalogs.stream().flatMap(catalogName -> listSchemas(session, metadata, accessControl, catalogName, schemaFilter).stream().filter(schemaName -> !hasSchemaPredicate || predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(catalogName), TABLE_SCHEMA_COLUMN, toNullableValue(schemaName)))).map(schemaName -> new CatalogSchemaName(catalogName, schemaName))).collect(toImmutableList());
    if (!hasTablePredicate) {
        return TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().put(TABLE_CATALOG_COLUMN, schemas.stream().map(CatalogSchemaName::getCatalogName).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).put(TABLE_SCHEMA_COLUMN, schemas.stream().map(CatalogSchemaName::getSchemaName).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).buildOrThrow());
    }
    List<CatalogSchemaTableName> tables = schemas.stream().flatMap(schema -> {
        QualifiedTablePrefix tablePrefix = tableFilter.isPresent() ? new QualifiedTablePrefix(schema.getCatalogName(), schema.getSchemaName(), tableFilter.get()) : new QualifiedTablePrefix(schema.getCatalogName(), schema.getSchemaName());
        return listTables(session, metadata, accessControl, tablePrefix).stream().filter(schemaTableName -> predicate.test(ImmutableMap.of(TABLE_CATALOG_COLUMN, toNullableValue(schema.getCatalogName()), TABLE_SCHEMA_COLUMN, toNullableValue(schemaTableName.getSchemaName()), TABLE_NAME_COLUMN, toNullableValue(schemaTableName.getTableName())))).map(schemaTableName -> new CatalogSchemaTableName(schema.getCatalogName(), schemaTableName.getSchemaName(), schemaTableName.getTableName()));
    }).collect(toImmutableList());
    return TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder().put(TABLE_CATALOG_COLUMN, tables.stream().map(CatalogSchemaTableName::getCatalogName).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).put(TABLE_SCHEMA_COLUMN, tables.stream().map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getSchemaName()).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).put(TABLE_NAME_COLUMN, tables.stream().map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getTableName()).collect(toVarcharDomain()).simplify(MAX_DOMAIN_SIZE)).buildOrThrow());
}
Also used : FilterUtil.tryGetSingleVarcharValue(io.trino.connector.system.jdbc.FilterUtil.tryGetSingleVarcharValue) TableMetadataBuilder.tableMetadataBuilder(io.trino.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) Slices(io.airlift.slice.Slices) Map(java.util.Map) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) Collector(java.util.stream.Collector) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) MetadataListing.listCatalogs(io.trino.metadata.MetadataListing.listCatalogs) ImmutableMap(com.google.common.collect.ImmutableMap) TypeUtils.getDisplayLabel(io.trino.type.TypeUtils.getDisplayLabel) Predicate(java.util.function.Predicate) Domain(io.trino.spi.predicate.Domain) Collection(java.util.Collection) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) ArrayType(io.trino.spi.type.ArrayType) Math.min(java.lang.Math.min) Collectors(java.util.stream.Collectors) SchemaTableName(io.trino.spi.connector.SchemaTableName) ZoneId(java.time.ZoneId) List(java.util.List) AccessControl(io.trino.security.AccessControl) BIGINT(io.trino.spi.type.BigintType.BIGINT) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) Entry(java.util.Map.Entry) Optional(java.util.Optional) DecimalType(io.trino.spi.type.DecimalType) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) Session(io.trino.Session) TimeWithTimeZoneType(io.trino.spi.type.TimeWithTimeZoneType) Types(java.sql.Types) Constraint(io.trino.spi.connector.Constraint) TimeType(io.trino.spi.type.TimeType) FullConnectorSession(io.trino.FullConnectorSession) NullableValue(io.trino.spi.predicate.NullableValue) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Type(io.trino.spi.type.Type) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) DatabaseMetaData(java.sql.DatabaseMetaData) TimestampType(io.trino.spi.type.TimestampType) Inject(javax.inject.Inject) VarcharType(io.trino.spi.type.VarcharType) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) QualifiedTablePrefix(io.trino.metadata.QualifiedTablePrefix) MetadataListing.listTables(io.trino.metadata.MetadataListing.listTables) VerifyException(com.google.common.base.VerifyException) MetadataListing.listSchemas(io.trino.metadata.MetadataListing.listSchemas) RecordCursor(io.trino.spi.connector.RecordCursor) MetadataListing.listTableColumns(io.trino.metadata.MetadataListing.listTableColumns) ConnectorSession(io.trino.spi.connector.ConnectorSession) TupleDomain(io.trino.spi.predicate.TupleDomain) InMemoryRecordSet(io.trino.spi.connector.InMemoryRecordSet) Builder(io.trino.spi.connector.InMemoryRecordSet.Builder) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) SystemColumnHandle(io.trino.connector.system.SystemColumnHandle) CharType(io.trino.spi.type.CharType) SystemSessionProperties.isOmitDateTimeTypePrecision(io.trino.SystemSessionProperties.isOmitDateTimeTypePrecision) Metadata(io.trino.metadata.Metadata) FilterUtil.tablePrefix(io.trino.connector.system.jdbc.FilterUtil.tablePrefix) TINYINT(io.trino.spi.type.TinyintType.TINYINT) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) SystemColumnHandle(io.trino.connector.system.SystemColumnHandle) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) QualifiedTablePrefix(io.trino.metadata.QualifiedTablePrefix) VerifyException(com.google.common.base.VerifyException) CatalogSchemaName(io.trino.spi.connector.CatalogSchemaName) Domain(io.trino.spi.predicate.Domain) TupleDomain(io.trino.spi.predicate.TupleDomain) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Session(io.trino.Session) FullConnectorSession(io.trino.FullConnectorSession) ConnectorSession(io.trino.spi.connector.ConnectorSession) FullConnectorSession(io.trino.FullConnectorSession)

Example 2 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class DecorrelateInnerUnnestWithGlobalAggregation method apply.

@Override
public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) {
    // find global aggregation in subquery
    List<PlanNode> globalAggregations = PlanNodeSearcher.searchFrom(correlatedJoinNode.getSubquery(), context.getLookup()).where(DecorrelateInnerUnnestWithGlobalAggregation::isGlobalAggregation).recurseOnlyWhen(node -> node instanceof ProjectNode || isGlobalAggregation(node)).findAll();
    if (globalAggregations.isEmpty()) {
        return Result.empty();
    }
    // if there are multiple global aggregations, the one that is closest to the source is the "reducing" aggregation, because it reduces multiple input rows to single output row
    AggregationNode reducingAggregation = (AggregationNode) globalAggregations.get(globalAggregations.size() - 1);
    // find unnest in subquery
    Optional<UnnestNode> subqueryUnnest = PlanNodeSearcher.searchFrom(reducingAggregation.getSource(), context.getLookup()).where(node -> isSupportedUnnest(node, correlatedJoinNode.getCorrelation(), context.getLookup())).recurseOnlyWhen(node -> node instanceof ProjectNode || isGroupedAggregation(node)).findFirst();
    if (subqueryUnnest.isEmpty()) {
        return Result.empty();
    }
    UnnestNode unnestNode = subqueryUnnest.get();
    // assign unique id to input rows to restore semantics of aggregations after rewrite
    PlanNode input = new AssignUniqueId(context.getIdAllocator().getNextId(), correlatedJoinNode.getInput(), context.getSymbolAllocator().newSymbol("unique", BIGINT));
    // pre-project unnest symbols if they were pre-projected in subquery
    // The correlated UnnestNode either unnests correlation symbols directly, or unnests symbols produced by a projection that uses only correlation symbols.
    // Here, any underlying projection that was a source of the correlated UnnestNode, is appended as a source of the rewritten UnnestNode.
    // If the projection is not necessary for UnnestNode (i.e. it does not produce any unnest symbols), it should be pruned afterwards.
    PlanNode unnestSource = context.getLookup().resolve(unnestNode.getSource());
    if (unnestSource instanceof ProjectNode) {
        ProjectNode sourceProjection = (ProjectNode) unnestSource;
        input = new ProjectNode(sourceProjection.getId(), input, Assignments.builder().putIdentities(input.getOutputSymbols()).putAll(sourceProjection.getAssignments()).build());
    }
    // rewrite correlated join to UnnestNode
    Symbol ordinalitySymbol = unnestNode.getOrdinalitySymbol().orElseGet(() -> context.getSymbolAllocator().newSymbol("ordinality", BIGINT));
    UnnestNode rewrittenUnnest = new UnnestNode(context.getIdAllocator().getNextId(), input, input.getOutputSymbols(), unnestNode.getMappings(), Optional.of(ordinalitySymbol), LEFT, Optional.empty());
    // append mask symbol based on ordinality to distinguish between the unnested rows and synthetic null rows
    Symbol mask = context.getSymbolAllocator().newSymbol("mask", BOOLEAN);
    ProjectNode sourceWithMask = new ProjectNode(context.getIdAllocator().getNextId(), rewrittenUnnest, Assignments.builder().putIdentities(rewrittenUnnest.getOutputSymbols()).put(mask, new IsNotNullPredicate(ordinalitySymbol.toSymbolReference())).build());
    // restore all projections, grouped aggregations and global aggregations from the subquery
    PlanNode result = rewriteNodeSequence(context.getLookup().resolve(correlatedJoinNode.getSubquery()), input.getOutputSymbols(), mask, sourceWithMask, reducingAggregation.getId(), unnestNode.getId(), context.getSymbolAllocator(), context.getIdAllocator(), context.getLookup());
    // restrict outputs
    return Result.ofPlanNode(restrictOutputs(context.getIdAllocator(), result, ImmutableSet.copyOf(correlatedJoinNode.getOutputSymbols())).orElse(result));
}
Also used : CorrelatedJoin.correlation(io.trino.sql.planner.plan.Patterns.CorrelatedJoin.correlation) QueryCardinalityUtil.isScalar(io.trino.sql.planner.optimizations.QueryCardinalityUtil.isScalar) INNER(io.trino.sql.planner.plan.JoinNode.Type.INNER) SymbolAllocator(io.trino.sql.planner.SymbolAllocator) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) SINGLE(io.trino.sql.planner.plan.AggregationNode.Step.SINGLE) CorrelatedJoinNode(io.trino.sql.planner.plan.CorrelatedJoinNode) PlanNode(io.trino.sql.planner.plan.PlanNode) AggregationDecorrelation.rewriteWithMasks(io.trino.sql.planner.iterative.rule.AggregationDecorrelation.rewriteWithMasks) CorrelatedJoin.filter(io.trino.sql.planner.plan.Patterns.CorrelatedJoin.filter) LEFT(io.trino.sql.planner.plan.JoinNode.Type.LEFT) ImmutableList(com.google.common.collect.ImmutableList) PlanNodeSearcher(io.trino.sql.planner.optimizations.PlanNodeSearcher) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) IsNotNullPredicate(io.trino.sql.tree.IsNotNullPredicate) Map(java.util.Map) AggregationNode(io.trino.sql.planner.plan.AggregationNode) Rule(io.trino.sql.planner.iterative.Rule) SymbolsExtractor(io.trino.sql.planner.SymbolsExtractor) Pattern.nonEmpty(io.trino.matching.Pattern.nonEmpty) AssignUniqueId(io.trino.sql.planner.plan.AssignUniqueId) ProjectNode(io.trino.sql.planner.plan.ProjectNode) Symbol(io.trino.sql.planner.Symbol) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Lookup(io.trino.sql.planner.iterative.Lookup) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) Streams(com.google.common.collect.Streams) UnnestNode(io.trino.sql.planner.plan.UnnestNode) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) List(java.util.List) Pattern(io.trino.matching.Pattern) BIGINT(io.trino.spi.type.BigintType.BIGINT) ExpressionUtils.and(io.trino.sql.ExpressionUtils.and) Captures(io.trino.matching.Captures) Util.restrictOutputs(io.trino.sql.planner.iterative.rule.Util.restrictOutputs) Mapping(io.trino.sql.planner.plan.UnnestNode.Mapping) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) PlanNodeIdAllocator(io.trino.sql.planner.PlanNodeIdAllocator) Patterns.correlatedJoin(io.trino.sql.planner.plan.Patterns.correlatedJoin) PlanNode(io.trino.sql.planner.plan.PlanNode) AssignUniqueId(io.trino.sql.planner.plan.AssignUniqueId) UnnestNode(io.trino.sql.planner.plan.UnnestNode) Symbol(io.trino.sql.planner.Symbol) ProjectNode(io.trino.sql.planner.plan.ProjectNode) AggregationNode(io.trino.sql.planner.plan.AggregationNode) IsNotNullPredicate(io.trino.sql.tree.IsNotNullPredicate)

Example 3 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class ThriftMetastoreUtil method parsePrivilege.

public static Set<HivePrivilegeInfo> parsePrivilege(PrivilegeGrantInfo userGrant, Optional<HivePrincipal> grantee) {
    boolean grantOption = userGrant.isGrantOption();
    String name = userGrant.getPrivilege().toUpperCase(ENGLISH);
    HivePrincipal grantor = new HivePrincipal(fromMetastoreApiPrincipalType(userGrant.getGrantorType()), userGrant.getGrantor());
    switch(name) {
        case "ALL":
            return Arrays.stream(HivePrivilegeInfo.HivePrivilege.values()).map(hivePrivilege -> new HivePrivilegeInfo(hivePrivilege, grantOption, grantor, grantee.orElse(grantor))).collect(toImmutableSet());
        case "SELECT":
            return ImmutableSet.of(new HivePrivilegeInfo(SELECT, grantOption, grantor, grantee.orElse(grantor)));
        case "INSERT":
            return ImmutableSet.of(new HivePrivilegeInfo(INSERT, grantOption, grantor, grantee.orElse(grantor)));
        case "UPDATE":
            return ImmutableSet.of(new HivePrivilegeInfo(UPDATE, grantOption, grantor, grantee.orElse(grantor)));
        case "DELETE":
            return ImmutableSet.of(new HivePrivilegeInfo(DELETE, grantOption, grantor, grantee.orElse(grantor)));
        case "OWNERSHIP":
            return ImmutableSet.of(new HivePrivilegeInfo(OWNERSHIP, grantOption, grantor, grantee.orElse(grantor)));
        default:
            throw new IllegalArgumentException("Unsupported privilege name: " + name);
    }
}
Also used : Arrays(java.util.Arrays) USER(io.trino.spi.security.PrincipalType.USER) HiveColumnStatistics.createDecimalColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDecimalColumnStatistics) NUMBER_OF_DISTINCT_VALUES(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_DISTINCT_VALUES) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ColumnStatisticType(io.trino.spi.statistics.ColumnStatisticType) BigDecimal(java.math.BigDecimal) HiveColumnStatistics.createDateColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDateColumnStatistics) Column(io.trino.plugin.hive.metastore.Column) BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) Math.round(java.lang.Math.round) Map(java.util.Map) DoubleColumnStatsData(org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData) BigInteger(java.math.BigInteger) PartitionWithStatistics(io.trino.plugin.hive.metastore.PartitionWithStatistics) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) UPDATE(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.UPDATE) Longs(com.google.common.primitives.Longs) DecimalColumnStatsData(org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData) Table(io.trino.plugin.hive.metastore.Table) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) ColumnStatisticsData.decimalStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.decimalStats) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) Stream(java.util.stream.Stream) TrinoPrincipal(io.trino.spi.security.TrinoPrincipal) HiveColumnStatistics.createBooleanColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createBooleanColumnStatistics) OWNERSHIP(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.OWNERSHIP) Date(org.apache.hadoop.hive.metastore.api.Date) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) Partition(io.trino.plugin.hive.metastore.Partition) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) TimestampType(io.trino.spi.type.TimestampType) HiveBucketProperty(io.trino.plugin.hive.HiveBucketProperty) HiveType(io.trino.plugin.hive.HiveType) OptionalLong(java.util.OptionalLong) HIVE_INVALID_METADATA(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA) NUMBER_OF_TRUE_VALUES(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_TRUE_VALUES) AVRO_SCHEMA_URL_KEY(io.trino.plugin.hive.HiveMetadata.AVRO_SCHEMA_URL_KEY) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) NUMBER_OF_NON_NULL_VALUES(io.trino.spi.statistics.ColumnStatisticType.NUMBER_OF_NON_NULL_VALUES) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) Nullable(javax.annotation.Nullable) ColumnStatisticsData.binaryStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.binaryStats) PrincipalType(io.trino.spi.security.PrincipalType) MapType(io.trino.spi.type.MapType) AbstractIterator(com.google.common.collect.AbstractIterator) Storage(io.trino.plugin.hive.metastore.Storage) RoleGrant(io.trino.spi.security.RoleGrant) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) Strings.emptyToNull(com.google.common.base.Strings.emptyToNull) ColumnStatisticsData.longStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.longStats) CharType(io.trino.spi.type.CharType) DateColumnStatsData(org.apache.hadoop.hive.metastore.api.DateColumnStatsData) MAX_VALUE(io.trino.spi.statistics.ColumnStatisticType.MAX_VALUE) TINYINT(io.trino.spi.type.TinyintType.TINYINT) ArrayDeque(java.util.ArrayDeque) ColumnStatisticsData.stringStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.stringStats) TOTAL_SIZE_IN_BYTES(io.trino.spi.statistics.ColumnStatisticType.TOTAL_SIZE_IN_BYTES) HiveColumnStatistics.createStringColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createStringColumnStatistics) ColumnStatisticsData.booleanStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.booleanStats) HiveColumnStatistics.createBinaryColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createBinaryColumnStatistics) Database(io.trino.plugin.hive.metastore.Database) RolePrincipalGrant(org.apache.hadoop.hive.metastore.api.RolePrincipalGrant) ByteBuffer(java.nio.ByteBuffer) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) ColumnStatisticsData.doubleStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.doubleStats) MAX_VALUE_SIZE_IN_BYTES(io.trino.spi.statistics.ColumnStatisticType.MAX_VALUE_SIZE_IN_BYTES) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) INTEGER(io.trino.spi.type.IntegerType.INTEGER) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) AVRO(io.trino.plugin.hive.HiveStorageFormat.AVRO) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) RowType(io.trino.spi.type.RowType) INSERT(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.INSERT) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) Collection(java.util.Collection) Decimal(org.apache.hadoop.hive.metastore.api.Decimal) Order(org.apache.hadoop.hive.metastore.api.Order) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) Streams(com.google.common.collect.Streams) String.format(java.lang.String.format) HiveColumnStatistics.createDoubleColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDoubleColumnStatistics) SelectedRole(io.trino.spi.security.SelectedRole) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) LocalDate(java.time.LocalDate) Optional(java.util.Optional) Queue(java.util.Queue) HivePrivilegeInfo(io.trino.plugin.hive.metastore.HivePrivilegeInfo) DecimalType(io.trino.spi.type.DecimalType) Strings.nullToEmpty(com.google.common.base.Strings.nullToEmpty) Type(io.trino.spi.type.Type) OptionalDouble(java.util.OptionalDouble) Shorts(com.google.common.primitives.Shorts) CSV(io.trino.plugin.hive.HiveStorageFormat.CSV) Function(java.util.function.Function) HashSet(java.util.HashSet) VarcharType(io.trino.spi.type.VarcharType) HiveColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics) ColumnStatisticsData.dateStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.dateStats) BinaryColumnStatsData(org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData) Objects.requireNonNull(java.util.Objects.requireNonNull) HiveColumnStatistics.createIntegerColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createIntegerColumnStatistics) DELETE(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.DELETE) ROLE(io.trino.spi.security.PrincipalType.ROLE) PRIMITIVE(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SELECT(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.SELECT) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) MIN_VALUE(io.trino.spi.statistics.ColumnStatisticType.MIN_VALUE) HivePrivilegeInfo(io.trino.plugin.hive.metastore.HivePrivilegeInfo) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal)

Example 4 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class SyncPartitionMetadataProcedure method doSyncPartitionMetadata.

private void doSyncPartitionMetadata(ConnectorSession session, ConnectorAccessControl accessControl, String schemaName, String tableName, String mode, boolean caseSensitive) {
    SyncMode syncMode = toSyncMode(mode);
    HdfsContext hdfsContext = new HdfsContext(session);
    SemiTransactionalHiveMetastore metastore = hiveMetadataFactory.create(session.getIdentity(), true).getMetastore();
    SchemaTableName schemaTableName = new SchemaTableName(schemaName, tableName);
    Table table = metastore.getTable(schemaName, tableName).orElseThrow(() -> new TableNotFoundException(schemaTableName));
    if (table.getPartitionColumns().isEmpty()) {
        throw new TrinoException(INVALID_PROCEDURE_ARGUMENT, "Table is not partitioned: " + schemaTableName);
    }
    if (syncMode == SyncMode.ADD || syncMode == SyncMode.FULL) {
        accessControl.checkCanInsertIntoTable(null, new SchemaTableName(schemaName, tableName));
    }
    if (syncMode == SyncMode.DROP || syncMode == SyncMode.FULL) {
        accessControl.checkCanDeleteFromTable(null, new SchemaTableName(schemaName, tableName));
    }
    Path tableLocation = new Path(table.getStorage().getLocation());
    Set<String> partitionsToAdd;
    Set<String> partitionsToDrop;
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(hdfsContext, tableLocation);
        List<String> partitionsInMetastore = metastore.getPartitionNames(schemaName, tableName).orElseThrow(() -> new TableNotFoundException(schemaTableName));
        List<String> partitionsInFileSystem = listDirectory(fileSystem, fileSystem.getFileStatus(tableLocation), table.getPartitionColumns(), table.getPartitionColumns().size(), caseSensitive).stream().map(fileStatus -> fileStatus.getPath().toUri()).map(uri -> tableLocation.toUri().relativize(uri).getPath()).collect(toImmutableList());
        // partitions in file system but not in metastore
        partitionsToAdd = difference(partitionsInFileSystem, partitionsInMetastore);
        // partitions in metastore but not in file system
        partitionsToDrop = difference(partitionsInMetastore, partitionsInFileSystem);
    } catch (IOException e) {
        throw new TrinoException(HIVE_FILESYSTEM_ERROR, e);
    }
    syncPartitions(partitionsToAdd, partitionsToDrop, syncMode, metastore, session, table);
}
Also used : Path(org.apache.hadoop.fs.Path) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) MethodHandle(java.lang.invoke.MethodHandle) HivePartitionManager.extractPartitionValues(io.trino.plugin.hive.HivePartitionManager.extractPartitionValues) Provider(javax.inject.Provider) TransactionalMetadataFactory(io.trino.plugin.hive.TransactionalMetadataFactory) FileSystem(org.apache.hadoop.fs.FileSystem) MethodHandleUtil.methodHandle(io.trino.spi.block.MethodHandleUtil.methodHandle) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) FileStatus(org.apache.hadoop.fs.FileStatus) Inject(javax.inject.Inject) HashSet(java.util.HashSet) INVALID_PROCEDURE_ARGUMENT(io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) ImmutableList(com.google.common.collect.ImmutableList) Column(io.trino.plugin.hive.metastore.Column) Procedure(io.trino.spi.procedure.Procedure) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) Objects.requireNonNull(java.util.Objects.requireNonNull) Path(org.apache.hadoop.fs.Path) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) HIVE_FILESYSTEM_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR) ENGLISH(java.util.Locale.ENGLISH) Argument(io.trino.spi.procedure.Procedure.Argument) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Table(io.trino.plugin.hive.metastore.Table) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ConnectorAccessControl(io.trino.spi.connector.ConnectorAccessControl) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) ThreadContextClassLoader(io.trino.spi.classloader.ThreadContextClassLoader) Sets(com.google.common.collect.Sets) SchemaTableName(io.trino.spi.connector.SchemaTableName) List(java.util.List) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) Stream(java.util.stream.Stream) Optional(java.util.Optional) TRUE(java.lang.Boolean.TRUE) Partition(io.trino.plugin.hive.metastore.Partition) Table(io.trino.plugin.hive.metastore.Table) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) IOException(java.io.IOException) SchemaTableName(io.trino.spi.connector.SchemaTableName) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) FileSystem(org.apache.hadoop.fs.FileSystem) TrinoException(io.trino.spi.TrinoException) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext)

Example 5 with BOOLEAN

use of io.trino.spi.type.BooleanType.BOOLEAN in project trino by trinodb.

the class AbstractTestHiveFileFormats method checkCursor.

protected void checkCursor(RecordCursor cursor, List<TestColumn> testColumns, int rowCount) {
    List<Type> types = testColumns.stream().map(column -> column.getObjectInspector().getTypeName()).map(type -> HiveType.valueOf(type).getType(TESTING_TYPE_MANAGER)).collect(toImmutableList());
    Map<Type, MethodHandle> distinctFromOperators = types.stream().distinct().collect(toImmutableMap(identity(), HiveTestUtils::distinctFromOperator));
    for (int row = 0; row < rowCount; row++) {
        assertTrue(cursor.advanceNextPosition());
        for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) {
            TestColumn testColumn = testColumns.get(i);
            Type type = types.get(i);
            Object fieldFromCursor = getFieldFromCursor(cursor, type, i);
            if (fieldFromCursor == null) {
                assertEquals(null, testColumn.getExpectedValue(), "Expected null for column " + testColumn.getName());
            } else if (type instanceof DecimalType) {
                DecimalType decimalType = (DecimalType) type;
                fieldFromCursor = new BigDecimal((BigInteger) fieldFromCursor, decimalType.getScale());
                assertEquals(fieldFromCursor, testColumn.getExpectedValue(), "Wrong value for column " + testColumn.getName());
            } else if (testColumn.getObjectInspector().getTypeName().equals("float")) {
                assertEquals((float) fieldFromCursor, (float) testColumn.getExpectedValue(), (float) EPSILON);
            } else if (testColumn.getObjectInspector().getTypeName().equals("double")) {
                assertEquals((double) fieldFromCursor, (double) testColumn.getExpectedValue(), EPSILON);
            } else if (testColumn.getObjectInspector().getTypeName().equals("tinyint")) {
                assertEquals(((Number) fieldFromCursor).byteValue(), testColumn.getExpectedValue());
            } else if (testColumn.getObjectInspector().getTypeName().equals("smallint")) {
                assertEquals(((Number) fieldFromCursor).shortValue(), testColumn.getExpectedValue());
            } else if (testColumn.getObjectInspector().getTypeName().equals("int")) {
                assertEquals(((Number) fieldFromCursor).intValue(), testColumn.getExpectedValue());
            } else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) {
                assertEquals(fieldFromCursor, testColumn.getExpectedValue(), "Wrong value for column " + testColumn.getName());
            } else {
                Block expected = (Block) testColumn.getExpectedValue();
                Block actual = (Block) fieldFromCursor;
                boolean distinct = isDistinctFrom(distinctFromOperators.get(type), expected, actual);
                assertFalse(distinct, "Wrong value for column: " + testColumn.getName());
            }
        }
    }
    assertFalse(cursor.advanceNextPosition());
}
Also used : StructuralTestUtil.decimalMapBlockOf(io.trino.testing.StructuralTestUtil.decimalMapBlockOf) DateTimeZone(org.joda.time.DateTimeZone) Arrays(java.util.Arrays) JavaHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveDecimalObjectInspector) SerDeUtils.serializeObject(io.trino.plugin.hive.util.SerDeUtils.serializeObject) DateType(io.trino.spi.type.DateType) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) Test(org.testng.annotations.Test) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) CharType.createCharType(io.trino.spi.type.CharType.createCharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BigDecimal(java.math.BigDecimal) FileSplit(org.apache.hadoop.mapred.FileSplit) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) StructuralTestUtil.rowBlockOf(io.trino.testing.StructuralTestUtil.rowBlockOf) BigInteger(java.math.BigInteger) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MaterializedRow(io.trino.testing.MaterializedRow) Assert.assertFalse(org.testng.Assert.assertFalse) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) MICROSECONDS_PER_MILLISECOND(io.trino.type.DateTimes.MICROSECONDS_PER_MILLISECOND) PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) StructuralTestUtil.arrayBlockOf(io.trino.testing.StructuralTestUtil.arrayBlockOf) REAL(io.trino.spi.type.RealType.REAL) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) MethodHandle(java.lang.invoke.MethodHandle) Slice(io.airlift.slice.Slice) PageBuilder(io.trino.spi.PageBuilder) Page(io.trino.spi.Page) SqlDecimal(io.trino.spi.type.SqlDecimal) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) TimestampType(io.trino.spi.type.TimestampType) ArrayList(java.util.ArrayList) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) Chars.padSpaces(io.trino.spi.type.Chars.padSpaces) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) Math.floorDiv(java.lang.Math.floorDiv) Int128(io.trino.spi.type.Int128) Arrays.fill(java.util.Arrays.fill) RecordCursor(io.trino.spi.connector.RecordCursor) Properties(java.util.Properties) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) StructuralTestUtil.mapBlockOf(io.trino.testing.StructuralTestUtil.mapBlockOf) UTC(org.joda.time.DateTimeZone.UTC) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) SESSION(io.trino.plugin.hive.HiveTestUtils.SESSION) SqlVarbinary(io.trino.spi.type.SqlVarbinary) HiveTestUtils.mapType(io.trino.plugin.hive.HiveTestUtils.mapType) CharType(io.trino.spi.type.CharType) TINYINT(io.trino.spi.type.TinyintType.TINYINT) BlockBuilder(io.trino.spi.block.BlockBuilder) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) PARTITION_KEY(io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) MaterializedResult(io.trino.testing.MaterializedResult) CompressionConfigUtil.configureCompression(io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression) SqlTimestamp(io.trino.spi.type.SqlTimestamp) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) StructuralTestUtil.decimalArrayBlockOf(io.trino.testing.StructuralTestUtil.decimalArrayBlockOf) Block(io.trino.spi.block.Block) Path(org.apache.hadoop.fs.Path) INTEGER(io.trino.spi.type.IntegerType.INTEGER) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) DateTimeFormat(org.joda.time.format.DateTimeFormat) RowType(io.trino.spi.type.RowType) ImmutableMap(com.google.common.collect.ImmutableMap) ArrayType(io.trino.spi.type.ArrayType) MaterializedResult.materializeSourceDataStream(io.trino.testing.MaterializedResult.materializeSourceDataStream) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat) Collectors(java.util.stream.Collectors) SqlDate(io.trino.spi.type.SqlDate) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) DecimalType(io.trino.spi.type.DecimalType) TypeInfoFactory.getCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) Type(io.trino.spi.type.Type) HiveColumnProjectionInfo.generatePartialName(io.trino.plugin.hive.HiveColumnProjectionInfo.generatePartialName) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) Float.intBitsToFloat(java.lang.Float.intBitsToFloat) OptionalInt(java.util.OptionalInt) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) VarcharType(io.trino.spi.type.VarcharType) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) HIVE_DEFAULT_DYNAMIC_PARTITION(io.trino.plugin.hive.HivePartitionKey.HIVE_DEFAULT_DYNAMIC_PARTITION) ObjectInspectorFactory.getStandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) UTF_8(java.nio.charset.StandardCharsets.UTF_8) DateTime(org.joda.time.DateTime) HiveTestUtils.isDistinctFrom(io.trino.plugin.hive.HiveTestUtils.isDistinctFrom) ObjectInspectorFactory.getStandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) TimeUnit(java.util.concurrent.TimeUnit) Collectors.toList(java.util.stream.Collectors.toList) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveUtil.isStructuralType(io.trino.plugin.hive.util.HiveUtil.isStructuralType) REGULAR(io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR) DateType(io.trino.spi.type.DateType) CharType.createCharType(io.trino.spi.type.CharType.createCharType) TimestampType(io.trino.spi.type.TimestampType) HiveTestUtils.mapType(io.trino.plugin.hive.HiveTestUtils.mapType) CharType(io.trino.spi.type.CharType) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) HiveUtil.isStructuralType(io.trino.plugin.hive.util.HiveUtil.isStructuralType) DecimalType(io.trino.spi.type.DecimalType) Block(io.trino.spi.block.Block) SerDeUtils.serializeObject(io.trino.plugin.hive.util.SerDeUtils.serializeObject) BigDecimal(java.math.BigDecimal) MethodHandle(java.lang.invoke.MethodHandle)

Aggregations

BOOLEAN (io.trino.spi.type.BooleanType.BOOLEAN)25 BIGINT (io.trino.spi.type.BigintType.BIGINT)24 List (java.util.List)22 Optional (java.util.Optional)22 ImmutableList (com.google.common.collect.ImmutableList)20 ImmutableMap (com.google.common.collect.ImmutableMap)18 DOUBLE (io.trino.spi.type.DoubleType.DOUBLE)18 Type (io.trino.spi.type.Type)18 Map (java.util.Map)18 INTEGER (io.trino.spi.type.IntegerType.INTEGER)17 VarcharType (io.trino.spi.type.VarcharType)17 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)16 DecimalType (io.trino.spi.type.DecimalType)16 REAL (io.trino.spi.type.RealType.REAL)16 SMALLINT (io.trino.spi.type.SmallintType.SMALLINT)16 TINYINT (io.trino.spi.type.TinyintType.TINYINT)16 Set (java.util.Set)16 ArrayType (io.trino.spi.type.ArrayType)15 DATE (io.trino.spi.type.DateType.DATE)15 String.format (java.lang.String.format)15