Search in sources :

Example 1 with DiscretePredicates

use of io.prestosql.spi.connector.DiscretePredicates in project boostkit-bigdata by kunpengcompute.

the class AbstractTestHive method setupHive.

protected void setupHive(String databaseName) {
    database = databaseName;
    tablePartitionFormat = new SchemaTableName(database, "presto_test_partition_format");
    tableUnpartitioned = new SchemaTableName(database, "presto_test_unpartitioned");
    tableOffline = new SchemaTableName(database, "presto_test_offline");
    tableOfflinePartition = new SchemaTableName(database, "presto_test_offline_partition");
    tableNotReadable = new SchemaTableName(database, "presto_test_not_readable");
    view = new SchemaTableName(database, "presto_test_view");
    invalidTable = new SchemaTableName(database, INVALID_TABLE);
    tableBucketedStringInt = new SchemaTableName(database, "presto_test_bucketed_by_string_int");
    tableBucketedBigintBoolean = new SchemaTableName(database, "presto_test_bucketed_by_bigint_boolean");
    tableBucketedDoubleFloat = new SchemaTableName(database, "presto_test_bucketed_by_double_float");
    tablePartitionSchemaChange = new SchemaTableName(database, "presto_test_partition_schema_change");
    tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "presto_test_partition_schema_change_non_canonical");
    tableBucketEvolution = new SchemaTableName(database, "presto_test_bucket_evolution");
    invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), Optional.empty());
    dsColumn = new HiveColumnHandle("ds", HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), -1, PARTITION_KEY, Optional.empty());
    fileFormatColumn = new HiveColumnHandle("file_format", HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), -1, PARTITION_KEY, Optional.empty());
    dummyColumn = new HiveColumnHandle("dummy", HIVE_INT, parseTypeSignature(StandardTypes.INTEGER), -1, PARTITION_KEY, Optional.empty());
    intColumn = new HiveColumnHandle("t_int", HIVE_INT, parseTypeSignature(StandardTypes.INTEGER), -1, PARTITION_KEY, Optional.empty());
    invalidColumnHandle = new HiveColumnHandle(INVALID_COLUMN, HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), 0, REGULAR, Optional.empty());
    List<ColumnHandle> partitionColumns = ImmutableList.of(dsColumn, fileFormatColumn, dummyColumn);
    tablePartitionFormatPartitions = ImmutableList.<HivePartition>builder().add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=textfile/dummy=1", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("textfile"))).put(dummyColumn, NullableValue.of(INTEGER, 1L)).build())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=sequencefile/dummy=2", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("sequencefile"))).put(dummyColumn, NullableValue.of(INTEGER, 2L)).build())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rctext/dummy=3", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rctext"))).put(dummyColumn, NullableValue.of(INTEGER, 3L)).build())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rcbinary/dummy=4", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rcbinary"))).put(dummyColumn, NullableValue.of(INTEGER, 4L)).build())).build();
    tableUnpartitionedPartitions = ImmutableList.of(new HivePartition(tableUnpartitioned));
    tablePartitionFormatProperties = new ConnectorTableProperties(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile")), Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile")), Range.equal(createUnboundedVarcharType(), utf8Slice("rctext")), Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L), Range.equal(INTEGER, 2L), Range.equal(INTEGER, 3L), Range.equal(INTEGER, 4L)), false))), Optional.empty(), Optional.empty(), Optional.of(new DiscretePredicates(partitionColumns, ImmutableList.of(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 2L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rctext"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 3L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 4L)), false)))))), ImmutableList.of());
    tableUnpartitionedProperties = new ConnectorTableProperties();
}
Also used : HiveColumnHandle.bucketColumnHandle(io.prestosql.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) NullableValue(io.prestosql.spi.predicate.NullableValue) DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties)

Example 2 with DiscretePredicates

use of io.prestosql.spi.connector.DiscretePredicates in project boostkit-bigdata by kunpengcompute.

the class HiveMetadata method getTableProperties.

@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table) {
    HiveIdentity identity = new HiveIdentity(session);
    HiveTableHandle hiveTable = (HiveTableHandle) table;
    List<ColumnHandle> partitionColumns = ImmutableList.copyOf(hiveTable.getPartitionColumns());
    List<HivePartition> partitions = partitionManager.getOrLoadPartitions(session, metastore, identity, hiveTable);
    TupleDomain<ColumnHandle> predicate = createPredicate(partitionColumns, partitions);
    if (hiveTable.isSuitableToPush()) {
        Table hmsTable = metastore.getTable(identity, hiveTable.getSchemaName(), hiveTable.getTableName()).orElseThrow(() -> new TableNotFoundException(hiveTable.getSchemaTableName()));
        ImmutableMap.Builder<ColumnHandle, Domain> pushedDown = ImmutableMap.builder();
        pushedDown.putAll(hiveTable.getCompactEffectivePredicate().getDomains().get().entrySet().stream().collect(toMap(e -> (ColumnHandle) e.getKey(), e -> e.getValue())));
        predicate = predicate.intersect(withColumnDomains(pushedDown.build()));
    }
    Optional<DiscretePredicates> discretePredicates = Optional.empty();
    if (!partitionColumns.isEmpty()) {
        // Do not create tuple domains for every partition at the same time!
        // There can be a huge number of partitions so use an iterable so
        // all domains do not need to be in memory at the same time.
        Iterable<TupleDomain<ColumnHandle>> partitionDomains = Iterables.transform(partitions, (hivePartition) -> TupleDomain.fromFixedValues(hivePartition.getKeys()));
        discretePredicates = Optional.of(new DiscretePredicates(partitionColumns, partitionDomains));
    }
    Optional<ConnectorTablePartitioning> tablePartitioning = Optional.empty();
    if (HiveSessionProperties.isBucketExecutionEnabled(session) && hiveTable.getBucketHandle().isPresent()) {
        tablePartitioning = hiveTable.getBucketHandle().map(bucketing -> new ConnectorTablePartitioning(new HivePartitioningHandle(bucketing.getBucketingVersion(), bucketing.getReadBucketCount(), bucketing.getColumns().stream().map(HiveColumnHandle::getHiveType).collect(toImmutableList()), OptionalInt.empty()), bucketing.getColumns().stream().map(ColumnHandle.class::cast).collect(toList())));
    }
    return new ConnectorTableProperties(predicate, tablePartitioning, Optional.empty(), discretePredicates, ImmutableList.of());
}
Also used : TableStatistics(io.prestosql.spi.statistics.TableStatistics) StorageFormat(io.prestosql.plugin.hive.metastore.StorageFormat) PartialAndFinalAggregationType(io.prestosql.spi.PartialAndFinalAggregationType) FIELD_DELIM(org.apache.hadoop.hive.serde.serdeConstants.FIELD_DELIM) HiveUtil.verifyPartitionTypeSupported(io.prestosql.plugin.hive.HiveUtil.verifyPartitionTypeSupported) FileSystem(org.apache.hadoop.fs.FileSystem) HIVE_FILESYSTEM_ERROR(io.prestosql.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR) HiveUtil.hiveColumnHandles(io.prestosql.plugin.hive.HiveUtil.hiveColumnHandles) MetastoreUtil(io.prestosql.plugin.hive.metastore.MetastoreUtil) TableAlreadyExistsException(io.prestosql.spi.connector.TableAlreadyExistsException) NullableValue(io.prestosql.spi.predicate.NullableValue) RoleGrant(io.prestosql.spi.security.RoleGrant) ConnectorVacuumTableHandle(io.prestosql.spi.connector.ConnectorVacuumTableHandle) FileStatus(org.apache.hadoop.fs.FileStatus) SCHEMA_NOT_EMPTY(io.prestosql.spi.StandardErrorCode.SCHEMA_NOT_EMPTY) HiveUtil.getPartitionKeyColumnHandles(io.prestosql.plugin.hive.HiveUtil.getPartitionKeyColumnHandles) ConnectorDeleteAsInsertTableHandle(io.prestosql.spi.connector.ConnectorDeleteAsInsertTableHandle) Future(java.util.concurrent.Future) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) BucketingVersion(io.prestosql.plugin.hive.HiveBucketing.BucketingVersion) ConnectorUpdateTableHandle(io.prestosql.spi.connector.ConnectorUpdateTableHandle) Map(java.util.Map) HiveTableProperties.getPartitionedBy(io.prestosql.plugin.hive.HiveTableProperties.getPartitionedBy) ENGLISH(java.util.Locale.ENGLISH) ConstraintApplicationResult(io.prestosql.spi.connector.ConstraintApplicationResult) SystemTable(io.prestosql.spi.connector.SystemTable) GrantInfo(io.prestosql.spi.security.GrantInfo) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) TableStatisticsMetadata(io.prestosql.spi.statistics.TableStatisticsMetadata) Set(java.util.Set) LOCATION_PROPERTY(io.prestosql.plugin.hive.HiveTableProperties.LOCATION_PROPERTY) HiveTableProperties.getTransactionalValue(io.prestosql.plugin.hive.HiveTableProperties.getTransactionalValue) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) Collectors.joining(java.util.stream.Collectors.joining) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) Table(io.prestosql.plugin.hive.metastore.Table) Privilege(io.prestosql.spi.security.Privilege) INVALID_TABLE_PROPERTY(io.prestosql.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) HiveTableProperties.isExternalTable(io.prestosql.plugin.hive.HiveTableProperties.isExternalTable) Domain(io.prestosql.spi.predicate.Domain) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) AccessControlMetadata(io.prestosql.plugin.hive.security.AccessControlMetadata) ColumnStatisticMetadata(io.prestosql.spi.statistics.ColumnStatisticMetadata) SortingColumn(io.prestosql.plugin.hive.metastore.SortingColumn) ConnectorVacuumTableInfo(io.prestosql.spi.connector.ConnectorVacuumTableInfo) ORC(io.prestosql.plugin.hive.HiveStorageFormat.ORC) SchemaTablePrefix(io.prestosql.spi.connector.SchemaTablePrefix) Joiner(com.google.common.base.Joiner) Iterables(com.google.common.collect.Iterables) Database(io.prestosql.plugin.hive.metastore.Database) Slice(io.airlift.slice.Slice) Partition(io.prestosql.plugin.hive.metastore.Partition) TRANSACTIONAL(io.prestosql.plugin.hive.HiveTableProperties.TRANSACTIONAL) HiveWriterFactory.getSnapshotSubFileIndex(io.prestosql.plugin.hive.HiveWriterFactory.getSnapshotSubFileIndex) Supplier(java.util.function.Supplier) ComputedStatistics(io.prestosql.spi.statistics.ComputedStatistics) ArrayList(java.util.ArrayList) HiveUtil.decodeViewData(io.prestosql.plugin.hive.HiveUtil.decodeViewData) OptionalLong(java.util.OptionalLong) TupleDomain.withColumnDomains(io.prestosql.spi.predicate.TupleDomain.withColumnDomains) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ConnectorPartitioningHandle(io.prestosql.spi.connector.ConnectorPartitioningHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) RecordCursor(io.prestosql.spi.connector.RecordCursor) DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates) SemiTransactionalHiveMetastore(io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore) HiveWriterFactory.removeSnapshotFileName(io.prestosql.plugin.hive.HiveWriterFactory.removeSnapshotFileName) ImmutableSortedMap(com.google.common.collect.ImmutableSortedMap) ConnectorOutputTableHandle(io.prestosql.spi.connector.ConnectorOutputTableHandle) Properties(java.util.Properties) HiveUtil.isPrestoView(io.prestosql.plugin.hive.HiveUtil.isPrestoView) TypeManager(io.prestosql.spi.type.TypeManager) IOException(java.io.IOException) USER(io.prestosql.spi.security.PrincipalType.USER) PrincipalPrivileges(io.prestosql.plugin.hive.metastore.PrincipalPrivileges) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) File(java.io.File) ExecutionException(java.util.concurrent.ExecutionException) Streams.stream(com.google.common.collect.Streams.stream) IS_EXTERNAL_TABLE(io.prestosql.plugin.hive.HiveTableProperties.IS_EXTERNAL_TABLE) HiveColumnStatistics(io.prestosql.plugin.hive.metastore.HiveColumnStatistics) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) ConnectorTablePartitioning(io.prestosql.spi.connector.ConnectorTablePartitioning) TableType(org.apache.hadoop.hive.metastore.TableType) ConfigurationUtils(io.prestosql.plugin.hive.util.ConfigurationUtils) PrestoPrincipal(io.prestosql.spi.security.PrestoPrincipal) HiveWriterFactory.isSnapshotFile(io.prestosql.plugin.hive.HiveWriterFactory.isSnapshotFile) VarcharType(io.prestosql.spi.type.VarcharType) HiveTableProperties.getLocation(io.prestosql.plugin.hive.HiveTableProperties.getLocation) ConnectorMetadata(io.prestosql.spi.connector.ConnectorMetadata) Statistics(io.prestosql.plugin.hive.util.Statistics) URL(java.net.URL) HdfsContext(io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) ViewNotFoundException(io.prestosql.spi.connector.ViewNotFoundException) HiveTableProperties.getHiveStorageFormat(io.prestosql.plugin.hive.HiveTableProperties.getHiveStorageFormat) Duration(io.airlift.units.Duration) TableStatisticType(io.prestosql.spi.statistics.TableStatisticType) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) INVALID_ANALYZE_PROPERTY(io.prestosql.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) Collectors.toMap(java.util.stream.Collectors.toMap) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties) Iterables.concat(com.google.common.collect.Iterables.concat) Path(org.apache.hadoop.fs.Path) Type(io.prestosql.spi.type.Type) Splitter(com.google.common.base.Splitter) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) Collectors.toSet(java.util.stream.Collectors.toSet) Constraint(io.prestosql.spi.connector.Constraint) PrestoException(io.prestosql.spi.PrestoException) ImmutableSet(com.google.common.collect.ImmutableSet) HiveWriteUtils.isS3FileSystem(io.prestosql.plugin.hive.HiveWriteUtils.isS3FileSystem) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) HiveUtil.columnExtraInfo(io.prestosql.plugin.hive.HiveUtil.columnExtraInfo) Collections.emptyList(java.util.Collections.emptyList) Collection(java.util.Collection) HiveUtil.encodeViewData(io.prestosql.plugin.hive.HiveUtil.encodeViewData) HiveWriterFactory.isSnapshotSubFile(io.prestosql.plugin.hive.HiveWriterFactory.isSnapshotSubFile) ROW_COUNT(io.prestosql.spi.statistics.TableStatisticType.ROW_COUNT) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) String.format(java.lang.String.format) List(java.util.List) PRESTO_VIEW_FLAG(io.prestosql.plugin.hive.HiveUtil.PRESTO_VIEW_FLAG) VarcharType.createUnboundedVarcharType(io.prestosql.spi.type.VarcharType.createUnboundedVarcharType) Function.identity(java.util.function.Function.identity) ConnectorTransactionHandle(io.prestosql.spi.connector.ConnectorTransactionHandle) Optional(java.util.Optional) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) NOT_SUPPORTED(io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED) AcidUtils(org.apache.hadoop.hive.ql.io.AcidUtils) HiveStatisticsProvider(io.prestosql.plugin.hive.statistics.HiveStatisticsProvider) JsonCodec(io.airlift.json.JsonCodec) IntStream(java.util.stream.IntStream) ConnectorOutputMetadata(io.prestosql.spi.connector.ConnectorOutputMetadata) Logger(io.airlift.log.Logger) ConnectorViewDefinition(io.prestosql.spi.connector.ConnectorViewDefinition) ConnectorNewTableLayout(io.prestosql.spi.connector.ConnectorNewTableLayout) HashMap(java.util.HashMap) HiveUtil.toPartitionValues(io.prestosql.plugin.hive.HiveUtil.toPartitionValues) HivePrincipal(io.prestosql.plugin.hive.metastore.HivePrincipal) OptionalInt(java.util.OptionalInt) Function(java.util.function.Function) ColumnStatisticType(io.prestosql.spi.statistics.ColumnStatisticType) InMemoryRecordSet(io.prestosql.spi.connector.InMemoryRecordSet) HashSet(java.util.HashSet) HiveTableProperties.getExternalLocation(io.prestosql.plugin.hive.HiveTableProperties.getExternalLocation) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) OpenCSVSerde(org.apache.hadoop.hive.serde2.OpenCSVSerde) INVALID_SCHEMA_PROPERTY(io.prestosql.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) Suppliers(com.google.common.base.Suppliers) NoSuchElementException(java.util.NoSuchElementException) Block(io.prestosql.spi.block.Block) VerifyException(com.google.common.base.VerifyException) Collections.emptyMap(java.util.Collections.emptyMap) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) MalformedURLException(java.net.MalformedURLException) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) TupleDomain(io.prestosql.spi.predicate.TupleDomain) NON_INHERITABLE_PROPERTIES(io.prestosql.plugin.hive.HiveTableProperties.NON_INHERITABLE_PROPERTIES) Maps(com.google.common.collect.Maps) PRIMITIVE(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) Column(io.prestosql.plugin.hive.metastore.Column) GENERIC_USER_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_USER_ERROR) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Comparator(java.util.Comparator) ConnectorInsertTableHandle(io.prestosql.spi.connector.ConnectorInsertTableHandle) HiveBucketing.bucketedOnTimestamp(io.prestosql.plugin.hive.HiveBucketing.bucketedOnTimestamp) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) SystemTable(io.prestosql.spi.connector.SystemTable) Table(io.prestosql.plugin.hive.metastore.Table) HiveTableProperties.isExternalTable(io.prestosql.plugin.hive.HiveTableProperties.isExternalTable) DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates) ConnectorTablePartitioning(io.prestosql.spi.connector.ConnectorTablePartitioning) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) TupleDomain(io.prestosql.spi.predicate.TupleDomain) Domain(io.prestosql.spi.predicate.Domain) TupleDomain(io.prestosql.spi.predicate.TupleDomain) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties)

Example 3 with DiscretePredicates

use of io.prestosql.spi.connector.DiscretePredicates in project hetu-core by openlookeng.

the class HiveMetadata method getTableProperties.

@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table) {
    HiveIdentity identity = new HiveIdentity(session);
    HiveTableHandle hiveTable = (HiveTableHandle) table;
    List<ColumnHandle> partitionColumns = ImmutableList.copyOf(hiveTable.getPartitionColumns());
    List<HivePartition> partitions = partitionManager.getOrLoadPartitions(session, metastore, identity, hiveTable);
    TupleDomain<ColumnHandle> predicate = createPredicate(partitionColumns, partitions);
    if (hiveTable.isSuitableToPush()) {
        Table hmsTable = metastore.getTable(identity, hiveTable.getSchemaName(), hiveTable.getTableName()).orElseThrow(() -> new TableNotFoundException(hiveTable.getSchemaTableName()));
        ImmutableMap.Builder<ColumnHandle, Domain> pushedDown = ImmutableMap.builder();
        pushedDown.putAll(hiveTable.getCompactEffectivePredicate().getDomains().get().entrySet().stream().collect(toMap(e -> (ColumnHandle) e.getKey(), e -> e.getValue())));
        predicate = predicate.intersect(withColumnDomains(pushedDown.build()));
    }
    Optional<DiscretePredicates> discretePredicates = Optional.empty();
    if (!partitionColumns.isEmpty()) {
        // Do not create tuple domains for every partition at the same time!
        // There can be a huge number of partitions so use an iterable so
        // all domains do not need to be in memory at the same time.
        Iterable<TupleDomain<ColumnHandle>> partitionDomains = Iterables.transform(partitions, (hivePartition) -> TupleDomain.fromFixedValues(hivePartition.getKeys()));
        discretePredicates = Optional.of(new DiscretePredicates(partitionColumns, partitionDomains));
    }
    Optional<ConnectorTablePartitioning> tablePartitioning = Optional.empty();
    if (HiveSessionProperties.isBucketExecutionEnabled(session) && hiveTable.getBucketHandle().isPresent()) {
        tablePartitioning = hiveTable.getBucketHandle().map(bucketing -> new ConnectorTablePartitioning(new HivePartitioningHandle(bucketing.getBucketingVersion(), bucketing.getReadBucketCount(), bucketing.getColumns().stream().map(HiveColumnHandle::getHiveType).collect(toImmutableList()), OptionalInt.empty()), bucketing.getColumns().stream().map(ColumnHandle.class::cast).collect(toList())));
    }
    return new ConnectorTableProperties(predicate, tablePartitioning, Optional.empty(), discretePredicates, ImmutableList.of());
}
Also used : TableStatistics(io.prestosql.spi.statistics.TableStatistics) StorageFormat(io.prestosql.plugin.hive.metastore.StorageFormat) PartialAndFinalAggregationType(io.prestosql.spi.PartialAndFinalAggregationType) FIELD_DELIM(org.apache.hadoop.hive.serde.serdeConstants.FIELD_DELIM) HiveUtil.verifyPartitionTypeSupported(io.prestosql.plugin.hive.HiveUtil.verifyPartitionTypeSupported) FileSystem(org.apache.hadoop.fs.FileSystem) HIVE_FILESYSTEM_ERROR(io.prestosql.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR) HiveUtil.hiveColumnHandles(io.prestosql.plugin.hive.HiveUtil.hiveColumnHandles) MetastoreUtil(io.prestosql.plugin.hive.metastore.MetastoreUtil) TableAlreadyExistsException(io.prestosql.spi.connector.TableAlreadyExistsException) NullableValue(io.prestosql.spi.predicate.NullableValue) RoleGrant(io.prestosql.spi.security.RoleGrant) ConnectorVacuumTableHandle(io.prestosql.spi.connector.ConnectorVacuumTableHandle) FileStatus(org.apache.hadoop.fs.FileStatus) SCHEMA_NOT_EMPTY(io.prestosql.spi.StandardErrorCode.SCHEMA_NOT_EMPTY) HiveUtil.getPartitionKeyColumnHandles(io.prestosql.plugin.hive.HiveUtil.getPartitionKeyColumnHandles) ConnectorDeleteAsInsertTableHandle(io.prestosql.spi.connector.ConnectorDeleteAsInsertTableHandle) Future(java.util.concurrent.Future) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) BucketingVersion(io.prestosql.plugin.hive.HiveBucketing.BucketingVersion) ConnectorUpdateTableHandle(io.prestosql.spi.connector.ConnectorUpdateTableHandle) Map(java.util.Map) HiveTableProperties.getPartitionedBy(io.prestosql.plugin.hive.HiveTableProperties.getPartitionedBy) ENGLISH(java.util.Locale.ENGLISH) ConstraintApplicationResult(io.prestosql.spi.connector.ConstraintApplicationResult) SystemTable(io.prestosql.spi.connector.SystemTable) GrantInfo(io.prestosql.spi.security.GrantInfo) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) TableStatisticsMetadata(io.prestosql.spi.statistics.TableStatisticsMetadata) Set(java.util.Set) LOCATION_PROPERTY(io.prestosql.plugin.hive.HiveTableProperties.LOCATION_PROPERTY) HiveTableProperties.getTransactionalValue(io.prestosql.plugin.hive.HiveTableProperties.getTransactionalValue) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) Collectors.joining(java.util.stream.Collectors.joining) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) Table(io.prestosql.plugin.hive.metastore.Table) Privilege(io.prestosql.spi.security.Privilege) INVALID_TABLE_PROPERTY(io.prestosql.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) HiveTableProperties.isExternalTable(io.prestosql.plugin.hive.HiveTableProperties.isExternalTable) Domain(io.prestosql.spi.predicate.Domain) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) AccessControlMetadata(io.prestosql.plugin.hive.security.AccessControlMetadata) ColumnStatisticMetadata(io.prestosql.spi.statistics.ColumnStatisticMetadata) SortingColumn(io.prestosql.plugin.hive.metastore.SortingColumn) ConnectorVacuumTableInfo(io.prestosql.spi.connector.ConnectorVacuumTableInfo) ORC(io.prestosql.plugin.hive.HiveStorageFormat.ORC) SchemaTablePrefix(io.prestosql.spi.connector.SchemaTablePrefix) Joiner(com.google.common.base.Joiner) Iterables(com.google.common.collect.Iterables) Database(io.prestosql.plugin.hive.metastore.Database) Slice(io.airlift.slice.Slice) Partition(io.prestosql.plugin.hive.metastore.Partition) TRANSACTIONAL(io.prestosql.plugin.hive.HiveTableProperties.TRANSACTIONAL) HiveWriterFactory.getSnapshotSubFileIndex(io.prestosql.plugin.hive.HiveWriterFactory.getSnapshotSubFileIndex) Supplier(java.util.function.Supplier) ComputedStatistics(io.prestosql.spi.statistics.ComputedStatistics) ArrayList(java.util.ArrayList) HiveUtil.decodeViewData(io.prestosql.plugin.hive.HiveUtil.decodeViewData) OptionalLong(java.util.OptionalLong) TupleDomain.withColumnDomains(io.prestosql.spi.predicate.TupleDomain.withColumnDomains) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ConnectorPartitioningHandle(io.prestosql.spi.connector.ConnectorPartitioningHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) RecordCursor(io.prestosql.spi.connector.RecordCursor) DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates) SemiTransactionalHiveMetastore(io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore) HiveWriterFactory.removeSnapshotFileName(io.prestosql.plugin.hive.HiveWriterFactory.removeSnapshotFileName) ImmutableSortedMap(com.google.common.collect.ImmutableSortedMap) ConnectorOutputTableHandle(io.prestosql.spi.connector.ConnectorOutputTableHandle) Properties(java.util.Properties) HiveUtil.isPrestoView(io.prestosql.plugin.hive.HiveUtil.isPrestoView) TypeManager(io.prestosql.spi.type.TypeManager) IOException(java.io.IOException) USER(io.prestosql.spi.security.PrincipalType.USER) PrincipalPrivileges(io.prestosql.plugin.hive.metastore.PrincipalPrivileges) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) File(java.io.File) ExecutionException(java.util.concurrent.ExecutionException) Streams.stream(com.google.common.collect.Streams.stream) IS_EXTERNAL_TABLE(io.prestosql.plugin.hive.HiveTableProperties.IS_EXTERNAL_TABLE) HiveColumnStatistics(io.prestosql.plugin.hive.metastore.HiveColumnStatistics) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) ConnectorTablePartitioning(io.prestosql.spi.connector.ConnectorTablePartitioning) TableType(org.apache.hadoop.hive.metastore.TableType) ConfigurationUtils(io.prestosql.plugin.hive.util.ConfigurationUtils) PrestoPrincipal(io.prestosql.spi.security.PrestoPrincipal) HiveWriterFactory.isSnapshotFile(io.prestosql.plugin.hive.HiveWriterFactory.isSnapshotFile) VarcharType(io.prestosql.spi.type.VarcharType) HiveTableProperties.getLocation(io.prestosql.plugin.hive.HiveTableProperties.getLocation) ConnectorMetadata(io.prestosql.spi.connector.ConnectorMetadata) Statistics(io.prestosql.plugin.hive.util.Statistics) URL(java.net.URL) HdfsContext(io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) ViewNotFoundException(io.prestosql.spi.connector.ViewNotFoundException) HiveTableProperties.getHiveStorageFormat(io.prestosql.plugin.hive.HiveTableProperties.getHiveStorageFormat) Duration(io.airlift.units.Duration) TableStatisticType(io.prestosql.spi.statistics.TableStatisticType) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) INVALID_ANALYZE_PROPERTY(io.prestosql.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) Collectors.toMap(java.util.stream.Collectors.toMap) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties) Iterables.concat(com.google.common.collect.Iterables.concat) Path(org.apache.hadoop.fs.Path) Type(io.prestosql.spi.type.Type) Splitter(com.google.common.base.Splitter) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) Collectors.toSet(java.util.stream.Collectors.toSet) Constraint(io.prestosql.spi.connector.Constraint) PrestoException(io.prestosql.spi.PrestoException) ImmutableSet(com.google.common.collect.ImmutableSet) HiveWriteUtils.isS3FileSystem(io.prestosql.plugin.hive.HiveWriteUtils.isS3FileSystem) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) HiveUtil.columnExtraInfo(io.prestosql.plugin.hive.HiveUtil.columnExtraInfo) Collections.emptyList(java.util.Collections.emptyList) Collection(java.util.Collection) HiveUtil.encodeViewData(io.prestosql.plugin.hive.HiveUtil.encodeViewData) HiveWriterFactory.isSnapshotSubFile(io.prestosql.plugin.hive.HiveWriterFactory.isSnapshotSubFile) ROW_COUNT(io.prestosql.spi.statistics.TableStatisticType.ROW_COUNT) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) String.format(java.lang.String.format) List(java.util.List) PRESTO_VIEW_FLAG(io.prestosql.plugin.hive.HiveUtil.PRESTO_VIEW_FLAG) VarcharType.createUnboundedVarcharType(io.prestosql.spi.type.VarcharType.createUnboundedVarcharType) Function.identity(java.util.function.Function.identity) ConnectorTransactionHandle(io.prestosql.spi.connector.ConnectorTransactionHandle) Optional(java.util.Optional) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) NOT_SUPPORTED(io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED) AcidUtils(org.apache.hadoop.hive.ql.io.AcidUtils) HiveStatisticsProvider(io.prestosql.plugin.hive.statistics.HiveStatisticsProvider) JsonCodec(io.airlift.json.JsonCodec) IntStream(java.util.stream.IntStream) ConnectorOutputMetadata(io.prestosql.spi.connector.ConnectorOutputMetadata) Logger(io.airlift.log.Logger) ConnectorViewDefinition(io.prestosql.spi.connector.ConnectorViewDefinition) ConnectorNewTableLayout(io.prestosql.spi.connector.ConnectorNewTableLayout) HashMap(java.util.HashMap) HiveUtil.toPartitionValues(io.prestosql.plugin.hive.HiveUtil.toPartitionValues) HivePrincipal(io.prestosql.plugin.hive.metastore.HivePrincipal) OptionalInt(java.util.OptionalInt) Function(java.util.function.Function) ColumnStatisticType(io.prestosql.spi.statistics.ColumnStatisticType) InMemoryRecordSet(io.prestosql.spi.connector.InMemoryRecordSet) HashSet(java.util.HashSet) HiveTableProperties.getExternalLocation(io.prestosql.plugin.hive.HiveTableProperties.getExternalLocation) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) OpenCSVSerde(org.apache.hadoop.hive.serde2.OpenCSVSerde) INVALID_SCHEMA_PROPERTY(io.prestosql.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) Objects.requireNonNull(java.util.Objects.requireNonNull) Suppliers(com.google.common.base.Suppliers) NoSuchElementException(java.util.NoSuchElementException) Block(io.prestosql.spi.block.Block) VerifyException(com.google.common.base.VerifyException) Collections.emptyMap(java.util.Collections.emptyMap) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) MalformedURLException(java.net.MalformedURLException) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) TupleDomain(io.prestosql.spi.predicate.TupleDomain) NON_INHERITABLE_PROPERTIES(io.prestosql.plugin.hive.HiveTableProperties.NON_INHERITABLE_PROPERTIES) Maps(com.google.common.collect.Maps) PRIMITIVE(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) Column(io.prestosql.plugin.hive.metastore.Column) GENERIC_USER_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_USER_ERROR) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Comparator(java.util.Comparator) ConnectorInsertTableHandle(io.prestosql.spi.connector.ConnectorInsertTableHandle) HiveBucketing.bucketedOnTimestamp(io.prestosql.plugin.hive.HiveBucketing.bucketedOnTimestamp) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) SystemTable(io.prestosql.spi.connector.SystemTable) Table(io.prestosql.plugin.hive.metastore.Table) HiveTableProperties.isExternalTable(io.prestosql.plugin.hive.HiveTableProperties.isExternalTable) DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates) ConnectorTablePartitioning(io.prestosql.spi.connector.ConnectorTablePartitioning) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) TupleDomain(io.prestosql.spi.predicate.TupleDomain) Domain(io.prestosql.spi.predicate.Domain) TupleDomain(io.prestosql.spi.predicate.TupleDomain) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties)

Example 4 with DiscretePredicates

use of io.prestosql.spi.connector.DiscretePredicates in project hetu-core by openlookeng.

the class AbstractTestHive method setupHive.

protected void setupHive(String databaseName) {
    database = databaseName;
    tablePartitionFormat = new SchemaTableName(database, "presto_test_partition_format");
    tableUnpartitioned = new SchemaTableName(database, "presto_test_unpartitioned");
    tableOffline = new SchemaTableName(database, "presto_test_offline");
    tableOfflinePartition = new SchemaTableName(database, "presto_test_offline_partition");
    tableNotReadable = new SchemaTableName(database, "presto_test_not_readable");
    view = new SchemaTableName(database, "presto_test_view");
    invalidTable = new SchemaTableName(database, INVALID_TABLE);
    tableBucketedStringInt = new SchemaTableName(database, "presto_test_bucketed_by_string_int");
    tableBucketedBigintBoolean = new SchemaTableName(database, "presto_test_bucketed_by_bigint_boolean");
    tableBucketedDoubleFloat = new SchemaTableName(database, "presto_test_bucketed_by_double_float");
    tablePartitionSchemaChange = new SchemaTableName(database, "presto_test_partition_schema_change");
    tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "presto_test_partition_schema_change_non_canonical");
    tableBucketEvolution = new SchemaTableName(database, "presto_test_bucket_evolution");
    invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), Optional.empty());
    dsColumn = new HiveColumnHandle("ds", HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), -1, PARTITION_KEY, Optional.empty());
    fileFormatColumn = new HiveColumnHandle("file_format", HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), -1, PARTITION_KEY, Optional.empty());
    dummyColumn = new HiveColumnHandle("dummy", HIVE_INT, parseTypeSignature(StandardTypes.INTEGER), -1, PARTITION_KEY, Optional.empty());
    intColumn = new HiveColumnHandle("t_int", HIVE_INT, parseTypeSignature(StandardTypes.INTEGER), -1, PARTITION_KEY, Optional.empty());
    invalidColumnHandle = new HiveColumnHandle(INVALID_COLUMN, HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), 0, REGULAR, Optional.empty());
    List<ColumnHandle> partitionColumns = ImmutableList.of(dsColumn, fileFormatColumn, dummyColumn);
    tablePartitionFormatPartitions = ImmutableList.<HivePartition>builder().add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=textfile/dummy=1", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("textfile"))).put(dummyColumn, NullableValue.of(INTEGER, 1L)).build())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=sequencefile/dummy=2", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("sequencefile"))).put(dummyColumn, NullableValue.of(INTEGER, 2L)).build())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rctext/dummy=3", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rctext"))).put(dummyColumn, NullableValue.of(INTEGER, 3L)).build())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rcbinary/dummy=4", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rcbinary"))).put(dummyColumn, NullableValue.of(INTEGER, 4L)).build())).build();
    tableUnpartitionedPartitions = ImmutableList.of(new HivePartition(tableUnpartitioned));
    tablePartitionFormatProperties = new ConnectorTableProperties(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile")), Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile")), Range.equal(createUnboundedVarcharType(), utf8Slice("rctext")), Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L), Range.equal(INTEGER, 2L), Range.equal(INTEGER, 3L), Range.equal(INTEGER, 4L)), false))), Optional.empty(), Optional.empty(), Optional.of(new DiscretePredicates(partitionColumns, ImmutableList.of(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 2L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rctext"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 3L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 4L)), false)))))), ImmutableList.of());
    tableUnpartitionedProperties = new ConnectorTableProperties();
}
Also used : HiveColumnHandle.bucketColumnHandle(io.prestosql.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) NullableValue(io.prestosql.spi.predicate.NullableValue) DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties)

Example 5 with DiscretePredicates

use of io.prestosql.spi.connector.DiscretePredicates in project hetu-core by openlookeng.

the class AbstractTestHive method assertExpectedTableProperties.

protected void assertExpectedTableProperties(ConnectorTableProperties actualProperties, ConnectorTableProperties expectedProperties) {
    assertEquals(actualProperties.getPredicate(), expectedProperties.getPredicate());
    assertEquals(actualProperties.getDiscretePredicates().isPresent(), expectedProperties.getDiscretePredicates().isPresent());
    actualProperties.getDiscretePredicates().ifPresent(actual -> {
        DiscretePredicates expected = expectedProperties.getDiscretePredicates().get();
        assertEquals(actual.getColumns(), expected.getColumns());
        assertEqualsIgnoreOrder(actual.getPredicates(), expected.getPredicates());
    });
    assertEquals(actualProperties.getStreamPartitioningColumns(), expectedProperties.getStreamPartitioningColumns());
    assertEquals(actualProperties.getLocalProperties(), expectedProperties.getLocalProperties());
}
Also used : DiscretePredicates(io.prestosql.spi.connector.DiscretePredicates)

Aggregations

DiscretePredicates (io.prestosql.spi.connector.DiscretePredicates)6 ColumnHandle (io.prestosql.spi.connector.ColumnHandle)4 ConnectorTableProperties (io.prestosql.spi.connector.ConnectorTableProperties)4 SchemaTableName (io.prestosql.spi.connector.SchemaTableName)4 NullableValue (io.prestosql.spi.predicate.NullableValue)4 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 Joiner (com.google.common.base.Joiner)2 MoreObjects.firstNonNull (com.google.common.base.MoreObjects.firstNonNull)2 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)2 Splitter (com.google.common.base.Splitter)2 Suppliers (com.google.common.base.Suppliers)2 Verify.verify (com.google.common.base.Verify.verify)2 VerifyException (com.google.common.base.VerifyException)2 ImmutableList (com.google.common.collect.ImmutableList)2 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)2 ImmutableMap (com.google.common.collect.ImmutableMap)2 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)2 ImmutableSet (com.google.common.collect.ImmutableSet)2 ImmutableSet.toImmutableSet (com.google.common.collect.ImmutableSet.toImmutableSet)2 ImmutableSortedMap (com.google.common.collect.ImmutableSortedMap)2