Search in sources :

Example 6 with HiveBasicStatistics

use of io.trino.plugin.hive.HiveBasicStatistics in project trino by trinodb.

the class TestMetastoreHiveStatisticsProvider method testGetTableStatistics.

@Test
public void testGetTableStatistics() {
    String partitionName = "p1=string1/p2=1234";
    PartitionStatistics statistics = PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(OptionalLong.empty(), OptionalLong.of(1000), OptionalLong.empty(), OptionalLong.empty())).setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(-100), OptionalLong.of(100), OptionalLong.of(500), OptionalLong.of(300)))).build();
    MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(partitionName, statistics));
    HiveColumnHandle columnHandle = createBaseColumn(COLUMN, 2, HIVE_LONG, BIGINT, REGULAR, Optional.empty());
    TableStatistics expected = TableStatistics.builder().setRowCount(Estimate.of(1000)).setColumnStatistics(PARTITION_COLUMN_1, ColumnStatistics.builder().setDataSize(Estimate.of(7000)).setNullsFraction(Estimate.of(0)).setDistinctValuesCount(Estimate.of(1)).build()).setColumnStatistics(PARTITION_COLUMN_2, ColumnStatistics.builder().setRange(new DoubleRange(1234, 1234)).setNullsFraction(Estimate.of(0)).setDistinctValuesCount(Estimate.of(1)).build()).setColumnStatistics(columnHandle, ColumnStatistics.builder().setRange(new DoubleRange(-100, 100)).setNullsFraction(Estimate.of(0.5)).setDistinctValuesCount(Estimate.of(300)).build()).build();
    assertEquals(statisticsProvider.getTableStatistics(SESSION, TABLE, ImmutableMap.of("p1", PARTITION_COLUMN_1, "p2", PARTITION_COLUMN_2, COLUMN, columnHandle), ImmutableMap.of("p1", VARCHAR, "p2", BIGINT, COLUMN, BIGINT), ImmutableList.of(partition(partitionName))), expected);
}
Also used : DoubleRange(io.trino.spi.statistics.DoubleRange) MetastoreHiveStatisticsProvider.validatePartitionStatistics(io.trino.plugin.hive.statistics.MetastoreHiveStatisticsProvider.validatePartitionStatistics) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) TableStatistics(io.trino.spi.statistics.TableStatistics) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) Test(org.testng.annotations.Test)

Example 7 with HiveBasicStatistics

use of io.trino.plugin.hive.HiveBasicStatistics in project trino by trinodb.

the class TestMetastoreHiveStatisticsProvider method testGetTableStatisticsValidationFailure.

@Test
public void testGetTableStatisticsValidationFailure() {
    PartitionStatistics corruptedStatistics = PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(-1, 0, 0, 0)).build();
    String partitionName = "p1=string1/p2=1234";
    MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> ImmutableMap.of(partitionName, corruptedStatistics));
    assertThatThrownBy(() -> statisticsProvider.getTableStatistics(getHiveSession(new HiveConfig().setIgnoreCorruptedStatistics(false)), TABLE, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of(partition(partitionName)))).isInstanceOf(TrinoException.class).hasFieldOrPropertyWithValue("errorCode", HIVE_CORRUPTED_COLUMN_STATISTICS.toErrorCode());
    assertEquals(statisticsProvider.getTableStatistics(getHiveSession(new HiveConfig().setIgnoreCorruptedStatistics(true)), TABLE, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of(partition(partitionName))), TableStatistics.empty());
}
Also used : MetastoreHiveStatisticsProvider.validatePartitionStatistics(io.trino.plugin.hive.statistics.MetastoreHiveStatisticsProvider.validatePartitionStatistics) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) TrinoException(io.trino.spi.TrinoException) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) HiveConfig(io.trino.plugin.hive.HiveConfig) Test(org.testng.annotations.Test)

Example 8 with HiveBasicStatistics

use of io.trino.plugin.hive.HiveBasicStatistics in project trino by trinodb.

the class TestMetastoreHiveStatisticsProvider method testValidatePartitionStatistics.

@Test
public void testValidatePartitionStatistics() {
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(-1, 0, 0, 0)).build(), invalidPartitionStatistics("fileCount must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, -1, 0, 0)).build(), invalidPartitionStatistics("rowCount must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, -1, 0)).build(), invalidPartitionStatistics("inMemoryDataSizeInBytes must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, -1)).build(), invalidPartitionStatistics("onDiskDataSizeInBytes must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setMaxValueSizeInBytes(-1).build())).build(), invalidColumnStatistics("maxValueSizeInBytes must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setTotalSizeInBytes(-1).build())).build(), invalidColumnStatistics("totalSizeInBytes must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setNullsCount(-1).build())).build(), invalidColumnStatistics("nullsCount must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setNullsCount(1).build())).build(), invalidColumnStatistics("nullsCount must be less than or equal to rowCount. nullsCount: 1. rowCount: 0."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setDistinctValuesCount(-1).build())).build(), invalidColumnStatistics("distinctValuesCount must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setDistinctValuesCount(1).build())).build(), invalidColumnStatistics("distinctValuesCount must be less than or equal to rowCount. distinctValuesCount: 1. rowCount: 0."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 1, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, HiveColumnStatistics.builder().setDistinctValuesCount(1).setNullsCount(1).build())).build(), invalidColumnStatistics("distinctValuesCount must be less than or equal to nonNullsCount. distinctValuesCount: 1. nonNullsCount: 0."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createIntegerColumnStatistics(OptionalLong.of(1), OptionalLong.of(-1), OptionalLong.empty(), OptionalLong.empty()))).build(), invalidColumnStatistics("integerStatistics.min must be less than or equal to integerStatistics.max. integerStatistics.min: 1. integerStatistics.max: -1."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createDoubleColumnStatistics(OptionalDouble.of(1), OptionalDouble.of(-1), OptionalLong.empty(), OptionalLong.empty()))).build(), invalidColumnStatistics("doubleStatistics.min must be less than or equal to doubleStatistics.max. doubleStatistics.min: 1.0. doubleStatistics.max: -1.0."));
    validatePartitionStatistics(TABLE, ImmutableMap.of(PARTITION, PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createDoubleColumnStatistics(OptionalDouble.of(NaN), OptionalDouble.of(NaN), OptionalLong.empty(), OptionalLong.empty()))).build()));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createDecimalColumnStatistics(Optional.of(BigDecimal.valueOf(1)), Optional.of(BigDecimal.valueOf(-1)), OptionalLong.empty(), OptionalLong.empty()))).build(), invalidColumnStatistics("decimalStatistics.min must be less than or equal to decimalStatistics.max. decimalStatistics.min: 1. decimalStatistics.max: -1."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createDateColumnStatistics(Optional.of(LocalDate.ofEpochDay(1)), Optional.of(LocalDate.ofEpochDay(-1)), OptionalLong.empty(), OptionalLong.empty()))).build(), invalidColumnStatistics("dateStatistics.min must be less than or equal to dateStatistics.max. dateStatistics.min: 1970-01-02. dateStatistics.max: 1969-12-31."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createBooleanColumnStatistics(OptionalLong.of(-1), OptionalLong.empty(), OptionalLong.empty()))).build(), invalidColumnStatistics("trueCount must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createBooleanColumnStatistics(OptionalLong.empty(), OptionalLong.of(-1), OptionalLong.empty()))).build(), invalidColumnStatistics("falseCount must be greater than or equal to zero: -1"));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createBooleanColumnStatistics(OptionalLong.of(1), OptionalLong.empty(), OptionalLong.empty()))).build(), invalidColumnStatistics("booleanStatistics.trueCount must be less than or equal to rowCount. booleanStatistics.trueCount: 1. rowCount: 0."));
    assertInvalidStatistics(PartitionStatistics.builder().setBasicStatistics(new HiveBasicStatistics(0, 0, 0, 0)).setColumnStatistics(ImmutableMap.of(COLUMN, createBooleanColumnStatistics(OptionalLong.empty(), OptionalLong.of(1), OptionalLong.empty()))).build(), invalidColumnStatistics("booleanStatistics.falseCount must be less than or equal to rowCount. booleanStatistics.falseCount: 1. rowCount: 0."));
}
Also used : HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) Test(org.testng.annotations.Test)

Example 9 with HiveBasicStatistics

use of io.trino.plugin.hive.HiveBasicStatistics in project trino by trinodb.

the class TestThriftMetastoreUtil method testBasicStatisticsRoundTrip.

@Test
public void testBasicStatisticsRoundTrip() {
    testBasicStatisticsRoundTrip(new HiveBasicStatistics(OptionalLong.empty(), OptionalLong.empty(), OptionalLong.empty(), OptionalLong.empty()));
    testBasicStatisticsRoundTrip(new HiveBasicStatistics(OptionalLong.of(1), OptionalLong.empty(), OptionalLong.of(2), OptionalLong.empty()));
    testBasicStatisticsRoundTrip(new HiveBasicStatistics(OptionalLong.of(1), OptionalLong.of(2), OptionalLong.of(3), OptionalLong.of(4)));
}
Also used : ThriftMetastoreUtil.getHiveBasicStatistics(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.getHiveBasicStatistics) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) Test(org.testng.annotations.Test)

Example 10 with HiveBasicStatistics

use of io.trino.plugin.hive.HiveBasicStatistics in project trino by trinodb.

the class ThriftHiveMetastore method updateTableStatistics.

@Override
public void updateTableStatistics(HiveIdentity identity, String databaseName, String tableName, AcidTransaction transaction, Function<PartitionStatistics, PartitionStatistics> update) {
    Table originalTable = getTable(identity, databaseName, tableName).orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
    PartitionStatistics currentStatistics = getTableStatistics(identity, originalTable);
    PartitionStatistics updatedStatistics = update.apply(currentStatistics);
    Table modifiedTable = originalTable.deepCopy();
    HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics();
    modifiedTable.setParameters(updateStatisticsParameters(modifiedTable.getParameters(), basicStatistics));
    if (transaction.isAcidTransactionRunning()) {
        modifiedTable.setWriteId(transaction.getWriteId());
    }
    alterTable(identity, databaseName, tableName, modifiedTable);
    io.trino.plugin.hive.metastore.Table table = fromMetastoreApiTable(modifiedTable);
    OptionalLong rowCount = basicStatistics.getRowCount();
    List<ColumnStatisticsObj> metastoreColumnStatistics = updatedStatistics.getColumnStatistics().entrySet().stream().flatMap(entry -> {
        Optional<Column> column = table.getColumn(entry.getKey());
        if (column.isEmpty() && isAvroTableWithSchemaSet(modifiedTable)) {
            // to store statistics for a column it does not know about.
            return Stream.of();
        }
        HiveType type = column.orElseThrow(() -> new IllegalStateException("Column not found: " + entry.getKey())).getType();
        return Stream.of(createMetastoreColumnStatistics(entry.getKey(), type, entry.getValue(), rowCount));
    }).collect(toImmutableList());
    if (!metastoreColumnStatistics.isEmpty()) {
        setTableColumnStatistics(identity, databaseName, tableName, metastoreColumnStatistics);
    }
    Set<String> removedColumnStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet());
    removedColumnStatistics.forEach(column -> deleteTableColumnStatistics(identity, databaseName, tableName, column));
}
Also used : ThriftMetastoreUtil.updateStatisticsParameters(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.updateStatisticsParameters) LockComponentBuilder(org.apache.hadoop.hive.metastore.LockComponentBuilder) USER(io.trino.spi.security.PrincipalType.USER) UNKNOWN_METHOD(org.apache.thrift.TApplicationException.UNKNOWN_METHOD) Throwables.throwIfUnchecked(com.google.common.base.Throwables.throwIfUnchecked) NoSuchTxnException(org.apache.hadoop.hive.metastore.api.NoSuchTxnException) ColumnStatisticType(io.trino.spi.statistics.ColumnStatisticType) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) Sets.difference(com.google.common.collect.Sets.difference) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Column(io.trino.plugin.hive.metastore.Column) Map(java.util.Map) PartitionWithStatistics(io.trino.plugin.hive.metastore.PartitionWithStatistics) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) HiveIdentity(io.trino.plugin.hive.authentication.HiveIdentity) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) HivePartition(io.trino.plugin.hive.HivePartition) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) ThreadSafe(javax.annotation.concurrent.ThreadSafe) MILLISECONDS(java.util.concurrent.TimeUnit.MILLISECONDS) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Stream(java.util.stream.Stream) OWNERSHIP(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege.OWNERSHIP) MetastoreUtil.partitionKeyFilterToStringList(io.trino.plugin.hive.metastore.MetastoreUtil.partitionKeyFilterToStringList) TRUE(java.lang.Boolean.TRUE) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) LockType(org.apache.hadoop.hive.metastore.api.LockType) TxnAbortedException(org.apache.hadoop.hive.metastore.api.TxnAbortedException) Verify.verifyNotNull(com.google.common.base.Verify.verifyNotNull) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) HiveViewNotSupportedException(io.trino.plugin.hive.HiveViewNotSupportedException) Flatten(org.weakref.jmx.Flatten) ThriftMetastoreUtil.toMetastoreApiPartition(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiPartition) HIVE_FILTER_FIELD_PARAMS(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS) Strings.isNullOrEmpty(com.google.common.base.Strings.isNullOrEmpty) PartitionNotFoundException(io.trino.plugin.hive.PartitionNotFoundException) ThriftMetastoreUtil.isAvroTableWithSchemaSet(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.isAvroTableWithSchemaSet) HIVE_TABLE_LOCK_NOT_ACQUIRED(io.trino.plugin.hive.HiveErrorCode.HIVE_TABLE_LOCK_NOT_ACQUIRED) ThriftMetastoreUtil.fromMetastoreApiPrincipalType(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromMetastoreApiPrincipalType) DataOperationType(org.apache.hadoop.hive.metastore.api.DataOperationType) HiveType(io.trino.plugin.hive.HiveType) OptionalLong(java.util.OptionalLong) Managed(org.weakref.jmx.Managed) LockState(org.apache.hadoop.hive.metastore.api.LockState) SchemaAlreadyExistsException(io.trino.plugin.hive.SchemaAlreadyExistsException) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) PrivilegeGrantInfo(org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) FileUtils.makePartName(org.apache.hadoop.hive.common.FileUtils.makePartName) EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) TException(org.apache.thrift.TException) ThriftMetastoreAuthenticationType(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreAuthenticationConfig.ThriftMetastoreAuthenticationType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) Throwables.throwIfInstanceOf(com.google.common.base.Throwables.throwIfInstanceOf) RoleGrant(io.trino.spi.security.RoleGrant) Table(org.apache.hadoop.hive.metastore.api.Table) System.nanoTime(java.lang.System.nanoTime) TableType(org.apache.hadoop.hive.metastore.TableType) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HivePrivilege(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege) ThriftMetastoreUtil.getHiveBasicStatistics(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.getHiveBasicStatistics) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) LockRequest(org.apache.hadoop.hive.metastore.api.LockRequest) AcidOperation(io.trino.plugin.hive.acid.AcidOperation) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) Duration(io.airlift.units.Duration) NonEvictableLoadingCache(io.trino.collect.cache.NonEvictableLoadingCache) AcidTransactionOwner(io.trino.plugin.hive.metastore.AcidTransactionOwner) ThriftMetastoreUtil.fromMetastoreApiTable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromMetastoreApiTable) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Throwables.propagateIfPossible(com.google.common.base.Throwables.propagateIfPossible) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ALREADY_EXISTS(io.trino.spi.StandardErrorCode.ALREADY_EXISTS) Path(org.apache.hadoop.fs.Path) LockComponent(org.apache.hadoop.hive.metastore.api.LockComponent) ThriftMetastoreUtil.fromTrinoPrincipalType(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromTrinoPrincipalType) PrivilegeBag(org.apache.hadoop.hive.metastore.api.PrivilegeBag) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) NoSuchLockException(org.apache.hadoop.hive.metastore.api.NoSuchLockException) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) LockRequestBuilder(org.apache.hadoop.hive.metastore.LockRequestBuilder) Predicate(java.util.function.Predicate) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) LockResponse(org.apache.hadoop.hive.metastore.api.LockResponse) TableAlreadyExistsException(io.trino.plugin.hive.TableAlreadyExistsException) TrinoException(io.trino.spi.TrinoException) String.format(java.lang.String.format) CacheLoader(com.google.common.cache.CacheLoader) SafeCaches.buildNonEvictableCache(io.trino.collect.cache.SafeCaches.buildNonEvictableCache) PRESTO_VIEW_FLAG(io.trino.plugin.hive.ViewReaderUtil.PRESTO_VIEW_FLAG) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) Optional(java.util.Optional) CacheBuilder(com.google.common.cache.CacheBuilder) Pattern(java.util.regex.Pattern) HivePrivilegeInfo(io.trino.plugin.hive.metastore.HivePrivilegeInfo) RetryDriver(io.trino.plugin.hive.util.RetryDriver) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) Logger(io.airlift.log.Logger) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) Type(io.trino.spi.type.Type) Partition(org.apache.hadoop.hive.metastore.api.Partition) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) TxnToWriteId(org.apache.hadoop.hive.metastore.api.TxnToWriteId) Inject(javax.inject.Inject) HashSet(java.util.HashSet) HiveColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics) LockLevel(org.apache.hadoop.hive.metastore.api.LockLevel) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) HIVE_METASTORE_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_METASTORE_ERROR) UncheckedExecutionException(com.google.common.util.concurrent.UncheckedExecutionException) Objects.requireNonNull(java.util.Objects.requireNonNull) TApplicationException(org.apache.thrift.TApplicationException) FALSE(java.lang.Boolean.FALSE) TABLE(org.apache.hadoop.hive.metastore.api.HiveObjectType.TABLE) Iterator(java.util.Iterator) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) TupleDomain(io.trino.spi.predicate.TupleDomain) ThriftMetastoreUtil.fromRolePrincipalGrants(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromRolePrincipalGrants) ThriftMetastoreUtil.createMetastoreColumnStatistics(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.createMetastoreColumnStatistics) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ThriftMetastoreUtil.parsePrivilege(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.parsePrivilege) Closeable(java.io.Closeable) Database(org.apache.hadoop.hive.metastore.api.Database) Collections(java.util.Collections) HiveConfig(io.trino.plugin.hive.HiveConfig) SECONDS(java.util.concurrent.TimeUnit.SECONDS) Table(org.apache.hadoop.hive.metastore.api.Table) ThriftMetastoreUtil.fromMetastoreApiTable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromMetastoreApiTable) Optional(java.util.Optional) ThriftMetastoreUtil.getHiveBasicStatistics(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.getHiveBasicStatistics) HiveBasicStatistics(io.trino.plugin.hive.HiveBasicStatistics) SchemaTableName(io.trino.spi.connector.SchemaTableName) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) OptionalLong(java.util.OptionalLong) HiveType(io.trino.plugin.hive.HiveType)

Aggregations

HiveBasicStatistics (io.trino.plugin.hive.HiveBasicStatistics)22 PartitionStatistics (io.trino.plugin.hive.PartitionStatistics)13 ThriftMetastoreUtil.getHiveBasicStatistics (io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.getHiveBasicStatistics)13 HiveColumnStatistics (io.trino.plugin.hive.metastore.HiveColumnStatistics)11 TrinoException (io.trino.spi.TrinoException)11 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)9 ImmutableMap (com.google.common.collect.ImmutableMap)9 Column (io.trino.plugin.hive.metastore.Column)8 List (java.util.List)8 Map (java.util.Map)8 ImmutableList (com.google.common.collect.ImmutableList)7 OptionalLong (java.util.OptionalLong)7 HIVE_METASTORE_ERROR (io.trino.plugin.hive.HiveErrorCode.HIVE_METASTORE_ERROR)6 ColumnStatisticType (io.trino.spi.statistics.ColumnStatisticType)6 Type (io.trino.spi.type.Type)6 ArrayList (java.util.ArrayList)6 Optional (java.util.Optional)6 Set (java.util.Set)6 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)5 Sets.difference (com.google.common.collect.Sets.difference)5