Search in sources :

Example 1 with TEXTFILE

use of io.trino.plugin.hive.HiveStorageFormat.TEXTFILE in project trino by trinodb.

the class AbstractTestHive method testStorePartitionWithStatistics.

protected void testStorePartitionWithStatistics(List<ColumnMetadata> columns, PartitionStatistics statsForAllColumns1, PartitionStatistics statsForAllColumns2, PartitionStatistics statsForSubsetOfColumns, PartitionStatistics emptyStatistics) throws Exception {
    SchemaTableName tableName = temporaryTable("store_partition_with_statistics");
    try {
        doCreateEmptyTable(tableName, ORC, columns);
        HiveMetastoreClosure metastoreClient = new HiveMetastoreClosure(getMetastoreClient());
        Table table = metastoreClient.getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
        List<String> partitionValues = ImmutableList.of("2016-01-01");
        String partitionName = makePartName(ImmutableList.of("ds"), partitionValues);
        Partition partition = createDummyPartition(table, partitionName);
        // create partition with stats for all columns
        metastoreClient.addPartitions(tableName.getSchemaName(), tableName.getTableName(), ImmutableList.of(new PartitionWithStatistics(partition, partitionName, statsForAllColumns1)));
        assertEquals(metastoreClient.getPartition(tableName.getSchemaName(), tableName.getTableName(), partitionValues).get().getStorage().getStorageFormat(), fromHiveStorageFormat(ORC));
        assertThat(metastoreClient.getPartitionStatistics(tableName.getSchemaName(), tableName.getTableName(), ImmutableSet.of(partitionName))).isEqualTo(ImmutableMap.of(partitionName, statsForAllColumns1));
        // alter the partition into one with other stats
        Partition modifiedPartition = Partition.builder(partition).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(RCBINARY)).setLocation(partitionTargetPath(tableName, partitionName))).build();
        metastoreClient.alterPartition(tableName.getSchemaName(), tableName.getTableName(), new PartitionWithStatistics(modifiedPartition, partitionName, statsForAllColumns2));
        assertEquals(metastoreClient.getPartition(tableName.getSchemaName(), tableName.getTableName(), partitionValues).get().getStorage().getStorageFormat(), fromHiveStorageFormat(RCBINARY));
        assertThat(metastoreClient.getPartitionStatistics(tableName.getSchemaName(), tableName.getTableName(), ImmutableSet.of(partitionName))).isEqualTo(ImmutableMap.of(partitionName, statsForAllColumns2));
        // alter the partition into one with stats for only subset of columns
        modifiedPartition = Partition.builder(partition).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE)).setLocation(partitionTargetPath(tableName, partitionName))).build();
        metastoreClient.alterPartition(tableName.getSchemaName(), tableName.getTableName(), new PartitionWithStatistics(modifiedPartition, partitionName, statsForSubsetOfColumns));
        assertThat(metastoreClient.getPartitionStatistics(tableName.getSchemaName(), tableName.getTableName(), ImmutableSet.of(partitionName))).isEqualTo(ImmutableMap.of(partitionName, statsForSubsetOfColumns));
        // alter the partition into one without stats
        modifiedPartition = Partition.builder(partition).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE)).setLocation(partitionTargetPath(tableName, partitionName))).build();
        metastoreClient.alterPartition(tableName.getSchemaName(), tableName.getTableName(), new PartitionWithStatistics(modifiedPartition, partitionName, emptyStatistics));
        assertThat(metastoreClient.getPartitionStatistics(tableName.getSchemaName(), tableName.getTableName(), ImmutableSet.of(partitionName))).isEqualTo(ImmutableMap.of(partitionName, emptyStatistics));
    } finally {
        dropTable(tableName);
    }
}
Also used : Assertions.assertInstanceOf(io.airlift.testing.Assertions.assertInstanceOf) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.testng.annotations.Test) FileStatus(org.apache.hadoop.fs.FileStatus) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) HiveColumnStatistics.createDateColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDateColumnStatistics) Files.createTempDirectory(java.nio.file.Files.createTempDirectory) Map(java.util.Map) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ViewNotFoundException(io.trino.spi.connector.ViewNotFoundException) MaterializedRow(io.trino.testing.MaterializedRow) ROLLBACK_AFTER_FINISH_INSERT(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_FINISH_INSERT) ENGLISH(java.util.Locale.ENGLISH) Assert.assertFalse(org.testng.Assert.assertFalse) HiveIdentity(io.trino.plugin.hive.authentication.HiveIdentity) Domain(io.trino.spi.predicate.Domain) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) ASCENDING(io.trino.plugin.hive.metastore.SortingColumn.Order.ASCENDING) ValueSet(io.trino.spi.predicate.ValueSet) MoreExecutors.directExecutor(com.google.common.util.concurrent.MoreExecutors.directExecutor) COMMIT(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.COMMIT) NOT_PARTITIONED(io.trino.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) DESCENDING(io.trino.plugin.hive.metastore.SortingColumn.Order.DESCENDING) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) TrinoS3ConfigurationInitializer(io.trino.plugin.hive.s3.TrinoS3ConfigurationInitializer) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) MetastoreLocator(io.trino.plugin.hive.metastore.thrift.MetastoreLocator) ROLLBACK_AFTER_SINK_FINISH(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_SINK_FINISH) TableScanRedirectApplicationResult(io.trino.spi.connector.TableScanRedirectApplicationResult) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) REAL(io.trino.spi.type.RealType.REAL) Partition(io.trino.plugin.hive.metastore.Partition) BUCKETED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.BUCKETED_BY_PROPERTY) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) LocalDateTime(java.time.LocalDateTime) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) HiveBasicStatistics.createEmptyStatistics(io.trino.plugin.hive.HiveBasicStatistics.createEmptyStatistics) Variable(io.trino.spi.expression.Variable) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) OptionalLong(java.util.OptionalLong) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) OrcPageSource(io.trino.plugin.hive.orc.OrcPageSource) SEQUENCEFILE(io.trino.plugin.hive.HiveStorageFormat.SEQUENCEFILE) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) HIVE_INVALID_PARTITION_VALUE(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) Assertions.assertGreaterThanOrEqual(io.airlift.testing.Assertions.assertGreaterThanOrEqual) ImmutableMultimap(com.google.common.collect.ImmutableMultimap) HiveSessionProperties.isTemporaryStagingDirectoryEnabled(io.trino.plugin.hive.HiveSessionProperties.isTemporaryStagingDirectoryEnabled) AfterClass(org.testng.annotations.AfterClass) HiveAzureConfig(io.trino.plugin.hive.azure.HiveAzureConfig) FileUtils.makePartName(org.apache.hadoop.hive.common.FileUtils.makePartName) MapType(io.trino.spi.type.MapType) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) TRANSACTIONAL(io.trino.plugin.hive.HiveTableProperties.TRANSACTIONAL) SPARK_TABLE_PROVIDER_KEY(io.trino.plugin.hive.util.HiveUtil.SPARK_TABLE_PROVIDER_KEY) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) STAGE_AND_MOVE_TO_TARGET_DIRECTORY(io.trino.plugin.hive.LocationHandle.WriteMode.STAGE_AND_MOVE_TO_TARGET_DIRECTORY) ROLLBACK_AFTER_BEGIN_INSERT(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_BEGIN_INSERT) HostAndPort(com.google.common.net.HostAndPort) CatalogName(io.trino.plugin.base.CatalogName) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HIVE_INT(io.trino.plugin.hive.HiveType.HIVE_INT) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) HiveTestUtils.mapType(io.trino.plugin.hive.HiveTestUtils.mapType) ParquetPageSource(io.trino.plugin.hive.parquet.ParquetPageSource) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) RCTEXT(io.trino.plugin.hive.HiveStorageFormat.RCTEXT) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) HivePrivilege(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege) PARTITION_KEY(io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY) HiveColumnStatistics.createStringColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createStringColumnStatistics) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) MaterializedResult(io.trino.testing.MaterializedResult) HiveUtil.toPartitionValues(io.trino.plugin.hive.util.HiveUtil.toPartitionValues) NO_RETRIES(io.trino.spi.connector.RetryMode.NO_RETRIES) ConnectorMaterializedViewDefinition(io.trino.spi.connector.ConnectorMaterializedViewDefinition) ICEBERG_TABLE_TYPE_VALUE(io.trino.plugin.hive.util.HiveUtil.ICEBERG_TABLE_TYPE_VALUE) Duration(io.airlift.units.Duration) BUCKETING_V1(io.trino.plugin.hive.util.HiveBucketing.BucketingVersion.BUCKETING_V1) SqlTimestamp(io.trino.spi.type.SqlTimestamp) ICEBERG_TABLE_TYPE_NAME(io.trino.plugin.hive.util.HiveUtil.ICEBERG_TABLE_TYPE_NAME) NOOP_METADATA_PROVIDER(io.trino.spi.connector.MetadataProvider.NOOP_METADATA_PROVIDER) HiveMetastoreFactory(io.trino.plugin.hive.metastore.HiveMetastoreFactory) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) HiveTestUtils.arrayType(io.trino.plugin.hive.HiveTestUtils.arrayType) Block(io.trino.spi.block.Block) HIVE_PARTITION_SCHEMA_MISMATCH(io.trino.plugin.hive.HiveErrorCode.HIVE_PARTITION_SCHEMA_MISMATCH) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ROLLBACK_RIGHT_AWAY(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_RIGHT_AWAY) ImmutableSet(com.google.common.collect.ImmutableSet) BeforeClass(org.testng.annotations.BeforeClass) Collection(java.util.Collection) UUID(java.util.UUID) Assert.assertNotNull(org.testng.Assert.assertNotNull) TrinoAzureConfigurationInitializer(io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer) HiveColumnStatistics.createDoubleColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDoubleColumnStatistics) BUCKET_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.BUCKET_COLUMN_NAME) BIGINT(io.trino.spi.type.BigintType.BIGINT) SORTED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.SORTED_BY_PROPERTY) LocalDate(java.time.LocalDate) JsonCodec(io.airlift.json.JsonCodec) IntStream(java.util.stream.IntStream) HiveTestUtils.getDefaultHivePageSourceFactories(io.trino.plugin.hive.HiveTestUtils.getDefaultHivePageSourceFactories) Constraint(io.trino.spi.connector.Constraint) Assert.assertNull(org.testng.Assert.assertNull) OptionalDouble(java.util.OptionalDouble) Assert.assertEquals(org.testng.Assert.assertEquals) OptionalInt(java.util.OptionalInt) Function(java.util.function.Function) HashSet(java.util.HashSet) HYPER_LOG_LOG(io.trino.spi.type.HyperLogLogType.HYPER_LOG_LOG) ImmutableList(com.google.common.collect.ImmutableList) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) TableStatistics(io.trino.spi.statistics.TableStatistics) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Math.toIntExact(java.lang.Math.toIntExact) ExecutorService(java.util.concurrent.ExecutorService) ConnectorPageSink(io.trino.spi.connector.ConnectorPageSink) DESC_NULLS_LAST(io.trino.spi.connector.SortOrder.DESC_NULLS_LAST) ORC(io.trino.plugin.hive.HiveStorageFormat.ORC) HiveColumnStatistics.createIntegerColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createIntegerColumnStatistics) UTF_8(java.nio.charset.StandardCharsets.UTF_8) Assert.fail(org.testng.Assert.fail) ConnectorPageSourceProvider(io.trino.spi.connector.ConnectorPageSourceProvider) DateTime(org.joda.time.DateTime) PAGE_SORTER(io.trino.plugin.hive.HiveTestUtils.PAGE_SORTER) Executors.newFixedThreadPool(java.util.concurrent.Executors.newFixedThreadPool) HIVE_STRING(io.trino.plugin.hive.HiveType.HIVE_STRING) Hashing.sha256(com.google.common.hash.Hashing.sha256) HiveTestUtils.getHiveSession(io.trino.plugin.hive.HiveTestUtils.getHiveSession) Collectors.toList(java.util.stream.Collectors.toList) PRESTO_VERSION_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME) Assert.assertTrue(org.testng.Assert.assertTrue) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) ConnectorPageSinkProvider(io.trino.spi.connector.ConnectorPageSinkProvider) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) ConnectorSplitManager(io.trino.spi.connector.ConnectorSplitManager) Arrays(java.util.Arrays) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) USER(io.trino.spi.security.PrincipalType.USER) Maps.uniqueIndex(com.google.common.collect.Maps.uniqueIndex) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) HiveColumnStatistics.createDecimalColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDecimalColumnStatistics) TypeOperators(io.trino.spi.type.TypeOperators) ROLLBACK_AFTER_APPEND_PAGE(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_APPEND_PAGE) HiveTestUtils.rowType(io.trino.plugin.hive.HiveTestUtils.rowType) TrinoExceptionAssert.assertTrinoExceptionThrownBy(io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy) CharType.createCharType(io.trino.spi.type.CharType.createCharType) BigDecimal(java.math.BigDecimal) TypeId(io.trino.spi.type.TypeId) Sets.difference(com.google.common.collect.Sets.difference) PARQUET(io.trino.plugin.hive.HiveStorageFormat.PARQUET) Column(io.trino.plugin.hive.metastore.Column) Executors.newScheduledThreadPool(java.util.concurrent.Executors.newScheduledThreadPool) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) PartitionWithStatistics(io.trino.plugin.hive.metastore.PartitionWithStatistics) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) HiveTestUtils.getDefaultHiveRecordCursorProviders(io.trino.plugin.hive.HiveTestUtils.getDefaultHiveRecordCursorProviders) ConnectorNodePartitioningProvider(io.trino.spi.connector.ConnectorNodePartitioningProvider) Table(io.trino.plugin.hive.metastore.Table) TestingNodeManager(io.trino.testing.TestingNodeManager) Range(io.trino.spi.predicate.Range) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) PARTITIONED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.PARTITIONED_BY_PROPERTY) RcFilePageSource(io.trino.plugin.hive.rcfile.RcFilePageSource) Set(java.util.Set) MILLISECONDS(java.util.concurrent.TimeUnit.MILLISECONDS) SchemaTableName(io.trino.spi.connector.SchemaTableName) SortingProperty(io.trino.spi.connector.SortingProperty) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) HiveColumnStatistics.createBooleanColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createBooleanColumnStatistics) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) Lists.reverse(com.google.common.collect.Lists.reverse) DATE(io.trino.spi.type.DateType.DATE) MoreObjects.toStringHelper(com.google.common.base.MoreObjects.toStringHelper) HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) Slice(io.airlift.slice.Slice) NullableValue(io.trino.spi.predicate.NullableValue) Page(io.trino.spi.Page) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) MINUTES(java.util.concurrent.TimeUnit.MINUTES) JoinCompiler(io.trino.sql.gen.JoinCompiler) HiveUtil.columnExtraInfo(io.trino.plugin.hive.util.HiveUtil.columnExtraInfo) GroupByHashPageIndexerFactory(io.trino.operator.GroupByHashPageIndexerFactory) Float.floatToRawIntBits(java.lang.Float.floatToRawIntBits) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) UNGROUPED_SCHEDULING(io.trino.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) ColumnHandle(io.trino.spi.connector.ColumnHandle) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) HIVE_INVALID_BUCKET_FILES(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) TestingMetastoreLocator(io.trino.plugin.hive.metastore.thrift.TestingMetastoreLocator) STORAGE_FORMAT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.STORAGE_FORMAT_PROPERTY) GoogleGcsConfigurationInitializer(io.trino.plugin.hive.gcs.GoogleGcsConfigurationInitializer) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) RecordCursor(io.trino.spi.connector.RecordCursor) BlockTypeOperators(io.trino.type.BlockTypeOperators) LongStream(java.util.stream.LongStream) NO_REDIRECTIONS(io.trino.plugin.hive.HiveTableRedirectionsProvider.NO_REDIRECTIONS) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) HIVE_LONG(io.trino.plugin.hive.HiveType.HIVE_LONG) HiveTestUtils.getTypes(io.trino.plugin.hive.HiveTestUtils.getTypes) TRANSACTION_CONFLICT(io.trino.spi.StandardErrorCode.TRANSACTION_CONFLICT) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) ConnectorSession(io.trino.spi.connector.ConnectorSession) MoreFutures.getFutureValue(io.airlift.concurrent.MoreFutures.getFutureValue) UTC(org.joda.time.DateTimeZone.UTC) SESSION(io.trino.plugin.hive.HiveTestUtils.SESSION) SqlVarbinary(io.trino.spi.type.SqlVarbinary) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) CharType(io.trino.spi.type.CharType) TableType(org.apache.hadoop.hive.metastore.TableType) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) TINYINT(io.trino.spi.type.TinyintType.TINYINT) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) Assertions.assertGreaterThan(io.airlift.testing.Assertions.assertGreaterThan) HiveColumnStatistics.createBinaryColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createBinaryColumnStatistics) MoreCollectors.onlyElement(com.google.common.collect.MoreCollectors.onlyElement) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) QueryAssertions.assertEqualsIgnoreOrder(io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder) HiveGcsConfig(io.trino.plugin.hive.gcs.HiveGcsConfig) Iterables.concat(com.google.common.collect.Iterables.concat) Path(org.apache.hadoop.fs.Path) KILOBYTE(io.airlift.units.DataSize.Unit.KILOBYTE) TIMESTAMP_WITH_TIME_ZONE(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE) AVRO(io.trino.plugin.hive.HiveStorageFormat.AVRO) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) RowType(io.trino.spi.type.RowType) HiveWriteUtils.getTableDefaultLocation(io.trino.plugin.hive.util.HiveWriteUtils.getTableDefaultLocation) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) HiveSessionProperties.getTemporaryStagingDirectoryPath(io.trino.plugin.hive.HiveSessionProperties.getTemporaryStagingDirectoryPath) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) MaterializedResult.materializeSourceDataStream(io.trino.testing.MaterializedResult.materializeSourceDataStream) ConnectorBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap) ASC_NULLS_FIRST(io.trino.spi.connector.SortOrder.ASC_NULLS_FIRST) String.format(java.lang.String.format) SqlDate(io.trino.spi.type.SqlDate) Preconditions.checkState(com.google.common.base.Preconditions.checkState) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) DynamicFilter(io.trino.spi.connector.DynamicFilter) Assignment(io.trino.spi.connector.Assignment) Optional(java.util.Optional) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HivePrivilegeInfo(io.trino.plugin.hive.metastore.HivePrivilegeInfo) SqlStandardAccessControlMetadata(io.trino.plugin.hive.security.SqlStandardAccessControlMetadata) Logger(io.airlift.log.Logger) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) CounterStat(io.airlift.stats.CounterStat) HashMap(java.util.HashMap) HiveBasicStatistics.createZeroStatistics(io.trino.plugin.hive.HiveBasicStatistics.createZeroStatistics) CSV(io.trino.plugin.hive.HiveStorageFormat.CSV) AtomicReference(java.util.concurrent.atomic.AtomicReference) VarcharType(io.trino.spi.type.VarcharType) HiveColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics) ROLLBACK_AFTER_DELETE(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_DELETE) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Verify.verify(com.google.common.base.Verify.verify) Assertions.assertLessThanOrEqual(io.airlift.testing.Assertions.assertLessThanOrEqual) Threads.daemonThreadsNamed(io.airlift.concurrent.Threads.daemonThreadsNamed) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) RecordPageSource(io.trino.spi.connector.RecordPageSource) Objects.requireNonNull(java.util.Objects.requireNonNull) RowFieldName(io.trino.spi.type.RowFieldName) JSON(io.trino.plugin.hive.HiveStorageFormat.JSON) RCBINARY(io.trino.plugin.hive.HiveStorageFormat.RCBINARY) NO_PRIVILEGES(io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES) DELTA_LAKE_PROVIDER(io.trino.plugin.hive.util.HiveUtil.DELTA_LAKE_PROVIDER) ColumnStatistics(io.trino.spi.statistics.ColumnStatistics) FieldDereference(io.trino.spi.expression.FieldDereference) HiveTestUtils.getDefaultHiveFileWriterFactories(io.trino.plugin.hive.HiveTestUtils.getDefaultHiveFileWriterFactories) HiveTestUtils.getHiveSessionProperties(io.trino.plugin.hive.HiveTestUtils.getHiveSessionProperties) TupleDomain(io.trino.spi.predicate.TupleDomain) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) TestingConnectorSession(io.trino.testing.TestingConnectorSession) HiveS3Config(io.trino.plugin.hive.s3.HiveS3Config) Executors.newCachedThreadPool(java.util.concurrent.Executors.newCachedThreadPool) BUCKET_COUNT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.BUCKET_COUNT_PROPERTY) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) SECONDS(java.util.concurrent.TimeUnit.SECONDS) REGULAR(io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Partition(io.trino.plugin.hive.metastore.Partition) Table(io.trino.plugin.hive.metastore.Table) PartitionWithStatistics(io.trino.plugin.hive.metastore.PartitionWithStatistics) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 2 with TEXTFILE

use of io.trino.plugin.hive.HiveStorageFormat.TEXTFILE in project trino by trinodb.

the class ViewReaderUtil method coralTableRedirectionResolver.

private static CoralTableRedirectionResolver coralTableRedirectionResolver(ConnectorSession session, BiFunction<ConnectorSession, SchemaTableName, Optional<CatalogSchemaTableName>> tableRedirectionResolver, MetadataProvider metadataProvider) {
    return schemaTableName -> tableRedirectionResolver.apply(session, schemaTableName).map(target -> {
        ConnectorTableSchema tableSchema = metadataProvider.getRelationMetadata(session, target).orElseThrow(() -> new TableNotFoundException(target.getSchemaTableName(), format("%s is redirected to %s, but that relation cannot be found", schemaTableName, target)));
        List<Column> columns = tableSchema.getColumns().stream().filter(columnSchema -> !columnSchema.isHidden()).map(columnSchema -> new Column(columnSchema.getName(), toHiveType(columnSchema.getType()), Optional.empty())).collect(toImmutableList());
        Table table = Table.builder().setDatabaseName(schemaTableName.getSchemaName()).setTableName(schemaTableName.getTableName()).setTableType(EXTERNAL_TABLE.name()).setDataColumns(columns).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE))).setOwner(Optional.empty()).build();
        return toMetastoreApiTable(table);
    });
}
Also used : BiFunction(java.util.function.BiFunction) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) CoralSemiTransactionalHiveMSCAdapter(io.trino.plugin.hive.metastore.CoralSemiTransactionalHiveMSCAdapter) HiveMetastoreClient(com.linkedin.coral.common.HiveMetastoreClient) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) ObjectMapperProvider(io.airlift.json.ObjectMapperProvider) MetadataProvider(io.trino.spi.connector.MetadataProvider) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Column(io.trino.plugin.hive.metastore.Column) Verify.verify(com.google.common.base.Verify.verify) Locale(java.util.Locale) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) VIRTUAL_VIEW(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) TABLE_COMMENT(io.trino.plugin.hive.HiveMetadata.TABLE_COMMENT) HiveSessionProperties.isLegacyHiveViewTranslation(io.trino.plugin.hive.HiveSessionProperties.isLegacyHiveViewTranslation) RelDataType(org.apache.calcite.rel.type.RelDataType) HIVE_VIEW_TRANSLATION_ERROR(io.trino.plugin.hive.HiveErrorCode.HIVE_VIEW_TRANSLATION_ERROR) HiveToRelConverter(com.linkedin.coral.hive.hive2rel.HiveToRelConverter) Table(io.trino.plugin.hive.metastore.Table) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) RelNode(org.apache.calcite.rel.RelNode) CatalogName(io.trino.plugin.base.CatalogName) SchemaTableName(io.trino.spi.connector.SchemaTableName) String.format(java.lang.String.format) Collectors.joining(java.util.stream.Collectors.joining) ConnectorTableSchema(io.trino.spi.connector.ConnectorTableSchema) ThriftMetastoreUtil.toMetastoreApiTable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiTable) HiveUtil.checkCondition(io.trino.plugin.hive.util.HiveUtil.checkCondition) Base64(java.util.Base64) List(java.util.List) RelToTrinoConverter(com.linkedin.coral.trino.rel2trino.RelToTrinoConverter) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) JsonCodecFactory(io.airlift.json.JsonCodecFactory) TableType(org.apache.hadoop.hive.metastore.TableType) HIVE_INVALID_VIEW_DATA(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_VIEW_DATA) Optional(java.util.Optional) TypeManager(io.trino.spi.type.TypeManager) JsonCodec(io.airlift.json.JsonCodec) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Table(io.trino.plugin.hive.metastore.Table) ThriftMetastoreUtil.toMetastoreApiTable(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiTable) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) ConnectorTableSchema(io.trino.spi.connector.ConnectorTableSchema)

Example 3 with TEXTFILE

use of io.trino.plugin.hive.HiveStorageFormat.TEXTFILE in project trino by trinodb.

the class TestCachingHiveMetastore method testLoadAfterInvalidate.

@Test(timeOut = 60_000, dataProviderClass = DataProviders.class, dataProvider = "trueFalse")
public void testLoadAfterInvalidate(boolean invalidateAll) throws Exception {
    // State
    CopyOnWriteArrayList<Column> tableColumns = new CopyOnWriteArrayList<>();
    ConcurrentMap<String, Partition> tablePartitionsByName = new ConcurrentHashMap<>();
    Map<String, String> tableParameters = new ConcurrentHashMap<>();
    tableParameters.put("frequent-changing-table-parameter", "parameter initial value");
    // Initialize data
    String databaseName = "my_database";
    String tableName = "my_table_name";
    tableColumns.add(new Column("value", toHiveType(VARCHAR), Optional.empty()));
    tableColumns.add(new Column("pk", toHiveType(VARCHAR), Optional.empty()));
    List<String> partitionNames = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        String partitionName = "pk=" + i;
        tablePartitionsByName.put(partitionName, Partition.builder().setDatabaseName(databaseName).setTableName(tableName).setColumns(ImmutableList.copyOf(tableColumns)).setValues(List.of(Integer.toString(i))).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE))).setParameters(Map.of("frequent-changing-partition-parameter", "parameter initial value")).build());
        partitionNames.add(partitionName);
    }
    // Mock metastore
    CountDownLatch getTableEnteredLatch = new CountDownLatch(1);
    CountDownLatch getTableReturnLatch = new CountDownLatch(1);
    CountDownLatch getTableFinishedLatch = new CountDownLatch(1);
    CountDownLatch getPartitionsByNamesEnteredLatch = new CountDownLatch(1);
    CountDownLatch getPartitionsByNamesReturnLatch = new CountDownLatch(1);
    CountDownLatch getPartitionsByNamesFinishedLatch = new CountDownLatch(1);
    HiveMetastore mockMetastore = new UnimplementedHiveMetastore() {

        @Override
        public Optional<Table> getTable(String databaseName, String tableName) {
            Optional<Table> table = Optional.of(Table.builder().setDatabaseName(databaseName).setTableName(tableName).setTableType(EXTERNAL_TABLE.name()).setDataColumns(tableColumns).setParameters(ImmutableMap.copyOf(tableParameters)).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE))).setOwner(Optional.empty()).build());
            // 1
            getTableEnteredLatch.countDown();
            // 2
            await(getTableReturnLatch, 10, SECONDS);
            return table;
        }

        @Override
        public Map<String, Optional<Partition>> getPartitionsByNames(Table table, List<String> partitionNames) {
            Map<String, Optional<Partition>> result = new HashMap<>();
            for (String partitionName : partitionNames) {
                result.put(partitionName, Optional.ofNullable(tablePartitionsByName.get(partitionName)));
            }
            // loader#1
            getPartitionsByNamesEnteredLatch.countDown();
            // loader#2
            await(getPartitionsByNamesReturnLatch, 10, SECONDS);
            return result;
        }
    };
    // Caching metastore
    metastore = cachingHiveMetastore(mockMetastore, executor, new Duration(5, TimeUnit.MINUTES), Optional.of(new Duration(1, TimeUnit.MINUTES)), 1000);
    // The test. Main thread does modifications and verifies subsequent load sees them. Background thread loads the state into the cache.
    ExecutorService executor = Executors.newFixedThreadPool(1);
    try {
        Future<Void> future = executor.submit(() -> {
            try {
                Table table;
                table = metastore.getTable(databaseName, tableName).orElseThrow();
                // 3
                getTableFinishedLatch.countDown();
                metastore.getPartitionsByNames(table, partitionNames);
                // 6
                getPartitionsByNamesFinishedLatch.countDown();
                return (Void) null;
            } catch (Throwable e) {
                log.error(e);
                throw e;
            }
        });
        // 21
        await(getTableEnteredLatch, 10, SECONDS);
        tableParameters.put("frequent-changing-table-parameter", "main-thread-put-xyz");
        if (invalidateAll) {
            metastore.flushCache();
        } else {
            metastore.invalidateTable(databaseName, tableName);
        }
        // 2
        getTableReturnLatch.countDown();
        // 3
        await(getTableFinishedLatch, 10, SECONDS);
        Table table = metastore.getTable(databaseName, tableName).orElseThrow();
        assertThat(table.getParameters()).isEqualTo(Map.of("frequent-changing-table-parameter", "main-thread-put-xyz"));
        // 4
        await(getPartitionsByNamesEnteredLatch, 10, SECONDS);
        String partitionName = partitionNames.get(2);
        Map<String, String> newPartitionParameters = Map.of("frequent-changing-partition-parameter", "main-thread-put-alice");
        tablePartitionsByName.put(partitionName, Partition.builder(tablePartitionsByName.get(partitionName)).setParameters(newPartitionParameters).build());
        if (invalidateAll) {
            metastore.flushCache();
        } else {
            metastore.invalidateTable(databaseName, tableName);
        }
        // 5
        getPartitionsByNamesReturnLatch.countDown();
        // 6
        await(getPartitionsByNamesFinishedLatch, 10, SECONDS);
        Map<String, Optional<Partition>> loadedPartitions = metastore.getPartitionsByNames(table, partitionNames);
        assertThat(loadedPartitions.get(partitionName)).isNotNull().isPresent().hasValueSatisfying(partition -> assertThat(partition.getParameters()).isEqualTo(newPartitionParameters));
        // verify no failure in the background thread
        future.get(10, SECONDS);
    } finally {
        getTableEnteredLatch.countDown();
        getTableReturnLatch.countDown();
        getTableFinishedLatch.countDown();
        getPartitionsByNamesEnteredLatch.countDown();
        getPartitionsByNamesReturnLatch.countDown();
        getPartitionsByNamesFinishedLatch.countDown();
        executor.shutdownNow();
        executor.awaitTermination(10, SECONDS);
    }
}
Also used : PARTITION_KEY(io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY) USER(io.trino.spi.security.PrincipalType.USER) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) Test(org.testng.annotations.Test) TEST_PARTITION1(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_PARTITION1) Duration(io.airlift.units.Duration) MockThriftMetastoreClient(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient) TEST_PARTITION2(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_PARTITION2) Future(java.util.concurrent.Future) Column(io.trino.plugin.hive.metastore.Column) Map(java.util.Map) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) CachingHiveMetastore.memoizeMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.memoizeMetastore) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) Assert.assertFalse(org.testng.Assert.assertFalse) HiveIdentity(io.trino.plugin.hive.authentication.HiveIdentity) Table(io.trino.plugin.hive.metastore.Table) ImmutableMap(com.google.common.collect.ImmutableMap) Range(io.trino.spi.predicate.Range) DataProviders(io.trino.testing.DataProviders) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) Domain(io.trino.spi.predicate.Domain) BAD_PARTITION(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.BAD_PARTITION) BeforeMethod(org.testng.annotations.BeforeMethod) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) SESSION(io.trino.testing.TestingConnectorSession.SESSION) Assert.assertNotNull(org.testng.Assert.assertNotNull) HDFS_ENVIRONMENT(io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT) Executors(java.util.concurrent.Executors) ValueSet(io.trino.spi.predicate.ValueSet) Preconditions.checkState(com.google.common.base.Preconditions.checkState) MoreExecutors.directExecutor(com.google.common.util.concurrent.MoreExecutors.directExecutor) BAD_DATABASE(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.BAD_DATABASE) TEST_COLUMN(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_COLUMN) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) MetastoreLocator(io.trino.plugin.hive.metastore.thrift.MetastoreLocator) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) TEST_ROLES(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_ROLES) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) Partition(io.trino.plugin.hive.metastore.Partition) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) MoreExecutors.listeningDecorator(com.google.common.util.concurrent.MoreExecutors.listeningDecorator) Iterables(com.google.common.collect.Iterables) Logger(io.airlift.log.Logger) TEST_PARTITION_VALUES1(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_PARTITION_VALUES1) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) UnimplementedHiveMetastore(io.trino.plugin.hive.metastore.UnimplementedHiveMetastore) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) OptionalLong(java.util.OptionalLong) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) PARTITION_COLUMN_NAMES(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.PARTITION_COLUMN_NAMES) ImmutableList(com.google.common.collect.ImmutableList) ThriftMetastoreClient(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreClient) Threads.daemonThreadsNamed(io.airlift.concurrent.Threads.daemonThreadsNamed) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) ExecutorService(java.util.concurrent.ExecutorService) AfterClass(org.testng.annotations.AfterClass) TEST_TABLE(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_TABLE) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) HiveColumnStatistics.createIntegerColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createIntegerColumnStatistics) TupleDomain(io.trino.spi.predicate.TupleDomain) HIVE_STRING(io.trino.plugin.hive.HiveType.HIVE_STRING) ThriftMetastoreStats(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreStats) TimeUnit(java.util.concurrent.TimeUnit) HiveMetastoreClosure(io.trino.plugin.hive.HiveMetastoreClosure) Executors.newCachedThreadPool(java.util.concurrent.Executors.newCachedThreadPool) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) MetastoreUtil.computePartitionKeyFilter(io.trino.plugin.hive.metastore.MetastoreUtil.computePartitionKeyFilter) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) Assert.assertTrue(org.testng.Assert.assertTrue) TEST_DATABASE(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_DATABASE) HiveConfig(io.trino.plugin.hive.HiveConfig) SECONDS(java.util.concurrent.TimeUnit.SECONDS) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList) Column(io.trino.plugin.hive.metastore.Column) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) List(java.util.List) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) UnimplementedHiveMetastore(io.trino.plugin.hive.metastore.UnimplementedHiveMetastore) Partition(io.trino.plugin.hive.metastore.Partition) Table(io.trino.plugin.hive.metastore.Table) Optional(java.util.Optional) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) UnimplementedHiveMetastore(io.trino.plugin.hive.metastore.UnimplementedHiveMetastore) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) Duration(io.airlift.units.Duration) CountDownLatch(java.util.concurrent.CountDownLatch) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) ExecutorService(java.util.concurrent.ExecutorService) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) Test(org.testng.annotations.Test)

Aggregations

TEXTFILE (io.trino.plugin.hive.HiveStorageFormat.TEXTFILE)3 HiveType.toHiveType (io.trino.plugin.hive.HiveType.toHiveType)3 Column (io.trino.plugin.hive.metastore.Column)3 StorageFormat.fromHiveStorageFormat (io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat)3 Table (io.trino.plugin.hive.metastore.Table)3 List (java.util.List)3 Map (java.util.Map)3 Optional (java.util.Optional)3 Preconditions.checkState (com.google.common.base.Preconditions.checkState)2 Verify.verify (com.google.common.base.Verify.verify)2 ImmutableList (com.google.common.collect.ImmutableList)2 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)2 ImmutableMap (com.google.common.collect.ImmutableMap)2 MoreExecutors.directExecutor (com.google.common.util.concurrent.MoreExecutors.directExecutor)2 Threads.daemonThreadsNamed (io.airlift.concurrent.Threads.daemonThreadsNamed)2 JsonCodec (io.airlift.json.JsonCodec)2 Logger (io.airlift.log.Logger)2 Slices.utf8Slice (io.airlift.slice.Slices.utf8Slice)2 Duration (io.airlift.units.Duration)2 CatalogName (io.trino.plugin.base.CatalogName)2