Search in sources :

Example 1 with NoHdfsAuthentication

use of io.prestosql.plugin.hive.authentication.NoHdfsAuthentication in project hetu-core by openlookeng.

the class TestHiveWriterFactory method testSortingPath.

@Test
public void testSortingPath() {
    setUp();
    String targetPath = "/tmp";
    String writePath = "/tmp/table";
    Optional<WriteIdInfo> writeIdInfo = Optional.of(new WriteIdInfo(1, 1, 0));
    StorageFormat storageFormat = StorageFormat.fromHiveStorageFormat(ORC);
    Storage storage = new Storage(storageFormat, "", Optional.empty(), false, ImmutableMap.of());
    Table table = new Table("schema", "table", "user", "MANAGED_TABLE", storage, ImmutableList.of(new Column("col_1", HiveType.HIVE_INT, Optional.empty())), ImmutableList.of(), ImmutableMap.of("transactional", "true"), Optional.of("original"), Optional.of("expanded"));
    HiveConfig hiveConfig = getHiveConfig();
    HivePageSinkMetadata hivePageSinkMetadata = new HivePageSinkMetadata(new SchemaTableName("schema", "table"), Optional.of(table), ImmutableMap.of());
    PageSorter pageSorter = new PagesIndexPageSorter(new PagesIndex.TestingFactory(false));
    Metadata metadata = createTestMetadataManager();
    TypeManager typeManager = new InternalTypeManager(metadata.getFunctionAndTypeManager());
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hiveConfig), ImmutableSet.of());
    HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveConfig, new NoHdfsAuthentication());
    LocationService locationService = new HiveLocationService(hdfsEnvironment);
    ConnectorSession session = newSession();
    HiveWriterFactory hiveWriterFactory = new HiveWriterFactory(getDefaultHiveFileWriterFactories(hiveConfig), "schema", "table", false, HiveACIDWriteType.DELETE, ImmutableList.of(new HiveColumnHandle("col_1", HiveType.HIVE_INT, new TypeSignature("integer", ImmutableList.of()), 0, HiveColumnHandle.ColumnType.REGULAR, Optional.empty())), ORC, ORC, ImmutableMap.of(), OptionalInt.empty(), ImmutableList.of(), new LocationHandle(targetPath, writePath, false, LocationHandle.WriteMode.STAGE_AND_MOVE_TO_TARGET_DIRECTORY, writeIdInfo), locationService, session.getQueryId(), new HivePageSinkMetadataProvider(hivePageSinkMetadata, CachingHiveMetastore.memoizeMetastore(metastore, 1000), new HiveIdentity(session)), typeManager, hdfsEnvironment, pageSorter, hiveConfig.getWriterSortBufferSize(), hiveConfig.getMaxOpenSortFiles(), false, UTC, session, new TestingNodeManager("fake-environment"), new HiveEventClient(), new HiveSessionProperties(hiveConfig, new OrcFileWriterConfig(), new ParquetFileWriterConfig()), new HiveWriterStats(), getDefaultOrcFileWriterFactory(hiveConfig));
    HiveWriter hiveWriter = hiveWriterFactory.createWriter(ImmutableList.of(), OptionalInt.empty(), Optional.empty());
    assertEquals(((SortingFileWriter) hiveWriter.getFileWriter()).getTempFilePrefix().getName(), ".tmp-sort.bucket_00000");
}
Also used : HivePageSinkMetadataProvider(io.prestosql.plugin.hive.metastore.HivePageSinkMetadataProvider) HivePageSinkMetadata(io.prestosql.plugin.hive.metastore.HivePageSinkMetadata) Metadata(io.prestosql.metadata.Metadata) StorageFormat(io.prestosql.plugin.hive.metastore.StorageFormat) PagesIndex(io.prestosql.operator.PagesIndex) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) PagesIndexPageSorter(io.prestosql.PagesIndexPageSorter) TypeSignature(io.prestosql.spi.type.TypeSignature) Column(io.prestosql.plugin.hive.metastore.Column) TestingNodeManager(io.prestosql.testing.TestingNodeManager) PagesIndexPageSorter(io.prestosql.PagesIndexPageSorter) PageSorter(io.prestosql.spi.PageSorter) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) TestingConnectorSession(io.prestosql.testing.TestingConnectorSession) InternalTypeManager(io.prestosql.type.InternalTypeManager) Table(io.prestosql.plugin.hive.metastore.Table) HivePageSinkMetadata(io.prestosql.plugin.hive.metastore.HivePageSinkMetadata) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) Storage(io.prestosql.plugin.hive.metastore.Storage) InternalTypeManager(io.prestosql.type.InternalTypeManager) TypeManager(io.prestosql.spi.type.TypeManager) Test(org.testng.annotations.Test)

Example 2 with NoHdfsAuthentication

use of io.prestosql.plugin.hive.authentication.NoHdfsAuthentication in project hetu-core by openlookeng.

the class HiveQueryRunner method createQueryRunner.

public static DistributedQueryRunner createQueryRunner(Iterable<TpchTable<?>> tables, Map<String, String> extraProperties, String security, Map<String, String> extraHiveProperties, Optional<Path> baseDataDir, boolean hasStateStore, String jdbcUrl) throws Exception {
    assertEquals(DateTimeZone.getDefault(), TIME_ZONE, "Timezone not configured correctly. Add -Duser.timezone=America/Bahia_Banderas to your JVM arguments");
    setupLogging();
    DistributedQueryRunner queryRunner = null;
    if (hasStateStore) {
        queryRunner = DistributedQueryRunner.builder(createSession(Optional.of(new SelectedRole(ROLE, Optional.of("admin"))))).setNodeCount(4).setExtraProperties(extraProperties).setBaseDataDir(baseDataDir).buildWithStateStore();
    } else {
        Map<String, String> configProperties = new HashMap<>();
        configProperties.put("auto-vacuum.enabled", "true");
        configProperties.put("optimizer.cte-reuse-enabled", "true");
        configProperties.put("auto-vacuum.scan.interval", "15s");
        configProperties.put("hetu.split-cache-map.enabled", "true");
        queryRunner = DistributedQueryRunner.builder(createSession(Optional.of(new SelectedRole(ROLE, Optional.of("admin"))))).setNodeCount(4).setCoordinatorProperties(configProperties).setExtraProperties(extraProperties).setBaseDataDir(baseDataDir).build();
    }
    try {
        if (jdbcUrl != null && !jdbcUrl.isEmpty()) {
            File directory = new File("");
            String courseFile = directory.getCanonicalPath();
            System.setProperty("config", courseFile + "/etc/");
            String configDir = System.getProperty("config");
            String hetumetastoreConfig = configDir + "hetu-metastore.properties";
            File file = new File(configDir);
            if (!file.exists()) {
                file.mkdirs();
            }
            File file2 = new File(configDir, "hetu-metastore.properties");
            if (!file2.exists()) {
                try {
                    file2.createNewFile();
                } catch (IOException e) {
                    log.info(e.getMessage());
                }
            }
            try (BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(hetumetastoreConfig))) {
                bufferedWriter.write("hetu.metastore.db.url = " + jdbcUrl);
                bufferedWriter.write("\n");
                bufferedWriter.write("hetu.metastore.type = jdbc\n");
                bufferedWriter.write("hetu.metastore.db.user = user\n");
                bufferedWriter.write("hetu.metastore.db.password = testpass\n");
                bufferedWriter.write("hetu.metastore.cache.ttl = 0s");
            }
            queryRunner.installPlugin(new HetuMetastorePlugin());
            queryRunner.getCoordinator().loadMetastore();
            queryRunner.installPlugin(new StarTreePlugin());
        }
        queryRunner.installPlugin(new TpchPlugin());
        queryRunner.createCatalog("tpch", "tpch");
        File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("hive_data").toFile();
        HiveConfig hiveConfig = new HiveConfig();
        HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hiveConfig), ImmutableSet.of());
        HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveConfig, new NoHdfsAuthentication());
        FileHiveMetastore metastore = new FileHiveMetastore(hdfsEnvironment, baseDir.toURI().toString(), "test");
        queryRunner.installPlugin(new HivePlugin(HIVE_CATALOG, Optional.of(metastore)));
        Map<String, String> hiveProperties = ImmutableMap.<String, String>builder().putAll(extraHiveProperties).put("hive.rcfile.time-zone", TIME_ZONE.getID()).put("hive.parquet.time-zone", TIME_ZONE.getID()).put("hive.security", security).put("hive.max-partitions-per-scan", "1000").put("hive.assume-canonical-partition-keys", "true").build();
        Map<String, String> hiveBucketedProperties = ImmutableMap.<String, String>builder().putAll(hiveProperties).put("hive.max-initial-split-size", // so that each bucket has multiple splits
        "10kB").put("hive.max-split-size", // so that each bucket has multiple splits
        "10kB").put("hive.storage-format", // so that there's no minimum split size for the file
        "TEXTFILE").put("hive.compression-codec", // so that the file is splittable
        "NONE").build();
        Map<String, String> hiveAutoVacuumProperties = ImmutableMap.<String, String>builder().putAll(hiveProperties).put("hive.auto-vacuum-enabled", "true").put("hive.vacuum-collector-interval", "15s").build();
        queryRunner.createCatalog(HIVE_AUTO_VACUUM_CATALOG, HIVE_CATALOG, hiveAutoVacuumProperties);
        queryRunner.createCatalog(HIVE_CATALOG, HIVE_CATALOG, hiveProperties);
        queryRunner.createCatalog(HIVE_BUCKETED_CATALOG, HIVE_CATALOG, hiveBucketedProperties);
        HiveIdentity identity = new HiveIdentity(SESSION);
        if (!metastore.getDatabase(TPCH_SCHEMA).isPresent()) {
            metastore.createDatabase(identity, createDatabaseMetastoreObject(TPCH_SCHEMA));
            copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, createSession(Optional.empty()), tables);
        }
        if (!metastore.getDatabase(TPCH_BUCKETED_SCHEMA).isPresent()) {
            metastore.createDatabase(identity, createDatabaseMetastoreObject(TPCH_BUCKETED_SCHEMA));
            copyTpchTablesBucketed(queryRunner, "tpch", TINY_SCHEMA_NAME, createBucketedSession(Optional.empty()), tables);
        }
        return queryRunner;
    } catch (Exception e) {
        queryRunner.close();
        throw e;
    }
}
Also used : DistributedQueryRunner(io.prestosql.tests.DistributedQueryRunner) HashMap(java.util.HashMap) SelectedRole(io.prestosql.spi.security.SelectedRole) TpchPlugin(io.prestosql.plugin.tpch.TpchPlugin) FileWriter(java.io.FileWriter) IOException(java.io.IOException) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) IOException(java.io.IOException) BufferedWriter(java.io.BufferedWriter) FileHiveMetastore(io.prestosql.plugin.hive.metastore.file.FileHiveMetastore) HetuMetastorePlugin(io.hetu.core.metastore.HetuMetastorePlugin) StarTreePlugin(io.hetu.core.cube.startree.StarTreePlugin) File(java.io.File)

Example 3 with NoHdfsAuthentication

use of io.prestosql.plugin.hive.authentication.NoHdfsAuthentication in project hetu-core by openlookeng.

the class FileHiveMetastore method createTestingFileHiveMetastore.

public static FileHiveMetastore createTestingFileHiveMetastore(File catalogDirectory) {
    HiveConfig hiveConfig = new HiveConfig();
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hiveConfig), ImmutableSet.of());
    HdfsEnvironment localHdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveConfig, new NoHdfsAuthentication());
    return new FileHiveMetastore(localHdfsEnvironment, catalogDirectory.toURI().toString(), "test");
}
Also used : HdfsConfigurationInitializer(io.prestosql.plugin.hive.HdfsConfigurationInitializer) HiveHdfsConfiguration(io.prestosql.plugin.hive.HiveHdfsConfiguration) HdfsConfiguration(io.prestosql.plugin.hive.HdfsConfiguration) HiveHdfsConfiguration(io.prestosql.plugin.hive.HiveHdfsConfiguration) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) HiveConfig(io.prestosql.plugin.hive.HiveConfig) HdfsEnvironment(io.prestosql.plugin.hive.HdfsEnvironment)

Example 4 with NoHdfsAuthentication

use of io.prestosql.plugin.hive.authentication.NoHdfsAuthentication in project boostkit-bigdata by kunpengcompute.

the class FileHiveMetastore method createTestingFileHiveMetastore.

public static FileHiveMetastore createTestingFileHiveMetastore(File catalogDirectory) {
    HiveConfig hiveConfig = new HiveConfig();
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hiveConfig), ImmutableSet.of());
    HdfsEnvironment localHdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveConfig, new NoHdfsAuthentication());
    return new FileHiveMetastore(localHdfsEnvironment, catalogDirectory.toURI().toString(), "test");
}
Also used : HdfsConfigurationInitializer(io.prestosql.plugin.hive.HdfsConfigurationInitializer) HiveHdfsConfiguration(io.prestosql.plugin.hive.HiveHdfsConfiguration) HdfsConfiguration(io.prestosql.plugin.hive.HdfsConfiguration) HiveHdfsConfiguration(io.prestosql.plugin.hive.HiveHdfsConfiguration) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) HiveConfig(io.prestosql.plugin.hive.HiveConfig) HdfsEnvironment(io.prestosql.plugin.hive.HdfsEnvironment)

Example 5 with NoHdfsAuthentication

use of io.prestosql.plugin.hive.authentication.NoHdfsAuthentication in project boostkit-bigdata by kunpengcompute.

the class AbstractTestHiveFileSystem method setup.

protected void setup(String host, int port, String databaseName, Function<HiveConfig, HdfsConfiguration> hdfsConfigurationProvider, boolean s3SelectPushdownEnabled) {
    database = databaseName;
    table = new SchemaTableName(database, "presto_test_external_fs");
    String random = UUID.randomUUID().toString().toLowerCase(ENGLISH).replace("-", "");
    temporaryCreateTable = new SchemaTableName(database, "tmp_presto_test_create_" + random);
    config = new HiveConfig().setS3SelectPushdownEnabled(s3SelectPushdownEnabled);
    String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
    if (proxy != null) {
        config.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
    }
    MetastoreLocator metastoreLocator = new TestingMetastoreLocator(config, host, port);
    ExecutorService executors = newCachedThreadPool(daemonThreadsNamed("hive-%s"));
    ExecutorService executorRefresh = newCachedThreadPool(daemonThreadsNamed("hive-refresh-%s"));
    HivePartitionManager hivePartitionManager = new HivePartitionManager(TYPE_MANAGER, config);
    HdfsConfiguration hdfsConfiguration = hdfsConfigurationProvider.apply(config);
    hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, config, new NoHdfsAuthentication());
    metastoreClient = new TestingHiveMetastore(new BridgingHiveMetastore(new ThriftHiveMetastore(metastoreLocator, new ThriftHiveMetastoreConfig())), executors, executorRefresh, config, getBasePath(), hdfsEnvironment);
    locationService = new HiveLocationService(hdfsEnvironment);
    JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class);
    metadataFactory = new HiveMetadataFactory(config, metastoreClient, hdfsEnvironment, hivePartitionManager, newDirectExecutorService(), vacuumExecutorService, heartbeatService, vacuumExecutorService, TYPE_MANAGER, locationService, partitionUpdateCodec, new HiveTypeTranslator(), new NodeVersion("test_version"), SqlStandardAccessControlMetadata::new);
    transactionManager = new HiveTransactionManager();
    splitManager = new HiveSplitManager(transactionHandle -> ((HiveMetadata) transactionManager.get(transactionHandle)).getMetastore(), hivePartitionManager, new NamenodeStats(), hdfsEnvironment, new CachingDirectoryLister(new HiveConfig()), new BoundedExecutor(executors, config.getMaxSplitIteratorThreads()), new HiveCoercionPolicy(TYPE_MANAGER), new CounterStat(), config.getMaxOutstandingSplits(), config.getMaxOutstandingSplitsSize(), config.getMinPartitionBatchSize(), config.getMaxPartitionBatchSize(), config.getMaxInitialSplits(), config.getSplitLoaderConcurrency(), config.getMaxSplitsPerSecond(), config.getRecursiveDirWalkerEnabled(), null, config);
    pageSinkProvider = new HivePageSinkProvider(getDefaultHiveFileWriterFactories(config), hdfsEnvironment, PAGE_SORTER, metastoreClient, new GroupByHashPageIndexerFactory(new JoinCompiler(createTestMetadataManager())), TYPE_MANAGER, config, locationService, partitionUpdateCodec, new TestingNodeManager("fake-environment"), new HiveEventClient(), new HiveSessionProperties(config, new OrcFileWriterConfig(), new ParquetFileWriterConfig()), new HiveWriterStats(), getDefaultOrcFileWriterFactory(config));
    pageSourceProvider = new HivePageSourceProvider(config, hdfsEnvironment, getDefaultHiveRecordCursorProvider(config), getDefaultHiveDataStreamFactories(config), TYPE_MANAGER, getNoOpIndexCache(), getDefaultHiveSelectiveFactories(config));
}
Also used : ConnectorMetadata(io.prestosql.spi.connector.ConnectorMetadata) HiveTestUtils.getDefaultHiveSelectiveFactories(io.prestosql.plugin.hive.HiveTestUtils.getDefaultHiveSelectiveFactories) FileSystem(org.apache.hadoop.fs.FileSystem) HdfsContext(io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext) ConnectorSplitManager(io.prestosql.spi.connector.ConnectorSplitManager) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) ConnectorPageSink(io.prestosql.spi.connector.ConnectorPageSink) Test(org.testng.annotations.Test) MaterializedResult.materializeSourceDataStream(io.prestosql.testing.MaterializedResult.materializeSourceDataStream) MaterializedResult(io.prestosql.testing.MaterializedResult) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) BoundedExecutor(io.airlift.concurrent.BoundedExecutor) Executors.newScheduledThreadPool(java.util.concurrent.Executors.newScheduledThreadPool) Map(java.util.Map) Path(org.apache.hadoop.fs.Path) ConnectorPageSinkProvider(io.prestosql.spi.connector.ConnectorPageSinkProvider) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) ENGLISH(java.util.Locale.ENGLISH) Assert.assertFalse(org.testng.Assert.assertFalse) Function(com.google.common.base.Function) CachingHiveMetastore(io.prestosql.plugin.hive.metastore.CachingHiveMetastore) UNGROUPED_SCHEDULING(io.prestosql.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING) ImmutableMap(com.google.common.collect.ImmutableMap) MetadataManager.createTestMetadataManager(io.prestosql.metadata.MetadataManager.createTestMetadataManager) BeforeClass(org.testng.annotations.BeforeClass) Collection(java.util.Collection) SqlStandardAccessControlMetadata(io.prestosql.plugin.hive.security.SqlStandardAccessControlMetadata) ConnectorSplitSource(io.prestosql.spi.connector.ConnectorSplitSource) UUID(java.util.UUID) HiveTestUtils.getDefaultHiveFileWriterFactories(io.prestosql.plugin.hive.HiveTestUtils.getDefaultHiveFileWriterFactories) UncheckedIOException(java.io.UncheckedIOException) List(java.util.List) ConnectorPageSource(io.prestosql.spi.connector.ConnectorPageSource) Table(io.prestosql.plugin.hive.metastore.Table) HiveTestUtils.getTypes(io.prestosql.plugin.hive.HiveTestUtils.getTypes) Optional(java.util.Optional) AbstractTestHive.getAllSplits(io.prestosql.plugin.hive.AbstractTestHive.getAllSplits) TestingNodeManager(io.prestosql.testing.TestingNodeManager) AbstractTestHive.filterNonHiddenColumnHandles(io.prestosql.plugin.hive.AbstractTestHive.filterNonHiddenColumnHandles) JsonCodec(io.airlift.json.JsonCodec) Database(io.prestosql.plugin.hive.metastore.Database) Slice(io.airlift.slice.Slice) TYPE_MANAGER(io.prestosql.plugin.hive.HiveTestUtils.TYPE_MANAGER) ConnectorSplit(io.prestosql.spi.connector.ConnectorSplit) MetastoreLocator(io.prestosql.plugin.hive.metastore.thrift.MetastoreLocator) MoreExecutors.newDirectExecutorService(com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService) HiveTransaction(io.prestosql.plugin.hive.AbstractTestHive.HiveTransaction) ThriftHiveMetastoreConfig(io.prestosql.plugin.hive.metastore.thrift.ThriftHiveMetastoreConfig) AbstractTestHive.createTableProperties(io.prestosql.plugin.hive.AbstractTestHive.createTableProperties) Assert.assertEquals(org.testng.Assert.assertEquals) CounterStat(io.airlift.stats.CounterStat) AbstractTestHive.filterNonHiddenColumnMetadata(io.prestosql.plugin.hive.AbstractTestHive.filterNonHiddenColumnMetadata) GroupByHashPageIndexerFactory(io.prestosql.GroupByHashPageIndexerFactory) BridgingHiveMetastore(io.prestosql.plugin.hive.metastore.thrift.BridgingHiveMetastore) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) ImmutableList(com.google.common.collect.ImmutableList) TestingMetastoreLocator(io.prestosql.plugin.hive.metastore.thrift.TestingMetastoreLocator) Threads.daemonThreadsNamed(io.airlift.concurrent.Threads.daemonThreadsNamed) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ThriftHiveMetastore(io.prestosql.plugin.hive.metastore.thrift.ThriftHiveMetastore) ImmutableMultimap(com.google.common.collect.ImmutableMultimap) HiveMetastore(io.prestosql.plugin.hive.metastore.HiveMetastore) ExecutorService(java.util.concurrent.ExecutorService) ConnectorOutputTableHandle(io.prestosql.spi.connector.ConnectorOutputTableHandle) AfterClass(org.testng.annotations.AfterClass) HiveTestUtils.getNoOpIndexCache(io.prestosql.plugin.hive.HiveTestUtils.getNoOpIndexCache) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) Executor(java.util.concurrent.Executor) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) HiveTestUtils.getDefaultHiveRecordCursorProvider(io.prestosql.plugin.hive.HiveTestUtils.getDefaultHiveRecordCursorProvider) ConnectorIdentity(io.prestosql.spi.security.ConnectorIdentity) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) HiveTestUtils.getDefaultHiveDataStreamFactories(io.prestosql.plugin.hive.HiveTestUtils.getDefaultHiveDataStreamFactories) MoreFutures.getFutureValue(io.airlift.concurrent.MoreFutures.getFutureValue) PAGE_SORTER(io.prestosql.plugin.hive.HiveTestUtils.PAGE_SORTER) HostAndPort(com.google.common.net.HostAndPort) MaterializedRow(io.prestosql.testing.MaterializedRow) PrincipalPrivileges(io.prestosql.plugin.hive.metastore.PrincipalPrivileges) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) Assertions.assertEqualsIgnoreOrder(io.airlift.testing.Assertions.assertEqualsIgnoreOrder) Transaction(io.prestosql.plugin.hive.AbstractTestHive.Transaction) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) Executors.newCachedThreadPool(java.util.concurrent.Executors.newCachedThreadPool) JoinCompiler(io.prestosql.sql.gen.JoinCompiler) Assert.assertTrue(org.testng.Assert.assertTrue) TestingConnectorSession(io.prestosql.testing.TestingConnectorSession) HiveTestUtils.getDefaultOrcFileWriterFactory(io.prestosql.plugin.hive.HiveTestUtils.getDefaultOrcFileWriterFactory) ConnectorPageSourceProvider(io.prestosql.spi.connector.ConnectorPageSourceProvider) TestingMetastoreLocator(io.prestosql.plugin.hive.metastore.thrift.TestingMetastoreLocator) CounterStat(io.airlift.stats.CounterStat) MetastoreLocator(io.prestosql.plugin.hive.metastore.thrift.MetastoreLocator) TestingMetastoreLocator(io.prestosql.plugin.hive.metastore.thrift.TestingMetastoreLocator) NoHdfsAuthentication(io.prestosql.plugin.hive.authentication.NoHdfsAuthentication) TestingNodeManager(io.prestosql.testing.TestingNodeManager) ThriftHiveMetastoreConfig(io.prestosql.plugin.hive.metastore.thrift.ThriftHiveMetastoreConfig) BridgingHiveMetastore(io.prestosql.plugin.hive.metastore.thrift.BridgingHiveMetastore) JoinCompiler(io.prestosql.sql.gen.JoinCompiler) ThriftHiveMetastore(io.prestosql.plugin.hive.metastore.thrift.ThriftHiveMetastore) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) BoundedExecutor(io.airlift.concurrent.BoundedExecutor) MoreExecutors.newDirectExecutorService(com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ExecutorService(java.util.concurrent.ExecutorService) GroupByHashPageIndexerFactory(io.prestosql.GroupByHashPageIndexerFactory)

Aggregations

NoHdfsAuthentication (io.prestosql.plugin.hive.authentication.NoHdfsAuthentication)17 HdfsConfigurationInitializer (io.prestosql.plugin.hive.HdfsConfigurationInitializer)7 HdfsEnvironment (io.prestosql.plugin.hive.HdfsEnvironment)7 HiveConfig (io.prestosql.plugin.hive.HiveConfig)7 HiveHdfsConfiguration (io.prestosql.plugin.hive.HiveHdfsConfiguration)7 HiveIdentity (io.prestosql.plugin.hive.authentication.HiveIdentity)7 FileHiveMetastore (io.prestosql.plugin.hive.metastore.file.FileHiveMetastore)7 File (java.io.File)7 HdfsConfiguration (io.prestosql.plugin.hive.HdfsConfiguration)5 Table (io.prestosql.plugin.hive.metastore.Table)4 ConnectorSession (io.prestosql.spi.connector.ConnectorSession)4 SchemaTableName (io.prestosql.spi.connector.SchemaTableName)4 TestingConnectorSession (io.prestosql.testing.TestingConnectorSession)4 TestingNodeManager (io.prestosql.testing.TestingNodeManager)4 BeforeClass (org.testng.annotations.BeforeClass)4 Test (org.testng.annotations.Test)4 IOException (java.io.IOException)3 Function (com.google.common.base.Function)2 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)2 ImmutableList (com.google.common.collect.ImmutableList)2