Search in sources :

Example 1 with NoHdfsAuthentication

use of io.trino.plugin.hive.authentication.NoHdfsAuthentication in project trino by trinodb.

the class TestHiveAlluxioMetastore method setup.

@Parameters({ "hive.hadoop2.alluxio.host", "hive.hadoop2.alluxio.port", "hive.hadoop2.hiveVersionMajor", "hive.hadoop2.timeZone" })
@BeforeClass
public void setup(String host, String port, int hiveVersionMajor, String timeZone) {
    checkArgument(hiveVersionMajor > 0, "Invalid hiveVersionMajor: %s", hiveVersionMajor);
    timeZone = hiveVersionMajor >= 3 ? "UTC" : timeZone;
    this.alluxioAddress = host + ":" + port;
    this.hiveVersionMajor = hiveVersionMajor;
    System.setProperty(PropertyKey.Name.SECURITY_LOGIN_USERNAME, "presto");
    System.setProperty(PropertyKey.Name.MASTER_HOSTNAME, host);
    HiveConfig hiveConfig = new HiveConfig().setParquetTimeZone(timeZone).setRcfileTimeZone(timeZone);
    AlluxioHiveMetastoreConfig alluxioConfig = new AlluxioHiveMetastoreConfig();
    alluxioConfig.setMasterAddress(this.alluxioAddress);
    TableMasterClient client = AlluxioMetastoreModule.createCatalogMasterClient(alluxioConfig);
    hdfsEnvironment = new HdfsEnvironment(createTestHdfsConfiguration(), new HdfsConfig(), new NoHdfsAuthentication());
    setup(SCHEMA, hiveConfig, new AlluxioHiveMetastore(client, new MetastoreConfig()), hdfsEnvironment);
}
Also used : MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) AlluxioHiveMetastoreConfig(io.trino.plugin.hive.metastore.alluxio.AlluxioHiveMetastoreConfig) AlluxioHiveMetastoreConfig(io.trino.plugin.hive.metastore.alluxio.AlluxioHiveMetastoreConfig) TableMasterClient(alluxio.client.table.TableMasterClient) AlluxioHiveMetastore(io.trino.plugin.hive.metastore.alluxio.AlluxioHiveMetastore) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) BeforeClass(org.testng.annotations.BeforeClass) Parameters(org.testng.annotations.Parameters)

Example 2 with NoHdfsAuthentication

use of io.trino.plugin.hive.authentication.NoHdfsAuthentication in project trino by trinodb.

the class AbstractTestHive method setup.

protected final void setup(String host, int port, String databaseName, String timeZone) {
    HiveConfig hiveConfig = getHiveConfig().setParquetTimeZone(timeZone).setRcfileTimeZone(timeZone);
    Optional<HostAndPort> proxy = Optional.ofNullable(System.getProperty("hive.metastore.thrift.client.socks-proxy")).map(HostAndPort::fromString);
    MetastoreLocator metastoreLocator = new TestingMetastoreLocator(proxy, HostAndPort.fromParts(host, port));
    hdfsEnvironment = new HdfsEnvironment(createTestHdfsConfiguration(), new HdfsConfig(), new NoHdfsAuthentication());
    HiveMetastore metastore = cachingHiveMetastore(new BridgingHiveMetastore(new ThriftHiveMetastore(metastoreLocator, hiveConfig, new MetastoreConfig(), new ThriftMetastoreConfig(), hdfsEnvironment, false), new HiveIdentity(SESSION.getIdentity())), executor, new Duration(1, MINUTES), Optional.of(new Duration(15, SECONDS)), 10000);
    setup(databaseName, hiveConfig, metastore, hdfsEnvironment);
}
Also used : ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) TestingMetastoreLocator(io.trino.plugin.hive.metastore.thrift.TestingMetastoreLocator) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) MetastoreLocator(io.trino.plugin.hive.metastore.thrift.MetastoreLocator) TestingMetastoreLocator(io.trino.plugin.hive.metastore.thrift.TestingMetastoreLocator) Duration(io.airlift.units.Duration) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HiveIdentity(io.trino.plugin.hive.authentication.HiveIdentity) HostAndPort(com.google.common.net.HostAndPort) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore)

Example 3 with NoHdfsAuthentication

use of io.trino.plugin.hive.authentication.NoHdfsAuthentication in project trino by trinodb.

the class AbstractTestHive method testNewDirectoryPermissions.

@Test
public void testNewDirectoryPermissions() throws Exception {
    SchemaTableName tableName = temporaryTable("empty_file");
    List<Column> columns = ImmutableList.of(new Column("test", HIVE_STRING, Optional.empty()));
    createEmptyTable(tableName, ORC, columns, ImmutableList.of(), Optional.empty());
    try {
        Transaction transaction = newTransaction();
        ConnectorSession session = newSession();
        ConnectorMetadata metadata = transaction.getMetadata();
        metadata.beginQuery(session);
        Table table = transaction.getMetastore().getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow();
        // create new directory and set directory permission after creation
        HdfsContext context = new HdfsContext(session);
        Path location = new Path(table.getStorage().getLocation());
        Path defaultPath = new Path(location + "/defaultperms");
        createDirectory(context, hdfsEnvironment, defaultPath);
        FileStatus defaultFsStatus = hdfsEnvironment.getFileSystem(context, defaultPath).getFileStatus(defaultPath);
        assertEquals(defaultFsStatus.getPermission().toOctal(), 777);
        // use hdfs config that skips setting directory permissions after creation
        HdfsConfig configWithSkip = new HdfsConfig();
        configWithSkip.setNewDirectoryPermissions(HdfsConfig.SKIP_DIR_PERMISSIONS);
        HdfsEnvironment hdfsEnvironmentWithSkip = new HdfsEnvironment(createTestHdfsConfiguration(), configWithSkip, new NoHdfsAuthentication());
        Path skipPath = new Path(location + "/skipperms");
        createDirectory(context, hdfsEnvironmentWithSkip, skipPath);
        FileStatus skipFsStatus = hdfsEnvironmentWithSkip.getFileSystem(context, skipPath).getFileStatus(skipPath);
        assertEquals(skipFsStatus.getPermission().toOctal(), 755);
    } finally {
        dropTable(tableName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveSessionProperties.getTemporaryStagingDirectoryPath(io.trino.plugin.hive.HiveSessionProperties.getTemporaryStagingDirectoryPath) Table(io.trino.plugin.hive.metastore.Table) FileStatus(org.apache.hadoop.fs.FileStatus) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) Test(org.testng.annotations.Test)

Example 4 with NoHdfsAuthentication

use of io.trino.plugin.hive.authentication.NoHdfsAuthentication in project trino by trinodb.

the class BaseTestHiveOnDataLake method createQueryRunner.

@Override
protected QueryRunner createQueryRunner() throws Exception {
    this.bucketName = "test-hive-insert-overwrite-" + randomTableSuffix();
    this.dockerizedS3DataLake = closeAfterClass(new HiveMinioDataLake(bucketName, ImmutableMap.of(), hiveHadoopImage));
    this.dockerizedS3DataLake.start();
    this.metastoreClient = new BridgingHiveMetastore(new ThriftHiveMetastore(new TestingMetastoreLocator(Optional.empty(), this.dockerizedS3DataLake.getHiveHadoop().getHiveMetastoreEndpoint()), new HiveConfig(), new MetastoreConfig(), new ThriftMetastoreConfig(), new HdfsEnvironment(new HiveHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of()), ImmutableSet.of()), new HdfsConfig(), new NoHdfsAuthentication()), false), HiveIdentity.none());
    return S3HiveQueryRunner.create(dockerizedS3DataLake, ImmutableMap.<String, String>builder().put("hive.insert-existing-partitions-behavior", "OVERWRITE").put("hive.non-managed-table-writes-enabled", "true").put("hive.metastore-cache-ttl", "1d").put("hive.metastore-refresh-interval", "1d").buildOrThrow());
}
Also used : MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) TestingMetastoreLocator(io.trino.plugin.hive.metastore.thrift.TestingMetastoreLocator) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) HiveMinioDataLake(io.trino.plugin.hive.containers.HiveMinioDataLake) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore)

Example 5 with NoHdfsAuthentication

use of io.trino.plugin.hive.authentication.NoHdfsAuthentication in project trino by trinodb.

the class TestCheckpointEntryIterator method setUp.

@BeforeClass
public void setUp() {
    HdfsConfig hdfsConfig = new HdfsConfig();
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
    hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
    checkpointSchemaManager = new CheckpointSchemaManager(TESTING_TYPE_MANAGER);
}
Also used : HdfsConfigurationInitializer(io.trino.plugin.hive.HdfsConfigurationInitializer) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) HdfsConfig(io.trino.plugin.hive.HdfsConfig) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) HdfsConfiguration(io.trino.plugin.hive.HdfsConfiguration) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) BeforeClass(org.testng.annotations.BeforeClass)

Aggregations

NoHdfsAuthentication (io.trino.plugin.hive.authentication.NoHdfsAuthentication)28 HdfsConfigurationInitializer (io.trino.plugin.hive.HdfsConfigurationInitializer)23 HdfsEnvironment (io.trino.plugin.hive.HdfsEnvironment)23 HiveHdfsConfiguration (io.trino.plugin.hive.HiveHdfsConfiguration)23 HdfsConfig (io.trino.plugin.hive.HdfsConfig)22 HdfsConfiguration (io.trino.plugin.hive.HdfsConfiguration)19 MetastoreConfig (io.trino.plugin.hive.metastore.MetastoreConfig)15 NodeVersion (io.trino.plugin.hive.NodeVersion)11 FileHiveMetastore (io.trino.plugin.hive.metastore.file.FileHiveMetastore)10 FileHiveMetastoreConfig (io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig)10 HiveMetastore (io.trino.plugin.hive.metastore.HiveMetastore)8 File (java.io.File)8 CatalogName (io.trino.plugin.base.CatalogName)5 CheckpointSchemaManager (io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager)5 Path (org.apache.hadoop.fs.Path)5 Test (org.testng.annotations.Test)5 FileFormatDataSourceStats (io.trino.plugin.hive.FileFormatDataSourceStats)4 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)4 ParquetReaderConfig (io.trino.plugin.hive.parquet.ParquetReaderConfig)4 DistributedQueryRunner (io.trino.testing.DistributedQueryRunner)4