use of org.apache.hudi.hive.ddl.HiveQueryDDLExecutor in project hudi by apache.
the class HiveSyncFunctionalTestHarness method dropDatabases.
public void dropDatabases(String... databases) throws IOException, HiveException, MetaException {
HiveSyncConfig hiveSyncConfig = hiveSyncConf();
for (String database : databases) {
hiveSyncConfig.databaseName = database;
new HiveQueryDDLExecutor(hiveSyncConfig, fs(), hiveConf()).runSQL("drop database if exists " + database);
}
}
use of org.apache.hudi.hive.ddl.HiveQueryDDLExecutor in project hudi by apache.
the class HiveSyncFunctionalTestHarness method dropTables.
public void dropTables(String database, String... tables) throws IOException, HiveException, MetaException {
HiveSyncConfig hiveSyncConfig = hiveSyncConf();
hiveSyncConfig.databaseName = database;
for (String table : tables) {
hiveSyncConfig.tableName = table;
new HiveQueryDDLExecutor(hiveSyncConfig, fs(), hiveConf()).runSQL("drop table if exists " + table);
}
}
use of org.apache.hudi.hive.ddl.HiveQueryDDLExecutor in project hudi by apache.
the class HiveTestUtil method setUp.
public static void setUp() throws IOException, InterruptedException, HiveException, MetaException {
configuration = new Configuration();
if (zkServer == null) {
zkService = new ZookeeperTestService(configuration);
zkServer = zkService.start();
}
if (hiveServer == null) {
hiveTestService = new HiveTestService(configuration);
hiveServer = hiveTestService.start();
}
fileSystem = FileSystem.get(configuration);
hiveSyncConfig = new HiveSyncConfig();
hiveSyncConfig.jdbcUrl = hiveTestService.getJdbcHive2Url();
hiveSyncConfig.hiveUser = "";
hiveSyncConfig.hivePass = "";
hiveSyncConfig.databaseName = "testdb";
hiveSyncConfig.tableName = "test1";
hiveSyncConfig.basePath = Files.createTempDirectory("hivesynctest" + Instant.now().toEpochMilli()).toUri().toString();
hiveSyncConfig.assumeDatePartitioning = true;
hiveSyncConfig.usePreApacheInputFormat = false;
hiveSyncConfig.partitionFields = Collections.singletonList("datestr");
dtfOut = DateTimeFormatter.ofPattern("yyyy/MM/dd");
ddlExecutor = new HiveQueryDDLExecutor(hiveSyncConfig, fileSystem, getHiveConf());
clear();
}
Aggregations