use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class HiveQueryDDLExecutor method updateHiveSQLs.
private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) {
List<CommandProcessorResponse> responses = new ArrayList<>();
try {
for (String sql : sqls) {
if (hiveDriver != null) {
HoodieTimer timer = new HoodieTimer().startTimer();
responses.add(hiveDriver.run(sql));
LOG.info(String.format("Time taken to execute [%s]: %s ms", sql, timer.endTimer()));
}
}
} catch (Exception e) {
throw new HoodieHiveSyncException("Failed in executing SQL", e);
}
return responses;
}
use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class HiveQueryDDLExecutor method dropPartitionsToTable.
@Override
public void dropPartitionsToTable(String tableName, List<String> partitionsToDrop) {
if (partitionsToDrop.isEmpty()) {
LOG.info("No partitions to drop for " + tableName);
return;
}
LOG.info("Drop partitions " + partitionsToDrop.size() + " on " + tableName);
try {
for (String dropPartition : partitionsToDrop) {
String partitionClause = HivePartitionUtil.getPartitionClauseForDrop(dropPartition, partitionValueExtractor, config);
metaStoreClient.dropPartition(config.databaseName, tableName, partitionClause, false);
LOG.info("Drop partition " + dropPartition + " on " + tableName);
}
} catch (Exception e) {
LOG.error(config.databaseName + "." + tableName + " drop partition failed", e);
throw new HoodieHiveSyncException(config.databaseName + "." + tableName + " drop partition failed", e);
}
}
use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class JDBCExecutor method runSQL.
@Override
public void runSQL(String s) {
Statement stmt = null;
try {
stmt = connection.createStatement();
LOG.info("Executing SQL " + s);
stmt.execute(s);
} catch (SQLException e) {
throw new HoodieHiveSyncException("Failed in executing SQL " + s, e);
} finally {
closeQuietly(null, stmt);
}
}
use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class QueryBasedDDLExecutor method createTable.
@Override
public void createTable(String tableName, MessageType storageSchema, String inputFormatClass, String outputFormatClass, String serdeClass, Map<String, String> serdeProperties, Map<String, String> tableProperties) {
try {
String createSQLQuery = HiveSchemaUtil.generateCreateDDL(tableName, storageSchema, config, inputFormatClass, outputFormatClass, serdeClass, serdeProperties, tableProperties);
LOG.info("Creating table with " + createSQLQuery);
runSQL(createSQLQuery);
} catch (IOException e) {
throw new HoodieHiveSyncException("Failed to create table " + tableName, e);
}
}
use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class HMSDDLExecutor method addPartitionsToTable.
@Override
public void addPartitionsToTable(String tableName, List<String> partitionsToAdd) {
if (partitionsToAdd.isEmpty()) {
LOG.info("No partitions to add for " + tableName);
return;
}
LOG.info("Adding partitions " + partitionsToAdd.size() + " to table " + tableName);
try {
StorageDescriptor sd = client.getTable(syncConfig.databaseName, tableName).getSd();
List<Partition> partitionList = partitionsToAdd.stream().map(partition -> {
StorageDescriptor partitionSd = new StorageDescriptor();
partitionSd.setCols(sd.getCols());
partitionSd.setInputFormat(sd.getInputFormat());
partitionSd.setOutputFormat(sd.getOutputFormat());
partitionSd.setSerdeInfo(sd.getSerdeInfo());
String fullPartitionPath = FSUtils.getPartitionPath(syncConfig.basePath, partition).toString();
List<String> partitionValues = partitionValueExtractor.extractPartitionValuesInPath(partition);
partitionSd.setLocation(fullPartitionPath);
return new Partition(partitionValues, syncConfig.databaseName, tableName, 0, 0, partitionSd, null);
}).collect(Collectors.toList());
client.add_partitions(partitionList, true, false);
} catch (TException e) {
LOG.error(syncConfig.databaseName + "." + tableName + " add partition failed", e);
throw new HoodieHiveSyncException(syncConfig.databaseName + "." + tableName + " add partition failed", e);
}
}
Aggregations