use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class JDBCExecutor method getTableSchema.
@Override
public Map<String, String> getTableSchema(String tableName) {
Map<String, String> schema = new HashMap<>();
ResultSet result = null;
try {
DatabaseMetaData databaseMetaData = connection.getMetaData();
result = databaseMetaData.getColumns(null, config.databaseName, tableName, null);
while (result.next()) {
String columnName = result.getString(4);
String columnType = result.getString(6);
if ("DECIMAL".equals(columnType)) {
int columnSize = result.getInt("COLUMN_SIZE");
int decimalDigits = result.getInt("DECIMAL_DIGITS");
columnType += String.format("(%s,%s)", columnSize, decimalDigits);
}
schema.put(columnName, columnType);
}
return schema;
} catch (SQLException e) {
throw new HoodieHiveSyncException("Failed to get table schema for " + tableName, e);
} finally {
closeQuietly(result, null);
}
}
use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class JDBCExecutor method createHiveConnection.
private void createHiveConnection(String jdbcUrl, String hiveUser, String hivePass) {
if (connection == null) {
try {
Class.forName("org.apache.hive.jdbc.HiveDriver");
} catch (ClassNotFoundException e) {
LOG.error("Unable to load Hive driver class", e);
return;
}
try {
this.connection = DriverManager.getConnection(jdbcUrl, hiveUser, hivePass);
LOG.info("Successfully established Hive connection to " + jdbcUrl);
} catch (SQLException e) {
throw new HoodieHiveSyncException("Cannot create hive connection " + getHiveJdbcUrlWithDefaultDBName(jdbcUrl), e);
}
}
}
use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.
the class QueryBasedDDLExecutor method updateTableDefinition.
@Override
public void updateTableDefinition(String tableName, MessageType newSchema) {
try {
String newSchemaStr = HiveSchemaUtil.generateSchemaString(newSchema, config.partitionFields, config.supportTimestamp);
// Cascade clause should not be present for non-partitioned tables
String cascadeClause = config.partitionFields.size() > 0 ? " cascade" : "";
StringBuilder sqlBuilder = new StringBuilder("ALTER TABLE ").append(HIVE_ESCAPE_CHARACTER).append(config.databaseName).append(HIVE_ESCAPE_CHARACTER).append(".").append(HIVE_ESCAPE_CHARACTER).append(tableName).append(HIVE_ESCAPE_CHARACTER).append(" REPLACE COLUMNS(").append(newSchemaStr).append(" )").append(cascadeClause);
LOG.info("Updating table definition with " + sqlBuilder);
runSQL(sqlBuilder.toString());
} catch (IOException e) {
throw new HoodieHiveSyncException("Failed to update table for " + tableName, e);
}
}
Aggregations