Search in sources :

Example 16 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class JDBCExecutor method getTableSchema.

@Override
public Map<String, String> getTableSchema(String tableName) {
    Map<String, String> schema = new HashMap<>();
    ResultSet result = null;
    try {
        DatabaseMetaData databaseMetaData = connection.getMetaData();
        result = databaseMetaData.getColumns(null, config.databaseName, tableName, null);
        while (result.next()) {
            String columnName = result.getString(4);
            String columnType = result.getString(6);
            if ("DECIMAL".equals(columnType)) {
                int columnSize = result.getInt("COLUMN_SIZE");
                int decimalDigits = result.getInt("DECIMAL_DIGITS");
                columnType += String.format("(%s,%s)", columnSize, decimalDigits);
            }
            schema.put(columnName, columnType);
        }
        return schema;
    } catch (SQLException e) {
        throw new HoodieHiveSyncException("Failed to get table schema for " + tableName, e);
    } finally {
        closeQuietly(result, null);
    }
}
Also used : HashMap(java.util.HashMap) SQLException(java.sql.SQLException) ResultSet(java.sql.ResultSet) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException) DatabaseMetaData(java.sql.DatabaseMetaData)

Example 17 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class JDBCExecutor method createHiveConnection.

private void createHiveConnection(String jdbcUrl, String hiveUser, String hivePass) {
    if (connection == null) {
        try {
            Class.forName("org.apache.hive.jdbc.HiveDriver");
        } catch (ClassNotFoundException e) {
            LOG.error("Unable to load Hive driver class", e);
            return;
        }
        try {
            this.connection = DriverManager.getConnection(jdbcUrl, hiveUser, hivePass);
            LOG.info("Successfully established Hive connection to  " + jdbcUrl);
        } catch (SQLException e) {
            throw new HoodieHiveSyncException("Cannot create hive connection " + getHiveJdbcUrlWithDefaultDBName(jdbcUrl), e);
        }
    }
}
Also used : SQLException(java.sql.SQLException) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException)

Example 18 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class QueryBasedDDLExecutor method updateTableDefinition.

@Override
public void updateTableDefinition(String tableName, MessageType newSchema) {
    try {
        String newSchemaStr = HiveSchemaUtil.generateSchemaString(newSchema, config.partitionFields, config.supportTimestamp);
        // Cascade clause should not be present for non-partitioned tables
        String cascadeClause = config.partitionFields.size() > 0 ? " cascade" : "";
        StringBuilder sqlBuilder = new StringBuilder("ALTER TABLE ").append(HIVE_ESCAPE_CHARACTER).append(config.databaseName).append(HIVE_ESCAPE_CHARACTER).append(".").append(HIVE_ESCAPE_CHARACTER).append(tableName).append(HIVE_ESCAPE_CHARACTER).append(" REPLACE COLUMNS(").append(newSchemaStr).append(" )").append(cascadeClause);
        LOG.info("Updating table definition with " + sqlBuilder);
        runSQL(sqlBuilder.toString());
    } catch (IOException e) {
        throw new HoodieHiveSyncException("Failed to update table for " + tableName, e);
    }
}
Also used : IOException(java.io.IOException) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException)

Aggregations

HoodieHiveSyncException (org.apache.hudi.hive.HoodieHiveSyncException)18 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)10 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)10 TException (org.apache.thrift.TException)8 IOException (java.io.IOException)7 HashMap (java.util.HashMap)7 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)7 Table (org.apache.hadoop.hive.metastore.api.Table)7 Map (java.util.Map)6 EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)6 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)6 LinkedHashMap (java.util.LinkedHashMap)5 List (java.util.List)5 Collectors (java.util.stream.Collectors)5 FileSystem (org.apache.hadoop.fs.FileSystem)5 HiveConf (org.apache.hadoop.hive.conf.HiveConf)5 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)5 Database (org.apache.hadoop.hive.metastore.api.Database)5 Hive (org.apache.hadoop.hive.ql.metadata.Hive)5 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)5