Search in sources :

Example 6 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class HiveQueryDDLExecutor method updateHiveSQLs.

private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) {
    List<CommandProcessorResponse> responses = new ArrayList<>();
    try {
        for (String sql : sqls) {
            if (hiveDriver != null) {
                HoodieTimer timer = new HoodieTimer().startTimer();
                responses.add(hiveDriver.run(sql));
                LOG.info(String.format("Time taken to execute [%s]: %s ms", sql, timer.endTimer()));
            }
        }
    } catch (Exception e) {
        throw new HoodieHiveSyncException("Failed in executing SQL", e);
    }
    return responses;
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) ArrayList(java.util.ArrayList) HoodieTimer(org.apache.hudi.common.util.HoodieTimer) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) IOException(java.io.IOException) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 7 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class HiveQueryDDLExecutor method dropPartitionsToTable.

@Override
public void dropPartitionsToTable(String tableName, List<String> partitionsToDrop) {
    if (partitionsToDrop.isEmpty()) {
        LOG.info("No partitions to drop for " + tableName);
        return;
    }
    LOG.info("Drop partitions " + partitionsToDrop.size() + " on " + tableName);
    try {
        for (String dropPartition : partitionsToDrop) {
            String partitionClause = HivePartitionUtil.getPartitionClauseForDrop(dropPartition, partitionValueExtractor, config);
            metaStoreClient.dropPartition(config.databaseName, tableName, partitionClause, false);
            LOG.info("Drop partition " + dropPartition + " on " + tableName);
        }
    } catch (Exception e) {
        LOG.error(config.databaseName + "." + tableName + " drop partition failed", e);
        throw new HoodieHiveSyncException(config.databaseName + "." + tableName + " drop partition failed", e);
    }
}
Also used : HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) IOException(java.io.IOException) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 8 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class JDBCExecutor method runSQL.

@Override
public void runSQL(String s) {
    Statement stmt = null;
    try {
        stmt = connection.createStatement();
        LOG.info("Executing SQL " + s);
        stmt.execute(s);
    } catch (SQLException e) {
        throw new HoodieHiveSyncException("Failed in executing SQL " + s, e);
    } finally {
        closeQuietly(null, stmt);
    }
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException)

Example 9 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class QueryBasedDDLExecutor method createTable.

@Override
public void createTable(String tableName, MessageType storageSchema, String inputFormatClass, String outputFormatClass, String serdeClass, Map<String, String> serdeProperties, Map<String, String> tableProperties) {
    try {
        String createSQLQuery = HiveSchemaUtil.generateCreateDDL(tableName, storageSchema, config, inputFormatClass, outputFormatClass, serdeClass, serdeProperties, tableProperties);
        LOG.info("Creating table with " + createSQLQuery);
        runSQL(createSQLQuery);
    } catch (IOException e) {
        throw new HoodieHiveSyncException("Failed to create table " + tableName, e);
    }
}
Also used : IOException(java.io.IOException) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException)

Example 10 with HoodieHiveSyncException

use of org.apache.hudi.hive.HoodieHiveSyncException in project hudi by apache.

the class HMSDDLExecutor method addPartitionsToTable.

@Override
public void addPartitionsToTable(String tableName, List<String> partitionsToAdd) {
    if (partitionsToAdd.isEmpty()) {
        LOG.info("No partitions to add for " + tableName);
        return;
    }
    LOG.info("Adding partitions " + partitionsToAdd.size() + " to table " + tableName);
    try {
        StorageDescriptor sd = client.getTable(syncConfig.databaseName, tableName).getSd();
        List<Partition> partitionList = partitionsToAdd.stream().map(partition -> {
            StorageDescriptor partitionSd = new StorageDescriptor();
            partitionSd.setCols(sd.getCols());
            partitionSd.setInputFormat(sd.getInputFormat());
            partitionSd.setOutputFormat(sd.getOutputFormat());
            partitionSd.setSerdeInfo(sd.getSerdeInfo());
            String fullPartitionPath = FSUtils.getPartitionPath(syncConfig.basePath, partition).toString();
            List<String> partitionValues = partitionValueExtractor.extractPartitionValuesInPath(partition);
            partitionSd.setLocation(fullPartitionPath);
            return new Partition(partitionValues, syncConfig.databaseName, tableName, 0, 0, partitionSd, null);
        }).collect(Collectors.toList());
        client.add_partitions(partitionList, true, false);
    } catch (TException e) {
        LOG.error(syncConfig.databaseName + "." + tableName + " add partition failed", e);
        throw new HoodieHiveSyncException(syncConfig.databaseName + "." + tableName + " add partition failed", e);
    }
}
Also used : ImmutablePair(org.apache.hudi.common.util.collection.ImmutablePair) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) FileSystem(org.apache.hadoop.fs.FileSystem) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) Partition(org.apache.hadoop.hive.metastore.api.Partition) LinkedHashMap(java.util.LinkedHashMap) HiveSchemaUtil(org.apache.hudi.hive.util.HiveSchemaUtil) Logger(org.apache.log4j.Logger) StatsSetupConst(org.apache.hadoop.hive.common.StatsSetupConst) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Map(java.util.Map) Path(org.apache.hadoop.fs.Path) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) HivePartitionUtil(org.apache.hudi.hive.util.HivePartitionUtil) PartitionValueExtractor(org.apache.hudi.hive.PartitionValueExtractor) Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveSyncConfig(org.apache.hudi.hive.HiveSyncConfig) HiveConf(org.apache.hadoop.hive.conf.HiveConf) EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) TException(org.apache.thrift.TException) StorageSchemes(org.apache.hudi.common.fs.StorageSchemes) Collectors(java.util.stream.Collectors) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) MessageType(org.apache.parquet.schema.MessageType) List(java.util.List) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) TableType(org.apache.hadoop.hive.metastore.TableType) LogManager(org.apache.log4j.LogManager) Database(org.apache.hadoop.hive.metastore.api.Database) FSUtils(org.apache.hudi.common.fs.FSUtils) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) List(java.util.List) HoodieHiveSyncException(org.apache.hudi.hive.HoodieHiveSyncException)

Aggregations

HoodieHiveSyncException (org.apache.hudi.hive.HoodieHiveSyncException)18 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)10 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)10 TException (org.apache.thrift.TException)8 IOException (java.io.IOException)7 HashMap (java.util.HashMap)7 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)7 Table (org.apache.hadoop.hive.metastore.api.Table)7 Map (java.util.Map)6 EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)6 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)6 LinkedHashMap (java.util.LinkedHashMap)5 List (java.util.List)5 Collectors (java.util.stream.Collectors)5 FileSystem (org.apache.hadoop.fs.FileSystem)5 HiveConf (org.apache.hadoop.hive.conf.HiveConf)5 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)5 Database (org.apache.hadoop.hive.metastore.api.Database)5 Hive (org.apache.hadoop.hive.ql.metadata.Hive)5 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)5