use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HoodieDeltaStreamer method sync.
/**
* Main method to start syncing.
*
* @throws Exception
*/
public void sync() throws Exception {
if (bootstrapExecutor.isPresent()) {
LOG.info("Performing bootstrap. Source=" + bootstrapExecutor.get().getBootstrapConfig().getBootstrapSourceBasePath());
bootstrapExecutor.get().execute();
} else {
if (cfg.continuousMode) {
deltaSyncService.ifPresent(ds -> {
ds.start(this::onDeltaSyncShutdown);
try {
ds.waitForShutdown();
} catch (Exception e) {
throw new HoodieException(e.getMessage(), e);
}
});
LOG.info("Delta Sync shutting down");
} else {
LOG.info("Delta Streamer running only single round");
try {
deltaSyncService.ifPresent(ds -> {
try {
ds.getDeltaSync().syncOnce();
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
});
} catch (Exception ex) {
LOG.error("Got error running delta sync once. Shutting down", ex);
throw ex;
} finally {
deltaSyncService.ifPresent(DeltaSyncService::close);
LOG.info("Shut down delta streamer");
}
}
}
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HiveIncrementalPuller method ensureTempPathExists.
private boolean ensureTempPathExists(FileSystem fs, String lastCommitTime) throws IOException {
Path targetBaseDirPath = new Path(config.hoodieTmpDir, config.targetTable + "__" + config.sourceTable);
if (!fs.exists(targetBaseDirPath)) {
LOG.info("Creating " + targetBaseDirPath + " with permission drwxrwxrwx");
boolean result = FileSystem.mkdirs(fs, targetBaseDirPath, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
if (!result) {
throw new HoodieException("Could not create " + targetBaseDirPath + " with the required permissions");
}
}
Path targetPath = new Path(targetBaseDirPath, lastCommitTime);
if (fs.exists(targetPath)) {
boolean result = fs.delete(targetPath, true);
if (!result) {
throw new HoodieException("Could not delete existing " + targetPath);
}
}
LOG.info("Creating " + targetPath + " with permission drwxrwxrwx");
return FileSystem.mkdirs(fs, targetBaseDirPath, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HoodieClusteringJob method getSchemaFromLatestInstant.
private String getSchemaFromLatestInstant() throws Exception {
TableSchemaResolver schemaResolver = new TableSchemaResolver(metaClient);
if (metaClient.getActiveTimeline().getCommitsTimeline().filterCompletedInstants().countInstants() == 0) {
throw new HoodieException("Cannot run clustering without any completed commits");
}
Schema schema = schemaResolver.getTableAvroSchema(false);
return schema.toString();
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HoodieCompactor method getSchemaFromLatestInstant.
private String getSchemaFromLatestInstant() throws Exception {
TableSchemaResolver schemaUtil = new TableSchemaResolver(metaClient);
if (metaClient.getActiveTimeline().getCommitsTimeline().filterCompletedInstants().countInstants() == 0) {
throw new HoodieException("Cannot run compaction without any completed commits");
}
Schema schema = schemaUtil.getTableAvroSchema(false);
return schema.toString();
}
use of org.apache.hudi.exception.HoodieException in project hudi by apache.
the class HoodieDataTableValidator method run.
public void run() {
try {
LOG.info(cfg);
if (cfg.continuous) {
LOG.info(" ****** do hoodie data table validation in CONTINUOUS mode ******");
doHoodieDataTableValidationContinuous();
} else {
LOG.info(" ****** do hoodie data table validation once ******");
doHoodieDataTableValidationOnce();
}
} catch (Exception e) {
throw new HoodieException("Unable to do hoodie data table validation in " + cfg.basePath, e);
} finally {
if (asyncDataTableValidateService.isPresent()) {
asyncDataTableValidateService.get().shutdown(true);
}
}
}
Aggregations