use of org.apache.gobblin.compaction.dataset.Dataset in project incubator-gobblin by apache.
the class SimpleCompactorCompletionListener method onCompactionCompletion.
public void onCompactionCompletion(MRCompactor compactor) {
logger.info(String.format("Compaction (started on : %s) is finished", compactor.getInitializeTime()));
Set<Dataset> datasets = compactor.getDatasets();
for (Dataset dataset : datasets) {
if (dataset.state() != Dataset.DatasetState.COMPACTION_COMPLETE) {
logger.error("Dataset " + dataset.getDatasetName() + " " + dataset.state().name());
}
}
}
use of org.apache.gobblin.compaction.dataset.Dataset in project incubator-gobblin by apache.
the class MRCompactor method throwExceptionsIfAnyDatasetCompactionFailed.
private void throwExceptionsIfAnyDatasetCompactionFailed() {
Set<Dataset> datasetsWithThrowables = getDatasetsWithThrowables();
int numDatasetsWithThrowables = 0;
for (Dataset dataset : datasetsWithThrowables) {
numDatasetsWithThrowables++;
for (Throwable t : dataset.throwables()) {
LOG.error("Error processing dataset " + dataset, t);
submitFailureSlaEvent(dataset, CompactionSlaEventHelper.COMPACTION_FAILED_EVENT_NAME);
}
}
if (numDatasetsWithThrowables > 0) {
throw new RuntimeException(String.format("Failed to process %d datasets.", numDatasetsWithThrowables));
}
}
use of org.apache.gobblin.compaction.dataset.Dataset in project incubator-gobblin by apache.
the class MRCompactor method cancel.
@Override
public void cancel() throws IOException {
try {
for (Map.Entry<Dataset, Job> entry : MRCompactor.RUNNING_MR_JOBS.entrySet()) {
Job hadoopJob = entry.getValue();
if (!hadoopJob.isComplete()) {
LOG.info(String.format("Killing hadoop job %s for dataset %s", hadoopJob.getJobID(), entry.getKey()));
hadoopJob.killJob();
}
}
} finally {
try {
ExecutorsUtils.shutdownExecutorService(this.jobExecutor, Optional.of(LOG), 0, TimeUnit.NANOSECONDS);
} finally {
if (this.verifier.isPresent()) {
this.verifier.get().closeNow();
}
}
}
}
Aggregations