use of net.geoprism.registry.etl.upload.ImportProgressListenerIF in project geoprism-registry by terraframe.
the class DataImportJob method process.
// TODO : It might actually be faster to first convert into a shared temp
// table, assuming you're resolving the parent references into it.
private void process(ExecutionContext executionContext, ImportHistory history, ImportStage stage, ImportConfiguration config) throws MalformedURLException, InvocationTargetException {
validate(config);
// TODO : We should have a single transaction where we do all the history
// configuration upfront, that way the job is either fully configured (and
// resumable) or it isn't (no in-between)
config.setHistoryId(history.getOid());
config.setJobId(this.getOid());
if (stage.equals(ImportStage.VALIDATE)) {
// We can't do this because it prevents people from resuming the job where
// it left off
// history.appLock();
// history.setWorkProgress(0L);
// history.setImportedRecords(0L);
// history.apply();
ImportProgressListenerIF progressListener = runImport(history, stage, config);
if (progressListener.hasValidationProblems()) {
executionContext.setStatus(AllJobStatus.FEEDBACK);
progressListener.applyValidationProblems();
history.appLock();
history.clearStage();
history.addStage(ImportStage.VALIDATION_RESOLVE);
history.setConfigJson(config.toJSON().toString());
history.apply();
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.IMPORT_JOB_CHANGE, null));
} else {
history.appLock();
history.clearStage();
history.addStage(ImportStage.IMPORT);
history.setConfigJson(config.toJSON().toString());
history.setWorkProgress(0L);
history.setImportedRecords(0L);
history.apply();
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.IMPORT_JOB_CHANGE, null));
this.process(executionContext, history, ImportStage.IMPORT, config);
}
} else if (stage.equals(ImportStage.IMPORT)) {
deleteValidationProblems(history);
// We can't do this because it prevents people from resuming the job where
// it left off
// history.appLock();
// history.setWorkProgress(0L);
// history.setImportedRecords(0L);
// history.apply();
runImport(history, stage, config);
if (history.hasImportErrors()) {
history.appLock();
history.clearStage();
history.addStage(ImportStage.IMPORT_RESOLVE);
history.setConfigJson(config.toJSON().toString());
history.apply();
executionContext.setStatus(AllJobStatus.FEEDBACK);
} else {
history.appLock();
history.clearStage();
history.addStage(ImportStage.COMPLETE);
history.setConfigJson(config.toJSON().toString());
history.apply();
}
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.IMPORT_JOB_CHANGE, null));
} else if (// TODO : I'm not sure
stage.equals(ImportStage.RESUME_IMPORT)) // this code block is ever
// used
{
runImport(history, stage, config);
if (history.hasImportErrors()) {
history.appLock();
history.clearStage();
history.addStage(ImportStage.IMPORT_RESOLVE);
history.setConfigJson(config.toJSON().toString());
history.apply();
executionContext.setStatus(AllJobStatus.FEEDBACK);
} else {
history.appLock();
history.clearStage();
history.addStage(ImportStage.COMPLETE);
history.setConfigJson(config.toJSON().toString());
history.apply();
}
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.IMPORT_JOB_CHANGE, null));
} else {
String msg = "Invalid import stage [" + stage.getEnumName() + "].";
logger.error(msg);
throw new ProgrammingErrorException(msg);
}
}
Aggregations