use of org.hisp.dhis.scheduling.JobConfiguration in project dhis2-core by dhis2.
the class DefaultAdxDataService method saveDataValueSetInternal.
private ImportSummary saveDataValueSetInternal(InputStream in, ImportOptions importOptions, JobConfiguration id) {
notifier.clear(id).notify(id, "ADX parsing process started");
ImportOptions adxImportOptions = firstNonNull(importOptions, ImportOptions.getDefaultImportOptions()).instance().setNotificationLevel(NotificationLevel.OFF);
// Get import options
IdScheme dsScheme = importOptions.getIdSchemes().getDataSetIdScheme();
IdScheme deScheme = importOptions.getIdSchemes().getDataElementIdScheme();
// Create meta-data maps
CachingMap<String, DataSet> dataSetMap = new CachingMap<>();
CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
// Get meta-data maps
IdentifiableObjectCallable<DataSet> dataSetCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataSet.class, dsScheme, null);
IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataElement.class, deScheme, null);
// Heat cache
if (importOptions.isPreheatCacheDefaultFalse()) {
dataSetMap.load(identifiableObjectManager.getAll(DataSet.class), o -> o.getPropertyValue(dsScheme));
dataElementMap.load(identifiableObjectManager.getAll(DataElement.class), o -> o.getPropertyValue(deScheme));
}
XMLReader adxReader = XMLFactory.getXMLReader(in);
ImportSummary importSummary;
adxReader.moveToStartElement(AdxDataService.ROOT, AdxDataService.NAMESPACE);
ExecutorService executor = Executors.newSingleThreadExecutor();
// For Async runs, give the DXF import a different notification task ID
// so it doesn't conflict with notifications from this level.
JobConfiguration dxfJobId = (id == null) ? null : new JobConfiguration("dxfJob", JobType.DATAVALUE_IMPORT_INTERNAL, id.getUserUid(), true);
int groupCount = 0;
try (PipedOutputStream pipeOut = new PipedOutputStream()) {
Future<ImportSummary> futureImportSummary = executor.submit(new AdxPipedImporter(dataValueSetService, adxImportOptions, dxfJobId, pipeOut, sessionFactory));
XMLOutputFactory factory = XMLOutputFactory.newInstance();
XMLStreamWriter dxfWriter = factory.createXMLStreamWriter(pipeOut);
List<ImportConflict> adxConflicts = new LinkedList<>();
dxfWriter.writeStartDocument("1.0");
dxfWriter.writeStartElement("dataValueSet");
dxfWriter.writeDefaultNamespace("http://dhis2.org/schema/dxf/2.0");
notifier.notify(id, "Starting to import ADX data groups.");
while (adxReader.moveToStartElement(AdxDataService.GROUP, AdxDataService.NAMESPACE)) {
notifier.update(id, "Importing ADX data group: " + groupCount);
// note this returns conflicts which are detected at ADX level
adxConflicts.addAll(parseAdxGroupToDxf(adxReader, dxfWriter, adxImportOptions, dataSetMap, dataSetCallable, dataElementMap, dataElementCallable));
groupCount++;
}
// end dataValueSet
dxfWriter.writeEndElement();
dxfWriter.writeEndDocument();
pipeOut.flush();
importSummary = futureImportSummary.get(TOTAL_MINUTES_TO_WAIT, TimeUnit.MINUTES);
ImportSummary summary = importSummary;
adxConflicts.forEach(conflict -> summary.addConflict(conflict.getObject(), conflict.getValue()));
importSummary.getImportCount().incrementIgnored(adxConflicts.size());
} catch (AdxException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
importSummary.addConflict(ex.getObject(), ex.getMessage());
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
} catch (IOException | XMLStreamException | InterruptedException | ExecutionException | TimeoutException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
}
executor.shutdown();
notifier.update(id, INFO, "ADX data import done", true).addJobSummary(id, importSummary, ImportSummary.class);
ImportCount c = importSummary.getImportCount();
log.info("ADX data import done, imported: " + c.getImported() + ", updated: " + c.getUpdated() + ", deleted: " + c.getDeleted() + ", ignored: " + c.getIgnored());
return importSummary;
}
use of org.hisp.dhis.scheduling.JobConfiguration in project dhis2-core by dhis2.
the class TrackerCrudTest method shouldUpdateTrackedEntityWithUpdateStrategy.
@Test
void shouldUpdateTrackedEntityWithUpdateStrategy() {
List<TrackedEntityInstance> trackedEntityInstanceList = Collections.singletonList(trackedEntityInstance);
when(importOptions.getImportStrategy()).thenReturn(ImportStrategy.UPDATE);
ImportSummaries importSummaries = trackedEntityInstanceService.mergeOrDeleteTrackedEntityInstances(trackedEntityInstanceList, importOptions, jobConfiguration);
assertFalse(importSummaries.getImportSummaries().stream().anyMatch(is -> is.isStatus(ImportStatus.ERROR)));
verify(defaultTrackedEntityInstanceService, times(1)).getTrackedEntityInstance(trackedEntityInstanceUid, user);
verify(defaultTrackedEntityInstanceService, times(1)).updateTrackedEntityInstance(any());
}
use of org.hisp.dhis.scheduling.JobConfiguration in project dhis2-core by dhis2.
the class EventController method startAsyncImport.
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
/**
* Starts an asynchronous import task.
*
* @param importOptions the ImportOptions.
* @param events the events to import.
*/
private WebMessage startAsyncImport(ImportOptions importOptions, List<Event> events) {
JobConfiguration jobId = new JobConfiguration("inMemoryEventImport", EVENT_IMPORT, currentUserService.getCurrentUser().getUid(), true);
taskExecutor.executeTask(new ImportEventsTask(events, eventService, importOptions, jobId));
return jobConfigurationReport(jobId).setLocation("/system/tasks/" + EVENT_IMPORT);
}
use of org.hisp.dhis.scheduling.JobConfiguration in project dhis2-core by dhis2.
the class NotificationMapTest method testFirstSummaryToBeCreatedIsTheFirstOneToBeRemoved.
@Test
void testFirstSummaryToBeCreatedIsTheFirstOneToBeRemoved() {
// Fill the map with jobs
JobConfiguration jobConfiguration = new JobConfiguration(null, DATAVALUE_IMPORT, "userId", false);
for (int i = 0; i < MAX_POOL_TYPE_SIZE; i++) {
jobConfiguration.setUid(String.valueOf(i));
mapToTest.addSummary(jobConfiguration, i);
}
// Add one more
jobConfiguration.setUid(String.valueOf(MAX_POOL_TYPE_SIZE));
mapToTest.addSummary(jobConfiguration, MAX_POOL_TYPE_SIZE);
// Check that oldest job is not in the map anymore
Optional<String> notPresentSummary = mapToTest.getJobSummariesForJobType(DATAVALUE_IMPORT).keySet().stream().filter(object -> object.equals("0")).findAny();
Assertions.assertFalse(notPresentSummary.isPresent());
// Add one more
jobConfiguration.setUid(String.valueOf(MAX_POOL_TYPE_SIZE + 1));
mapToTest.addSummary(jobConfiguration, MAX_POOL_TYPE_SIZE + 1);
// Check that oldest job is not in the map anymore
notPresentSummary = mapToTest.getJobSummariesForJobType(DATAVALUE_IMPORT).keySet().stream().filter(object -> object.equals("1")).findAny();
Assertions.assertFalse(notPresentSummary.isPresent());
}
use of org.hisp.dhis.scheduling.JobConfiguration in project dhis2-core by dhis2.
the class JobConfigurationObjectBundleHookTest method validateInternalNonConfigurableIgnoredValidationErrorE7010.
@Test
void validateInternalNonConfigurableIgnoredValidationErrorE7010() {
Mockito.when(jobConfigurationService.getJobConfigurationByUid(Mockito.eq("jsdhJSJHD"))).thenReturn(analyticsTableJobConfig);
Mockito.when(jobService.getJob(Mockito.eq(JobType.ANALYTICSTABLE_UPDATE))).thenReturn(job);
Mockito.when(job.validate()).thenReturn(new ErrorReport(Class.class, ErrorCode.E7010));
JobConfiguration jobConfiguration = new JobConfiguration();
jobConfiguration.setUid("jsdhJSJHD");
jobConfiguration.setJobType(JobType.ANALYTICSTABLE_UPDATE);
jobConfiguration.setCronExpression(CRON_HOURLY);
jobConfiguration.setEnabled(true);
List<ErrorReport> errorReports = hook.validate(jobConfiguration, null);
Assertions.assertEquals(0, errorReports.size());
}
Aggregations