use of org.hisp.dhis.dxf2.common.ImportSummary in project dhis2-core by dhis2.
the class AdxPipedImporter method call.
@Override
public ImportSummary call() {
ImportSummary result = null;
SecurityContextHolder.getContext().setAuthentication(authentication);
DbmsUtils.bindSessionToThread(sessionFactory);
try {
result = dataValueSetService.saveDataValueSet(pipeIn, importOptions, id);
} catch (Exception ex) {
result = new ImportSummary();
result.setStatus(ImportStatus.ERROR);
result.setDescription("Exception: " + ex.getMessage());
} finally {
IOUtils.closeQuietly(pipeIn);
DbmsUtils.unbindSessionFromThread(sessionFactory);
}
return result;
}
use of org.hisp.dhis.dxf2.common.ImportSummary in project dhis2-core by dhis2.
the class DefaultAdxDataService method saveDataValueSetInternal.
private ImportSummary saveDataValueSetInternal(InputStream in, ImportOptions importOptions, TaskId id) {
notifier.clear(id).notify(id, "ADX parsing process started");
ImportOptions adxImportOptions = ObjectUtils.firstNonNull(importOptions, ImportOptions.getDefaultImportOptions()).instance().setNotificationLevel(NotificationLevel.OFF);
// Get import options
IdScheme dataSetIdScheme = importOptions.getIdSchemes().getDataSetIdScheme();
IdScheme dataElementIdScheme = importOptions.getIdSchemes().getDataElementIdScheme();
// Create meta-data maps
CachingMap<String, DataSet> dataSetMap = new CachingMap<>();
CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
// Get meta-data maps
IdentifiableObjectCallable<DataSet> dataSetCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataSet.class, dataSetIdScheme, null);
IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataElement.class, dataElementIdScheme, null);
// Heat cache
if (importOptions.isPreheatCacheDefaultFalse()) {
dataSetMap.load(identifiableObjectManager.getAll(DataSet.class), o -> o.getPropertyValue(dataSetIdScheme));
dataElementMap.load(identifiableObjectManager.getAll(DataElement.class), o -> o.getPropertyValue(dataElementIdScheme));
}
XMLReader adxReader = XMLFactory.getXMLReader(in);
ImportSummary importSummary;
adxReader.moveToStartElement(AdxDataService.ROOT, AdxDataService.NAMESPACE);
ExecutorService executor = Executors.newSingleThreadExecutor();
// Give the DXF import a different notification task ID so it doesn't conflict with notifications from this level.
TaskId dxfTaskId = new TaskId(TaskCategory.DATAVALUE_IMPORT_INTERNAL, id.getUser());
int groupCount = 0;
try (PipedOutputStream pipeOut = new PipedOutputStream()) {
Future<ImportSummary> futureImportSummary = executor.submit(new AdxPipedImporter(dataValueSetService, adxImportOptions, dxfTaskId, pipeOut, sessionFactory));
XMLOutputFactory factory = XMLOutputFactory.newInstance();
XMLStreamWriter dxfWriter = factory.createXMLStreamWriter(pipeOut);
List<ImportConflict> adxConflicts = new LinkedList<>();
dxfWriter.writeStartDocument("1.0");
dxfWriter.writeStartElement("dataValueSet");
dxfWriter.writeDefaultNamespace("http://dhis2.org/schema/dxf/2.0");
notifier.notify(id, "Starting to import ADX data groups.");
while (adxReader.moveToStartElement(AdxDataService.GROUP, AdxDataService.NAMESPACE)) {
notifier.update(id, "Importing ADX data group: " + groupCount);
// note this returns conflicts which are detected at ADX level
adxConflicts.addAll(parseAdxGroupToDxf(adxReader, dxfWriter, adxImportOptions, dataSetMap, dataSetCallable, dataElementMap, dataElementCallable));
groupCount++;
}
// end dataValueSet
dxfWriter.writeEndElement();
dxfWriter.writeEndDocument();
pipeOut.flush();
importSummary = futureImportSummary.get(TOTAL_MINUTES_TO_WAIT, TimeUnit.MINUTES);
importSummary.getConflicts().addAll(adxConflicts);
importSummary.getImportCount().incrementIgnored(adxConflicts.size());
} catch (AdxException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
importSummary.getConflicts().add(ex.getImportConflict());
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
} catch (IOException | XMLStreamException | InterruptedException | ExecutionException | TimeoutException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
}
executor.shutdown();
notifier.update(id, INFO, "ADX data import done", true).addTaskSummary(id, importSummary);
ImportCount c = importSummary.getImportCount();
log.info("ADX data import done, imported: " + c.getImported() + ", updated: " + c.getUpdated() + ", deleted: " + c.getDeleted() + ", ignored: " + c.getIgnored());
return importSummary;
}
use of org.hisp.dhis.dxf2.common.ImportSummary in project dhis2-core by dhis2.
the class MetadataSyncPreProcessorTest method testhandleAggregateDataPushShouldThrowExceptionWhenDataPushIsUnsuccessful.
@Test(expected = MetadataSyncServiceException.class)
public void testhandleAggregateDataPushShouldThrowExceptionWhenDataPushIsUnsuccessful() throws Exception {
MetadataRetryContext mockRetryContext = mock(MetadataRetryContext.class);
ImportSummary expectedSummary = new ImportSummary();
expectedSummary.setStatus(ImportStatus.ERROR);
AvailabilityStatus availabilityStatus = new AvailabilityStatus(true, "test_message", null);
when(synchronizationManager.isRemoteServerAvailable()).thenReturn(availabilityStatus);
when(metadataSyncPreProcessor.handleAggregateDataPush(mockRetryContext)).thenReturn(expectedSummary);
ImportSummary actualSummary = metadataSyncPreProcessor.handleAggregateDataPush(mockRetryContext);
assertEquals(expectedSummary.getStatus(), actualSummary.getStatus());
}
use of org.hisp.dhis.dxf2.common.ImportSummary in project dhis2-core by dhis2.
the class MetadataSyncPreProcessorTest method testHandleEventDataPushShouldNotThrowExceptionWhenDataPushIsSuccessful.
@Test
public void testHandleEventDataPushShouldNotThrowExceptionWhenDataPushIsSuccessful() throws Exception {
MetadataRetryContext mockRetryContext = mock(MetadataRetryContext.class);
ImportSummaries expectedSummary = new ImportSummaries();
ImportSummary summary = new ImportSummary();
summary.setStatus(ImportStatus.SUCCESS);
expectedSummary.addImportSummary(summary);
AvailabilityStatus availabilityStatus = new AvailabilityStatus(true, "test_message", null);
when(synchronizationManager.isRemoteServerAvailable()).thenReturn(availabilityStatus);
when(metadataSyncPreProcessor.handleEventDataPush(mockRetryContext)).thenReturn(expectedSummary);
ImportSummaries actualSummary = metadataSyncPreProcessor.handleEventDataPush(mockRetryContext);
assertEquals(expectedSummary.getImportSummaries().get(0).getStatus(), actualSummary.getImportSummaries().get(0).getStatus());
}
use of org.hisp.dhis.dxf2.common.ImportSummary in project dhis2-core by dhis2.
the class EventController method postJsonEvent.
@RequestMapping(method = RequestMethod.POST, consumes = "application/json")
@PreAuthorize("hasRole('ALL') or hasRole('F_TRACKED_ENTITY_DATAVALUE_ADD')")
public void postJsonEvent(@RequestParam(defaultValue = "CREATE") ImportStrategy strategy, HttpServletResponse response, HttpServletRequest request, ImportOptions importOptions) throws Exception {
importOptions.setImportStrategy(strategy);
InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat(request.getInputStream());
importOptions.setIdSchemes(getIdSchemesFromParameters(importOptions.getIdSchemes(), contextService.getParameterValuesMap()));
if (!importOptions.isAsync()) {
ImportSummaries importSummaries = eventService.addEventsJson(inputStream, importOptions);
importSummaries.setImportOptions(importOptions);
importSummaries.getImportSummaries().stream().filter(importSummary -> !importOptions.isDryRun() && !importSummary.getStatus().equals(ImportStatus.ERROR) && !importOptions.getImportStrategy().isDelete()).forEach(importSummary -> importSummary.setHref(ContextUtils.getRootPath(request) + RESOURCE_PATH + "/" + importSummary.getReference()));
if (importSummaries.getImportSummaries().size() == 1) {
ImportSummary importSummary = importSummaries.getImportSummaries().get(0);
importSummary.setImportOptions(importOptions);
if (!importOptions.isDryRun()) {
if (!importSummary.getStatus().equals(ImportStatus.ERROR)) {
response.setHeader("Location", ContextUtils.getRootPath(request) + RESOURCE_PATH + "/" + importSummary.getReference());
}
}
}
webMessageService.send(WebMessageUtils.importSummaries(importSummaries), response, request);
} else {
TaskId taskId = new TaskId(TaskCategory.EVENT_IMPORT, currentUserService.getCurrentUser());
List<Event> events = eventService.getEventsJson(inputStream);
scheduler.executeTask(new ImportEventTask(events, eventService, importOptions, taskId));
response.setHeader("Location", ContextUtils.getRootPath(request) + "/system/tasks/" + TaskCategory.EVENT_IMPORT);
response.setStatus(HttpServletResponse.SC_NO_CONTENT);
}
}
Aggregations