use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.
the class EventManager method updateEvent.
public ImportSummary updateEvent(final Event event, final WorkContext workContext) {
final List<Event> singleEvent = singletonList(event);
final ImportSummaries importSummaries = updateEvents(singleEvent, workContext);
if (isNotEmpty(importSummaries.getImportSummaries())) {
return importSummaries.getImportSummaries().get(0);
} else {
return error("Not updated", event.getEvent());
}
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.
the class ProgramStageInstanceMapper method mapForInsert.
public ProgramStageInstance mapForInsert(Event event) {
ImportOptions importOptions = workContext.getImportOptions();
ProgramStageInstance psi = new ProgramStageInstance();
if (importOptions.getIdSchemes().getProgramStageInstanceIdScheme().equals(CODE)) {
psi.setCode(event.getEvent());
} else if (importOptions.getIdSchemes().getProgramStageIdScheme().equals(UID)) {
psi.setUid(event.getUid());
}
// Program Instance
psi.setProgramInstance(this.workContext.getProgramInstanceMap().get(event.getUid()));
// Program Stage
psi.setProgramStage(this.workContext.getProgramStage(importOptions.getIdSchemes().getProgramStageIdScheme(), event.getProgramStage()));
// Org Unit
psi.setOrganisationUnit(this.workContext.getOrganisationUnitMap().get(event.getUid()));
// Status
psi.setStatus(fromInt(event.getStatus().getValue()));
// Attribute Option Combo
psi.setAttributeOptionCombo(this.workContext.getCategoryOptionComboMap().get(event.getUid()));
// Geometry
psi.setGeometry(event.getGeometry());
// Notes
psi.setComments(convertNotes(event, this.workContext));
// Data Values
psi.setEventDataValues(workContext.getEventDataValueMap().get(event.getUid()));
Date dueDate = new Date();
if (event.getDueDate() != null) {
dueDate = parseDate(event.getDueDate());
}
psi.setDueDate(dueDate);
setCompletedDate(event, psi);
// Note that execution date can be null
setExecutionDate(event, psi);
if (psi.getProgramStage() != null && psi.getProgramStage().isEnableUserAssignment()) {
psi.setAssignedUser(this.workContext.getAssignedUserMap().get(event.getUid()));
}
// CREATED AT CLIENT + UPDATED AT CLIENT
psi.setCreatedAtClient(parseDate(event.getCreatedAtClient()));
psi.setLastUpdatedAtClient(parseDate(event.getLastUpdatedAtClient()));
psi.setStoredBy(event.getStoredBy());
psi.setCompletedBy(event.getCompletedBy());
psi.setCreatedByUserInfo(event.getCreatedByUserInfo());
psi.setLastUpdatedByUserInfo(event.getLastUpdatedByUserInfo());
return psi;
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.
the class DefaultAdxDataService method saveDataValueSetInternal.
private ImportSummary saveDataValueSetInternal(InputStream in, ImportOptions importOptions, JobConfiguration id) {
notifier.clear(id).notify(id, "ADX parsing process started");
ImportOptions adxImportOptions = firstNonNull(importOptions, ImportOptions.getDefaultImportOptions()).instance().setNotificationLevel(NotificationLevel.OFF);
// Get import options
IdScheme dsScheme = importOptions.getIdSchemes().getDataSetIdScheme();
IdScheme deScheme = importOptions.getIdSchemes().getDataElementIdScheme();
// Create meta-data maps
CachingMap<String, DataSet> dataSetMap = new CachingMap<>();
CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
// Get meta-data maps
IdentifiableObjectCallable<DataSet> dataSetCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataSet.class, dsScheme, null);
IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataElement.class, deScheme, null);
// Heat cache
if (importOptions.isPreheatCacheDefaultFalse()) {
dataSetMap.load(identifiableObjectManager.getAll(DataSet.class), o -> o.getPropertyValue(dsScheme));
dataElementMap.load(identifiableObjectManager.getAll(DataElement.class), o -> o.getPropertyValue(deScheme));
}
XMLReader adxReader = XMLFactory.getXMLReader(in);
ImportSummary importSummary;
adxReader.moveToStartElement(AdxDataService.ROOT, AdxDataService.NAMESPACE);
ExecutorService executor = Executors.newSingleThreadExecutor();
// For Async runs, give the DXF import a different notification task ID
// so it doesn't conflict with notifications from this level.
JobConfiguration dxfJobId = (id == null) ? null : new JobConfiguration("dxfJob", JobType.DATAVALUE_IMPORT_INTERNAL, id.getUserUid(), true);
int groupCount = 0;
try (PipedOutputStream pipeOut = new PipedOutputStream()) {
Future<ImportSummary> futureImportSummary = executor.submit(new AdxPipedImporter(dataValueSetService, adxImportOptions, dxfJobId, pipeOut, sessionFactory));
XMLOutputFactory factory = XMLOutputFactory.newInstance();
XMLStreamWriter dxfWriter = factory.createXMLStreamWriter(pipeOut);
List<ImportConflict> adxConflicts = new LinkedList<>();
dxfWriter.writeStartDocument("1.0");
dxfWriter.writeStartElement("dataValueSet");
dxfWriter.writeDefaultNamespace("http://dhis2.org/schema/dxf/2.0");
notifier.notify(id, "Starting to import ADX data groups.");
while (adxReader.moveToStartElement(AdxDataService.GROUP, AdxDataService.NAMESPACE)) {
notifier.update(id, "Importing ADX data group: " + groupCount);
// note this returns conflicts which are detected at ADX level
adxConflicts.addAll(parseAdxGroupToDxf(adxReader, dxfWriter, adxImportOptions, dataSetMap, dataSetCallable, dataElementMap, dataElementCallable));
groupCount++;
}
// end dataValueSet
dxfWriter.writeEndElement();
dxfWriter.writeEndDocument();
pipeOut.flush();
importSummary = futureImportSummary.get(TOTAL_MINUTES_TO_WAIT, TimeUnit.MINUTES);
ImportSummary summary = importSummary;
adxConflicts.forEach(conflict -> summary.addConflict(conflict.getObject(), conflict.getValue()));
importSummary.getImportCount().incrementIgnored(adxConflicts.size());
} catch (AdxException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
importSummary.addConflict(ex.getObject(), ex.getMessage());
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
} catch (IOException | XMLStreamException | InterruptedException | ExecutionException | TimeoutException ex) {
importSummary = new ImportSummary();
importSummary.setStatus(ImportStatus.ERROR);
importSummary.setDescription("Data set import failed within group number: " + groupCount);
notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
}
executor.shutdown();
notifier.update(id, INFO, "ADX data import done", true).addJobSummary(id, importSummary, ImportSummary.class);
ImportCount c = importSummary.getImportCount();
log.info("ADX data import done, imported: " + c.getImported() + ", updated: " + c.getUpdated() + ", deleted: " + c.getDeleted() + ", ignored: " + c.getIgnored());
return importSummary;
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.
the class DefaultCompleteDataSetRegistrationExchangeService method saveCompleteDataSetRegistrations.
private ImportSummary saveCompleteDataSetRegistrations(ImportOptions importOptions, JobConfiguration id, CompleteDataSetRegistrations completeRegistrations) {
Clock clock = new Clock(log).startClock().logTime("Starting complete data set registration import, options: " + importOptions);
notifier.clear(id).notify(id, "Process started");
// Start here so we can access any outer attributes for the
// configuration
completeRegistrations.open();
ImportSummary importSummary = new ImportSummary();
// ---------------------------------------------------------------------
// Set up import configuration
// ---------------------------------------------------------------------
importOptions = importOptions != null ? importOptions : ImportOptions.getDefaultImportOptions();
log.info("Import options: " + importOptions);
ImportConfig cfg = new ImportConfig(this.systemSettingManager, this.categoryService, completeRegistrations, importOptions);
// ---------------------------------------------------------------------
// Set up meta-data
// ---------------------------------------------------------------------
MetadataCaches caches = new MetadataCaches();
MetadataCallables metaDataCallables = new MetadataCallables(cfg, this.idObjManager, this.periodService, this.categoryService);
if (importOptions.isPreheatCacheDefaultFalse()) {
caches.preheat(idObjManager, cfg);
}
// ---------------------------------------------------------------------
// Perform import
// ---------------------------------------------------------------------
notifier.notify(id, "Importing complete data set registrations");
int totalCount = batchImport(completeRegistrations, cfg, importSummary, metaDataCallables, caches);
notifier.notify(id, NotificationLevel.INFO, "Import done", true).addJobSummary(id, importSummary, ImportSummary.class);
ImportCount count = importSummary.getImportCount();
clock.logTime(String.format("Complete data set registration import done, total: %d, imported: %d, updated: %d, deleted: %d", totalCount, count.getImported(), count.getUpdated(), count.getDeleted()));
completeRegistrations.close();
return importSummary;
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.
the class EventController method putJsonEventForEventDate.
@PutMapping(value = "/{uid}/eventDate", consumes = APPLICATION_JSON_VALUE)
@ResponseBody
public WebMessage putJsonEventForEventDate(HttpServletRequest request, @PathVariable("uid") String uid, ImportOptions importOptions) throws IOException {
if (!programStageInstanceService.programStageInstanceExists(uid)) {
return notFound("Event not found for ID " + uid);
}
InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat(request.getInputStream());
Event updatedEvent = renderService.fromJson(inputStream, Event.class);
updatedEvent.setEvent(uid);
eventService.updateEventForEventDate(updatedEvent);
return ok("Event updated " + uid);
}
Aggregations