use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.ID in project dhis2-core by dhis2.
the class DefaultCompleteDataSetRegistrationExchangeService method saveCompleteDataSetRegistrations.
private ImportSummary saveCompleteDataSetRegistrations(ImportOptions importOptions, JobConfiguration id, CompleteDataSetRegistrations completeRegistrations) {
Clock clock = new Clock(log).startClock().logTime("Starting complete data set registration import, options: " + importOptions);
notifier.clear(id).notify(id, "Process started");
// Start here so we can access any outer attributes for the
// configuration
completeRegistrations.open();
ImportSummary importSummary = new ImportSummary();
// ---------------------------------------------------------------------
// Set up import configuration
// ---------------------------------------------------------------------
importOptions = importOptions != null ? importOptions : ImportOptions.getDefaultImportOptions();
log.info("Import options: " + importOptions);
ImportConfig cfg = new ImportConfig(this.systemSettingManager, this.categoryService, completeRegistrations, importOptions);
// ---------------------------------------------------------------------
// Set up meta-data
// ---------------------------------------------------------------------
MetadataCaches caches = new MetadataCaches();
MetadataCallables metaDataCallables = new MetadataCallables(cfg, this.idObjManager, this.periodService, this.categoryService);
if (importOptions.isPreheatCacheDefaultFalse()) {
caches.preheat(idObjManager, cfg);
}
// ---------------------------------------------------------------------
// Perform import
// ---------------------------------------------------------------------
notifier.notify(id, "Importing complete data set registrations");
int totalCount = batchImport(completeRegistrations, cfg, importSummary, metaDataCallables, caches);
notifier.notify(id, NotificationLevel.INFO, "Import done", true).addJobSummary(id, importSummary, ImportSummary.class);
ImportCount count = importSummary.getImportCount();
clock.logTime(String.format("Complete data set registration import done, total: %d, imported: %d, updated: %d, deleted: %d", totalCount, count.getImported(), count.getUpdated(), count.getDeleted()));
completeRegistrations.close();
return importSummary;
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.ID in project dhis2-core by dhis2.
the class DefaultDataValueSetService method importDataValueSet.
/**
* There are specific id schemes for data elements and organisation units
* and a generic id scheme for all objects. The specific id schemes will
* take precedence over the generic id scheme. The generic id scheme also
* applies to data set and category option combo.
* <p>
* The id schemes uses the following order of precedence:
* <p>
* <ul>
* <li>Id scheme from the data value set</li>
* <li>Id scheme from the import options</li>
* <li>Default id scheme which is UID</li>
* <ul>
* <p>
* If id scheme is specific in the data value set, any id schemes in the
* import options will be ignored.
*/
private ImportSummary importDataValueSet(ImportOptions options, JobConfiguration id, DataValueSetReader reader) {
DataValueSet dataValueSet = reader.readHeader();
final ImportContext context = createDataValueSetImportContext(options, dataValueSet);
logDataValueSetImportContextInfo(context);
Clock clock = new Clock(log).startClock().logTime("Starting data value import, options: " + context.getImportOptions());
NotificationLevel notificationLevel = context.getImportOptions().getNotificationLevel(INFO);
notifier.clear(id).notify(id, notificationLevel, "Process started");
// ---------------------------------------------------------------------
// Heat caches
// ---------------------------------------------------------------------
preheatCaches(context);
// ---------------------------------------------------------------------
// Get outer meta-data
// ---------------------------------------------------------------------
ImportContext.DataSetContext dataSetContext = createDataSetContext(context, dataValueSet);
if (importValidator.abortDataSetImport(dataValueSet, context, dataSetContext)) {
context.getSummary().setDescription("Import process was aborted");
notifier.notify(id, WARN, "Import process aborted", true).addJobSummary(id, context.getSummary(), ImportSummary.class);
return context.getSummary();
}
Date completeDate = parseDate(dataValueSet.getCompleteDate());
if (dataSetContext.getDataSet() != null && completeDate != null) {
notifier.notify(id, notificationLevel, "Completing data set");
handleComplete(dataSetContext.getDataSet(), completeDate, dataSetContext.getOuterPeriod(), dataSetContext.getOuterOrgUnit(), dataSetContext.getFallbackCategoryOptionCombo(), context.getCurrentUserName(), context.getSummary());
} else {
context.getSummary().setDataSetComplete(Boolean.FALSE.toString());
}
final ImportCount importCount = new ImportCount();
// ---------------------------------------------------------------------
// Data values
// ---------------------------------------------------------------------
Date now = new Date();
clock.logTime("Validated outer meta-data");
notifier.notify(id, notificationLevel, "Importing data values");
List<? extends DataValueEntry> values = dataValueSet.getDataValues();
int index = 0;
if (values != null && !values.isEmpty()) {
for (DataValueEntry dataValue : values) {
importDataValue(context, dataSetContext, importCount, now, index++, dataValue);
}
}
DataValueEntry dataValue = reader.readNext();
while (dataValue != null) {
importDataValue(context, dataSetContext, importCount, now, index++, dataValue);
dataValue = reader.readNext();
}
context.getDataValueBatchHandler().flush();
if (!context.isSkipAudit()) {
context.getAuditBatchHandler().flush();
}
context.getSummary().setImportCount(importCount).setStatus(!context.getSummary().hasConflicts() ? ImportStatus.SUCCESS : ImportStatus.WARNING).setDescription("Import process completed successfully");
clock.logTime("Data value import done, total: " + importCount.getTotalCount() + ", import: " + importCount.getImported() + ", update: " + importCount.getUpdated() + ", delete: " + importCount.getDeleted());
notifier.notify(id, notificationLevel, "Import done", true).addJobSummary(id, notificationLevel, context.getSummary(), ImportSummary.class);
return context.getSummary();
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.ID in project dhis2-core by dhis2.
the class EventController method putJsonEventForEventDate.
@PutMapping(value = "/{uid}/eventDate", consumes = APPLICATION_JSON_VALUE)
@ResponseBody
public WebMessage putJsonEventForEventDate(HttpServletRequest request, @PathVariable("uid") String uid, ImportOptions importOptions) throws IOException {
if (!programStageInstanceService.programStageInstanceExists(uid)) {
return notFound("Event not found for ID " + uid);
}
InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat(request.getInputStream());
Event updatedEvent = renderService.fromJson(inputStream, Event.class);
updatedEvent.setEvent(uid);
eventService.updateEventForEventDate(updatedEvent);
return ok("Event updated " + uid);
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.ID in project dhis2-core by dhis2.
the class EventController method postJsonEventForNote.
@PostMapping(value = "/{uid}/note", consumes = APPLICATION_JSON_VALUE)
@ResponseBody
public WebMessage postJsonEventForNote(@PathVariable("uid") String uid, HttpServletRequest request, ImportOptions importOptions) throws IOException {
if (!programStageInstanceService.programStageInstanceExists(uid)) {
return notFound("Event not found for ID " + uid);
}
InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat(request.getInputStream());
Event event = renderService.fromJson(inputStream, Event.class);
event.setEvent(uid);
eventService.updateEventForNote(event);
return ok("Event updated: " + uid);
}
use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.ID in project dhis2-core by dhis2.
the class EventController method putJsonEventSingleValue.
@PutMapping(value = "/{uid}/{dataElementUid}", consumes = APPLICATION_JSON_VALUE)
@ResponseBody
public WebMessage putJsonEventSingleValue(HttpServletRequest request, @PathVariable("uid") String uid, @PathVariable("dataElementUid") String dataElementUid) throws IOException {
DataElement dataElement = dataElementService.getDataElement(dataElementUid);
if (dataElement == null) {
return notFound("DataElement not found for ID " + dataElementUid);
}
InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat(request.getInputStream());
Event updatedEvent = renderService.fromJson(inputStream, Event.class);
updatedEvent.setEvent(uid);
return updateEvent(updatedEvent, true, null);
}
Aggregations