Search in sources :

Example 11 with UPDATED

use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.

the class EventManager method updateEvent.

public ImportSummary updateEvent(final Event event, final WorkContext workContext) {
    final List<Event> singleEvent = singletonList(event);
    final ImportSummaries importSummaries = updateEvents(singleEvent, workContext);
    if (isNotEmpty(importSummaries.getImportSummaries())) {
        return importSummaries.getImportSummaries().get(0);
    } else {
        return error("Not updated", event.getEvent());
    }
}
Also used : Event(org.hisp.dhis.dxf2.events.event.Event) ImmutableEvent(org.hisp.dhis.dxf2.events.importer.shared.ImmutableEvent) ImportSummaries(org.hisp.dhis.dxf2.importsummary.ImportSummaries)

Example 12 with UPDATED

use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.

the class ProgramStageInstanceMapper method mapForInsert.

public ProgramStageInstance mapForInsert(Event event) {
    ImportOptions importOptions = workContext.getImportOptions();
    ProgramStageInstance psi = new ProgramStageInstance();
    if (importOptions.getIdSchemes().getProgramStageInstanceIdScheme().equals(CODE)) {
        psi.setCode(event.getEvent());
    } else if (importOptions.getIdSchemes().getProgramStageIdScheme().equals(UID)) {
        psi.setUid(event.getUid());
    }
    // Program Instance
    psi.setProgramInstance(this.workContext.getProgramInstanceMap().get(event.getUid()));
    // Program Stage
    psi.setProgramStage(this.workContext.getProgramStage(importOptions.getIdSchemes().getProgramStageIdScheme(), event.getProgramStage()));
    // Org Unit
    psi.setOrganisationUnit(this.workContext.getOrganisationUnitMap().get(event.getUid()));
    // Status
    psi.setStatus(fromInt(event.getStatus().getValue()));
    // Attribute Option Combo
    psi.setAttributeOptionCombo(this.workContext.getCategoryOptionComboMap().get(event.getUid()));
    // Geometry
    psi.setGeometry(event.getGeometry());
    // Notes
    psi.setComments(convertNotes(event, this.workContext));
    // Data Values
    psi.setEventDataValues(workContext.getEventDataValueMap().get(event.getUid()));
    Date dueDate = new Date();
    if (event.getDueDate() != null) {
        dueDate = parseDate(event.getDueDate());
    }
    psi.setDueDate(dueDate);
    setCompletedDate(event, psi);
    // Note that execution date can be null
    setExecutionDate(event, psi);
    if (psi.getProgramStage() != null && psi.getProgramStage().isEnableUserAssignment()) {
        psi.setAssignedUser(this.workContext.getAssignedUserMap().get(event.getUid()));
    }
    // CREATED AT CLIENT + UPDATED AT CLIENT
    psi.setCreatedAtClient(parseDate(event.getCreatedAtClient()));
    psi.setLastUpdatedAtClient(parseDate(event.getLastUpdatedAtClient()));
    psi.setStoredBy(event.getStoredBy());
    psi.setCompletedBy(event.getCompletedBy());
    psi.setCreatedByUserInfo(event.getCreatedByUserInfo());
    psi.setLastUpdatedByUserInfo(event.getLastUpdatedByUserInfo());
    return psi;
}
Also used : ImportOptions(org.hisp.dhis.dxf2.common.ImportOptions) ProgramStageInstance(org.hisp.dhis.program.ProgramStageInstance) Date(java.util.Date) DateUtils.parseDate(org.hisp.dhis.util.DateUtils.parseDate)

Example 13 with UPDATED

use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.

the class DefaultAdxDataService method saveDataValueSetInternal.

private ImportSummary saveDataValueSetInternal(InputStream in, ImportOptions importOptions, JobConfiguration id) {
    notifier.clear(id).notify(id, "ADX parsing process started");
    ImportOptions adxImportOptions = firstNonNull(importOptions, ImportOptions.getDefaultImportOptions()).instance().setNotificationLevel(NotificationLevel.OFF);
    // Get import options
    IdScheme dsScheme = importOptions.getIdSchemes().getDataSetIdScheme();
    IdScheme deScheme = importOptions.getIdSchemes().getDataElementIdScheme();
    // Create meta-data maps
    CachingMap<String, DataSet> dataSetMap = new CachingMap<>();
    CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
    // Get meta-data maps
    IdentifiableObjectCallable<DataSet> dataSetCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataSet.class, dsScheme, null);
    IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(identifiableObjectManager, DataElement.class, deScheme, null);
    // Heat cache
    if (importOptions.isPreheatCacheDefaultFalse()) {
        dataSetMap.load(identifiableObjectManager.getAll(DataSet.class), o -> o.getPropertyValue(dsScheme));
        dataElementMap.load(identifiableObjectManager.getAll(DataElement.class), o -> o.getPropertyValue(deScheme));
    }
    XMLReader adxReader = XMLFactory.getXMLReader(in);
    ImportSummary importSummary;
    adxReader.moveToStartElement(AdxDataService.ROOT, AdxDataService.NAMESPACE);
    ExecutorService executor = Executors.newSingleThreadExecutor();
    // For Async runs, give the DXF import a different notification task ID
    // so it doesn't conflict with notifications from this level.
    JobConfiguration dxfJobId = (id == null) ? null : new JobConfiguration("dxfJob", JobType.DATAVALUE_IMPORT_INTERNAL, id.getUserUid(), true);
    int groupCount = 0;
    try (PipedOutputStream pipeOut = new PipedOutputStream()) {
        Future<ImportSummary> futureImportSummary = executor.submit(new AdxPipedImporter(dataValueSetService, adxImportOptions, dxfJobId, pipeOut, sessionFactory));
        XMLOutputFactory factory = XMLOutputFactory.newInstance();
        XMLStreamWriter dxfWriter = factory.createXMLStreamWriter(pipeOut);
        List<ImportConflict> adxConflicts = new LinkedList<>();
        dxfWriter.writeStartDocument("1.0");
        dxfWriter.writeStartElement("dataValueSet");
        dxfWriter.writeDefaultNamespace("http://dhis2.org/schema/dxf/2.0");
        notifier.notify(id, "Starting to import ADX data groups.");
        while (adxReader.moveToStartElement(AdxDataService.GROUP, AdxDataService.NAMESPACE)) {
            notifier.update(id, "Importing ADX data group: " + groupCount);
            // note this returns conflicts which are detected at ADX level
            adxConflicts.addAll(parseAdxGroupToDxf(adxReader, dxfWriter, adxImportOptions, dataSetMap, dataSetCallable, dataElementMap, dataElementCallable));
            groupCount++;
        }
        // end dataValueSet
        dxfWriter.writeEndElement();
        dxfWriter.writeEndDocument();
        pipeOut.flush();
        importSummary = futureImportSummary.get(TOTAL_MINUTES_TO_WAIT, TimeUnit.MINUTES);
        ImportSummary summary = importSummary;
        adxConflicts.forEach(conflict -> summary.addConflict(conflict.getObject(), conflict.getValue()));
        importSummary.getImportCount().incrementIgnored(adxConflicts.size());
    } catch (AdxException ex) {
        importSummary = new ImportSummary();
        importSummary.setStatus(ImportStatus.ERROR);
        importSummary.setDescription("Data set import failed within group number: " + groupCount);
        importSummary.addConflict(ex.getObject(), ex.getMessage());
        notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
        log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
    } catch (IOException | XMLStreamException | InterruptedException | ExecutionException | TimeoutException ex) {
        importSummary = new ImportSummary();
        importSummary.setStatus(ImportStatus.ERROR);
        importSummary.setDescription("Data set import failed within group number: " + groupCount);
        notifier.update(id, NotificationLevel.ERROR, "ADX data import done", true);
        log.warn("Import failed: " + DebugUtils.getStackTrace(ex));
    }
    executor.shutdown();
    notifier.update(id, INFO, "ADX data import done", true).addJobSummary(id, importSummary, ImportSummary.class);
    ImportCount c = importSummary.getImportCount();
    log.info("ADX data import done, imported: " + c.getImported() + ", updated: " + c.getUpdated() + ", deleted: " + c.getDeleted() + ", ignored: " + c.getIgnored());
    return importSummary;
}
Also used : XMLOutputFactory(javax.xml.stream.XMLOutputFactory) DataSet(org.hisp.dhis.dataset.DataSet) ImportSummary(org.hisp.dhis.dxf2.importsummary.ImportSummary) PipedOutputStream(java.io.PipedOutputStream) DataElement(org.hisp.dhis.dataelement.DataElement) CachingMap(org.hisp.dhis.commons.collection.CachingMap) XMLStreamWriter(javax.xml.stream.XMLStreamWriter) ExecutionException(java.util.concurrent.ExecutionException) XMLReader(org.hisp.staxwax.reader.XMLReader) JobConfiguration(org.hisp.dhis.scheduling.JobConfiguration) ImportConflict(org.hisp.dhis.dxf2.importsummary.ImportConflict) TimeoutException(java.util.concurrent.TimeoutException) ImportCount(org.hisp.dhis.dxf2.importsummary.ImportCount) IdScheme(org.hisp.dhis.common.IdScheme) IOException(java.io.IOException) IdentifiableObjectCallable(org.hisp.dhis.system.callable.IdentifiableObjectCallable) LinkedList(java.util.LinkedList) XMLStreamException(javax.xml.stream.XMLStreamException) ExecutorService(java.util.concurrent.ExecutorService) ImportOptions(org.hisp.dhis.dxf2.common.ImportOptions)

Example 14 with UPDATED

use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.

the class DefaultCompleteDataSetRegistrationExchangeService method saveCompleteDataSetRegistrations.

private ImportSummary saveCompleteDataSetRegistrations(ImportOptions importOptions, JobConfiguration id, CompleteDataSetRegistrations completeRegistrations) {
    Clock clock = new Clock(log).startClock().logTime("Starting complete data set registration import, options: " + importOptions);
    notifier.clear(id).notify(id, "Process started");
    // Start here so we can access any outer attributes for the
    // configuration
    completeRegistrations.open();
    ImportSummary importSummary = new ImportSummary();
    // ---------------------------------------------------------------------
    // Set up import configuration
    // ---------------------------------------------------------------------
    importOptions = importOptions != null ? importOptions : ImportOptions.getDefaultImportOptions();
    log.info("Import options: " + importOptions);
    ImportConfig cfg = new ImportConfig(this.systemSettingManager, this.categoryService, completeRegistrations, importOptions);
    // ---------------------------------------------------------------------
    // Set up meta-data
    // ---------------------------------------------------------------------
    MetadataCaches caches = new MetadataCaches();
    MetadataCallables metaDataCallables = new MetadataCallables(cfg, this.idObjManager, this.periodService, this.categoryService);
    if (importOptions.isPreheatCacheDefaultFalse()) {
        caches.preheat(idObjManager, cfg);
    }
    // ---------------------------------------------------------------------
    // Perform import
    // ---------------------------------------------------------------------
    notifier.notify(id, "Importing complete data set registrations");
    int totalCount = batchImport(completeRegistrations, cfg, importSummary, metaDataCallables, caches);
    notifier.notify(id, NotificationLevel.INFO, "Import done", true).addJobSummary(id, importSummary, ImportSummary.class);
    ImportCount count = importSummary.getImportCount();
    clock.logTime(String.format("Complete data set registration import done, total: %d, imported: %d, updated: %d, deleted: %d", totalCount, count.getImported(), count.getUpdated(), count.getDeleted()));
    completeRegistrations.close();
    return importSummary;
}
Also used : ImportSummary(org.hisp.dhis.dxf2.importsummary.ImportSummary) ImportCount(org.hisp.dhis.dxf2.importsummary.ImportCount) Clock(org.hisp.dhis.system.util.Clock)

Example 15 with UPDATED

use of org.hisp.dhis.dxf2.events.trackedentity.store.query.EventQuery.COLUMNS.UPDATED in project dhis2-core by dhis2.

the class EventController method putJsonEventForEventDate.

@PutMapping(value = "/{uid}/eventDate", consumes = APPLICATION_JSON_VALUE)
@ResponseBody
public WebMessage putJsonEventForEventDate(HttpServletRequest request, @PathVariable("uid") String uid, ImportOptions importOptions) throws IOException {
    if (!programStageInstanceService.programStageInstanceExists(uid)) {
        return notFound("Event not found for ID " + uid);
    }
    InputStream inputStream = StreamUtils.wrapAndCheckCompressionFormat(request.getInputStream());
    Event updatedEvent = renderService.fromJson(inputStream, Event.class);
    updatedEvent.setEvent(uid);
    eventService.updateEventForEventDate(updatedEvent);
    return ok("Event updated " + uid);
}
Also used : InputStream(java.io.InputStream) Event(org.hisp.dhis.dxf2.events.event.Event) PutMapping(org.springframework.web.bind.annotation.PutMapping) ResponseBody(org.springframework.web.bind.annotation.ResponseBody)

Aggregations

ImportReport (org.hisp.dhis.dxf2.metadata.feedback.ImportReport)9 ImportSummary (org.hisp.dhis.dxf2.importsummary.ImportSummary)8 User (org.hisp.dhis.user.User)8 Test (org.junit.jupiter.api.Test)8 Date (java.util.Date)7 List (java.util.List)7 TransactionalIntegrationTest (org.hisp.dhis.TransactionalIntegrationTest)6 IdentifiableObject (org.hisp.dhis.common.IdentifiableObject)6 DataSet (org.hisp.dhis.dataset.DataSet)6 ClassPathResource (org.springframework.core.io.ClassPathResource)6 Event (org.hisp.dhis.dxf2.events.event.Event)5 ImportCount (org.hisp.dhis.dxf2.importsummary.ImportCount)5 IOException (java.io.IOException)4 InputStream (java.io.InputStream)4 WebMessageException (org.hisp.dhis.dxf2.webmessage.WebMessageException)4 DataElement (org.hisp.dhis.dataelement.DataElement)3 ImportOptions (org.hisp.dhis.dxf2.common.ImportOptions)3 Events (org.hisp.dhis.dxf2.events.event.Events)3 TrackedEntityInstance (org.hisp.dhis.dxf2.events.trackedentity.TrackedEntityInstance)3 ImportSummaries (org.hisp.dhis.dxf2.importsummary.ImportSummaries)3