use of org.hisp.dhis.dxf2.datavalueset.ImportContext.DataSetContext in project dhis2-core by dhis2.
the class DataValueSetImportValidator method skipDataValue.
/*
* DataValue validation
*/
public boolean skipDataValue(DataValueEntry dataValue, ImportContext context, DataSetContext dataSetContext, DataValueContext valueContext) {
ImportSummary summary = context.getSummary();
int skippedBefore = summary.skippedValueCount();
int totalConflictsBefore = summary.getTotalConflictOccurrenceCount();
for (DataValueValidation validation : dataValueValidations) {
validation.validate(dataValue, context, dataSetContext, valueContext);
if (summary.skippedValueCount() > skippedBefore || summary.getTotalConflictOccurrenceCount() > totalConflictsBefore) {
return true;
}
}
return false;
}
use of org.hisp.dhis.dxf2.datavalueset.ImportContext.DataSetContext in project dhis2-core by dhis2.
the class DefaultDataValueSetService method importDataValueSet.
/**
* There are specific id schemes for data elements and organisation units
* and a generic id scheme for all objects. The specific id schemes will
* take precedence over the generic id scheme. The generic id scheme also
* applies to data set and category option combo.
* <p>
* The id schemes uses the following order of precedence:
* <p>
* <ul>
* <li>Id scheme from the data value set</li>
* <li>Id scheme from the import options</li>
* <li>Default id scheme which is UID</li>
* <ul>
* <p>
* If id scheme is specific in the data value set, any id schemes in the
* import options will be ignored.
*/
private ImportSummary importDataValueSet(ImportOptions options, JobConfiguration id, DataValueSetReader reader) {
DataValueSet dataValueSet = reader.readHeader();
final ImportContext context = createDataValueSetImportContext(options, dataValueSet);
logDataValueSetImportContextInfo(context);
Clock clock = new Clock(log).startClock().logTime("Starting data value import, options: " + context.getImportOptions());
NotificationLevel notificationLevel = context.getImportOptions().getNotificationLevel(INFO);
notifier.clear(id).notify(id, notificationLevel, "Process started");
// ---------------------------------------------------------------------
// Heat caches
// ---------------------------------------------------------------------
preheatCaches(context);
// ---------------------------------------------------------------------
// Get outer meta-data
// ---------------------------------------------------------------------
ImportContext.DataSetContext dataSetContext = createDataSetContext(context, dataValueSet);
if (importValidator.abortDataSetImport(dataValueSet, context, dataSetContext)) {
context.getSummary().setDescription("Import process was aborted");
notifier.notify(id, WARN, "Import process aborted", true).addJobSummary(id, context.getSummary(), ImportSummary.class);
return context.getSummary();
}
Date completeDate = parseDate(dataValueSet.getCompleteDate());
if (dataSetContext.getDataSet() != null && completeDate != null) {
notifier.notify(id, notificationLevel, "Completing data set");
handleComplete(dataSetContext.getDataSet(), completeDate, dataSetContext.getOuterPeriod(), dataSetContext.getOuterOrgUnit(), dataSetContext.getFallbackCategoryOptionCombo(), context.getCurrentUserName(), context.getSummary());
} else {
context.getSummary().setDataSetComplete(Boolean.FALSE.toString());
}
final ImportCount importCount = new ImportCount();
// ---------------------------------------------------------------------
// Data values
// ---------------------------------------------------------------------
Date now = new Date();
clock.logTime("Validated outer meta-data");
notifier.notify(id, notificationLevel, "Importing data values");
List<? extends DataValueEntry> values = dataValueSet.getDataValues();
int index = 0;
if (values != null && !values.isEmpty()) {
for (DataValueEntry dataValue : values) {
importDataValue(context, dataSetContext, importCount, now, index++, dataValue);
}
}
DataValueEntry dataValue = reader.readNext();
while (dataValue != null) {
importDataValue(context, dataSetContext, importCount, now, index++, dataValue);
dataValue = reader.readNext();
}
context.getDataValueBatchHandler().flush();
if (!context.isSkipAudit()) {
context.getAuditBatchHandler().flush();
}
context.getSummary().setImportCount(importCount).setStatus(!context.getSummary().hasConflicts() ? ImportStatus.SUCCESS : ImportStatus.WARNING).setDescription("Import process completed successfully");
clock.logTime("Data value import done, total: " + importCount.getTotalCount() + ", import: " + importCount.getImported() + ", update: " + importCount.getUpdated() + ", delete: " + importCount.getDeleted());
notifier.notify(id, notificationLevel, "Import done", true).addJobSummary(id, notificationLevel, context.getSummary(), ImportSummary.class);
return context.getSummary();
}
use of org.hisp.dhis.dxf2.datavalueset.ImportContext.DataSetContext in project dhis2-core by dhis2.
the class DataValueSetImportValidatorTest method testValidateDataValueOrgUnitInUserHierarchy.
@Test
void testValidateDataValueOrgUnitInUserHierarchy() {
DataValue dataValue = createRandomDataValue();
DataValueContext valueContext = createDataValueContext(dataValue).build();
DataSetContext dataSetContext = createMinimalDataSetContext().build();
ImportContext context = createMinimalImportContext(valueContext).currentOrgUnits(emptySet()).build();
assertTrue(validator.skipDataValue(dataValue, context, dataSetContext, valueContext));
String currentUserId = context.getCurrentUser().getUid();
assertConflict(ErrorCode.E7617, "Organisation unit: `<object1>` not in hierarchy of current user: `" + currentUserId + "`", context, dataValue.getOrgUnit(), currentUserId);
}
use of org.hisp.dhis.dxf2.datavalueset.ImportContext.DataSetContext in project dhis2-core by dhis2.
the class DataValueSetImportValidatorTest method testCheckDataValueNotAfterLatestOpenFuturePeriod.
@Test
void testCheckDataValueNotAfterLatestOpenFuturePeriod() {
DataValue dataValue = createRandomDataValue();
DataValueContext valueContext = createDataValueContext(dataValue).build();
DataSetContext dataSetContext = createMinimalDataSetContext(createEmptyDataValueSet()).build();
ImportContext context = createMinimalImportContext(valueContext).forceDataInput(false).isIso8601(true).build();
context.getDataElementLatestFuturePeriodMap().put(valueContext.getDataElement().getUid(), PeriodType.getPeriodFromIsoString("2020-01"));
assertTrue(validator.skipDataValue(dataValue, context, dataSetContext, valueContext));
assertConflict(ErrorCode.E7641, "Period: `<object1>` is after latest open future period: `202001` for data element: `<object2>`", context, dataValue.getPeriod(), dataValue.getDataElement());
}
use of org.hisp.dhis.dxf2.datavalueset.ImportContext.DataSetContext in project dhis2-core by dhis2.
the class DataValueSetImportValidatorTest method testCheckDataValuePeriodIsOpenNow.
@Test
void testCheckDataValuePeriodIsOpenNow() {
DataValue dataValue = createRandomDataValue();
DataValueContext valueContext = createDataValueContext(dataValue).build();
DataSetContext dataSetContext = createMinimalDataSetContext(createEmptyDataValueSet()).build();
ImportContext context = createMinimalImportContext(valueContext).forceDataInput(false).build();
DataInputPeriod inputPeriod = new DataInputPeriod();
inputPeriod.setPeriod(PeriodType.getPeriodFromIsoString("2019"));
dataSetContext.getDataSet().setDataInputPeriods(singleton(inputPeriod));
assertTrue(validator.skipDataValue(dataValue, context, dataSetContext, valueContext));
assertConflict(ErrorCode.E7643, "Period: `<object1>` is not open for this data set at this time: `<object2>`", context, dataValue.getPeriod(), dataSetContext.getDataSet().getUid());
}
Aggregations