use of org.hisp.dhis.dxf2.csv.CsvImportService in project dhis2-core by dhis2.
the class MetaDataImportAction method execute.
// -------------------------------------------------------------------------
// Action Implementation
// -------------------------------------------------------------------------
@Override
public String execute() throws Exception {
strategy = strategy != null ? strategy : ImportStrategy.NEW_AND_UPDATES;
User user = currentUserService.getCurrentUser();
TaskId taskId = new TaskId(TaskCategory.METADATA_IMPORT, user);
notifier.clear(taskId);
InputStream in = StreamUtils.wrapAndCheckCompressionFormat(new FileInputStream(upload));
MetadataImportParams importParams = createMetadataImportParams(taskId, strategy, atomicMode, dryRun).setFilename(uploadFileName);
if ("csv".equals(importFormat)) {
if (classKey != null && CSV_SUPPORTED_CLASSES.containsKey(classKey)) {
scheduler.executeTask(new ImportMetaDataCsvTask(importService, csvImportService, schemaService, importParams, in, CSV_SUPPORTED_CLASSES.get(classKey)));
}
} else if ("gml".equals(importFormat)) {
scheduler.executeTask(new ImportMetaDataGmlTask(gmlImportService, importParams, in));
} else if ("json".equals(importFormat) || "xml".equals(importFormat)) {
scheduler.executeTask(new ImportMetaDataTask(importService, schemaService, importParams, in, importFormat));
}
return SUCCESS;
}
use of org.hisp.dhis.dxf2.csv.CsvImportService in project dhis2-core by dhis2.
the class DefaultCsvImportService method fromCsv.
// -------------------------------------------------------------------------
// CsvImportService implementation
// -------------------------------------------------------------------------
@Override
public Metadata fromCsv(InputStream input, Class<? extends IdentifiableObject> clazz) throws IOException {
CsvReader reader = new CsvReader(input, Charset.forName("UTF-8"));
// Ignore first row
reader.readRecord();
Metadata metadata = new Metadata();
if (DataElement.class.equals(clazz)) {
metadata.setDataElements(dataElementsFromCsv(reader));
} else if (DataElementGroup.class.equals(clazz)) {
metadata.setDataElementGroups(dataElementGroupsFromCsv(reader));
} else if (DataElementCategoryOption.class.equals(clazz)) {
metadata.setCategoryOptions(categoryOptionsFromCsv(reader));
} else if (CategoryOptionGroup.class.equals(clazz)) {
metadata.setCategoryOptionGroups(categoryOptionGroupsFromCsv(reader));
} else if (OrganisationUnit.class.equals(clazz)) {
metadata.setOrganisationUnits(organisationUnitsFromCsv(reader));
} else if (OrganisationUnitGroup.class.equals(clazz)) {
metadata.setOrganisationUnitGroups(organisationUnitGroupsFromCsv(reader));
} else if (ValidationRule.class.equals(clazz)) {
metadata.setValidationRules(validationRulesFromCsv(reader));
} else if (OptionSet.class.equals(clazz)) {
setOptionSetsFromCsv(reader, metadata);
}
return metadata;
}
use of org.hisp.dhis.dxf2.csv.CsvImportService in project dhis2-core by dhis2.
the class DefaultCsvImportService method fromCsv.
// -------------------------------------------------------------------------
// CsvImportService implementation
// -------------------------------------------------------------------------
// TODO Add unit tests
@Override
public Metadata fromCsv(InputStream input, CsvImportOptions options) throws IOException {
CsvReader reader = CsvUtils.getReader(input);
// Disabled due to large geometry
reader.setSafetySwitch(false);
if (options.isFirstRowIsHeader()) {
// Ignore first row
reader.readRecord();
}
Metadata metadata = new Metadata();
switch(options.getImportClass()) {
case DATA_ELEMENT:
metadata.setDataElements(dataElementsFromCsv(reader));
break;
case DATA_ELEMENT_GROUP:
metadata.setDataElementGroups(dataElementGroupsFromCsv(reader));
break;
case DATA_ELEMENT_GROUP_MEMBERSHIP:
metadata.setDataElementGroups(dataElementGroupMembersFromCsv(reader));
break;
case INDICATOR_GROUP_MEMBERSHIP:
metadata.setIndicatorGroups(indicatorGroupMembersFromCsv(reader));
break;
case CATEGORY_OPTION:
metadata.setCategoryOptions(categoryOptionsFromCsv(reader));
break;
case CATEGORY:
metadata.setCategories(categoriesFromCsv(reader));
break;
case CATEGORY_COMBO:
metadata.setCategoryCombos(categoryCombosFromCsv(reader));
break;
case CATEGORY_OPTION_GROUP:
metadata.setCategoryOptionGroups(categoryOptionGroupsFromCsv(reader));
break;
case ORGANISATION_UNIT:
metadata.setOrganisationUnits(orgUnitsFromCsv(reader));
break;
case ORGANISATION_UNIT_GROUP:
metadata.setOrganisationUnitGroups(orgUnitGroupsFromCsv(reader));
break;
case ORGANISATION_UNIT_GROUP_MEMBERSHIP:
metadata.setOrganisationUnitGroups(orgUnitGroupMembersFromCsv(reader));
break;
case VALIDATION_RULE:
metadata.setValidationRules(validationRulesFromCsv(reader));
break;
case OPTION_SET:
setOptionSetsFromCsv(reader, metadata);
break;
case OPTION_GROUP:
setOptionGroupsFromCsv(reader, metadata);
break;
case OPTION_GROUP_SET:
metadata.setOptionGroupSets(setOptionGroupSetFromCsv(reader));
break;
case OPTION_GROUP_SET_MEMBERSHIP:
metadata.setOptionGroupSets(optionGroupSetMembersFromCsv(reader));
break;
default:
break;
}
return metadata;
}
Aggregations