use of edu.harvard.iq.dataverse.api.dto.FieldDTO in project dataverse by IQSS.
the class ImportGenericServiceBean method processXMLElement.
private void processXMLElement(XMLStreamReader xmlr, String currentPath, String openingTag, ForeignMetadataFormatMapping foreignFormatMapping, DatasetDTO datasetDTO) throws XMLStreamException {
logger.fine("entering processXMLElement; (" + currentPath + ")");
for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
if (event == XMLStreamConstants.START_ELEMENT) {
String currentElement = xmlr.getLocalName();
ForeignMetadataFieldMapping mappingDefined = datasetfieldService.findFieldMapping(foreignFormatMapping.getName(), currentPath + currentElement);
if (mappingDefined != null) {
DatasetFieldType mappingDefinedFieldType = datasetfieldService.findByNameOpt(mappingDefined.getDatasetfieldName());
boolean compound = mappingDefinedFieldType.isCompound();
DatasetFieldCompoundValue cachedCompoundValue = null;
String dataverseFieldName = mappingDefined.getDatasetfieldName();
// Process attributes, if any are defined in the mapping:
if (mappingDefinedFieldType.isCompound()) {
List<HashSet<FieldDTO>> compoundField = new ArrayList<>();
HashSet<FieldDTO> set = new HashSet<>();
for (ForeignMetadataFieldMapping childMapping : mappingDefined.getChildFieldMappings()) {
if (childMapping.isAttribute()) {
String attributeName = childMapping.getForeignFieldXPath();
String attributeValue = xmlr.getAttributeValue(null, attributeName);
if (attributeValue != null) {
String mappedFieldName = childMapping.getDatasetfieldName();
logger.fine("looking up dataset field " + mappedFieldName);
DatasetFieldType mappedFieldType = datasetfieldService.findByNameOpt(mappedFieldName);
if (mappedFieldType != null) {
try {
addToSet(set, attributeName, attributeValue);
// FieldDTO value = FieldDTO.createPrimitiveFieldDTO(attributeName, attributeValue);
// FieldDTO attribute = FieldDTO.createCompoundFieldDTO(attributeName, value);
// MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation");
// citationBlock.getFields().add(value);
// TO DO replace database output with Json cachedCompoundValue = createDatasetFieldValue(mappedFieldType, cachedCompoundValue, attributeValue, datasetVersion);
} catch (Exception ex) {
logger.warning("Caught unknown exception when processing attribute " + currentPath + currentElement + "{" + attributeName + "} (skipping);");
}
} else {
throw new EJBException("Bad foreign metadata field mapping: no such DatasetField " + mappedFieldName + "!");
}
}
}
}
if (!set.isEmpty()) {
compoundField.add(set);
MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get(mappingDefinedFieldType.getMetadataBlock().getName());
citationBlock.addField(FieldDTO.createMultipleCompoundFieldDTO(mappingDefined.getDatasetfieldName(), compoundField));
} else {
FieldDTO value = null;
if (mappingDefinedFieldType.isAllowMultiples()) {
List<String> values = new ArrayList<>();
values.add(parseText(xmlr));
value = FieldDTO.createMultiplePrimitiveFieldDTO(dataverseFieldName, values);
} else {
value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
}
value = makeDTO(mappingDefinedFieldType, value, dataverseFieldName);
MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get(mappingDefinedFieldType.getMetadataBlock().getName());
citationBlock.addField(value);
}
} else // xxString dataverseFieldName = mappingDefined.getDatasetfieldName();
if (dataverseFieldName != null && !dataverseFieldName.isEmpty()) {
DatasetFieldType dataverseFieldType = datasetfieldService.findByNameOpt(dataverseFieldName);
FieldDTO value;
if (dataverseFieldType != null) {
if (dataverseFieldType.isControlledVocabulary()) {
value = FieldDTO.createVocabFieldDTO(dataverseFieldName, parseText(xmlr));
} else {
value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
}
value = makeDTO(dataverseFieldType, value, dataverseFieldName);
// value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
// FieldDTO dataverseField = FieldDTO.createCompoundFieldDTO(dataverseFieldName, value);
MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get(mappingDefinedFieldType.getMetadataBlock().getName());
citationBlock.addField(value);
// TO DO replace database output with Json createDatasetFieldValue(dataverseFieldType, cachedCompoundValue, elementTextPayload, datasetVersion);
} else {
throw new EJBException("Bad foreign metadata field mapping: no such DatasetField " + dataverseFieldName + "!");
}
}
} else {
// recursively, process the xml stream further down:
processXMLElement(xmlr, currentPath + currentElement + ":", currentElement, foreignFormatMapping, datasetDTO);
}
} else if (event == XMLStreamConstants.END_ELEMENT) {
if (xmlr.getLocalName().equals(openingTag))
return;
}
}
}
use of edu.harvard.iq.dataverse.api.dto.FieldDTO in project dataverse by IQSS.
the class ImportDDIServiceBean method handleChildField.
private void handleChildField(MetadataBlockDTO customBlock, DatasetFieldType dsfType, String fieldValue) throws ImportException {
DatasetFieldType parent = dsfType.getParentDatasetFieldType();
// Create child Field
FieldDTO child = null;
if (dsfType.isAllowControlledVocabulary()) {
child = FieldDTO.createVocabFieldDTO(dsfType.getName(), fieldValue);
} else if (dsfType.isPrimitive()) {
child = FieldDTO.createPrimitiveFieldDTO(dsfType.getName(), fieldValue);
} else {
throw new ImportException("Unsupported custom child field type: " + dsfType);
}
// Create compound field with this child as its only element
FieldDTO compound = null;
if (parent.isAllowMultiples()) {
compound = FieldDTO.createMultipleCompoundFieldDTO(parent.getName(), child);
} else {
compound = FieldDTO.createCompoundFieldDTO(parent.getName(), child);
}
customBlock.addField(compound);
}
use of edu.harvard.iq.dataverse.api.dto.FieldDTO in project dataverse by IQSS.
the class ImportDDIServiceBean method processTargetSampleSize.
private void processTargetSampleSize(XMLStreamReader xmlr, MetadataBlockDTO socialScience) throws XMLStreamException {
FieldDTO sampleSize = null;
FieldDTO sampleSizeFormula = null;
for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
if (event == XMLStreamConstants.START_ELEMENT) {
if (xmlr.getLocalName().equals("sampleSize")) {
sampleSize = FieldDTO.createPrimitiveFieldDTO("targetSampleActualSize", parseText(xmlr, "sampleSize"));
} else if (xmlr.getLocalName().equals("sampleSizeFormula")) {
sampleSizeFormula = FieldDTO.createPrimitiveFieldDTO("targetSampleSizeFormula", parseText(xmlr, "sampleSizeFormula"));
}
} else if (event == XMLStreamConstants.END_ELEMENT) {
if (xmlr.getLocalName().equals("targetSampleSize")) {
if (sampleSize != null || sampleSizeFormula != null) {
socialScience.getFields().add(FieldDTO.createCompoundFieldDTO("targetSampleSize", sampleSize, sampleSizeFormula));
}
return;
}
}
}
}
use of edu.harvard.iq.dataverse.api.dto.FieldDTO in project dataverse by IQSS.
the class DatasetVersionDTOTest method testReadDataSet.
@Test
public void testReadDataSet() {
try {
File file = new File("src/test/java/edu/harvard/iq/dataverse/util/json/JsonDatasetVersion.txt");
String text = new Scanner(file).useDelimiter("\\Z").next();
Gson gson = new Gson();
DatasetVersionDTO dto = gson.fromJson(text, DatasetVersionDTO.class);
HashSet<FieldDTO> author1Fields = new HashSet<>();
author1Fields.add(FieldDTO.createPrimitiveFieldDTO("authorAffiliation", "Top"));
author1Fields.add(FieldDTO.createPrimitiveFieldDTO("authorIdentifier", "ellenid"));
author1Fields.add(FieldDTO.createVocabFieldDTO("authorIdentifierScheme", "ORCID"));
author1Fields.add(FieldDTO.createPrimitiveFieldDTO("authorName", "Privileged, Pete"));
HashSet<FieldDTO> author2Fields = new HashSet<>();
author2Fields.add(FieldDTO.createPrimitiveFieldDTO("authorAffiliation", "Bottom"));
author2Fields.add(FieldDTO.createPrimitiveFieldDTO("authorIdentifier", "audreyId"));
author2Fields.add(FieldDTO.createVocabFieldDTO("authorIdentifierScheme", "DAISY"));
author2Fields.add(FieldDTO.createPrimitiveFieldDTO("authorName", "Awesome, Audrey"));
List<HashSet<FieldDTO>> authorList = new ArrayList<>();
authorList.add(author1Fields);
authorList.add(author2Fields);
FieldDTO expectedDTO = new FieldDTO();
expectedDTO.setTypeName("author");
expectedDTO.setMultipleCompound(authorList);
FieldDTO authorDTO = dto.getMetadataBlocks().get("citation").getFields().get(1);
// write both dto's to json to compare them with gson parser
JsonElement expected = gson.toJsonTree(expectedDTO, FieldDTO.class);
JsonElement result = gson.toJsonTree(authorDTO);
Assert.assertEquals(expected, result);
} catch (IOException e) {
e.printStackTrace();
}
}
use of edu.harvard.iq.dataverse.api.dto.FieldDTO in project dataverse by IQSS.
the class JsonParser method remapGeographicCoverage.
/**
* Special processing for GeographicCoverage compound field:
* Handle parsing exceptions caused by invalid controlled vocabulary in the "country" field by
* putting the invalid data in "otherGeographicCoverage" in a new compound value.
*
* @param ex - contains the invalid values to be processed
* @return a compound DatasetField that contains the newly created values, in addition to
* the original valid values.
* @throws JsonParseException
*/
private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) throws JsonParseException {
List<HashSet<FieldDTO>> geoCoverageList = new ArrayList<>();
// For each exception, create HashSet of otherGeographic Coverage and add to list
for (ControlledVocabularyException vocabEx : ex.getExList()) {
HashSet<FieldDTO> set = new HashSet<>();
set.add(FieldDTO.createPrimitiveFieldDTO(DatasetFieldConstant.otherGeographicCoverage, vocabEx.getStrValue()));
geoCoverageList.add(set);
}
FieldDTO geoCoverageDTO = FieldDTO.createMultipleCompoundFieldDTO(DatasetFieldConstant.geographicCoverage, geoCoverageList);
// convert DTO to datasetField so we can back valid values.
Gson gson = new Gson();
String jsonString = gson.toJson(geoCoverageDTO);
JsonReader jsonReader = Json.createReader(new StringReader(jsonString));
JsonObject obj = jsonReader.readObject();
DatasetField geoCoverageField = parseField(obj);
// add back valid values
for (DatasetFieldCompoundValue dsfcv : ex.getValidValues()) {
if (!dsfcv.getChildDatasetFields().isEmpty()) {
dsfcv.setParentDatasetField(geoCoverageField);
geoCoverageField.getDatasetFieldCompoundValues().add(dsfcv);
}
}
return geoCoverageField;
}
Aggregations