use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class KafkaConnectFieldReader method nestComplexCollection.
private void nestComplexCollection(AtlasInternalSession session, List<Object> collection, FieldGroup parent, int depth) throws AtlasException {
AtlasPath path = new AtlasPath(parent.getPath());
SegmentContext segment = path.getSegments(true).get(depth);
if (segment.getCollectionIndex() != null) {
int index = segment.getCollectionIndex();
Struct struct = (Struct) collection.get(index);
populateChildFields(session, struct, parent);
return;
}
List<Field> processed = new LinkedList<>();
for (int index = 0; index < collection.size(); index++) {
FieldGroup itemGroup = AtlasKafkaConnectModelFactory.cloneFieldGroup(parent);
AtlasPath.setCollectionIndexRecursively(itemGroup, depth, index);
processed.add(itemGroup);
Struct struct = (Struct) collection.get(index);
populateChildFields(session, struct, itemGroup);
}
parent.getField().clear();
parent.getField().addAll(processed);
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class XmlFieldReader method populateChildFields.
private void populateChildFields(AtlasInternalSession session, Optional<XmlNamespaces> xmlNamespaces, Element node, FieldGroup fieldGroup, AtlasPath path) throws AtlasException {
List<Field> newChildren = new ArrayList<>();
for (Field child : fieldGroup.getField()) {
XmlPath childPath = new XmlPath(child.getPath());
String fieldNamespace = childPath.getLastSegment().getNamespace();
Optional<String> namespace = getNamespace(xmlNamespaces, fieldNamespace);
List<Element> children = XmlIOHelper.getChildrenWithNameStripAlias(childPath.getLastSegment().getName(), namespace, node);
if (childPath.getLastSegment().getCollectionType() != CollectionType.NONE) {
FieldGroup childGroup = populateCollectionItems(session, xmlNamespaces, children, child);
newChildren.add(childGroup);
} else {
if (child instanceof FieldGroup) {
populateChildFields(session, xmlNamespaces, children.get(0), (FieldGroup) child, childPath);
} else {
copyValue(session, xmlNamespaces, childPath.getLastSegment(), children.get(0), (XmlField) child);
}
newChildren.add(child);
}
}
fieldGroup.getField().clear();
fieldGroup.getField().addAll(newChildren);
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class KafkaConnectFieldReaderTest method testReadPrimitiveArray.
@Test
public void testReadPrimitiveArray() throws Exception {
reader.setDocument(Arrays.asList(new String[] { "foo", "bar", "val" }));
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(Head.class));
KafkaConnectField field = AtlasKafkaConnectModelFactory.createKafkaConnectField();
field.setPath("/<>");
field.setFieldType(FieldType.STRING);
when(session.head().getSourceField()).thenReturn(field);
Audits audits = new Audits();
when(session.getAudits()).thenReturn(audits);
Field answer = reader.read(session);
assertEquals(0, audits.getAudit().size());
assertTrue(answer instanceof FieldGroup);
FieldGroup group = (FieldGroup) answer;
assertEquals("/<>", group.getPath());
Field child = group.getField().get(0);
assertEquals(FieldType.STRING, child.getFieldType());
assertEquals("foo", child.getValue());
assertEquals("/<0>", child.getPath());
child = group.getField().get(1);
assertEquals(FieldType.STRING, child.getFieldType());
assertEquals("bar", child.getValue());
assertEquals("/<1>", child.getPath());
child = group.getField().get(2);
assertEquals(FieldType.STRING, child.getFieldType());
assertEquals("val", child.getValue());
assertEquals("/<2>", child.getPath());
field.setPath("/<1>");
answer = reader.read(session);
assertEquals("bar", answer.getValue());
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class KafkaConnectFieldReaderTest method assertfc0.
private void assertfc0(FieldGroup fc0, String rootPrefix) {
assertEquals(rootPrefix + "/fc0", fc0.getPath());
assertEquals(FieldType.COMPLEX, fc0.getFieldType());
assertEquals(1, fc0.getField().size());
Field fc0f0 = fc0.getField().get(0);
assertEquals(rootPrefix + "/fc0/f0", fc0f0.getPath());
assertEquals(FieldType.STRING, fc0f0.getFieldType());
assertEquals("bar", fc0f0.getValue());
}
use of io.atlasmap.v2.FieldGroup in project atlasmap by atlasmap.
the class DefaultAtlasContext method processTargetFieldMapping.
private void processTargetFieldMapping(DefaultAtlasSession session, Mapping mapping) {
MappingType mappingType = mapping.getMappingType();
List<Field> sourceFields = mapping.getInputField();
List<Field> targetFields = mapping.getOutputField();
AtlasModule module = null;
Field targetField = null;
if (mappingType == null || mappingType == MappingType.LOOKUP || mappingType == MappingType.MAP) {
Field sourceField = session.head().getSourceField();
FieldGroup sourceFieldGroup = null;
if (sourceField instanceof FieldGroup) {
sourceFieldGroup = unwrapNestedGroup((FieldGroup) sourceField);
}
for (Field f : targetFields) {
targetField = f;
module = resolveModule(FieldDirection.TARGET, targetField);
if (!auditTargetFieldType(session, module, targetField)) {
continue;
}
session.head().setTargetField(targetField);
if (sourceFieldGroup != null) {
Integer index = targetField.getIndex();
AtlasPath targetPath = new AtlasPath(targetField.getPath());
if (targetPath.hasCollection() && !targetPath.isIndexedCollection()) {
if (targetFields.size() > 1) {
AtlasUtil.addAudit(session, targetField, "It's not yet supported to have a collection field as a part of multiple target fields in a same mapping", AuditStatus.ERROR, null);
return;
}
session.head().setSourceField(sourceFieldGroup);
} else if (index == null) {
if (sourceFieldGroup.getField().size() > 0) {
session.head().setSourceField(sourceFieldGroup.getField().get(sourceFieldGroup.getField().size() - 1));
}
} else {
if (sourceFieldGroup.getField().size() > index) {
session.head().setSourceField(sourceFieldGroup.getField().get(index));
} else {
AtlasUtil.addAudit(session, targetField, String.format("The number of source fields '%s' is fewer than expected via target field index '%s'", sourceFieldGroup.getField().size(), targetField.getIndex()), AuditStatus.WARN, null);
continue;
}
}
}
try {
module.populateTargetField(session);
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to populate target field: " + e.getMessage(), AuditStatus.ERROR, null);
if (LOG.isDebugEnabled()) {
LOG.error(String.format("populateTargetField() failed for %s:%s", targetField.getDocId(), targetField.getPath()), e);
}
return;
}
Field processed = applyFieldActions(session, session.head().getTargetField());
session.head().setTargetField(processed);
try {
module.writeTargetValue(session);
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to write field value into target document: " + e.getMessage(), AuditStatus.ERROR, null);
if (LOG.isDebugEnabled()) {
LOG.error(String.format("writeTargetValue() failed for %s:%s", targetField.getDocId(), targetField.getPath()), e);
}
return;
}
}
return;
} else if (mappingType == MappingType.COMBINE) {
targetField = targetFields.get(0);
module = resolveModule(FieldDirection.TARGET, targetField);
if (!auditTargetFieldType(session, module, targetField)) {
return;
}
Field sourceField = processCombineField(session, mapping, sourceFields, targetField);
session.head().setSourceField(sourceField).setTargetField(targetField);
try {
module.populateTargetField(session);
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to populate target field: " + e.getMessage(), AuditStatus.ERROR, null);
return;
}
applyFieldActions(session, session.head().getTargetField());
try {
module.writeTargetValue(session);
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to write field value into target document: " + e.getMessage(), AuditStatus.ERROR, null);
return;
}
return;
} else if (mappingType == MappingType.SEPARATE) {
List<Field> separatedFields = null;
try {
separatedFields = processSeparateField(session, mapping, sourceFields.get(0));
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to process separate mode: " + e.getMessage(), AuditStatus.ERROR, null);
return;
}
if (separatedFields == null) {
return;
}
for (Field f : targetFields) {
targetField = f;
module = resolveModule(FieldDirection.TARGET, targetField);
if (!auditTargetFieldType(session, module, targetField)) {
continue;
}
if (targetField.getIndex() == null || targetField.getIndex() < 0) {
AtlasUtil.addAudit(session, targetField, String.format("Separate requires zero or positive Index value to be set on targetField targetField.path=%s", targetField.getPath()), AuditStatus.WARN, null);
continue;
}
if (separatedFields.size() <= targetField.getIndex()) {
String errorMessage = String.format("Separate returned fewer segments count=%s when targetField.path=%s requested index=%s", separatedFields.size(), targetField.getPath(), targetField.getIndex());
AtlasUtil.addAudit(session, targetField, errorMessage, AuditStatus.WARN, null);
break;
}
session.head().setSourceField(separatedFields.get(targetField.getIndex())).setTargetField(targetField);
try {
module.populateTargetField(session);
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to populate target field: " + e.getMessage(), AuditStatus.ERROR, null);
return;
}
Field processed = applyFieldActions(session, session.head().getTargetField());
session.head().setTargetField(processed);
try {
module.writeTargetValue(session);
} catch (Exception e) {
AtlasUtil.addAudit(session, targetField, "Failed to write field value into target document: " + e.getMessage(), AuditStatus.ERROR, null);
return;
}
}
return;
}
AtlasUtil.addAudit(session, (String) null, String.format("Unsupported mappingType=%s detected", mapping.getMappingType()), AuditStatus.ERROR, null);
}
Aggregations