use of io.atlasmap.kafkaconnect.v2.KafkaConnectField in project atlasmap by atlasmap.
the class KafkaConnectFieldReaderTest method createFl0Field.
private KafkaConnectField createFl0Field(AtlasPath parentPath) {
KafkaConnectField fl0Field = AtlasKafkaConnectModelFactory.createKafkaConnectField();
fl0Field.setPath(parentPath.clone().appendField("fl0<>").toString());
fl0Field.setFieldType(FieldType.STRING);
fl0Field.setCollectionType(CollectionType.LIST);
return fl0Field;
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectField in project atlasmap by atlasmap.
the class KafkaConnectFieldReaderTest method testReadPrimitive.
@Test
public void testReadPrimitive() throws Exception {
reader.setDocument("foo");
AtlasInternalSession session = mock(AtlasInternalSession.class);
when(session.head()).thenReturn(mock(Head.class));
KafkaConnectField field = AtlasKafkaConnectModelFactory.createKafkaConnectField();
field.setPath("/");
field.setFieldType(FieldType.STRING);
when(session.head().getSourceField()).thenReturn(field);
Audits audits = new Audits();
when(session.getAudits()).thenReturn(audits);
Field answer = reader.read(session);
assertEquals(0, audits.getAudit().size());
assertEquals("foo", answer.getValue());
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectField in project atlasmap by atlasmap.
the class KafkaConnectModule method populateTargetField.
@Override
public void populateTargetField(AtlasInternalSession session) throws AtlasException {
Field sourceField = session.head().getSourceField();
Field targetField = session.head().getTargetField();
AtlasPath path = new AtlasPath(targetField.getPath());
FieldGroup targetFieldGroup = null;
if (path.hasCollection() && !path.isIndexedCollection()) {
targetFieldGroup = AtlasModelFactory.createFieldGroupFrom(targetField, true);
session.head().setTargetField(targetFieldGroup);
}
// Attempt to Auto-detect field type based on input value
if (targetField.getFieldType() == null && sourceField.getValue() != null) {
targetField.setFieldType(getConversionService().fieldTypeFromClass(sourceField.getValue().getClass()));
}
if (targetFieldGroup == null) {
if (sourceField instanceof FieldGroup) {
List<Field> subFields = ((FieldGroup) sourceField).getField();
if (subFields != null && subFields.size() > 0) {
Integer index = targetField.getIndex();
if (index != null) {
if (subFields.size() > index) {
sourceField = subFields.get(index);
} else {
AtlasUtil.addAudit(session, getDocId(), String.format("The number of source fields (%s) is smaller than target index (%s) - ignoring", subFields.size(), index), AuditStatus.WARN, null);
return;
}
} else {
// The last one wins for compatibility
sourceField = subFields.get(subFields.size() - 1);
}
session.head().setSourceField(sourceField);
}
}
super.populateTargetField(session);
} else if (sourceField instanceof FieldGroup) {
Field previousTargetSubField = null;
for (int i = 0; i < ((FieldGroup) sourceField).getField().size(); i++) {
Field sourceSubField = ((FieldGroup) sourceField).getField().get(i);
KafkaConnectField targetSubField = AtlasKafkaConnectModelFactory.createKafkaConnectField();
AtlasKafkaConnectModelFactory.copyField(targetField, targetSubField, false);
getCollectionHelper().copyCollectionIndexes(sourceField, sourceSubField, targetSubField, previousTargetSubField);
previousTargetSubField = targetSubField;
targetFieldGroup.getField().add(targetSubField);
session.head().setSourceField(sourceSubField);
session.head().setTargetField(targetSubField);
super.populateTargetField(session);
}
session.head().setSourceField(sourceField);
session.head().setTargetField(targetFieldGroup);
} else {
KafkaConnectField targetSubField = new KafkaConnectField();
AtlasKafkaConnectModelFactory.copyField(targetField, targetSubField, false);
path.setVacantCollectionIndex(0);
targetSubField.setPath(path.toString());
targetFieldGroup.getField().add(targetSubField);
session.head().setTargetField(targetSubField);
super.populateTargetField(session);
session.head().setTargetField(targetFieldGroup);
}
if (LOG.isDebugEnabled()) {
LOG.debug("{}: processTargetFieldMapping completed: SourceField:[docId={}, path={}, type={}, value={}], TargetField:[docId={}, path={}, type={}, value={}]", getDocId(), sourceField.getDocId(), sourceField.getPath(), sourceField.getFieldType(), sourceField.getValue(), targetField.getDocId(), targetField.getPath(), targetField.getFieldType(), targetField.getValue());
}
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectField in project atlasmap by atlasmap.
the class KafkaConnectInspectionServiceTest method testAvroComplex.
@Test
public void testAvroComplex() throws Exception {
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("avro-complex.json");
KafkaConnectDocument doc = service.inspectAvro(new String(is.readAllBytes()), options);
assertNotNull(doc);
assertEquals("root", doc.getName());
assertEquals("/", doc.getPath());
assertEquals(FieldType.COMPLEX, doc.getFieldType());
List<Field> fields = doc.getFields().getField();
assertEquals(9, fields.size());
Field f1 = fields.get(0);
assertEquals("f1", f1.getName());
assertEquals("/f1", f1.getPath());
assertEquals(FieldType.BOOLEAN, f1.getFieldType());
Field f2 = fields.get(1);
assertEquals("f2", f2.getName());
assertEquals("/f2", f2.getPath());
assertEquals(FieldType.STRING, f2.getFieldType());
Field record = fields.get(2);
assertEquals("record", record.getName());
assertEquals("/record", record.getPath());
assertEquals(FieldType.COMPLEX, record.getFieldType());
assertTrue(record instanceof KafkaConnectComplexType);
KafkaConnectComplexType recordComplex = (KafkaConnectComplexType) record;
assertEquals(2, recordComplex.getKafkaConnectFields().getKafkaConnectField().size());
Field recordf1 = recordComplex.getKafkaConnectFields().getKafkaConnectField().get(0);
assertEquals("recordf1", recordf1.getName());
assertEquals("/record/recordf1", recordf1.getPath());
assertEquals(FieldType.LONG, recordf1.getFieldType());
Field recordf2 = recordComplex.getKafkaConnectFields().getKafkaConnectField().get(1);
assertEquals("recordf2", recordf2.getName());
assertEquals("/record/recordf2", recordf2.getPath());
assertEquals(FieldType.DOUBLE, recordf2.getFieldType());
Field enumf = fields.get(3);
assertEquals("enum", enumf.getName());
assertEquals("/enum", enumf.getPath());
assertEquals(FieldType.COMPLEX, enumf.getFieldType());
assertTrue(record instanceof KafkaConnectComplexType);
KafkaConnectComplexType enumfComplex = (KafkaConnectComplexType) enumf;
List<KafkaConnectEnumField> entries = enumfComplex.getKafkaConnectEnumFields().getKafkaConnectEnumField();
assertEquals(3, entries.size());
assertEquals("ONE", entries.get(0).getName());
assertEquals("TWO", entries.get(1).getName());
assertEquals("THREE", entries.get(2).getName());
Field sarray = fields.get(4);
assertEquals("sarray", sarray.getName());
assertEquals("/sarray<>", sarray.getPath());
assertEquals(CollectionType.LIST, sarray.getCollectionType());
assertEquals(FieldType.STRING, sarray.getFieldType());
Field rarray = fields.get(5);
assertEquals("rarray", rarray.getName());
assertEquals("/rarray<>", rarray.getPath());
assertEquals(CollectionType.LIST, rarray.getCollectionType());
assertEquals(FieldType.COMPLEX, rarray.getFieldType());
assertTrue(rarray instanceof KafkaConnectComplexType);
KafkaConnectComplexType rarrayComplex = (KafkaConnectComplexType) rarray;
List<KafkaConnectField> rarrayEntries = rarrayComplex.getKafkaConnectFields().getKafkaConnectField();
assertEquals(2, rarrayEntries.size());
Field rarrayf3 = rarrayEntries.get(0);
assertEquals("recordf3", rarrayf3.getName());
assertEquals("/rarray<>/recordf3", rarrayf3.getPath());
assertEquals(FieldType.INTEGER, rarrayf3.getFieldType());
Field rarrayf4 = rarrayEntries.get(1);
assertEquals("recordf4", rarrayf4.getName());
assertEquals("/rarray<>/recordf4", rarrayf4.getPath());
assertEquals(FieldType.FLOAT, rarrayf4.getFieldType());
Field map = fields.get(6);
assertEquals("map", map.getName());
assertEquals("/map{}", map.getPath());
assertEquals(CollectionType.MAP, map.getCollectionType());
assertEquals(FieldType.STRING, map.getFieldType());
Field union = fields.get(7);
assertEquals("union", union.getName());
assertEquals("/union", union.getPath());
assertEquals(FieldType.COMPLEX, union.getFieldType());
assertTrue(union instanceof KafkaConnectComplexType);
KafkaConnectComplexType unionComplex = (KafkaConnectComplexType) union;
assertEquals(FieldStatus.UNSUPPORTED, unionComplex.getStatus());
Field fixed = fields.get(8);
assertEquals("fixed", fixed.getName());
assertEquals("/fixed", fixed.getPath());
assertEquals(FieldType.BYTE_ARRAY, fixed.getFieldType());
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectField in project atlasmap by atlasmap.
the class KafkaConnectFieldReader method read.
@Override
public Field read(AtlasInternalSession session) throws AtlasException {
Field field = session.head().getSourceField();
if (root == null) {
AtlasUtil.addAudit(session, field, String.format("Cannot read a field '%s' of KafkaConnect document '%s', document is null", field.getPath(), field.getDocId()), AuditStatus.ERROR, null);
return field;
}
AtlasPath path = new AtlasPath(field.getPath());
List<Field> fields;
if (path.getSegments(true).size() == 1) {
if (field.getFieldType() == FieldType.COMPLEX) {
FieldGroup group = (FieldGroup) field;
if (path.isCollectionRoot()) {
nestComplexCollection(session, (List<Object>) root, group, 0);
} else {
populateChildFields(session, (Struct) root, group);
}
return group;
} else {
fields = createValueFields(root, path.getRootSegment(), 0, (KafkaConnectField) field);
}
} else {
fields = getFieldsForPath(session, root, field, path, 0);
}
if (path.hasCollection() && !path.isIndexedCollection()) {
FieldGroup fieldGroup = AtlasModelFactory.createFieldGroupFrom(field, true);
fieldGroup.getField().addAll(fields);
session.head().setSourceField(fieldGroup);
return fieldGroup;
} else if (fields.size() == 1) {
Field f = fields.get(0);
session.head().setSourceField(f);
return f;
} else {
return field;
}
}
Aggregations