use of io.atlasmap.v2.Collection in project atlasmap by atlasmap.
the class JsonFieldReader method getJsonFieldsForPath.
private List<Field> getJsonFieldsForPath(AtlasInternalSession session, JsonNode node, Field field, AtlasPath path, int depth) throws AtlasException {
List<Field> fields = new ArrayList<>();
List<SegmentContext> segments = path.getSegments(true);
if (segments.size() < depth) {
throw new AtlasException(String.format("depth '%s' exceeds segment size '%s'", depth, segments.size()));
}
if (segments.size() == depth) {
// if traversed the entire path and found value
if (field.getFieldType() == FieldType.COMPLEX && !node.isValueNode()) {
FieldGroup group = (FieldGroup) field;
populateChildFields(session, node, group, path);
fields.add(group);
} else {
JsonField jsonField = new JsonField();
AtlasModelFactory.copyField(field, jsonField, true);
if (field instanceof JsonEnumField && field.getFieldType() == FieldType.COMPLEX) {
// enum has COMPLEX by default
jsonField.setFieldType(FieldType.STRING);
}
Object value = handleValueNode(session, node, jsonField);
jsonField.setValue(value);
// reset index for subfields
jsonField.setIndex(null);
fields.add(jsonField);
}
return fields;
}
// segments.size() > depth
SegmentContext segmentContext;
JsonNode child;
if (depth == 0 && path.hasCollectionRoot()) {
// if root is a collection
child = node;
segmentContext = segments.get(depth);
} else {
if (depth == 0) {
if (node.size() == 1 && !path.getSegments(false).get(0).getExpression().startsWith(rootNode.fieldNames().next())) {
// peel off a rooted object, i.e. mapping /orderId works for document { source: { orderId: 123 } }
node = node.elements().next();
}
if (segments.size() > 1) {
// skip the root, if not a collection
depth = 1;
}
}
segmentContext = segments.get(depth);
String fieldName = segmentContext.getName();
child = node.get(fieldName);
}
if (child == null) {
return fields;
}
if (segmentContext.getCollectionType() == CollectionType.NONE) {
List<Field> childFields = getJsonFieldsForPath(session, child, field, path, depth + 1);
fields.addAll(childFields);
return fields;
}
// collection
if (segmentContext.getCollectionIndex() != null) {
if (child.size() <= segmentContext.getCollectionIndex()) {
// index out of range
return fields;
}
List<Field> arrayFields = getJsonFieldsForPath(session, child.get(segmentContext.getCollectionIndex()), field, path, depth + 1);
fields.addAll(arrayFields);
} else {
// if index not included, iterate over all
for (int i = 0; i < child.size(); i++) {
Field itemField;
if (field instanceof FieldGroup) {
itemField = AtlasJsonModelFactory.cloneFieldGroup((FieldGroup) field);
AtlasPath.setCollectionIndexRecursively((FieldGroup) itemField, depth, i);
} else {
itemField = AtlasJsonModelFactory.cloneField((JsonField) field, false);
AtlasPath itemPath = new AtlasPath(field.getPath());
itemPath.setCollectionIndex(depth, i);
itemField.setPath(itemPath.toString());
}
List<Field> arrayFields = getJsonFieldsForPath(session, child.get(i), itemField, new AtlasPath(itemField.getPath()), depth + 1);
fields.addAll(arrayFields);
}
}
return fields;
}
use of io.atlasmap.v2.Collection in project atlasmap by atlasmap.
the class JsonMarshallerTest method testComplexRequests.
@Test
public void testComplexRequests() throws Exception {
runJSONSerializationTest(generatePropertyReferenceMapping(), "atlasmapping-property-request.json");
runJSONSerializationTest(generateConstantMapping(), "atlasmapping-constant-request.json");
runJSONSerializationTest(generateMultiSourceMapping(), "atlasmapping-multisource-request.json");
runJSONSerializationTest(generateCollectionMapping(), "atlasmapping-collection-request.json");
runJSONSerializationTest(generateCombineMapping(), "atlasmapping-combine-request.json");
runJSONSerializationTest(generateActionMapping(), "atlasmapping-field-action-request.json");
AtlasMapping action = mapper.readValue(new File("target" + File.separator + "junit" + File.separator + testMethodName + File.separator + "atlasmapping-field-action-request.json"), AtlasMapping.class);
assertNotNull(action);
validateAtlasMapping(action);
AtlasMapping collection = mapper.readValue(new File("target" + File.separator + "junit" + File.separator + testMethodName + File.separator + "atlasmapping-collection-request.json"), AtlasMapping.class);
assertNotNull(collection);
validateCollectionAtlasMapping(collection);
AtlasMapping multisource = mapper.readValue(new File("target" + File.separator + "junit" + File.separator + testMethodName + File.separator + "atlasmapping-multisource-request.json"), AtlasMapping.class);
assertNotNull(multisource);
validateMultisourceAtlasMapping(multisource);
AtlasMapping propertyMapping = mapper.readValue(new File("target" + File.separator + "junit" + File.separator + testMethodName + File.separator + "atlasmapping-property-request.json"), AtlasMapping.class);
assertNotNull(propertyMapping);
validatePropertyAtlasMapping(propertyMapping);
AtlasMapping constantMapping = mapper.readValue(new File("target" + File.separator + "junit" + File.separator + testMethodName + File.separator + "atlasmapping-constant-request.json"), AtlasMapping.class);
assertNotNull(constantMapping);
validateConstantAtlasMapping(constantMapping);
AtlasMapping combineAtlasMapping = mapper.readValue(new File("target" + File.separator + "junit" + File.separator + testMethodName + File.separator + "atlasmapping-combine-request.json"), AtlasMapping.class);
assertNotNull(combineAtlasMapping);
validateCombineAtlasMapping(combineAtlasMapping);
}
use of io.atlasmap.v2.Collection in project atlasmap by atlasmap.
the class BaseMarshallerTest method validateCollectionMapping.
private void validateCollectionMapping(Collection collection) {
assertEquals(new BigInteger("2"), collection.getCollectionSize());
assertEquals(CollectionType.LIST, collection.getCollectionType());
Mapping mapping = (Mapping) collection.getMappings().getMapping().get(0);
assertEquals(1, mapping.getInputField().size());
validateJavaField((JavaField) mapping.getInputField().get(0));
assertEquals(1, mapping.getOutputField().size());
validateJavaField((JavaField) mapping.getOutputField().get(0));
validateMapping(mapping, MappingType.MAP, generateMappingParams());
mapping = (Mapping) collection.getMappings().getMapping().get(1);
assertEquals(1, mapping.getInputField().size());
validateJavaField((JavaField) mapping.getInputField().get(0));
assertEquals(1, mapping.getOutputField().size());
validateJavaField((JavaField) mapping.getOutputField().get(0));
validateMapping(mapping, MappingType.MAP, generateMappingParams());
}
use of io.atlasmap.v2.Collection in project atlasmap by atlasmap.
the class KafkaConnectFieldReader method createValueFields.
private List<Field> createValueFields(Object parent, SegmentContext segment, int segmentIndex, KafkaConnectField parentField) throws AtlasException {
List<Field> fields = new LinkedList<>();
if (segment.getCollectionType() == CollectionType.NONE) {
KafkaConnectField kcField = AtlasKafkaConnectModelFactory.cloneField(parentField, true);
Object converted = conversionService.convertType(parent, parentField.getFormat(), parentField.getFieldType(), null);
kcField.setValue(converted);
// reset index for subfields
kcField.setIndex(null);
fields.add(kcField);
} else if (segment.getCollectionIndex() != null) {
List<Object> collection = (List<Object>) parent;
int i = segment.getCollectionIndex();
KafkaConnectField kcField = AtlasKafkaConnectModelFactory.cloneField(parentField, true);
Object converted = conversionService.convertType(collection.get(i), parentField.getFormat(), parentField.getFieldType(), null);
kcField.setValue(converted);
// reset index for subfields
kcField.setIndex(null);
fields.add(kcField);
} else {
List<Object> collection = (List<Object>) parent;
for (int i = 0; i < collection.size(); i++) {
KafkaConnectField kcField = AtlasKafkaConnectModelFactory.cloneField(parentField, true);
Object converted = conversionService.convertType(collection.get(i), parentField.getFormat(), parentField.getFieldType(), null);
kcField.setValue(converted);
// reset index for subfields
kcField.setIndex(null);
fields.add(kcField);
AtlasPath path = new AtlasPath(parentField.getPath());
path.setCollectionIndex(segmentIndex, i);
kcField.setPath(path.toString());
}
}
return fields;
}
use of io.atlasmap.v2.Collection in project atlasmap by atlasmap.
the class KafkaConnectFieldReader method getFieldsForPath.
private List<Field> getFieldsForPath(AtlasInternalSession session, Object parent, Field field, AtlasPath path, int depth) throws AtlasException {
List<Field> fields = new ArrayList<>();
List<SegmentContext> segments = path.getSegments(true);
if (parent == null) {
return fields;
}
if (segments.size() < depth) {
throw new AtlasException(String.format("depth '%s' exceeds segment size '%s'", depth, segments.size()));
}
if (segments.size() == depth) {
// if traversed the entire path and found value
if (field instanceof FieldGroup && field.getFieldType() == FieldType.COMPLEX && (parent instanceof Struct)) {
FieldGroup group = (FieldGroup) field;
populateChildFields(session, (Struct) parent, group);
fields.add(group);
} else {
field.setValue(parent);
fields.add(field);
}
return fields;
}
// segments.size() > depth
SegmentContext segmentContext = null;
Object child = null;
List<Object> collectionChild = null;
if (depth == 0 && path.hasCollectionRoot()) {
// if root is a collection
collectionChild = (List<Object>) parent;
segmentContext = segments.get(depth);
} else {
if (depth == 0) {
// skip the root, if not a collection
depth = 1;
}
segmentContext = segments.get(depth);
String fieldName = segmentContext.getName();
child = ((Struct) parent).get(fieldName);
if (segmentContext.getCollectionType() != CollectionType.NONE) {
collectionChild = (List<Object>) child;
}
}
if (segmentContext.getCollectionType() == CollectionType.NONE) {
List<Field> childFields = getFieldsForPath(session, child, field, path, depth + 1);
fields.addAll(childFields);
return fields;
}
// collection
if (segmentContext.getCollectionIndex() != null) {
if (collectionChild.size() <= segmentContext.getCollectionIndex()) {
// index out of range
return fields;
}
List<Field> arrayFields = getFieldsForPath(session, collectionChild.get(segmentContext.getCollectionIndex()), field, path, depth + 1);
fields.addAll(arrayFields);
} else {
// if index not included, iterate over all
for (int i = 0; i < collectionChild.size(); i++) {
Field itemField;
if (field instanceof FieldGroup) {
itemField = AtlasKafkaConnectModelFactory.cloneFieldGroup((FieldGroup) field);
AtlasPath.setCollectionIndexRecursively((FieldGroup) itemField, depth, i);
} else {
itemField = AtlasKafkaConnectModelFactory.cloneField((KafkaConnectField) field, false);
AtlasPath itemPath = new AtlasPath(field.getPath());
itemPath.setCollectionIndex(depth, i);
itemField.setPath(itemPath.toString());
}
List<Field> arrayFields = getFieldsForPath(session, collectionChild.get(i), itemField, new AtlasPath(itemField.getPath()), depth + 1);
fields.addAll(arrayFields);
}
}
return fields;
}
Aggregations