use of io.atlasmap.kafkaconnect.v2.KafkaConnectDocument in project atlasmap by atlasmap.
the class KafkaConnectService method inspect.
/**
* Inspects a Kafka Connect schema and return a Document object.
* @param request request
* @return {@link KafkaConnectInspectionResponse}
*/
public Response inspect(KafkaConnectInspectionRequest request) {
long startTime = System.currentTimeMillis();
KafkaConnectInspectionResponse response = new KafkaConnectInspectionResponse();
KafkaConnectDocument d = null;
try {
ClassLoader loader = resourceContext != null ? resourceContext.getResource(AtlasService.class).getLibraryLoader() : KafkaConnectService.class.getClassLoader();
KafkaConnectInspectionService s = new KafkaConnectInspectionService(loader);
String schemaTypeStr = request.getOptions().get(KafkaConnectConstants.OPTIONS_SCHEMA_TYPE);
KafkaConnectSchemaType schemaType = KafkaConnectSchemaType.valueOf(schemaTypeStr);
HashMap<String, Object> options = KafkaConnectUtil.repackParserOptions(request.getOptions());
switch(schemaType) {
case JSON:
d = s.inspectJson(request.getSchemaData(), options);
break;
case AVRO:
d = s.inspectAvro(request.getSchemaData(), options);
break;
default:
response.setErrorMessage("Unsupported inspection type: " + schemaType);
break;
}
} catch (Exception e) {
LOG.error("Error inspecting Kafka Connect schema: " + e.getMessage(), e);
response.setErrorMessage(e.getMessage());
} finally {
response.setExecutionTime(System.currentTimeMillis() - startTime);
}
response.setKafkaConnectDocument(d);
return Response.ok().entity(toJson(response)).build();
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectDocument in project atlasmap by atlasmap.
the class KafkaConnectInspectionServiceTest method testAvroComplex.
@Test
public void testAvroComplex() throws Exception {
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("avro-complex.json");
KafkaConnectDocument doc = service.inspectAvro(new String(is.readAllBytes()), options);
assertNotNull(doc);
assertEquals("root", doc.getName());
assertEquals("/", doc.getPath());
assertEquals(FieldType.COMPLEX, doc.getFieldType());
List<Field> fields = doc.getFields().getField();
assertEquals(9, fields.size());
Field f1 = fields.get(0);
assertEquals("f1", f1.getName());
assertEquals("/f1", f1.getPath());
assertEquals(FieldType.BOOLEAN, f1.getFieldType());
Field f2 = fields.get(1);
assertEquals("f2", f2.getName());
assertEquals("/f2", f2.getPath());
assertEquals(FieldType.STRING, f2.getFieldType());
Field record = fields.get(2);
assertEquals("record", record.getName());
assertEquals("/record", record.getPath());
assertEquals(FieldType.COMPLEX, record.getFieldType());
assertTrue(record instanceof KafkaConnectComplexType);
KafkaConnectComplexType recordComplex = (KafkaConnectComplexType) record;
assertEquals(2, recordComplex.getKafkaConnectFields().getKafkaConnectField().size());
Field recordf1 = recordComplex.getKafkaConnectFields().getKafkaConnectField().get(0);
assertEquals("recordf1", recordf1.getName());
assertEquals("/record/recordf1", recordf1.getPath());
assertEquals(FieldType.LONG, recordf1.getFieldType());
Field recordf2 = recordComplex.getKafkaConnectFields().getKafkaConnectField().get(1);
assertEquals("recordf2", recordf2.getName());
assertEquals("/record/recordf2", recordf2.getPath());
assertEquals(FieldType.DOUBLE, recordf2.getFieldType());
Field enumf = fields.get(3);
assertEquals("enum", enumf.getName());
assertEquals("/enum", enumf.getPath());
assertEquals(FieldType.COMPLEX, enumf.getFieldType());
assertTrue(record instanceof KafkaConnectComplexType);
KafkaConnectComplexType enumfComplex = (KafkaConnectComplexType) enumf;
List<KafkaConnectEnumField> entries = enumfComplex.getKafkaConnectEnumFields().getKafkaConnectEnumField();
assertEquals(3, entries.size());
assertEquals("ONE", entries.get(0).getName());
assertEquals("TWO", entries.get(1).getName());
assertEquals("THREE", entries.get(2).getName());
Field sarray = fields.get(4);
assertEquals("sarray", sarray.getName());
assertEquals("/sarray<>", sarray.getPath());
assertEquals(CollectionType.LIST, sarray.getCollectionType());
assertEquals(FieldType.STRING, sarray.getFieldType());
Field rarray = fields.get(5);
assertEquals("rarray", rarray.getName());
assertEquals("/rarray<>", rarray.getPath());
assertEquals(CollectionType.LIST, rarray.getCollectionType());
assertEquals(FieldType.COMPLEX, rarray.getFieldType());
assertTrue(rarray instanceof KafkaConnectComplexType);
KafkaConnectComplexType rarrayComplex = (KafkaConnectComplexType) rarray;
List<KafkaConnectField> rarrayEntries = rarrayComplex.getKafkaConnectFields().getKafkaConnectField();
assertEquals(2, rarrayEntries.size());
Field rarrayf3 = rarrayEntries.get(0);
assertEquals("recordf3", rarrayf3.getName());
assertEquals("/rarray<>/recordf3", rarrayf3.getPath());
assertEquals(FieldType.INTEGER, rarrayf3.getFieldType());
Field rarrayf4 = rarrayEntries.get(1);
assertEquals("recordf4", rarrayf4.getName());
assertEquals("/rarray<>/recordf4", rarrayf4.getPath());
assertEquals(FieldType.FLOAT, rarrayf4.getFieldType());
Field map = fields.get(6);
assertEquals("map", map.getName());
assertEquals("/map{}", map.getPath());
assertEquals(CollectionType.MAP, map.getCollectionType());
assertEquals(FieldType.STRING, map.getFieldType());
Field union = fields.get(7);
assertEquals("union", union.getName());
assertEquals("/union", union.getPath());
assertEquals(FieldType.COMPLEX, union.getFieldType());
assertTrue(union instanceof KafkaConnectComplexType);
KafkaConnectComplexType unionComplex = (KafkaConnectComplexType) union;
assertEquals(FieldStatus.UNSUPPORTED, unionComplex.getStatus());
Field fixed = fields.get(8);
assertEquals("fixed", fixed.getName());
assertEquals("/fixed", fixed.getPath());
assertEquals(FieldType.BYTE_ARRAY, fixed.getFieldType());
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectDocument in project atlasmap by atlasmap.
the class KafkaConnectInspectionServiceTest method testAvroTopmostArray.
@Test
public void testAvroTopmostArray() throws Exception {
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("avro-topmost-array.json");
KafkaConnectDocument doc = service.inspectAvro(new String(is.readAllBytes()), options);
assertNotNull(doc);
assertNull(doc.getName());
assertEquals("/<>", doc.getPath());
assertEquals(CollectionType.LIST, doc.getCollectionType());
assertEquals(FieldType.STRING, doc.getFieldType());
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectDocument in project atlasmap by atlasmap.
the class KafkaConnectServiceTest method testAvroSchema.
@Test
public void testAvroSchema() throws Exception {
KafkaConnectInspectionRequest request = new KafkaConnectInspectionRequest();
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream("avro-complex.json");
request.setSchemaData(new String(is.readAllBytes()));
request.getOptions().put(KafkaConnectConstants.OPTIONS_SCHEMA_TYPE, "AVRO");
request.getOptions().put(KafkaConnectConstants.OPTIONS_IS_KEY, "true");
Response res = kafkaConnectService.inspect(request);
Object entity = res.getEntity();
assertEquals(byte[].class, entity.getClass());
KafkaConnectInspectionResponse inspectionResponse = Json.mapper().readValue((byte[]) entity, KafkaConnectInspectionResponse.class);
assertNull(inspectionResponse.getErrorMessage());
KafkaConnectDocument doc = inspectionResponse.getKafkaConnectDocument();
assertNotNull(doc);
assertEquals(org.apache.kafka.connect.data.Schema.Type.STRUCT, doc.getRootSchemaType());
}
use of io.atlasmap.kafkaconnect.v2.KafkaConnectDocument in project atlasmap by atlasmap.
the class KafkaConnectInspector method createDocument.
private KafkaConnectDocument createDocument(org.apache.kafka.connect.data.Schema schema) {
KafkaConnectDocument doc = AtlasKafkaConnectModelFactory.createKafkaConnectDocument();
doc.setRootSchemaType(schema.type());
doc.setName(schema.name());
Schema connectSchema = schema;
AtlasPath path;
if (Type.ARRAY == connectSchema.type()) {
path = new AtlasPath(AtlasPath.PATH_SEPARATOR + AtlasPath.PATH_LIST_SUFFIX);
doc.setCollectionType(CollectionType.LIST);
connectSchema = connectSchema.valueSchema();
} else if (Type.MAP == connectSchema.type()) {
path = new AtlasPath(AtlasPath.PATH_SEPARATOR + AtlasPath.PATH_MAP_SUFFIX);
doc.setCollectionType(CollectionType.MAP);
connectSchema = connectSchema.valueSchema();
} else {
path = new AtlasPath("");
}
doc.setPath(path.toString());
if (connectSchema.parameters() != null) {
doc.setEnumeration(true);
List<KafkaConnectEnumField> symbols = doc.getEnumFields().getKafkaConnectEnumField();
for (Entry<String, String> entry : connectSchema.parameters().entrySet()) {
if ("io.confluent".equals(entry.getKey())) {
continue;
}
KafkaConnectEnumField f = new KafkaConnectEnumField();
f.setName(entry.getValue());
symbols.add(f);
}
doc.setFieldType(KafkaConnectUtil.getFieldType(connectSchema.type()));
} else if (!connectSchema.type().isPrimitive()) {
doc.setFieldType(FieldType.COMPLEX);
List<KafkaConnectField> children = populateFields(connectSchema.fields(), path);
doc.getFields().getField().addAll(children);
} else {
doc.setFieldType(KafkaConnectUtil.getFieldType(connectSchema.type()));
}
return doc;
}
Aggregations