use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testAnotherSimplePipeline.
@Test
public void testAnotherSimplePipeline() {
// n1-->n2-->n3-->n4
// n1 => read: file -> (offset, body)
// n2 => parse: (body) -> (first_name, last_name) | n2
// n3 => concat: (first_name, last_name) -> (name) | n
// n4 => write: (offset, name) -> another_file
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "some read", EndPoint.of("ns", "file1"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "first_name", "last_name"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("concat", "concatinating the fields", Arrays.asList("first_name", "last_name"), "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList("offset", "name")));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
ReadOperation read = new ReadOperation("n1.read", "some read", EndPoint.of("ns", "file1"), "offset", "body");
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "first_name", "last_name");
TransformOperation concat = new TransformOperation("n3.concat", "concatinating the fields", Arrays.asList(InputField.of("n2.parse", "first_name"), InputField.of("n2.parse", "last_name")), "name");
WriteOperation write = new WriteOperation("n4.write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList(InputField.of("n1.read", "offset"), InputField.of("n3.concat", "name")));
List<Operation> expectedOperations = new ArrayList<>();
expectedOperations.add(parse);
expectedOperations.add(concat);
expectedOperations.add(read);
expectedOperations.add(write);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testComplexMerge.
@Test
public void testComplexMerge() {
//
// n1----n2---
// | |-------n6
// |----n5---|
// n3----n4--- |---n7----n8
//
//
// n1: read: file1 -> offset,body
// n2: parse: body -> name, address, zip
// n3: read: file2 -> offset,body
// n4: parse: body -> name, address, zip
// n5: normalize: address -> address
// n5: rename: address -> state_address
// n6: write: offset, name, address -> file3
// n7: rename: offset -> file_offset
// n8: write: file_offset, name, address, zip -> file4
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n5"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
connections.add(new Connection("n5", "n6"));
connections.add(new Connection("n5", "n7"));
connections.add(new Connection("n7", "n8"));
EndPoint n1EndPoint = EndPoint.of("ns", "file1");
EndPoint n3EndPoint = EndPoint.of("ns", "file2");
EndPoint n6EndPoint = EndPoint.of("ns", "file3");
EndPoint n8EndPoint = EndPoint.of("ns", "file4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading file 1", n1EndPoint, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing file 1", Collections.singletonList("body"), "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading file 2", n3EndPoint, "offset", "body"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing file 2", Collections.singletonList("body"), "name", "address", "zip"));
stageOperations.put("n4", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("normalize", "normalizing address", Collections.singletonList("address"), "address"));
fieldOperations.add(new FieldTransformOperation("rename", "renaming address to state_address", Collections.singletonList("address"), "state_address"));
stageOperations.put("n5", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing file 3", n6EndPoint, "offset", "name", "address"));
stageOperations.put("n6", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("rename", "renaming offset to file_offset", Collections.singletonList("offset"), "file_offset"));
stageOperations.put("n7", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing file 4", n8EndPoint, "file_offset", "name", "address", "zip"));
stageOperations.put("n8", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading file 1", n1EndPoint, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing file 1", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip");
expectedOperations.add(parse);
read = new ReadOperation("n3.read", "reading file 2", n3EndPoint, "offset", "body");
expectedOperations.add(read);
parse = new TransformOperation("n4.parse", "parsing file 2", Collections.singletonList(InputField.of("n3.read", "body")), "name", "address", "zip");
expectedOperations.add(parse);
TransformOperation merge1 = new TransformOperation("n2,n4.merge.offset", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n1.read", "offset"), InputField.of("n3.read", "offset")), "offset");
TransformOperation merge2 = new TransformOperation("n2,n4.merge.body", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n1.read", "body"), InputField.of("n3.read", "body")), "body");
TransformOperation merge3 = new TransformOperation("n2,n4.merge.address", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "address"), InputField.of("n4.parse", "address")), "address");
TransformOperation merge4 = new TransformOperation("n2,n4.merge.name", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "name"), InputField.of("n4.parse", "name")), "name");
TransformOperation merge5 = new TransformOperation("n2,n4.merge.zip", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "zip"), InputField.of("n4.parse", "zip")), "zip");
expectedOperations.add(merge1);
expectedOperations.add(merge2);
expectedOperations.add(merge3);
expectedOperations.add(merge4);
expectedOperations.add(merge5);
TransformOperation normalize = new TransformOperation("n5.normalize", "normalizing address", Collections.singletonList(InputField.of("n2,n4.merge.address", "address")), "address");
expectedOperations.add(normalize);
TransformOperation rename = new TransformOperation("n5.rename", "renaming address to state_address", Collections.singletonList(InputField.of("n5.normalize", "address")), "state_address");
expectedOperations.add(rename);
WriteOperation write = new WriteOperation("n6.write", "writing file 3", n6EndPoint, InputField.of("n2,n4.merge.offset", "offset"), InputField.of("n2,n4.merge.name", "name"), InputField.of("n5.normalize", "address"));
expectedOperations.add(write);
rename = new TransformOperation("n7.rename", "renaming offset to file_offset", Collections.singletonList(InputField.of("n2,n4.merge.offset", "offset")), "file_offset");
expectedOperations.add(rename);
write = new WriteOperation("n8.write", "writing file 4", n8EndPoint, InputField.of("n7.rename", "file_offset"), InputField.of("n2,n4.merge.name", "name"), InputField.of("n5.normalize", "address"), InputField.of("n2,n4.merge.zip", "zip"));
expectedOperations.add(write);
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testSimpleJoinOperation.
@Test
public void testSimpleJoinOperation() {
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
// customer -> (id)------------
// |
// JOIN ------->(id, customer_id)
// |
// purchase -> (customer_id)---
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id")));
stageOperations.put("n3", Collections.singletonList(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id"))));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id", "customer_id")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Join", "id"), InputField.of("n3.Join", "customer_id"))));
Assert.assertEquals(expectedOperations, processor.process());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testJoinWith3Inputs.
@Test
public void testJoinWith3Inputs() {
// customer -> (id, name)---------- |
// |
// purchase ->(customer_id, item)------> JOIN --->(id_from_customer, customer_id, address_id,
// | name_from_customer, address)
// |
// address ->(address_id, address)--|
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint aEndPoint = EndPoint.of("default", "address");
EndPoint acpEndPoint = EndPoint.of("default", "customer_purchase_address");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
stageOperations.put("n3", Collections.singletonList(new FieldReadOperation("ReadAddress", "read description", aEndPoint, "address_id", "address")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id", "n3.address_id"), Arrays.asList("id", "customer_id", "address_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename Operation", Collections.singletonList("id"), Collections.singletonList("id_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Rename customer.name", "Rename Operation", Collections.singletonList("n1.name"), Collections.singletonList("name_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Identity address.address", "Identity Operation", Collections.singletonList("n3.address"), Collections.singletonList("address")));
stageOperations.put("n4", operationsFromJoin);
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("Write", "Write Operation", acpEndPoint, "id_from_customer", "customer_id", "address_id", "name_from_customer", "address")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n4"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new ReadOperation("n3.ReadAddress", "read description", aEndPoint, "address_id", "address"));
expectedOperations.add(new TransformOperation("n4.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id"), InputField.of("n3.ReadAddress", "address_id")), "id", "customer_id", "address_id"));
expectedOperations.add(new TransformOperation("n4.Rename id", "Rename Operation", Collections.singletonList(InputField.of("n4.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n4.Rename customer.name", "Rename Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n4.Identity address.address", "Identity Operation", Collections.singletonList(InputField.of("n3.ReadAddress", "address")), "address"));
expectedOperations.add(new WriteOperation("n5.Write", "Write Operation", acpEndPoint, Arrays.asList(InputField.of("n4.Rename id", "id_from_customer"), InputField.of("n4.Join", "customer_id"), InputField.of("n4.Join", "address_id"), InputField.of("n4.Rename customer.name", "name_from_customer"), InputField.of("n4.Identity address.address", "address"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testSimpleJoinWithRenameOnAdditionalFields.
@Test
public void testSimpleJoinWithRenameOnAdditionalFields() {
// customer -> (id, name)----------
// |
// JOIN --->(id_from_customer, customer_id, name_from_customer, item_from_purchase)
// |
// purchase ->(customer_id, item)---
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename name", "Rename name", Collections.singletonList("n1.name"), "name_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename item", "Rename item", Collections.singletonList("n2.item"), "item_from_purchase"));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "customer_id", "name_from_customer", "item_from_purchase")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename name", "Rename name", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename item", "Rename item", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item_from_purchase"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Join", "customer_id"), InputField.of("n3.Rename name", "name_from_customer"), InputField.of("n3.Rename item", "item_from_purchase"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
Aggregations