use of io.cdap.cdap.api.lineage.field.TransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testMergeOperationsNonRepeat.
@Test
public void testMergeOperationsNonRepeat() {
// n1 -> n3 ----
// |---- n5
// n2 -> n4 ----
// operations (n1) -> (id, name)
// (n3) -> (body, offset)
// (n2.id) -> id
// (n2.name) -> name
// (n4.body) -> (id, name)
// (n5) -> (id, name)
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n3", "n5"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint src1 = EndPoint.of("default", "n1");
EndPoint src2 = EndPoint.of("default", "n2");
EndPoint dest = EndPoint.of("default", "n5");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("read1", "read description", src1, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("read2", "read description", src2, "body", "offset")));
List<FieldOperation> n3Operations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
n3Operations.add(new FieldTransformOperation("identity1", "identity", Collections.singletonList("id"), "id"));
n3Operations.add(new FieldTransformOperation("identity2", "identity", Collections.singletonList("name"), "name"));
stageOperations.put("n4", Collections.singletonList(new FieldTransformOperation("generate", "generate", Collections.singletonList("body"), "id", "name")));
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("write", "write", dest, "id", "name")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.read1", "read description", src1, "id", "name"));
expectedOperations.add(new ReadOperation("n2.read2", "read description", src2, "body", "offset"));
expectedOperations.add(new TransformOperation("n3.identity1", "identity", Collections.singletonList(InputField.of("n1.read1", "id")), "id"));
expectedOperations.add(new TransformOperation("n3.identity2", "identity", Collections.singletonList(InputField.of("n1.read1", "name")), "name"));
expectedOperations.add(new TransformOperation("n4.generate", "generate", Collections.singletonList(InputField.of("n2.read2", "body")), "id", "name"));
expectedOperations.add(new TransformOperation("n3,n4.merge.id", "Merged stages: n3,n4", Arrays.asList(InputField.of("n3.identity1", "id"), InputField.of("n4.generate", "id")), "id"));
expectedOperations.add(new TransformOperation("n3,n4.merge.name", "Merged stages: n3,n4", Arrays.asList(InputField.of("n3.identity2", "name"), InputField.of("n4.generate", "name")), "name"));
expectedOperations.add(new TransformOperation("n3,n4.merge.body", "Merged stages: n3,n4", Collections.singletonList(InputField.of("n2.read2", "body")), "body"));
expectedOperations.add(new TransformOperation("n3,n4.merge.offset", "Merged stages: n3,n4", Collections.singletonList(InputField.of("n2.read2", "offset")), "offset"));
expectedOperations.add(new WriteOperation("n5.write", "write", dest, Arrays.asList(InputField.of("n3,n4.merge.id", "id"), InputField.of("n3,n4.merge.name", "name"))));
Set<Operation> process = processor.process();
Assert.assertEquals(expectedOperations, process);
}
use of io.cdap.cdap.api.lineage.field.TransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinWithRenameOnAdditionalFields.
@Test
public void testSimpleJoinWithRenameOnAdditionalFields() {
// customer -> (id, name)----------
// |
// JOIN --->(id_from_customer, customer_id, name_from_customer, item_from_purchase)
// |
// purchase ->(customer_id, item)---
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename name", "Rename name", Collections.singletonList("n1.name"), "name_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename item", "Rename item", Collections.singletonList("n2.item"), "item_from_purchase"));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "customer_id", "name_from_customer", "item_from_purchase")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename name", "Rename name", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename item", "Rename item", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item_from_purchase"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Join", "customer_id"), InputField.of("n3.Rename name", "name_from_customer"), InputField.of("n3.Rename item", "item_from_purchase"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.api.lineage.field.TransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testComplexMerge.
@Test
public void testComplexMerge() {
//
// n1----n2---
// | |-------n6
// |----n5---|
// n3----n4--- |---n7----n8
//
//
// n1: read: file1 -> offset,body
// n2: parse: body -> name, address, zip
// n3: read: file2 -> offset,body
// n4: parse: body -> name, address, zip
// n5: normalize: address -> address
// n5: rename: address -> state_address
// n6: write: offset, name, address -> file3
// n7: rename: offset -> file_offset
// n8: write: file_offset, name, address, zip -> file4
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n5"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
connections.add(new Connection("n5", "n6"));
connections.add(new Connection("n5", "n7"));
connections.add(new Connection("n7", "n8"));
EndPoint n1EndPoint = EndPoint.of("ns", "file1");
EndPoint n3EndPoint = EndPoint.of("ns", "file2");
EndPoint n6EndPoint = EndPoint.of("ns", "file3");
EndPoint n8EndPoint = EndPoint.of("ns", "file4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading file 1", n1EndPoint, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing file 1", Collections.singletonList("body"), "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading file 2", n3EndPoint, "offset", "body"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing file 2", Collections.singletonList("body"), "name", "address", "zip"));
stageOperations.put("n4", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("normalize", "normalizing address", Collections.singletonList("address"), "address"));
fieldOperations.add(new FieldTransformOperation("rename", "renaming address to state_address", Collections.singletonList("address"), "state_address"));
stageOperations.put("n5", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing file 3", n6EndPoint, "offset", "name", "address"));
stageOperations.put("n6", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("rename", "renaming offset to file_offset", Collections.singletonList("offset"), "file_offset"));
stageOperations.put("n7", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing file 4", n8EndPoint, "file_offset", "name", "address", "zip"));
stageOperations.put("n8", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading file 1", n1EndPoint, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing file 1", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip");
expectedOperations.add(parse);
read = new ReadOperation("n3.read", "reading file 2", n3EndPoint, "offset", "body");
expectedOperations.add(read);
parse = new TransformOperation("n4.parse", "parsing file 2", Collections.singletonList(InputField.of("n3.read", "body")), "name", "address", "zip");
expectedOperations.add(parse);
TransformOperation merge1 = new TransformOperation("n2,n4.merge.offset", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n1.read", "offset"), InputField.of("n3.read", "offset")), "offset");
TransformOperation merge2 = new TransformOperation("n2,n4.merge.body", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n1.read", "body"), InputField.of("n3.read", "body")), "body");
TransformOperation merge3 = new TransformOperation("n2,n4.merge.address", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "address"), InputField.of("n4.parse", "address")), "address");
TransformOperation merge4 = new TransformOperation("n2,n4.merge.name", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "name"), InputField.of("n4.parse", "name")), "name");
TransformOperation merge5 = new TransformOperation("n2,n4.merge.zip", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "zip"), InputField.of("n4.parse", "zip")), "zip");
expectedOperations.add(merge1);
expectedOperations.add(merge2);
expectedOperations.add(merge3);
expectedOperations.add(merge4);
expectedOperations.add(merge5);
TransformOperation normalize = new TransformOperation("n5.normalize", "normalizing address", Collections.singletonList(InputField.of("n2,n4.merge.address", "address")), "address");
expectedOperations.add(normalize);
TransformOperation rename = new TransformOperation("n5.rename", "renaming address to state_address", Collections.singletonList(InputField.of("n5.normalize", "address")), "state_address");
expectedOperations.add(rename);
WriteOperation write = new WriteOperation("n6.write", "writing file 3", n6EndPoint, InputField.of("n2,n4.merge.offset", "offset"), InputField.of("n2,n4.merge.name", "name"), InputField.of("n5.normalize", "address"));
expectedOperations.add(write);
rename = new TransformOperation("n7.rename", "renaming offset to file_offset", Collections.singletonList(InputField.of("n2,n4.merge.offset", "offset")), "file_offset");
expectedOperations.add(rename);
write = new WriteOperation("n8.write", "writing file 4", n8EndPoint, InputField.of("n7.rename", "file_offset"), InputField.of("n2,n4.merge.name", "name"), InputField.of("n5.normalize", "address"), InputField.of("n2,n4.merge.zip", "zip"));
expectedOperations.add(write);
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.api.lineage.field.TransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testAnotherSimplePipeline.
@Test
public void testAnotherSimplePipeline() {
// n1-->n2-->n3-->n4
// n1 => read: file -> (offset, body)
// n2 => parse: (body) -> (first_name, last_name) | n2
// n3 => concat: (first_name, last_name) -> (name) | n
// n4 => write: (offset, name) -> another_file
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "some read", EndPoint.of("ns", "file1"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "first_name", "last_name"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("concat", "concatinating the fields", Arrays.asList("first_name", "last_name"), "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList("offset", "name")));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
ReadOperation read = new ReadOperation("n1.read", "some read", EndPoint.of("ns", "file1"), "offset", "body");
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "first_name", "last_name");
TransformOperation concat = new TransformOperation("n3.concat", "concatinating the fields", Arrays.asList(InputField.of("n2.parse", "first_name"), InputField.of("n2.parse", "last_name")), "name");
WriteOperation write = new WriteOperation("n4.write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList(InputField.of("n1.read", "offset"), InputField.of("n3.concat", "name")));
List<Operation> expectedOperations = new ArrayList<>();
expectedOperations.add(parse);
expectedOperations.add(concat);
expectedOperations.add(read);
expectedOperations.add(write);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.api.lineage.field.TransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinWithRenameJoinKeys.
@Test
public void testSimpleJoinWithRenameJoinKeys() {
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
// customer -> (id, name)------------
// |
// JOIN ------->(id_from_customer, id_from_purchase, name, item)
// |
// purchase -> (customer_id, item)---
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename customer_id", "Rename customer_id", Collections.singletonList("customer_id"), "id_from_purchase"));
operationsFromJoin.add(new FieldTransformOperation("Identity name", "Identity Operation", Collections.singletonList("n1.name"), Collections.singletonList("name")));
operationsFromJoin.add(new FieldTransformOperation("Identity item", "Identity Operation", Collections.singletonList("n2.item"), Collections.singletonList("item")));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "id_from_purchase", "name", "item")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename customer_id", "Rename customer_id", Collections.singletonList(InputField.of("n3.Join", "customer_id")), "id_from_purchase"));
expectedOperations.add(new TransformOperation("n3.Identity name", "Identity Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name"));
expectedOperations.add(new TransformOperation("n3.Identity item", "Identity Operation", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Rename customer_id", "id_from_purchase"), InputField.of("n3.Identity name", "name"), InputField.of("n3.Identity item", "item"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
Aggregations