use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testComplexMerge.
@Test
public void testComplexMerge() {
//
// n1----n2---
// | |-------n6
// |----n5---|
// n3----n4--- |---n7----n8
//
//
// n1: read: file1 -> offset,body
// n2: parse: body -> name, address, zip
// n3: read: file2 -> offset,body
// n4: parse: body -> name, address, zip
// n5: normalize: address -> address
// n5: rename: address -> state_address
// n6: write: offset, name, address -> file3
// n7: rename: offset -> file_offset
// n8: write: file_offset, name, address, zip -> file4
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n5"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
connections.add(new Connection("n5", "n6"));
connections.add(new Connection("n5", "n7"));
connections.add(new Connection("n7", "n8"));
EndPoint n1EndPoint = EndPoint.of("ns", "file1");
EndPoint n3EndPoint = EndPoint.of("ns", "file2");
EndPoint n6EndPoint = EndPoint.of("ns", "file3");
EndPoint n8EndPoint = EndPoint.of("ns", "file4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading file 1", n1EndPoint, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing file 1", Collections.singletonList("body"), "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading file 2", n3EndPoint, "offset", "body"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing file 2", Collections.singletonList("body"), "name", "address", "zip"));
stageOperations.put("n4", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("normalize", "normalizing address", Collections.singletonList("address"), "address"));
fieldOperations.add(new FieldTransformOperation("rename", "renaming address to state_address", Collections.singletonList("address"), "state_address"));
stageOperations.put("n5", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing file 3", n6EndPoint, "offset", "name", "address"));
stageOperations.put("n6", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("rename", "renaming offset to file_offset", Collections.singletonList("offset"), "file_offset"));
stageOperations.put("n7", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing file 4", n8EndPoint, "file_offset", "name", "address", "zip"));
stageOperations.put("n8", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading file 1", n1EndPoint, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing file 1", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip");
expectedOperations.add(parse);
read = new ReadOperation("n3.read", "reading file 2", n3EndPoint, "offset", "body");
expectedOperations.add(read);
parse = new TransformOperation("n4.parse", "parsing file 2", Collections.singletonList(InputField.of("n3.read", "body")), "name", "address", "zip");
expectedOperations.add(parse);
TransformOperation merge1 = new TransformOperation("n2,n4.merge.offset", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n1.read", "offset"), InputField.of("n3.read", "offset")), "offset");
TransformOperation merge2 = new TransformOperation("n2,n4.merge.body", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n1.read", "body"), InputField.of("n3.read", "body")), "body");
TransformOperation merge3 = new TransformOperation("n2,n4.merge.address", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "address"), InputField.of("n4.parse", "address")), "address");
TransformOperation merge4 = new TransformOperation("n2,n4.merge.name", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "name"), InputField.of("n4.parse", "name")), "name");
TransformOperation merge5 = new TransformOperation("n2,n4.merge.zip", "Merged stages: n2,n4", ImmutableList.of(InputField.of("n2.parse", "zip"), InputField.of("n4.parse", "zip")), "zip");
expectedOperations.add(merge1);
expectedOperations.add(merge2);
expectedOperations.add(merge3);
expectedOperations.add(merge4);
expectedOperations.add(merge5);
TransformOperation normalize = new TransformOperation("n5.normalize", "normalizing address", Collections.singletonList(InputField.of("n2,n4.merge.address", "address")), "address");
expectedOperations.add(normalize);
TransformOperation rename = new TransformOperation("n5.rename", "renaming address to state_address", Collections.singletonList(InputField.of("n5.normalize", "address")), "state_address");
expectedOperations.add(rename);
WriteOperation write = new WriteOperation("n6.write", "writing file 3", n6EndPoint, InputField.of("n2,n4.merge.offset", "offset"), InputField.of("n2,n4.merge.name", "name"), InputField.of("n5.normalize", "address"));
expectedOperations.add(write);
rename = new TransformOperation("n7.rename", "renaming offset to file_offset", Collections.singletonList(InputField.of("n2,n4.merge.offset", "offset")), "file_offset");
expectedOperations.add(rename);
write = new WriteOperation("n8.write", "writing file 4", n8EndPoint, InputField.of("n7.rename", "file_offset"), InputField.of("n2,n4.merge.name", "name"), InputField.of("n5.normalize", "address"), InputField.of("n2,n4.merge.zip", "zip"));
expectedOperations.add(write);
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testSimpleJoinWithRenameOnAdditionalFields.
@Test
public void testSimpleJoinWithRenameOnAdditionalFields() {
// customer -> (id, name)----------
// |
// JOIN --->(id_from_customer, customer_id, name_from_customer, item_from_purchase)
// |
// purchase ->(customer_id, item)---
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename name", "Rename name", Collections.singletonList("n1.name"), "name_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename item", "Rename item", Collections.singletonList("n2.item"), "item_from_purchase"));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "customer_id", "name_from_customer", "item_from_purchase")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename name", "Rename name", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename item", "Rename item", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item_from_purchase"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Join", "customer_id"), InputField.of("n3.Rename name", "name_from_customer"), InputField.of("n3.Rename item", "item_from_purchase"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by cdapio.
the class LineageOperationProcessorTest method testJoinWith3Inputs.
@Test
public void testJoinWith3Inputs() {
// customer -> (id, name)---------- |
// |
// purchase ->(customer_id, item)------> JOIN --->(id_from_customer, customer_id, address_id,
// | name_from_customer, address)
// |
// address ->(address_id, address)--|
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint aEndPoint = EndPoint.of("default", "address");
EndPoint acpEndPoint = EndPoint.of("default", "customer_purchase_address");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
stageOperations.put("n3", Collections.singletonList(new FieldReadOperation("ReadAddress", "read description", aEndPoint, "address_id", "address")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id", "n3.address_id"), Arrays.asList("id", "customer_id", "address_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename Operation", Collections.singletonList("id"), Collections.singletonList("id_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Rename customer.name", "Rename Operation", Collections.singletonList("n1.name"), Collections.singletonList("name_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Identity address.address", "Identity Operation", Collections.singletonList("n3.address"), Collections.singletonList("address")));
stageOperations.put("n4", operationsFromJoin);
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("Write", "Write Operation", acpEndPoint, "id_from_customer", "customer_id", "address_id", "name_from_customer", "address")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n4"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new ReadOperation("n3.ReadAddress", "read description", aEndPoint, "address_id", "address"));
expectedOperations.add(new TransformOperation("n4.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id"), InputField.of("n3.ReadAddress", "address_id")), "id", "customer_id", "address_id"));
expectedOperations.add(new TransformOperation("n4.Rename id", "Rename Operation", Collections.singletonList(InputField.of("n4.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n4.Rename customer.name", "Rename Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n4.Identity address.address", "Identity Operation", Collections.singletonList(InputField.of("n3.ReadAddress", "address")), "address"));
expectedOperations.add(new WriteOperation("n5.Write", "Write Operation", acpEndPoint, Arrays.asList(InputField.of("n4.Rename id", "id_from_customer"), InputField.of("n4.Join", "customer_id"), InputField.of("n4.Join", "address_id"), InputField.of("n4.Rename customer.name", "name_from_customer"), InputField.of("n4.Identity address.address", "address"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by cdapio.
the class StageOperationsValidatorTest method testInvalidInputs.
@Test
public void testInvalidInputs() {
FieldReadOperation read = new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body");
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(Collections.singletonList(read));
builder.addStageOutputs(Arrays.asList("offset", "body"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
FieldTransformOperation transform = new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip"));
builder = new StageOperationsValidator.Builder(Collections.singletonList(transform));
builder.addStageInputs(Arrays.asList("offset", "body"));
builder.addStageOutputs(Arrays.asList("name", "address", "zip"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
FieldWriteOperation write = new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip");
builder = new StageOperationsValidator.Builder(Collections.singletonList(write));
builder.addStageInputs(Arrays.asList("address", "zip"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNotNull(stageOperationsValidator.getStageInvalids());
InvalidFieldOperations invalidFieldOperations = stageOperationsValidator.getStageInvalids();
Assert.assertEquals(1, invalidFieldOperations.getInvalidInputs().size());
Map<String, List<String>> invalidInputs = new HashMap<>();
invalidInputs.put("name", Collections.singletonList("write"));
Assert.assertEquals(invalidInputs, invalidFieldOperations.getInvalidInputs());
Assert.assertEquals(0, invalidFieldOperations.getInvalidOutputs().size());
// name is provided by output of the operation previous to write
List<FieldOperation> pipelineOperations = new ArrayList<>();
pipelineOperations.add(new FieldTransformOperation("name_lookup", "generating name", Collections.singletonList("address"), "name"));
pipelineOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
builder = new StageOperationsValidator.Builder(pipelineOperations);
builder.addStageInputs(Arrays.asList("address", "zip"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by cdapio.
the class StageOperationsValidatorTest method testRedundantOutputs.
@Test
public void testRedundantOutputs() {
List<FieldOperation> pipelineOperations = new ArrayList<>();
pipelineOperations.add(new FieldTransformOperation("redundant_parse", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(pipelineOperations);
builder.addStageInputs(Arrays.asList("offset", "body"));
builder.addStageOutputs(Arrays.asList("name", "address", "zip"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Map<String, List<String>> expected = ImmutableMap.of("name", Collections.singletonList("redundant_parse"));
Assert.assertEquals(expected, stageOperationsValidator.getRedundantOutputs());
}
Aggregations