use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testRedundantOutputs.
@Test
public void testRedundantOutputs() {
List<FieldOperation> pipelineOperations = new ArrayList<>();
pipelineOperations.add(new FieldTransformOperation("redundant_parse", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(pipelineOperations);
builder.addStageInputs(Arrays.asList("offset", "body"));
builder.addStageOutputs(Arrays.asList("name", "address", "zip"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Map<String, List<String>> expected = ImmutableMap.of("name", Collections.singletonList("redundant_parse"));
Assert.assertEquals(expected, stageOperationsValidator.getRedundantOutputs());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testInvalidInputs.
@Test
public void testInvalidInputs() {
FieldReadOperation read = new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body");
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(Collections.singletonList(read));
builder.addStageOutputs(Arrays.asList("offset", "body"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
FieldTransformOperation transform = new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip"));
builder = new StageOperationsValidator.Builder(Collections.singletonList(transform));
builder.addStageInputs(Arrays.asList("offset", "body"));
builder.addStageOutputs(Arrays.asList("name", "address", "zip"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
FieldWriteOperation write = new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip");
builder = new StageOperationsValidator.Builder(Collections.singletonList(write));
builder.addStageInputs(Arrays.asList("address", "zip"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNotNull(stageOperationsValidator.getStageInvalids());
InvalidFieldOperations invalidFieldOperations = stageOperationsValidator.getStageInvalids();
Assert.assertEquals(1, invalidFieldOperations.getInvalidInputs().size());
Map<String, List<String>> invalidInputs = new HashMap<>();
invalidInputs.put("name", Collections.singletonList("write"));
Assert.assertEquals(invalidInputs, invalidFieldOperations.getInvalidInputs());
Assert.assertEquals(0, invalidFieldOperations.getInvalidOutputs().size());
// name is provided by output of the operation previous to write
List<FieldOperation> pipelineOperations = new ArrayList<>();
pipelineOperations.add(new FieldTransformOperation("name_lookup", "generating name", Collections.singletonList("address"), "name"));
pipelineOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
builder = new StageOperationsValidator.Builder(pipelineOperations);
builder.addStageInputs(Arrays.asList("address", "zip"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinWithAdditionalFields.
@Test
public void testSimpleJoinWithAdditionalFields() {
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
// customer -> (id)------------
// |
// JOIN ------->(id, customer_id)
// |
// purchase -> (customer_id)---
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Identity name", "Identity Operation", Collections.singletonList("n1.name"), Collections.singletonList("name")));
operationsFromJoin.add(new FieldTransformOperation("Identity item", "Identity Operation", Collections.singletonList("n2.item"), Collections.singletonList("item")));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id", "name", "customer_id", "item")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Identity name", "Identity Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name"));
expectedOperations.add(new TransformOperation("n3.Identity item", "Identity Operation", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Join", "id"), InputField.of("n3.Identity name", "name"), InputField.of("n3.Join", "customer_id"), InputField.of("n3.Identity item", "item"))));
Set<Operation> processedOperations = processor.process();
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testMergeOperationsNonRepeat.
@Test
public void testMergeOperationsNonRepeat() {
// n1 -> n3 ----
// |---- n5
// n2 -> n4 ----
// operations (n1) -> (id, name)
// (n3) -> (body, offset)
// (n2.id) -> id
// (n2.name) -> name
// (n4.body) -> (id, name)
// (n5) -> (id, name)
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n3", "n5"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint src1 = EndPoint.of("default", "n1");
EndPoint src2 = EndPoint.of("default", "n2");
EndPoint dest = EndPoint.of("default", "n5");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("read1", "read description", src1, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("read2", "read description", src2, "body", "offset")));
List<FieldOperation> n3Operations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
n3Operations.add(new FieldTransformOperation("identity1", "identity", Collections.singletonList("id"), "id"));
n3Operations.add(new FieldTransformOperation("identity2", "identity", Collections.singletonList("name"), "name"));
stageOperations.put("n4", Collections.singletonList(new FieldTransformOperation("generate", "generate", Collections.singletonList("body"), "id", "name")));
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("write", "write", dest, "id", "name")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.read1", "read description", src1, "id", "name"));
expectedOperations.add(new ReadOperation("n2.read2", "read description", src2, "body", "offset"));
expectedOperations.add(new TransformOperation("n3.identity1", "identity", Collections.singletonList(InputField.of("n1.read1", "id")), "id"));
expectedOperations.add(new TransformOperation("n3.identity2", "identity", Collections.singletonList(InputField.of("n1.read1", "name")), "name"));
expectedOperations.add(new TransformOperation("n4.generate", "generate", Collections.singletonList(InputField.of("n2.read2", "body")), "id", "name"));
expectedOperations.add(new TransformOperation("n3,n4.merge.id", "Merged stages: n3,n4", Arrays.asList(InputField.of("n3.identity1", "id"), InputField.of("n4.generate", "id")), "id"));
expectedOperations.add(new TransformOperation("n3,n4.merge.name", "Merged stages: n3,n4", Arrays.asList(InputField.of("n3.identity2", "name"), InputField.of("n4.generate", "name")), "name"));
expectedOperations.add(new TransformOperation("n3,n4.merge.body", "Merged stages: n3,n4", Collections.singletonList(InputField.of("n2.read2", "body")), "body"));
expectedOperations.add(new TransformOperation("n3,n4.merge.offset", "Merged stages: n3,n4", Collections.singletonList(InputField.of("n2.read2", "offset")), "offset"));
expectedOperations.add(new WriteOperation("n5.write", "write", dest, Arrays.asList(InputField.of("n3,n4.merge.id", "id"), InputField.of("n3,n4.merge.name", "name"))));
Set<Operation> process = processor.process();
Assert.assertEquals(expectedOperations, process);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinWithRenameOnAdditionalFields.
@Test
public void testSimpleJoinWithRenameOnAdditionalFields() {
// customer -> (id, name)----------
// |
// JOIN --->(id_from_customer, customer_id, name_from_customer, item_from_purchase)
// |
// purchase ->(customer_id, item)---
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename name", "Rename name", Collections.singletonList("n1.name"), "name_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename item", "Rename item", Collections.singletonList("n2.item"), "item_from_purchase"));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "customer_id", "name_from_customer", "item_from_purchase")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename name", "Rename name", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename item", "Rename item", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item_from_purchase"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Join", "customer_id"), InputField.of("n3.Rename name", "name_from_customer"), InputField.of("n3.Rename item", "item_from_purchase"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
Aggregations