use of io.cdap.cdap.api.lineage.field.ReadOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinWithRenameJoinKeys.
@Test
public void testSimpleJoinWithRenameJoinKeys() {
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
// customer -> (id, name)------------
// |
// JOIN ------->(id_from_customer, id_from_purchase, name, item)
// |
// purchase -> (customer_id, item)---
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
operationsFromJoin.add(new FieldTransformOperation("Rename customer_id", "Rename customer_id", Collections.singletonList("customer_id"), "id_from_purchase"));
operationsFromJoin.add(new FieldTransformOperation("Identity name", "Identity Operation", Collections.singletonList("n1.name"), Collections.singletonList("name")));
operationsFromJoin.add(new FieldTransformOperation("Identity item", "Identity Operation", Collections.singletonList("n2.item"), Collections.singletonList("item")));
stageOperations.put("n3", operationsFromJoin);
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "id_from_purchase", "name", "item")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n3.Rename customer_id", "Rename customer_id", Collections.singletonList(InputField.of("n3.Join", "customer_id")), "id_from_purchase"));
expectedOperations.add(new TransformOperation("n3.Identity name", "Identity Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name"));
expectedOperations.add(new TransformOperation("n3.Identity item", "Identity Operation", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Rename customer_id", "id_from_purchase"), InputField.of("n3.Identity name", "name"), InputField.of("n3.Identity item", "item"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.api.lineage.field.ReadOperation in project cdap by caskdata.
the class LineageOperationsProcessor method computeProcessedOperations.
/**
* Convert the all the stage operations to the platform operation, this method will go through the pipeline in
* topological order, so that the later stage will always know the origin of its operation.
* If a stage has multiple inputs except joiner, implicit merge operations will be generated in order to for further
* stages to look up the origins.
* For joiners, the input field name should already contains the previous stage name.
*
* @return a {@link Map} containing the operations with key of operation name and value of the corresponding
* platform {@link Operation}
*/
private Map<String, Operation> computeProcessedOperations() {
Map<String, Operation> processedOperations = new HashMap<>();
for (String stageName : topologicalOrder) {
Set<String> stageInputs = stageDag.getNodeInputs(stageName);
// if the stage has multiple inputs and it is not a joiner, compute the merge operations
if (stageInputs.size() > 1 && !noMergeRequiredStages.contains(stageName)) {
addMergeOperation(stageInputs, processedOperations);
}
List<FieldOperation> fieldOperations = stageOperations.get(stageName);
for (FieldOperation fieldOperation : fieldOperations) {
Operation newOperation = null;
String newOperationName = prefixedName(stageName, fieldOperation.getName());
Set<String> currentOperationOutputs = new LinkedHashSet<>();
switch(fieldOperation.getType()) {
case READ:
FieldReadOperation read = (FieldReadOperation) fieldOperation;
newOperation = new ReadOperation(newOperationName, read.getDescription(), read.getSource(), read.getOutputFields());
currentOperationOutputs.addAll(read.getOutputFields());
break;
case TRANSFORM:
FieldTransformOperation transform = (FieldTransformOperation) fieldOperation;
List<InputField> inputFields = createInputFields(transform.getInputFields(), stageName, processedOperations);
newOperation = new TransformOperation(newOperationName, transform.getDescription(), inputFields, transform.getOutputFields());
currentOperationOutputs.addAll(transform.getOutputFields());
break;
case WRITE:
FieldWriteOperation write = (FieldWriteOperation) fieldOperation;
inputFields = createInputFields(write.getInputFields(), stageName, processedOperations);
newOperation = new WriteOperation(newOperationName, write.getDescription(), write.getSink(), inputFields);
break;
}
for (String currentOperationOutput : currentOperationOutputs) {
// For all fields outputted by the current operation assign the operation name as origin
// If the field appears in the output again for some other operation belonging to the same stage,
// its origin will get updated to the new operation
stageOutputsWithOrigins.get(stageName).put(currentOperationOutput, newOperation.getName());
}
processedOperations.put(newOperation.getName(), newOperation);
}
}
return processedOperations;
}
Aggregations