use of io.cdap.cdap.proto.metadata.lineage.FieldOperationInfo in project cdap by caskdata.
the class FieldLineageAdmin method convertToFieldOperationInfo.
private FieldOperationInfo convertToFieldOperationInfo(Operation operation) {
FieldOperationInput inputs = null;
FieldOperationOutput outputs = null;
switch(operation.getType()) {
case READ:
ReadOperation read = (ReadOperation) operation;
inputs = FieldOperationInput.of(read.getSource());
outputs = FieldOperationOutput.of(read.getOutputs());
break;
case TRANSFORM:
TransformOperation transform = (TransformOperation) operation;
inputs = FieldOperationInput.of(transform.getInputs());
outputs = FieldOperationOutput.of(transform.getOutputs());
break;
case WRITE:
WriteOperation write = (WriteOperation) operation;
inputs = FieldOperationInput.of(write.getInputs());
outputs = FieldOperationOutput.of(write.getDestination());
break;
}
return new FieldOperationInfo(operation.getName(), operation.getDescription(), inputs, outputs);
}
use of io.cdap.cdap.proto.metadata.lineage.FieldOperationInfo in project cdap by caskdata.
the class FieldLineageAdmin method processOperations.
private List<ProgramFieldOperationInfo> processOperations(List<ProgramRunOperations> programRunOperations) {
List<ProgramFieldOperationInfo> result = new ArrayList<>();
for (ProgramRunOperations entry : programRunOperations) {
List<ProgramInfo> programInfo = computeProgramInfo(entry.getProgramRunIds());
List<FieldOperationInfo> fieldOperationInfo = computeFieldOperationInfo(entry.getOperations());
result.add(new ProgramFieldOperationInfo(programInfo, fieldOperationInfo));
}
return result;
}
use of io.cdap.cdap.proto.metadata.lineage.FieldOperationInfo in project cdap by caskdata.
the class FieldLineageAdmin method computeFieldOperationInfo.
/**
* Computes list of {@link FieldOperationInfo} from the given operations.
* Returned list contains the operations sorted in topological order i.e. each operation
* in the list is guaranteed to occur before any other operation that reads its outputs.
*
* @param operations set of operation to convert to FieldOperationInfo instances
* @return list of FieldOperationInfo sorted topologically
*/
private List<FieldOperationInfo> computeFieldOperationInfo(Set<Operation> operations) {
List<Operation> orderedOperations = FieldLineageInfo.getTopologicallySortedOperations(operations);
List<FieldOperationInfo> fieldOperationInfos = new ArrayList<>();
for (Operation operation : orderedOperations) {
fieldOperationInfos.add(convertToFieldOperationInfo(operation));
}
return fieldOperationInfos;
}
use of io.cdap.cdap.proto.metadata.lineage.FieldOperationInfo in project cdap by caskdata.
the class FieldLineageAdminTest method testOperations.
@Test
public void testOperations() {
FieldLineageAdmin fieldLineageAdmin = new FieldLineageAdmin(new FakeFieldLineageReader(Collections.emptySet(), Collections.emptySet(), operations()), metadataAdmin);
EndPoint endPoint = EndPoint.of("ns", "file");
// input args to the getOperationDetails below does not matter since data returned is mocked
FieldLineageDetails operationDetails = fieldLineageAdmin.getOperationDetails(Constants.FieldLineage.Direction.INCOMING, new EndPointField(endPoint, "somefield"), 0, Long.MAX_VALUE);
ProgramId program1 = new ProgramId("ns", "app", ProgramType.SPARK, "sparkprogram");
ProgramId program2 = new ProgramId("ns", "app", ProgramType.MAPREDUCE, "mrprogram");
ProgramRunId program1Run1 = program1.run(RunIds.generate(1000));
ProgramRunId program1Run2 = program1.run(RunIds.generate(2000));
ProgramRunId program1Run3 = program1.run(RunIds.generate(3000));
ProgramRunId program1Run4 = program1.run(RunIds.generate(5000));
ProgramRunId program2Run1 = program2.run(RunIds.generate(4000));
ProgramRunId program2Run2 = program2.run(RunIds.generate(6000));
List<ProgramFieldOperationInfo> incomings = operationDetails.getIncoming();
Set<ProgramFieldOperationInfo> expectedInfos = new HashSet<>();
List<ProgramInfo> programInfos = new ArrayList<>();
// program1Run1 and program1Run2 both generated same set of operations, however only the latest
// run will be included in the returned list. None of the run of program2 generated these set of operations.
programInfos.add(new ProgramInfo(program1, RunIds.getTime(program1Run2.getRun(), TimeUnit.SECONDS)));
EndPoint endPoint1 = EndPoint.of("ns", "file");
EndPoint endPoint2 = EndPoint.of("ns", "anotherfile");
List<FieldOperationInfo> fieldOperationInfos = new ArrayList<>();
// Return list should have topologically sorted operations
fieldOperationInfos.add(new FieldOperationInfo("read", "reading file", FieldOperationInput.of(endPoint1), FieldOperationOutput.of(Arrays.asList("offset", "body"))));
List<InputField> inputFields = new ArrayList<>();
inputFields.add(InputField.of("read", "offset"));
inputFields.add(InputField.of("parse", "name"));
inputFields.add(InputField.of("parse", "address"));
inputFields.add(InputField.of("parse", "zip"));
fieldOperationInfos.add(new FieldOperationInfo("write", "writing file", FieldOperationInput.of(inputFields), FieldOperationOutput.of(endPoint2)));
expectedInfos.add(new ProgramFieldOperationInfo(programInfos, fieldOperationInfos));
programInfos = new ArrayList<>();
// program1 and program2 both generated the next set of operations, returned list will contain the
// only one latest run of each program and that too sorted by the last execution time.
programInfos.add(new ProgramInfo(program2, RunIds.getTime(program2Run2.getRun(), TimeUnit.SECONDS)));
programInfos.add(new ProgramInfo(program1, RunIds.getTime(program1Run4.getRun(), TimeUnit.SECONDS)));
fieldOperationInfos = new ArrayList<>();
fieldOperationInfos.add(new FieldOperationInfo("read", "reading file", FieldOperationInput.of(endPoint1), FieldOperationOutput.of(Arrays.asList("offset", "body"))));
FieldOperationInput input = FieldOperationInput.of(Collections.singletonList(InputField.of("read", "offset")));
FieldOperationOutput output = FieldOperationOutput.of(Collections.singletonList("offset"));
fieldOperationInfos.add(new FieldOperationInfo("normalize", "normalizing offset", input, output));
inputFields = new ArrayList<>();
inputFields.add(InputField.of("normalize", "offset"));
inputFields.add(InputField.of("parse", "name"));
inputFields.add(InputField.of("parse", "address"));
inputFields.add(InputField.of("parse", "zip"));
input = FieldOperationInput.of(inputFields);
output = FieldOperationOutput.of(endPoint2);
fieldOperationInfos.add(new FieldOperationInfo("write", "writing file", input, output));
expectedInfos.add(new ProgramFieldOperationInfo(programInfos, fieldOperationInfos));
Assert.assertNotNull(incomings);
// converting to set because ordering in different versions of operations is not guaranteed
Assert.assertEquals(expectedInfos, new HashSet<>(incomings));
}
Aggregations