use of io.cdap.cdap.proto.metadata.lineage.FieldLineageDetails in project cdap by caskdata.
the class LineageHTTPHandler method datasetFieldLineageDetails.
/**
* Get the operation details about the specified field in one dataset.
*
* @param field the field name to compute field operation details
* @param directionStr the direction to compute the field level lineage, can be INCOMING, OUTGOING or BOTH
* @param startStr the start time string, it can be a specific timestamp in milliseconds or a relative time,
* using now and times added to it.
* @param endStr the end time string, it can be a specific timestamp in milliseconds or a relative time,
* using now and times added to it.
*/
@GET
@Path("/namespaces/{namespace-id}/datasets/{dataset-id}/lineage/fields/{field-name}/operations")
public void datasetFieldLineageDetails(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("dataset-id") String datasetId, @PathParam("field-name") String field, @QueryParam("direction") @DefaultValue("both") String directionStr, @QueryParam("start") String startStr, @QueryParam("end") String endStr) throws Exception {
accessEnforcer.enforce(new DatasetId(namespaceId, datasetId), authenticationContext.getPrincipal(), StandardPermission.GET);
TimeRange range = parseRange(startStr, endStr);
Constants.FieldLineage.Direction direction = parseDirection(directionStr);
EndPointField endPointField = new EndPointField(EndPoint.of(namespaceId, datasetId), field);
FieldLineageDetails details = fieldLineageAdmin.getOperationDetails(direction, endPointField, range.getStart(), range.getEnd());
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(details));
}
use of io.cdap.cdap.proto.metadata.lineage.FieldLineageDetails in project cdap by caskdata.
the class FieldLineageAdminTest method testOperations.
@Test
public void testOperations() {
FieldLineageAdmin fieldLineageAdmin = new FieldLineageAdmin(new FakeFieldLineageReader(Collections.emptySet(), Collections.emptySet(), operations()), metadataAdmin);
EndPoint endPoint = EndPoint.of("ns", "file");
// input args to the getOperationDetails below does not matter since data returned is mocked
FieldLineageDetails operationDetails = fieldLineageAdmin.getOperationDetails(Constants.FieldLineage.Direction.INCOMING, new EndPointField(endPoint, "somefield"), 0, Long.MAX_VALUE);
ProgramId program1 = new ProgramId("ns", "app", ProgramType.SPARK, "sparkprogram");
ProgramId program2 = new ProgramId("ns", "app", ProgramType.MAPREDUCE, "mrprogram");
ProgramRunId program1Run1 = program1.run(RunIds.generate(1000));
ProgramRunId program1Run2 = program1.run(RunIds.generate(2000));
ProgramRunId program1Run3 = program1.run(RunIds.generate(3000));
ProgramRunId program1Run4 = program1.run(RunIds.generate(5000));
ProgramRunId program2Run1 = program2.run(RunIds.generate(4000));
ProgramRunId program2Run2 = program2.run(RunIds.generate(6000));
List<ProgramFieldOperationInfo> incomings = operationDetails.getIncoming();
Set<ProgramFieldOperationInfo> expectedInfos = new HashSet<>();
List<ProgramInfo> programInfos = new ArrayList<>();
// program1Run1 and program1Run2 both generated same set of operations, however only the latest
// run will be included in the returned list. None of the run of program2 generated these set of operations.
programInfos.add(new ProgramInfo(program1, RunIds.getTime(program1Run2.getRun(), TimeUnit.SECONDS)));
EndPoint endPoint1 = EndPoint.of("ns", "file");
EndPoint endPoint2 = EndPoint.of("ns", "anotherfile");
List<FieldOperationInfo> fieldOperationInfos = new ArrayList<>();
// Return list should have topologically sorted operations
fieldOperationInfos.add(new FieldOperationInfo("read", "reading file", FieldOperationInput.of(endPoint1), FieldOperationOutput.of(Arrays.asList("offset", "body"))));
List<InputField> inputFields = new ArrayList<>();
inputFields.add(InputField.of("read", "offset"));
inputFields.add(InputField.of("parse", "name"));
inputFields.add(InputField.of("parse", "address"));
inputFields.add(InputField.of("parse", "zip"));
fieldOperationInfos.add(new FieldOperationInfo("write", "writing file", FieldOperationInput.of(inputFields), FieldOperationOutput.of(endPoint2)));
expectedInfos.add(new ProgramFieldOperationInfo(programInfos, fieldOperationInfos));
programInfos = new ArrayList<>();
// program1 and program2 both generated the next set of operations, returned list will contain the
// only one latest run of each program and that too sorted by the last execution time.
programInfos.add(new ProgramInfo(program2, RunIds.getTime(program2Run2.getRun(), TimeUnit.SECONDS)));
programInfos.add(new ProgramInfo(program1, RunIds.getTime(program1Run4.getRun(), TimeUnit.SECONDS)));
fieldOperationInfos = new ArrayList<>();
fieldOperationInfos.add(new FieldOperationInfo("read", "reading file", FieldOperationInput.of(endPoint1), FieldOperationOutput.of(Arrays.asList("offset", "body"))));
FieldOperationInput input = FieldOperationInput.of(Collections.singletonList(InputField.of("read", "offset")));
FieldOperationOutput output = FieldOperationOutput.of(Collections.singletonList("offset"));
fieldOperationInfos.add(new FieldOperationInfo("normalize", "normalizing offset", input, output));
inputFields = new ArrayList<>();
inputFields.add(InputField.of("normalize", "offset"));
inputFields.add(InputField.of("parse", "name"));
inputFields.add(InputField.of("parse", "address"));
inputFields.add(InputField.of("parse", "zip"));
input = FieldOperationInput.of(inputFields);
output = FieldOperationOutput.of(endPoint2);
fieldOperationInfos.add(new FieldOperationInfo("write", "writing file", input, output));
expectedInfos.add(new ProgramFieldOperationInfo(programInfos, fieldOperationInfos));
Assert.assertNotNull(incomings);
// converting to set because ordering in different versions of operations is not guaranteed
Assert.assertEquals(expectedInfos, new HashSet<>(incomings));
}
Aggregations