use of io.cdap.cdap.api.lineage.field.WriteOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSameKeyAndRenameJoin.
@Test
public void testSameKeyAndRenameJoin() {
// n1(id(key), swap1, n1same) ---------
// |
// JOIN ------->(id, new_id, swap1, swap2, n1same, n2same)
// |
// n2(id(key), swap2, n2same)----------
// operations (n1.id, n2.id) -> id
// (n2.id) -> new_id
// (n1.swap1) -> swap2
// (n2.swap2) -> swap1
// (n1.n1same) -> n1same
// (n2.n2same) -> n2same
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint src1 = EndPoint.of("default", "n1");
EndPoint src2 = EndPoint.of("default", "n2");
EndPoint dest = EndPoint.of("default", "n4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("readSrc1", "read description", src1, "id", "swap1", "n1same")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("readSrc2", "read description", src2, "id", "swap2", "n2same")));
List<FieldOperation> joinOperations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
joinOperations.add(new FieldTransformOperation("JoinKey", "Join Key", Arrays.asList("n1.id", "n2.id"), "id"));
joinOperations.add(new FieldTransformOperation("RenameN2", "rename", Collections.singletonList("n2.id"), "new_id"));
joinOperations.add(new FieldTransformOperation("swap1", "swap", Collections.singletonList("n1.swap1"), "swap2"));
joinOperations.add(new FieldTransformOperation("swap2", "swap", Collections.singletonList("n2.swap2"), "swap1"));
joinOperations.add(new FieldTransformOperation("unchange1", "unchange", Collections.singletonList("n1.n1same"), "n1same"));
joinOperations.add(new FieldTransformOperation("unchange2", "unchange", Collections.singletonList("n2.n2same"), "n2same"));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", dest, "id", "new_id", "swap1", "swap2", "n1same", "n2same")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.readSrc1", "read description", src1, "id", "swap1", "n1same"));
expectedOperations.add(new ReadOperation("n2.readSrc2", "read description", src2, "id", "swap2", "n2same"));
expectedOperations.add(new TransformOperation("n3.JoinKey", "Join Key", Arrays.asList(InputField.of("n1.readSrc1", "id"), InputField.of("n2.readSrc2", "id")), "id"));
expectedOperations.add(new TransformOperation("n3.RenameN2", "rename", Collections.singletonList(InputField.of("n2.readSrc2", "id")), "new_id"));
expectedOperations.add(new TransformOperation("n3.swap1", "swap", Collections.singletonList(InputField.of("n1.readSrc1", "swap1")), "swap2"));
expectedOperations.add(new TransformOperation("n3.swap2", "swap", Collections.singletonList(InputField.of("n2.readSrc2", "swap2")), "swap1"));
expectedOperations.add(new TransformOperation("n3.unchange1", "unchange", Collections.singletonList(InputField.of("n1.readSrc1", "n1same")), "n1same"));
expectedOperations.add(new TransformOperation("n3.unchange2", "unchange", Collections.singletonList(InputField.of("n2.readSrc2", "n2same")), "n2same"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", dest, Arrays.asList(InputField.of("n3.JoinKey", "id"), InputField.of("n3.RenameN2", "new_id"), InputField.of("n3.swap2", "swap1"), InputField.of("n3.swap1", "swap2"), InputField.of("n3.unchange1", "n1same"), InputField.of("n3.unchange2", "n2same"))));
Assert.assertEquals(expectedOperations, processor.process());
}
use of io.cdap.cdap.api.lineage.field.WriteOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSourceWithMultipleDestinations.
@Test
public void testSourceWithMultipleDestinations() {
// |----->n3
// n1--->n2-----|
// |----->n4
// n1 => read: file -> (offset, body)
// n2 => parse: body -> (id, name, address, zip)
// n3 => write1: (parse.id, parse.name) -> info
// n4 => write2: (parse.address, parse.zip) -> location
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint source = EndPoint.of("ns", "file");
EndPoint info = EndPoint.of("ns", "info");
EndPoint location = EndPoint.of("ns", "location");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading from file", source, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "id", "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("infoWrite", "writing info", info, "id", "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("locationWrite", "writing location", location, "address", "zip"));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading from file", source, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "id", "name", "address", "zip");
expectedOperations.add(parse);
WriteOperation infoWrite = new WriteOperation("n3.infoWrite", "writing info", info, InputField.of("n2.parse", "id"), InputField.of("n2.parse", "name"));
expectedOperations.add(infoWrite);
WriteOperation locationWrite = new WriteOperation("n4.locationWrite", "writing location", location, InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip"));
expectedOperations.add(locationWrite);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.api.lineage.field.WriteOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimplePipeline.
@Test
public void testSimplePipeline() {
// n1-->n2-->n3
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
stageOperations.put("n3", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expected = new HashSet<>();
expected.add(new ReadOperation("n1.read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
expected.add(new TransformOperation("n2.parse", "parsing data", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip"));
expected.add(new WriteOperation("n3.write", "writing data", EndPoint.of("default", "file2"), InputField.of("n2.parse", "name"), InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip")));
Assert.assertEquals(new FieldLineageInfo(expected), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.api.lineage.field.WriteOperation in project cdap by caskdata.
the class DataPipelineTest method testActionFieldLineage.
private void testActionFieldLineage(Engine engine) throws Exception {
String readDataset = "ActionReadDataset" + engine;
String writeDataset = "ActionWriteDataset" + engine;
List<String> srcFields = ImmutableList.of("srcField1", "srcField2", "srcField3");
Set<String> destFields = ImmutableSet.of("destField1", "destField2", "destField3");
List<Operation> operations = new ArrayList<>();
/*
* |---------> srcField1 -> destField1----|
* | |
* ActionReadDataset -> srcField2 -> destField2 ---|-> ActionWriteDataset
* | |
* |---------> srcField3 -> destField3 ---|
*/
operations.add(new ReadOperation("Read", "1st operation", EndPoint.of("default", readDataset), srcFields));
operations.add(new TransformOperation("Transform1", "2nd operation", Collections.singletonList(InputField.of("Read", "srcField1")), "destField1"));
operations.add(new TransformOperation("Transform2", "3rd operation", Collections.singletonList(InputField.of("Read", "srcField2")), "destField2"));
operations.add(new TransformOperation("Transform3", "4th operation", Collections.singletonList(InputField.of("Read", "srcField3")), "destField3"));
operations.add(new WriteOperation("Write", "5th operation", EndPoint.of("default", writeDataset), ImmutableList.of(InputField.of("Transform1", "destField1"), InputField.of("Transform2", "destField2"), InputField.of("Transform3", "destField3"))));
ETLStage action = new ETLStage("action", FieldLineageAction.getPlugin(readDataset, writeDataset, operations));
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(action).setEngine(engine).build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app("ActionFieldLineage-" + engine);
ApplicationManager appManager = deployApplication(appId, appRequest);
WorkflowManager workflowManager = appManager.getWorkflowManager(SmartWorkflow.NAME);
workflowManager.startAndWaitForGoodRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
FieldLineageAdmin fieldAdmin = getFieldLineageAdmin();
// get field lineage for dest dataset
DatasetFieldLineageSummary summary = fieldAdmin.getDatasetFieldLineage(Constants.FieldLineage.Direction.BOTH, EndPoint.of("default", writeDataset), 0, System.currentTimeMillis());
Assert.assertEquals(NamespaceId.DEFAULT.dataset(writeDataset), summary.getDatasetId());
Assert.assertEquals(destFields, summary.getFields());
Assert.assertTrue(summary.getOutgoing().isEmpty());
Assert.assertEquals(1, summary.getIncoming().size());
Set<FieldRelation> fieldRelations = ImmutableSet.of(new FieldRelation("srcField1", "destField1"), new FieldRelation("srcField2", "destField2"), new FieldRelation("srcField3", "destField3"));
DatasetFieldLineageSummary.FieldLineageRelations expectedRelations = new DatasetFieldLineageSummary.FieldLineageRelations(NamespaceId.DEFAULT.dataset(readDataset), 3, fieldRelations);
Assert.assertEquals(expectedRelations, summary.getIncoming().iterator().next());
// get field lineage for src dataset
summary = fieldAdmin.getDatasetFieldLineage(Constants.FieldLineage.Direction.BOTH, EndPoint.of("default", readDataset), 0, System.currentTimeMillis());
Assert.assertEquals(NamespaceId.DEFAULT.dataset(readDataset), summary.getDatasetId());
Assert.assertEquals(new HashSet<>(srcFields), summary.getFields());
Assert.assertTrue(summary.getIncoming().isEmpty());
Assert.assertEquals(1, summary.getOutgoing().size());
expectedRelations = new DatasetFieldLineageSummary.FieldLineageRelations(NamespaceId.DEFAULT.dataset(writeDataset), 3, fieldRelations);
Assert.assertEquals(expectedRelations, summary.getOutgoing().iterator().next());
LineageAdmin lineageAdmin = getLineageAdmin();
ProgramId programId = appId.workflow(SmartWorkflow.NAME);
RunId runId = RunIds.fromString(workflowManager.getHistory().iterator().next().getPid());
// get dataset lineage for src dataset
Tasks.waitFor(2, () -> {
Lineage lineage = lineageAdmin.computeLineage(NamespaceId.DEFAULT.dataset(readDataset), 0, System.currentTimeMillis(), 1, "workflow");
return lineage.getRelations().size();
}, 10, TimeUnit.SECONDS);
Lineage lineage = lineageAdmin.computeLineage(NamespaceId.DEFAULT.dataset(readDataset), 0, System.currentTimeMillis(), 1, "workflow");
Set<Relation> expectedLineage = ImmutableSet.of(new Relation(NamespaceId.DEFAULT.dataset(readDataset), programId, AccessType.READ, runId), new Relation(NamespaceId.DEFAULT.dataset(writeDataset), programId, AccessType.WRITE, runId));
Assert.assertEquals(expectedLineage, lineage.getRelations());
// get dataset lineage for dest dataset, in this test they should be same
lineage = lineageAdmin.computeLineage(NamespaceId.DEFAULT.dataset(writeDataset), 0, System.currentTimeMillis(), 1, "workflow");
Assert.assertEquals(2, lineage.getRelations().size());
Assert.assertEquals(expectedLineage, lineage.getRelations());
}
use of io.cdap.cdap.api.lineage.field.WriteOperation in project cdap by caskdata.
the class FieldLineageAdminTest method operations.
private Set<ProgramRunOperations> operations() {
ProgramId program1 = new ProgramId("ns", "app", ProgramType.SPARK, "sparkprogram");
ProgramId program2 = new ProgramId("ns", "app", ProgramType.MAPREDUCE, "mrprogram");
EndPoint endPoint1 = EndPoint.of("ns", "file");
EndPoint endPoint2 = EndPoint.of("ns", "anotherfile");
ReadOperation read = new ReadOperation("read", "reading file", endPoint1, "offset", "body");
WriteOperation write = new WriteOperation("write", "writing file", endPoint2, InputField.of("read", "offset"), InputField.of("parse", "name"), InputField.of("parse", "address"), InputField.of("parse", "zip"));
ProgramRunId program1Run1 = program1.run(RunIds.generate(1000));
ProgramRunId program1Run2 = program1.run(RunIds.generate(2000));
Set<ProgramRunOperations> programRunOperations = new HashSet<>();
programRunOperations.add(new ProgramRunOperations(new HashSet<>(Arrays.asList(program1Run1, program1Run2)), new HashSet<>(Arrays.asList(read, write))));
TransformOperation normalize = new TransformOperation("normalize", "normalizing offset", Collections.singletonList(InputField.of("read", "offset")), "offset");
write = new WriteOperation("write", "writing file", endPoint2, InputField.of("normalize", "offset"), InputField.of("parse", "name"), InputField.of("parse", "address"), InputField.of("parse", "zip"));
ProgramRunId program1Run3 = program1.run(RunIds.generate(3000));
ProgramRunId program1Run4 = program1.run(RunIds.generate(5000));
ProgramRunId program2Run1 = program2.run(RunIds.generate(4000));
ProgramRunId program2Run2 = program2.run(RunIds.generate(6000));
Set<ProgramRunId> programRunIds = new HashSet<>(Arrays.asList(program1Run3, program1Run4, program2Run1, program2Run2));
Set<Operation> operations = new HashSet<>(Arrays.asList(read, normalize, write));
programRunOperations.add(new ProgramRunOperations(programRunIds, operations));
return programRunOperations;
}
Aggregations