use of io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo in project cdap by caskdata.
the class SparkRuntimeService method destroy.
/**
* Calls the destroy or onFinish method of {@link ProgramLifecycle}.
*/
private void destroy(final ProgramState state) {
context.setState(state);
TransactionControl defaultTxControl = runtimeContext.getDefaultTxControl();
TransactionControl txControl = spark instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, Spark.class, spark, "destroy") : defaultTxControl;
runtimeContext.destroyProgram(programLifecycle, txControl, false);
if (emitFieldLineage()) {
try {
// here we cannot call context.flushRecord() since the WorkflowNodeState will need to record and store
// the lineage information
FieldLineageInfo info = new FieldLineageInfo(runtimeContext.getFieldLineageOperations());
fieldLineageWriter.write(runtimeContext.getProgramRunId(), info);
} catch (Throwable t) {
LOG.warn("Failed to emit the field lineage operations for Spark {}", runtimeContext.getProgramRunId(), t);
}
}
}
use of io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo in project cdap by cdapio.
the class MapReduceRuntimeService method destroy.
/**
* Calls the destroy method of {@link ProgramLifecycle}.
*/
private void destroy() {
TransactionControl defaultTxControl = context.getDefaultTxControl();
TransactionControl txControl = mapReduce instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, MapReduce.class, mapReduce, "destroy") : defaultTxControl;
context.destroyProgram(programLifecycle, txControl, false);
if (emitFieldLineage()) {
try {
// here we cannot call context.flushRecord() since the WorkflowNodeState will need to record and store
// the lineage information
FieldLineageInfo info = new FieldLineageInfo(context.getFieldLineageOperations());
fieldLineageWriter.write(mapReduceRunId, info);
} catch (Throwable t) {
LOG.warn("Failed to emit the field lineage operations for MapReduce {}", mapReduceRunId, t);
}
}
}
use of io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo in project cdap by cdapio.
the class SparkRuntimeService method destroy.
/**
* Calls the destroy or onFinish method of {@link ProgramLifecycle}.
*/
private void destroy(final ProgramState state) {
context.setState(state);
TransactionControl defaultTxControl = runtimeContext.getDefaultTxControl();
TransactionControl txControl = spark instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, Spark.class, spark, "destroy") : defaultTxControl;
runtimeContext.destroyProgram(programLifecycle, txControl, false);
if (emitFieldLineage()) {
try {
// here we cannot call context.flushRecord() since the WorkflowNodeState will need to record and store
// the lineage information
FieldLineageInfo info = new FieldLineageInfo(runtimeContext.getFieldLineageOperations());
fieldLineageWriter.write(runtimeContext.getProgramRunId(), info);
} catch (Throwable t) {
LOG.warn("Failed to emit the field lineage operations for Spark {}", runtimeContext.getProgramRunId(), t);
}
}
}
use of io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo in project cdap by cdapio.
the class LineageOperationProcessorTest method testAnotherSimplePipeline.
@Test
public void testAnotherSimplePipeline() {
// n1-->n2-->n3-->n4
// n1 => read: file -> (offset, body)
// n2 => parse: (body) -> (first_name, last_name) | n2
// n3 => concat: (first_name, last_name) -> (name) | n
// n4 => write: (offset, name) -> another_file
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "some read", EndPoint.of("ns", "file1"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "first_name", "last_name"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("concat", "concatinating the fields", Arrays.asList("first_name", "last_name"), "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList("offset", "name")));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
ReadOperation read = new ReadOperation("n1.read", "some read", EndPoint.of("ns", "file1"), "offset", "body");
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "first_name", "last_name");
TransformOperation concat = new TransformOperation("n3.concat", "concatinating the fields", Arrays.asList(InputField.of("n2.parse", "first_name"), InputField.of("n2.parse", "last_name")), "name");
WriteOperation write = new WriteOperation("n4.write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList(InputField.of("n1.read", "offset"), InputField.of("n3.concat", "name")));
List<Operation> expectedOperations = new ArrayList<>();
expectedOperations.add(parse);
expectedOperations.add(concat);
expectedOperations.add(read);
expectedOperations.add(write);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo in project cdap by cdapio.
the class LineageOperationProcessorTest method testDirectMerge.
@Test
public void testDirectMerge() {
// n1--------->n3
// |
// n2--------->n4
// n1 => pRead: personFile -> (offset, body)
// n2 => hRead: hrFile -> (offset, body)
// n1.n2.merge => n1.n2.merge: (pRead.offset, pRead.body, hRead.offset, hRead.body) -> (offset, body)
// n3 => write1: (n1.n2.merge.offset, n1.n2.merge.body) -> testStore
// n4 => write1: (n1.n2.merge.offset, n1.n2.merge.body) -> prodStore
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n2", "n4"));
EndPoint pEndPoint = EndPoint.of("ns", "personFile");
EndPoint hEndPoint = EndPoint.of("ns", "hrFile");
EndPoint testEndPoint = EndPoint.of("ns", "testStore");
EndPoint prodEndPoint = EndPoint.of("ns", "prodStore");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("pRead", "Reading from person file", pEndPoint, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("hRead", "Reading from hr file", hEndPoint, "offset", "body"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write1", "Writing to test store", testEndPoint, "offset", "body"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write2", "Writing to prod store", prodEndPoint, "offset", "body"));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation pRead = new ReadOperation("n1.pRead", "Reading from person file", pEndPoint, "offset", "body");
expectedOperations.add(pRead);
ReadOperation hRead = new ReadOperation("n2.hRead", "Reading from hr file", hEndPoint, "offset", "body");
expectedOperations.add(hRead);
// implicit merge should be added by app
TransformOperation merge1 = new TransformOperation("n1,n2.merge.offset", "Merged stages: n1,n2", Arrays.asList(InputField.of("n1.pRead", "offset"), InputField.of("n2.hRead", "offset")), "offset");
TransformOperation merge2 = new TransformOperation("n1,n2.merge.body", "Merged stages: n1,n2", Arrays.asList(InputField.of("n1.pRead", "body"), InputField.of("n2.hRead", "body")), "body");
expectedOperations.add(merge1);
expectedOperations.add(merge2);
WriteOperation write1 = new WriteOperation("n3.write1", "Writing to test store", testEndPoint, Arrays.asList(InputField.of("n1,n2.merge.offset", "offset"), InputField.of("n1,n2.merge.body", "body")));
expectedOperations.add(write1);
WriteOperation write2 = new WriteOperation("n4.write2", "Writing to prod store", prodEndPoint, Arrays.asList(InputField.of("n1,n2.merge.offset", "offset"), InputField.of("n1,n2.merge.body", "body")));
expectedOperations.add(write2);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
Aggregations