Search in sources :

Example 31 with FieldReadOperation

use of io.cdap.cdap.etl.api.lineage.field.FieldReadOperation in project cdap by cdapio.

the class StageOperationsValidator method validate.

/**
 * Validate the inputs and outputs for a stage.
 */
void validate() {
    // Fields input to the stage are valid
    Set<String> validInputsSoFar = new HashSet<>(stageInputs);
    // Map of field name to the list of operations that generated that field.
    // Map will contain fields that are yet to be validated
    Map<String, List<FieldOperation>> unusedOutputs = new HashMap<>();
    // Map of field name to the list of operations that generated that field.
    // Map will contain fields that are redundant
    // For example: if following operations are recorded by stage
    // 
    // OP1: [a, b] -> [d]
    // OP2: [b] -> [d]
    // OP3: [d] -> [e]
    // 
    // output d of OP1 is redundant, since OP3 will always read d generated by OP2
    // so following map will contain d -> [OP1]
    Map<String, List<FieldOperation>> redundantOutputs = new HashMap<>();
    for (FieldOperation pipelineOperation : operations) {
        switch(pipelineOperation.getType()) {
            case READ:
                FieldReadOperation read = (FieldReadOperation) pipelineOperation;
                updateInvalidOutputs(Collections.emptyList(), unusedOutputs, redundantOutputs);
                validInputsSoFar.addAll(read.getOutputFields());
                for (String field : read.getOutputFields()) {
                    List<FieldOperation> origins = unusedOutputs.computeIfAbsent(field, k -> new ArrayList<>());
                    origins.add(pipelineOperation);
                }
                break;
            case TRANSFORM:
                FieldTransformOperation transform = (FieldTransformOperation) pipelineOperation;
                // take no effect
                if (transform.getInputFields().isEmpty() || transform.getOutputFields().isEmpty()) {
                    continue;
                }
                validateInputs(pipelineOperation.getName(), transform.getInputFields(), validInputsSoFar);
                updateInvalidOutputs(transform.getInputFields(), unusedOutputs, redundantOutputs);
                validInputsSoFar.addAll(transform.getOutputFields());
                for (String field : transform.getOutputFields()) {
                    List<FieldOperation> origins = unusedOutputs.computeIfAbsent(field, k -> new ArrayList<>());
                    origins.add(pipelineOperation);
                }
                break;
            case WRITE:
                FieldWriteOperation write = (FieldWriteOperation) pipelineOperation;
                validateInputs(pipelineOperation.getName(), write.getInputFields(), validInputsSoFar);
                updateInvalidOutputs(write.getInputFields(), unusedOutputs, redundantOutputs);
                break;
        }
    }
    // At this point unusedOutputs map should only contain those fields as keys which are not used
    // by any operation in the stage as an input. However those fields can still be part of output schema.
    // We want to remove such keys which are part of output schema as well.
    // We cannot simply do "unusedOutputs.removeAll(stageInputOutput.getOutputs()))"
    // Consider following case assuming d is part of output schema:
    // OP1: [a, b] -> [d]
    // OP2: [b] -> [d]
    // Here outout d from OP1 is redundant, since the d in output schema will always come from OP2.
    // However d will not be in the redundantOutputs map, as we only put the redundant fields if they
    // appear in input of some operation. Such redundancy should cause validation checks to fail.
    Iterator<Map.Entry<String, List<FieldOperation>>> iterator = unusedOutputs.entrySet().iterator();
    while (iterator.hasNext()) {
        Map.Entry<String, List<FieldOperation>> next = iterator.next();
        String field = next.getKey();
        List<FieldOperation> origins = next.getValue();
        if (origins.size() > 1) {
            List<FieldOperation> operations = redundantOutputs.computeIfAbsent(field, k -> new ArrayList<>());
            // except the last origin, all others are redundant
            operations.addAll(origins.subList(0, origins.size() - 1));
        }
        // No matter this field is or is not in the output schema of the stage, it is valid.
        // For example, a Joiner joins two datasets D1,D2 based on the joiner key D1.K1, D2.K2, and
        // decides to drop the joiner key in the output schema. The operation
        // [D1.K1, D2.K2] ->[K1, K2] is a valid even though K1,K2 are not in the output schema.
        iterator.remove();
    }
    this.invalidOutputs.putAll(unusedOutputs.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().stream().map(FieldOperation::getName).collect(Collectors.toList()))));
    this.redundantOutputs.putAll(redundantOutputs.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().stream().map(FieldOperation::getName).collect(Collectors.toList()))));
}
Also used : HashMap(java.util.HashMap) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet)

Example 32 with FieldReadOperation

use of io.cdap.cdap.etl.api.lineage.field.FieldReadOperation in project cdap by cdapio.

the class LineageOperationProcessorTest method testMergeOperationsNonRepeat.

@Test
public void testMergeOperationsNonRepeat() {
    // n1 -> n3 ----
    // |---- n5
    // n2 -> n4 ----
    // operations (n1) -> (id, name)
    // (n3) -> (body, offset)
    // (n2.id) -> id
    // (n2.name) -> name
    // (n4.body) -> (id, name)
    // (n5) -> (id, name)
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n3"));
    connections.add(new Connection("n3", "n5"));
    connections.add(new Connection("n2", "n4"));
    connections.add(new Connection("n4", "n5"));
    EndPoint src1 = EndPoint.of("default", "n1");
    EndPoint src2 = EndPoint.of("default", "n2");
    EndPoint dest = EndPoint.of("default", "n5");
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("read1", "read description", src1, "id", "name")));
    stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("read2", "read description", src2, "body", "offset")));
    List<FieldOperation> n3Operations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
    n3Operations.add(new FieldTransformOperation("identity1", "identity", Collections.singletonList("id"), "id"));
    n3Operations.add(new FieldTransformOperation("identity2", "identity", Collections.singletonList("name"), "name"));
    stageOperations.put("n4", Collections.singletonList(new FieldTransformOperation("generate", "generate", Collections.singletonList("body"), "id", "name")));
    stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("write", "write", dest, "id", "name")));
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
    Set<Operation> expectedOperations = new HashSet<>();
    expectedOperations.add(new ReadOperation("n1.read1", "read description", src1, "id", "name"));
    expectedOperations.add(new ReadOperation("n2.read2", "read description", src2, "body", "offset"));
    expectedOperations.add(new TransformOperation("n3.identity1", "identity", Collections.singletonList(InputField.of("n1.read1", "id")), "id"));
    expectedOperations.add(new TransformOperation("n3.identity2", "identity", Collections.singletonList(InputField.of("n1.read1", "name")), "name"));
    expectedOperations.add(new TransformOperation("n4.generate", "generate", Collections.singletonList(InputField.of("n2.read2", "body")), "id", "name"));
    expectedOperations.add(new TransformOperation("n3,n4.merge.id", "Merged stages: n3,n4", Arrays.asList(InputField.of("n3.identity1", "id"), InputField.of("n4.generate", "id")), "id"));
    expectedOperations.add(new TransformOperation("n3,n4.merge.name", "Merged stages: n3,n4", Arrays.asList(InputField.of("n3.identity2", "name"), InputField.of("n4.generate", "name")), "name"));
    expectedOperations.add(new TransformOperation("n3,n4.merge.body", "Merged stages: n3,n4", Collections.singletonList(InputField.of("n2.read2", "body")), "body"));
    expectedOperations.add(new TransformOperation("n3,n4.merge.offset", "Merged stages: n3,n4", Collections.singletonList(InputField.of("n2.read2", "offset")), "offset"));
    expectedOperations.add(new WriteOperation("n5.write", "write", dest, Arrays.asList(InputField.of("n3,n4.merge.id", "id"), InputField.of("n3,n4.merge.name", "name"))));
    Set<Operation> process = processor.process();
    Assert.assertEquals(expectedOperations, process);
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) EndPoint(io.cdap.cdap.api.lineage.field.EndPoint) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 33 with FieldReadOperation

use of io.cdap.cdap.etl.api.lineage.field.FieldReadOperation in project cdap by cdapio.

the class LineageOperationProcessorTest method testSameKeyAndRenameJoin.

@Test
public void testSameKeyAndRenameJoin() {
    // n1(id(key), swap1, n1same) ---------
    // |
    // JOIN  ------->(id, new_id, swap1, swap2, n1same, n2same)
    // |
    // n2(id(key), swap2, n2same)----------
    // operations (n1.id, n2.id) -> id
    // (n2.id) -> new_id
    // (n1.swap1) -> swap2
    // (n2.swap2) -> swap1
    // (n1.n1same) -> n1same
    // (n2.n2same) -> n2same
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n3"));
    connections.add(new Connection("n2", "n3"));
    connections.add(new Connection("n3", "n4"));
    EndPoint src1 = EndPoint.of("default", "n1");
    EndPoint src2 = EndPoint.of("default", "n2");
    EndPoint dest = EndPoint.of("default", "n4");
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("readSrc1", "read description", src1, "id", "swap1", "n1same")));
    stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("readSrc2", "read description", src2, "id", "swap2", "n2same")));
    List<FieldOperation> joinOperations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
    joinOperations.add(new FieldTransformOperation("JoinKey", "Join Key", Arrays.asList("n1.id", "n2.id"), "id"));
    joinOperations.add(new FieldTransformOperation("RenameN2", "rename", Collections.singletonList("n2.id"), "new_id"));
    joinOperations.add(new FieldTransformOperation("swap1", "swap", Collections.singletonList("n1.swap1"), "swap2"));
    joinOperations.add(new FieldTransformOperation("swap2", "swap", Collections.singletonList("n2.swap2"), "swap1"));
    joinOperations.add(new FieldTransformOperation("unchange1", "unchange", Collections.singletonList("n1.n1same"), "n1same"));
    joinOperations.add(new FieldTransformOperation("unchange2", "unchange", Collections.singletonList("n2.n2same"), "n2same"));
    stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", dest, "id", "new_id", "swap1", "swap2", "n1same", "n2same")));
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
    Set<Operation> expectedOperations = new HashSet<>();
    expectedOperations.add(new ReadOperation("n1.readSrc1", "read description", src1, "id", "swap1", "n1same"));
    expectedOperations.add(new ReadOperation("n2.readSrc2", "read description", src2, "id", "swap2", "n2same"));
    expectedOperations.add(new TransformOperation("n3.JoinKey", "Join Key", Arrays.asList(InputField.of("n1.readSrc1", "id"), InputField.of("n2.readSrc2", "id")), "id"));
    expectedOperations.add(new TransformOperation("n3.RenameN2", "rename", Collections.singletonList(InputField.of("n2.readSrc2", "id")), "new_id"));
    expectedOperations.add(new TransformOperation("n3.swap1", "swap", Collections.singletonList(InputField.of("n1.readSrc1", "swap1")), "swap2"));
    expectedOperations.add(new TransformOperation("n3.swap2", "swap", Collections.singletonList(InputField.of("n2.readSrc2", "swap2")), "swap1"));
    expectedOperations.add(new TransformOperation("n3.unchange1", "unchange", Collections.singletonList(InputField.of("n1.readSrc1", "n1same")), "n1same"));
    expectedOperations.add(new TransformOperation("n3.unchange2", "unchange", Collections.singletonList(InputField.of("n2.readSrc2", "n2same")), "n2same"));
    expectedOperations.add(new WriteOperation("n4.Write", "write description", dest, Arrays.asList(InputField.of("n3.JoinKey", "id"), InputField.of("n3.RenameN2", "new_id"), InputField.of("n3.swap2", "swap1"), InputField.of("n3.swap1", "swap2"), InputField.of("n3.unchange1", "n1same"), InputField.of("n3.unchange2", "n2same"))));
    Assert.assertEquals(expectedOperations, processor.process());
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) EndPoint(io.cdap.cdap.api.lineage.field.EndPoint) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 34 with FieldReadOperation

use of io.cdap.cdap.etl.api.lineage.field.FieldReadOperation in project cdap by cdapio.

the class LineageOperationProcessorTest method testSourceWithMultipleDestinations.

@Test
public void testSourceWithMultipleDestinations() {
    // |----->n3
    // n1--->n2-----|
    // |----->n4
    // n1 => read: file -> (offset, body)
    // n2 => parse: body -> (id, name, address, zip)
    // n3 => write1: (parse.id, parse.name) -> info
    // n4 => write2: (parse.address, parse.zip) -> location
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n2"));
    connections.add(new Connection("n2", "n3"));
    connections.add(new Connection("n3", "n4"));
    EndPoint source = EndPoint.of("ns", "file");
    EndPoint info = EndPoint.of("ns", "info");
    EndPoint location = EndPoint.of("ns", "location");
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    List<FieldOperation> fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldReadOperation("read", "reading from file", source, "offset", "body"));
    stageOperations.put("n1", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "id", "name", "address", "zip"));
    stageOperations.put("n2", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldWriteOperation("infoWrite", "writing info", info, "id", "name"));
    stageOperations.put("n3", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldWriteOperation("locationWrite", "writing location", location, "address", "zip"));
    stageOperations.put("n4", fieldOperations);
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
    Set<Operation> processedOperations = processor.process();
    Set<Operation> expectedOperations = new HashSet<>();
    ReadOperation read = new ReadOperation("n1.read", "reading from file", source, "offset", "body");
    expectedOperations.add(read);
    TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "id", "name", "address", "zip");
    expectedOperations.add(parse);
    WriteOperation infoWrite = new WriteOperation("n3.infoWrite", "writing info", info, InputField.of("n2.parse", "id"), InputField.of("n2.parse", "name"));
    expectedOperations.add(infoWrite);
    WriteOperation locationWrite = new WriteOperation("n4.locationWrite", "writing location", location, InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip"));
    expectedOperations.add(locationWrite);
    Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) ArrayList(java.util.ArrayList) EndPoint(io.cdap.cdap.api.lineage.field.EndPoint) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldLineageInfo(io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 35 with FieldReadOperation

use of io.cdap.cdap.etl.api.lineage.field.FieldReadOperation in project cdap by cdapio.

the class LineageOperationProcessorTest method testSimplePipeline.

@Test
public void testSimplePipeline() {
    // n1-->n2-->n3
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n2"));
    connections.add(new Connection("n2", "n3"));
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    List<FieldOperation> fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
    stageOperations.put("n1", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
    stageOperations.put("n2", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
    stageOperations.put("n3", fieldOperations);
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
    Set<Operation> processedOperations = processor.process();
    Set<Operation> expected = new HashSet<>();
    expected.add(new ReadOperation("n1.read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
    expected.add(new TransformOperation("n2.parse", "parsing data", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip"));
    expected.add(new WriteOperation("n3.write", "writing data", EndPoint.of("default", "file2"), InputField.of("n2.parse", "name"), InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip")));
    Assert.assertEquals(new FieldLineageInfo(expected), new FieldLineageInfo(processedOperations));
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) ArrayList(java.util.ArrayList) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldLineageInfo(io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

FieldReadOperation (io.cdap.cdap.etl.api.lineage.field.FieldReadOperation)39 FieldOperation (io.cdap.cdap.etl.api.lineage.field.FieldOperation)37 FieldTransformOperation (io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation)32 FieldWriteOperation (io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation)32 HashMap (java.util.HashMap)31 List (java.util.List)30 Test (org.junit.Test)30 ImmutableList (com.google.common.collect.ImmutableList)28 Operation (io.cdap.cdap.api.lineage.field.Operation)28 ReadOperation (io.cdap.cdap.api.lineage.field.ReadOperation)28 TransformOperation (io.cdap.cdap.api.lineage.field.TransformOperation)28 WriteOperation (io.cdap.cdap.api.lineage.field.WriteOperation)28 ArrayList (java.util.ArrayList)28 Connection (io.cdap.cdap.etl.proto.Connection)26 HashSet (java.util.HashSet)26 EndPoint (io.cdap.cdap.api.lineage.field.EndPoint)20 FieldLineageInfo (io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo)8 Schema (io.cdap.cdap.api.data.schema.Schema)7 InputFormatProvider (io.cdap.cdap.api.data.batch.InputFormatProvider)2 DatasetProperties (io.cdap.cdap.api.dataset.DatasetProperties)2