Search in sources :

Example 31 with Connection

use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.

the class SmartWorkflow method configure.

@Override
protected void configure() {
    setName(NAME);
    setDescription("Data Pipeline Workflow");
    // If plugins were registered only at the application level, CDAP would not be able to fail the run early.
    try {
        spec = new BatchPipelineSpecGenerator(applicationConfigurer.getDeployedNamespace(), getConfigurer(), applicationConfigurer.getRuntimeConfigurer(), ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE, SparkSink.PLUGIN_TYPE, AlertPublisher.PLUGIN_TYPE), config.getEngine(), getConfigurer()).generateSpec(config);
    } catch (ValidationException e) {
        throw new IllegalArgumentException(String.format("Failed to configure pipeline: %s", e.getFailures().isEmpty() ? e.getMessage() : e.getFailures().iterator().next().getFullMessage()), e);
    }
    // append "_" to the connection name so it will not conflict with the system tag we add
    Set<String> connectionsUsed = spec.getConnectionsUsed().stream().map(s -> "_" + s).collect(Collectors.toSet());
    applicationConfigurer.emitMetadata(new Metadata(Collections.emptyMap(), connectionsUsed), MetadataScope.SYSTEM);
    stageSpecs = new HashMap<>();
    useSpark = config.getEngine() == Engine.SPARK;
    for (StageSpec stageSpec : spec.getStages()) {
        stageSpecs.put(stageSpec.getName(), stageSpec);
        String pluginType = stageSpec.getPlugin().getType();
        if (SparkCompute.PLUGIN_TYPE.equals(pluginType) || SparkSink.PLUGIN_TYPE.equals(pluginType)) {
            useSpark = true;
        }
    }
    plan = createPlan();
    WorkflowProgramAdder programAdder = new TrunkProgramAdder(getConfigurer());
    // single phase, just add the program directly
    if (plan.getPhases().size() == 1) {
        addProgram(plan.getPhases().keySet().iterator().next(), programAdder);
        setWorkflowProperties();
        return;
    }
    // Dag classes don't allow a 'dag' without connections
    if (plan.getPhaseConnections().isEmpty()) {
        WorkflowProgramAdder fork = programAdder.fork();
        for (String phaseName : plan.getPhases().keySet()) {
            addProgram(phaseName, fork);
        }
        fork.join();
        setWorkflowProperties();
        return;
    }
    /*
       ControlDag is used to flatten the dag that represents connections between phases.
       Connections between phases represent a happens-before relationship, not the flow of data.
       As such, phases can be shifted around as long as every happens-before relationship is maintained.
       The exception is condition phases. Connection from a condition to another phase must be maintained as is.

       Flattening a ControlDag will transform a dag into a special fork-join dag by moving phases around.
       We therefore cannot blindly flatten the phase connections.
       However, we validated earlier that condition outputs have a special property, where every stage following a
       condition can only have a single input. This means we will never need to flatten anything after the first
       set of conditions. We will only have to flatten what comes before the first set of conditions.
     */
    dag = new ControlDag(plan.getPhaseConnections());
    boolean dummyNodeAdded = false;
    Map<String, ConditionBranches> conditionBranches = plan.getConditionPhaseBranches();
    if (conditionBranches.isEmpty()) {
        // after flattening, there is guaranteed to be just one source
        dag.flatten();
    } else if (!conditionBranches.keySet().containsAll(dag.getSources())) {
        // Continue only if the condition node is not the source of the dag, otherwise dag is already in the
        // required form
        Set<String> conditions = conditionBranches.keySet();
        // flatten only the part of the dag starting from sources and ending in conditions/sinks.
        Set<String> dagNodes = dag.accessibleFrom(dag.getSources(), Sets.union(dag.getSinks(), conditions));
        Set<String> dagNodesWithoutCondition = Sets.difference(dagNodes, conditions);
        Set<Connection> connections = new HashSet<>();
        Deque<String> bfs = new LinkedList<>();
        Set<String> sinks = new HashSet<>();
        // If its a single phase without condition then no need to flatten
        if (dagNodesWithoutCondition.size() < 2) {
            sinks.addAll(dagNodesWithoutCondition);
        } else {
            /*
           Create a subdag from dagNodesWithoutCondition.
           There are a couple situations where this is not immediately possible. For example:

             source1 --|
                       |--> condition -- ...
             source2 --|

           Here, dagNodesWithoutCondition = [source1, source2], which is an invalid dag. Similarly:

             source --> condition -- ...

           Here, dagNodesWithoutCondition = [source], which is also invalid. In order to ensure that we have a
           valid dag, we just insert a dummy node as the first node in the subdag, adding a connection from the
           dummy node to all the sources.
         */
            Dag subDag;
            try {
                subDag = dag.createSubDag(dagNodesWithoutCondition);
            } catch (IllegalArgumentException | DisjointConnectionsException e) {
                // DisjointConnectionsException thrown when islands are created from the dagNodesWithoutCondition
                // IllegalArgumentException thrown when connections are empty
                // In both cases we need to add dummy node and create connected Dag
                String dummyNode = "dummy";
                dummyNodeAdded = true;
                Set<Connection> subDagConnections = new HashSet<>();
                for (String source : dag.getSources()) {
                    subDagConnections.add(new Connection(dummyNode, source));
                }
                Deque<String> subDagBFS = new LinkedList<>();
                subDagBFS.addAll(dag.getSources());
                while (subDagBFS.peek() != null) {
                    String node = subDagBFS.poll();
                    for (String output : dag.getNodeOutputs(node)) {
                        if (dagNodesWithoutCondition.contains(output)) {
                            subDagConnections.add(new Connection(node, output));
                            subDagBFS.add(output);
                        }
                    }
                }
                subDag = new Dag(subDagConnections);
            }
            ControlDag cdag = new ControlDag(subDag);
            cdag.flatten();
            // Add all connections from cdag
            bfs.addAll(cdag.getSources());
            while (bfs.peek() != null) {
                String node = bfs.poll();
                for (String output : cdag.getNodeOutputs(node)) {
                    connections.add(new Connection(node, output));
                    bfs.add(output);
                }
            }
            sinks.addAll(cdag.getSinks());
        }
        // Add back the existing condition nodes and corresponding conditions
        Set<String> conditionsFromDag = Sets.intersection(dagNodes, conditions);
        for (String condition : conditionsFromDag) {
            connections.add(new Connection(sinks.iterator().next(), condition));
        }
        bfs.addAll(Sets.intersection(dagNodes, conditions));
        while (bfs.peek() != null) {
            String node = bfs.poll();
            ConditionBranches branches = conditionBranches.get(node);
            if (branches == null) {
                // not a condition node. add outputs
                for (String output : dag.getNodeOutputs(node)) {
                    connections.add(new Connection(node, output));
                    bfs.add(output);
                }
            } else {
                // condition node
                for (Boolean condition : Arrays.asList(true, false)) {
                    String phase = condition ? branches.getTrueOutput() : branches.getFalseOutput();
                    if (phase == null) {
                        continue;
                    }
                    connections.add(new Connection(node, phase, condition));
                    bfs.add(phase);
                }
            }
        }
        dag = new ControlDag(connections);
    }
    if (dummyNodeAdded) {
        WorkflowProgramAdder fork = programAdder.fork();
        String dummyNode = dag.getSources().iterator().next();
        // need to make sure we don't call also() if this is the final branch
        Iterator<String> outputIter = dag.getNodeOutputs(dummyNode).iterator();
        addBranchPrograms(outputIter.next(), fork, false);
        while (outputIter.hasNext()) {
            fork = fork.also();
            addBranchPrograms(outputIter.next(), fork, !outputIter.hasNext());
        }
    } else {
        String start = dag.getSources().iterator().next();
        addPrograms(start, programAdder);
    }
    setWorkflowProperties();
}
Also used : AlertPublisher(io.cdap.cdap.etl.api.AlertPublisher) BatchSource(io.cdap.cdap.etl.api.batch.BatchSource) Engine(io.cdap.cdap.etl.api.Engine) ConnectorSource(io.cdap.cdap.etl.batch.connector.ConnectorSource) Arrays(java.util.Arrays) TypeToken(com.google.gson.reflect.TypeToken) MultiConnectorSource(io.cdap.cdap.etl.batch.connector.MultiConnectorSource) LoggerFactory(org.slf4j.LoggerFactory) AbstractWorkflow(io.cdap.cdap.api.workflow.AbstractWorkflow) SparkSink(io.cdap.cdap.etl.api.batch.SparkSink) GsonBuilder(com.google.gson.GsonBuilder) PipelineAction(io.cdap.cdap.etl.batch.customaction.PipelineAction) Constants(io.cdap.cdap.etl.common.Constants) FieldOperationTypeAdapter(io.cdap.cdap.etl.common.FieldOperationTypeAdapter) WorkflowContext(io.cdap.cdap.api.workflow.WorkflowContext) Gson(com.google.gson.Gson) StageMetrics(io.cdap.cdap.etl.api.StageMetrics) Map(java.util.Map) Connection(io.cdap.cdap.etl.proto.Connection) ProgramStatus(io.cdap.cdap.api.ProgramStatus) Condition(io.cdap.cdap.etl.api.condition.Condition) TriggeringPropertyMapping(io.cdap.cdap.etl.proto.v2.TriggeringPropertyMapping) Action(io.cdap.cdap.etl.api.action.Action) BatchJoiner(io.cdap.cdap.etl.api.batch.BatchJoiner) ActionSpec(io.cdap.cdap.etl.batch.ActionSpec) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) ArgumentMapping(io.cdap.cdap.etl.proto.v2.ArgumentMapping) Metrics(io.cdap.cdap.api.metrics.Metrics) AlertPublisherContext(io.cdap.cdap.etl.api.AlertPublisherContext) PluginPropertyMapping(io.cdap.cdap.etl.proto.v2.PluginPropertyMapping) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) List(java.util.List) DefaultStageMetrics(io.cdap.cdap.etl.common.DefaultStageMetrics) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) Type(java.lang.reflect.Type) MetadataScope(io.cdap.cdap.api.metadata.MetadataScope) ETLSpark(io.cdap.cdap.etl.spark.batch.ETLSpark) FileSet(io.cdap.cdap.api.dataset.lib.FileSet) ApplicationConfigurer(io.cdap.cdap.api.app.ApplicationConfigurer) TrackedIterator(io.cdap.cdap.etl.common.TrackedIterator) ControlDag(io.cdap.cdap.etl.planner.ControlDag) WorkflowBackedActionContext(io.cdap.cdap.etl.batch.WorkflowBackedActionContext) TriggerInfo(io.cdap.cdap.api.schedule.TriggerInfo) Alert(io.cdap.cdap.etl.api.Alert) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) SplitterTransform(io.cdap.cdap.etl.api.SplitterTransform) SchemaTypeAdapter(io.cdap.cdap.internal.io.SchemaTypeAdapter) BatchPipelineSpecGenerator(io.cdap.cdap.etl.batch.BatchPipelineSpecGenerator) BatchPhaseSpec(io.cdap.cdap.etl.batch.BatchPhaseSpec) PipelinePluginContext(io.cdap.cdap.etl.common.plugin.PipelinePluginContext) HashMap(java.util.HashMap) AlertPublisherSink(io.cdap.cdap.etl.batch.connector.AlertPublisherSink) Deque(java.util.Deque) AlertReader(io.cdap.cdap.etl.batch.connector.AlertReader) ArrayList(java.util.ArrayList) DisjointConnectionsException(io.cdap.cdap.etl.planner.DisjointConnectionsException) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) WorkflowToken(io.cdap.cdap.api.workflow.WorkflowToken) LocationAwareMDCWrapperLogger(io.cdap.cdap.etl.common.LocationAwareMDCWrapperLogger) PluginContext(io.cdap.cdap.api.plugin.PluginContext) PipelinePlan(io.cdap.cdap.etl.planner.PipelinePlan) BatchActionContext(io.cdap.cdap.etl.api.batch.BatchActionContext) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) PipelinePlanner(io.cdap.cdap.etl.planner.PipelinePlanner) FieldLineageProcessor(io.cdap.cdap.etl.lineage.FieldLineageProcessor) TriggeringScheduleInfo(io.cdap.cdap.api.schedule.TriggeringScheduleInfo) PipelineCondition(io.cdap.cdap.etl.batch.condition.PipelineCondition) PipelineRuntime(io.cdap.cdap.etl.common.PipelineRuntime) LinkedList(java.util.LinkedList) Operation(io.cdap.cdap.api.lineage.field.Operation) ETLMapReduce(io.cdap.cdap.etl.batch.mapreduce.ETLMapReduce) Metadata(io.cdap.cdap.api.metadata.Metadata) Logger(org.slf4j.Logger) ValidationException(io.cdap.cdap.etl.api.validation.ValidationException) ConditionBranches(io.cdap.cdap.etl.planner.ConditionBranches) Iterator(java.util.Iterator) BatchAggregator(io.cdap.cdap.etl.api.batch.BatchAggregator) SparkCompute(io.cdap.cdap.etl.api.batch.SparkCompute) Dag(io.cdap.cdap.etl.planner.Dag) ETLBatchConfig(io.cdap.cdap.etl.proto.v2.ETLBatchConfig) ProgramStatusTriggerInfo(io.cdap.cdap.api.schedule.ProgramStatusTriggerInfo) Schema(io.cdap.cdap.api.data.schema.Schema) CloseableIterator(io.cdap.cdap.api.dataset.lib.CloseableIterator) PipelinePhase(io.cdap.cdap.etl.common.PipelinePhase) PostAction(io.cdap.cdap.etl.api.batch.PostAction) NodeValue(io.cdap.cdap.api.workflow.NodeValue) BatchSink(io.cdap.cdap.etl.api.batch.BatchSink) DefaultAlertPublisherContext(io.cdap.cdap.etl.common.DefaultAlertPublisherContext) BatchPipelineSpec(io.cdap.cdap.etl.batch.BatchPipelineSpec) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) Collections(java.util.Collections) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) ControlDag(io.cdap.cdap.etl.planner.ControlDag) ValidationException(io.cdap.cdap.etl.api.validation.ValidationException) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) FileSet(io.cdap.cdap.api.dataset.lib.FileSet) HashSet(java.util.HashSet) Metadata(io.cdap.cdap.api.metadata.Metadata) ConditionBranches(io.cdap.cdap.etl.planner.ConditionBranches) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) HashSet(java.util.HashSet) BatchPipelineSpecGenerator(io.cdap.cdap.etl.batch.BatchPipelineSpecGenerator) Connection(io.cdap.cdap.etl.proto.Connection) ControlDag(io.cdap.cdap.etl.planner.ControlDag) Dag(io.cdap.cdap.etl.planner.Dag) Deque(java.util.Deque) LinkedList(java.util.LinkedList)

Example 32 with Connection

use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.

the class LineageOperationProcessorTest method testAnotherSimplePipeline.

@Test
public void testAnotherSimplePipeline() {
    // n1-->n2-->n3-->n4
    // n1 => read: file -> (offset, body)
    // n2 => parse: (body) -> (first_name, last_name) | n2
    // n3 => concat: (first_name, last_name) -> (name) | n
    // n4 => write: (offset, name) -> another_file
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n2"));
    connections.add(new Connection("n2", "n3"));
    connections.add(new Connection("n3", "n4"));
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    List<FieldOperation> fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldReadOperation("read", "some read", EndPoint.of("ns", "file1"), "offset", "body"));
    stageOperations.put("n1", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "first_name", "last_name"));
    stageOperations.put("n2", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldTransformOperation("concat", "concatinating the fields", Arrays.asList("first_name", "last_name"), "name"));
    stageOperations.put("n3", fieldOperations);
    fieldOperations = new ArrayList<>();
    fieldOperations.add(new FieldWriteOperation("write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList("offset", "name")));
    stageOperations.put("n4", fieldOperations);
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
    Set<Operation> processedOperations = processor.process();
    ReadOperation read = new ReadOperation("n1.read", "some read", EndPoint.of("ns", "file1"), "offset", "body");
    TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "first_name", "last_name");
    TransformOperation concat = new TransformOperation("n3.concat", "concatinating the fields", Arrays.asList(InputField.of("n2.parse", "first_name"), InputField.of("n2.parse", "last_name")), "name");
    WriteOperation write = new WriteOperation("n4.write_op", "writing data to file", EndPoint.of("myns", "another_file"), Arrays.asList(InputField.of("n1.read", "offset"), InputField.of("n3.concat", "name")));
    List<Operation> expectedOperations = new ArrayList<>();
    expectedOperations.add(parse);
    expectedOperations.add(concat);
    expectedOperations.add(read);
    expectedOperations.add(write);
    Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) ArrayList(java.util.ArrayList) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldLineageInfo(io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 33 with Connection

use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.

the class LineageOperationProcessorTest method testSimpleJoinOperation.

@Test
public void testSimpleJoinOperation() {
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n3"));
    connections.add(new Connection("n2", "n3"));
    connections.add(new Connection("n3", "n4"));
    EndPoint cEndPoint = EndPoint.of("default", "customer");
    EndPoint pEndPoint = EndPoint.of("default", "purchase");
    EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
    // customer -> (id)------------
    // |
    // JOIN  ------->(id, customer_id)
    // |
    // purchase -> (customer_id)---
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id")));
    stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id")));
    stageOperations.put("n3", Collections.singletonList(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id"))));
    stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id", "customer_id")));
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
    Set<Operation> expectedOperations = new HashSet<>();
    expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id"));
    expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id"));
    expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
    expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Join", "id"), InputField.of("n3.Join", "customer_id"))));
    Assert.assertEquals(expectedOperations, processor.process());
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) EndPoint(io.cdap.cdap.api.lineage.field.EndPoint) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 34 with Connection

use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.

the class LineageOperationProcessorTest method testSimpleJoinWithAdditionalFields.

@Test
public void testSimpleJoinWithAdditionalFields() {
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n3"));
    connections.add(new Connection("n2", "n3"));
    connections.add(new Connection("n3", "n4"));
    EndPoint cEndPoint = EndPoint.of("default", "customer");
    EndPoint pEndPoint = EndPoint.of("default", "purchase");
    EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
    // customer -> (id)------------
    // |
    // JOIN  ------->(id, customer_id)
    // |
    // purchase -> (customer_id)---
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
    stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
    List<FieldOperation> operationsFromJoin = new ArrayList<>();
    operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
    operationsFromJoin.add(new FieldTransformOperation("Identity name", "Identity Operation", Collections.singletonList("n1.name"), Collections.singletonList("name")));
    operationsFromJoin.add(new FieldTransformOperation("Identity item", "Identity Operation", Collections.singletonList("n2.item"), Collections.singletonList("item")));
    stageOperations.put("n3", operationsFromJoin);
    stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id", "name", "customer_id", "item")));
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
    Set<Operation> expectedOperations = new HashSet<>();
    expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
    expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
    expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
    expectedOperations.add(new TransformOperation("n3.Identity name", "Identity Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name"));
    expectedOperations.add(new TransformOperation("n3.Identity item", "Identity Operation", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item"));
    expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Join", "id"), InputField.of("n3.Identity name", "name"), InputField.of("n3.Join", "customer_id"), InputField.of("n3.Identity item", "item"))));
    Set<Operation> processedOperations = processor.process();
    Assert.assertEquals(expectedOperations, processedOperations);
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) ArrayList(java.util.ArrayList) EndPoint(io.cdap.cdap.api.lineage.field.EndPoint) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 35 with Connection

use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.

the class LineageOperationProcessorTest method testSimpleJoinWithRenameJoinKeys.

@Test
public void testSimpleJoinWithRenameJoinKeys() {
    Set<Connection> connections = new HashSet<>();
    connections.add(new Connection("n1", "n3"));
    connections.add(new Connection("n2", "n3"));
    connections.add(new Connection("n3", "n4"));
    EndPoint cEndPoint = EndPoint.of("default", "customer");
    EndPoint pEndPoint = EndPoint.of("default", "purchase");
    EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
    // customer -> (id, name)------------
    // |
    // JOIN  ------->(id_from_customer, id_from_purchase, name, item)
    // |
    // purchase -> (customer_id, item)---
    Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
    stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
    stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
    List<FieldOperation> operationsFromJoin = new ArrayList<>();
    operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id")));
    operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename id", Collections.singletonList("id"), "id_from_customer"));
    operationsFromJoin.add(new FieldTransformOperation("Rename customer_id", "Rename customer_id", Collections.singletonList("customer_id"), "id_from_purchase"));
    operationsFromJoin.add(new FieldTransformOperation("Identity name", "Identity Operation", Collections.singletonList("n1.name"), Collections.singletonList("name")));
    operationsFromJoin.add(new FieldTransformOperation("Identity item", "Identity Operation", Collections.singletonList("n2.item"), Collections.singletonList("item")));
    stageOperations.put("n3", operationsFromJoin);
    stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id_from_customer", "id_from_purchase", "name", "item")));
    LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
    Set<Operation> processedOperations = processor.process();
    Set<Operation> expectedOperations = new HashSet<>();
    expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
    expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
    expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
    expectedOperations.add(new TransformOperation("n3.Rename id", "Rename id", Collections.singletonList(InputField.of("n3.Join", "id")), "id_from_customer"));
    expectedOperations.add(new TransformOperation("n3.Rename customer_id", "Rename customer_id", Collections.singletonList(InputField.of("n3.Join", "customer_id")), "id_from_purchase"));
    expectedOperations.add(new TransformOperation("n3.Identity name", "Identity Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name"));
    expectedOperations.add(new TransformOperation("n3.Identity item", "Identity Operation", Collections.singletonList(InputField.of("n2.ReadPurchase", "item")), "item"));
    expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Rename id", "id_from_customer"), InputField.of("n3.Rename customer_id", "id_from_purchase"), InputField.of("n3.Identity name", "name"), InputField.of("n3.Identity item", "item"))));
    Assert.assertEquals(expectedOperations, processedOperations);
}
Also used : ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) HashMap(java.util.HashMap) Connection(io.cdap.cdap.etl.proto.Connection) ArrayList(java.util.ArrayList) EndPoint(io.cdap.cdap.api.lineage.field.EndPoint) ReadOperation(io.cdap.cdap.api.lineage.field.ReadOperation) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) Operation(io.cdap.cdap.api.lineage.field.Operation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) TransformOperation(io.cdap.cdap.api.lineage.field.TransformOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldReadOperation(io.cdap.cdap.etl.api.lineage.field.FieldReadOperation) WriteOperation(io.cdap.cdap.api.lineage.field.WriteOperation) FieldWriteOperation(io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation) FieldTransformOperation(io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) FieldOperation(io.cdap.cdap.etl.api.lineage.field.FieldOperation) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

Connection (io.cdap.cdap.etl.proto.Connection)96 Test (org.junit.Test)78 HashSet (java.util.HashSet)70 HashMap (java.util.HashMap)44 ArrayList (java.util.ArrayList)32 Operation (io.cdap.cdap.api.lineage.field.Operation)28 FieldOperation (io.cdap.cdap.etl.api.lineage.field.FieldOperation)28 List (java.util.List)28 ImmutableList (com.google.common.collect.ImmutableList)26 ReadOperation (io.cdap.cdap.api.lineage.field.ReadOperation)26 TransformOperation (io.cdap.cdap.api.lineage.field.TransformOperation)26 WriteOperation (io.cdap.cdap.api.lineage.field.WriteOperation)26 FieldReadOperation (io.cdap.cdap.etl.api.lineage.field.FieldReadOperation)26 FieldWriteOperation (io.cdap.cdap.etl.api.lineage.field.FieldWriteOperation)26 FieldTransformOperation (io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation)24 EndPoint (io.cdap.cdap.api.lineage.field.EndPoint)20 StageSpec (io.cdap.cdap.etl.proto.v2.spec.StageSpec)18 PipelinePhase (io.cdap.cdap.etl.common.PipelinePhase)16 PipelineSpec (io.cdap.cdap.etl.proto.v2.spec.PipelineSpec)14 FieldLineageInfo (io.cdap.cdap.data2.metadata.lineage.field.FieldLineageInfo)8