use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class LineageOperationProcessorTest method testSourceWithMultipleDestinations.
@Test
public void testSourceWithMultipleDestinations() {
// |----->n3
// n1--->n2-----|
// |----->n4
// n1 => read: file -> (offset, body)
// n2 => parse: body -> (id, name, address, zip)
// n3 => write1: (parse.id, parse.name) -> info
// n4 => write2: (parse.address, parse.zip) -> location
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint source = EndPoint.of("ns", "file");
EndPoint info = EndPoint.of("ns", "info");
EndPoint location = EndPoint.of("ns", "location");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading from file", source, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "id", "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("infoWrite", "writing info", info, "id", "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("locationWrite", "writing location", location, "address", "zip"));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading from file", source, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "id", "name", "address", "zip");
expectedOperations.add(parse);
WriteOperation infoWrite = new WriteOperation("n3.infoWrite", "writing info", info, InputField.of("n2.parse", "id"), InputField.of("n2.parse", "name"));
expectedOperations.add(infoWrite);
WriteOperation locationWrite = new WriteOperation("n4.locationWrite", "writing location", location, InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip"));
expectedOperations.add(locationWrite);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimplePipeline.
@Test
public void testSimplePipeline() {
// n1-->n2-->n3
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
stageOperations.put("n3", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expected = new HashSet<>();
expected.add(new ReadOperation("n1.read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
expected.add(new TransformOperation("n2.parse", "parsing data", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip"));
expected.add(new WriteOperation("n3.write", "writing data", EndPoint.of("default", "file2"), InputField.of("n2.parse", "name"), InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip")));
Assert.assertEquals(new FieldLineageInfo(expected), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class ConnectorDagTest method testConditionDag.
@Test
public void testConditionDag() {
/*
file - csv - c1 - t1---agg1--agg2---sink1
|
----c2 - sink2
|
------c3 - sink3
*/
Set<Connection> connections = ImmutableSet.of(new Connection("file", "csv"), new Connection("csv", "c1"), new Connection("c1", "t1"), new Connection("t1", "agg1"), new Connection("agg1", "agg2"), new Connection("agg2", "sink1"), new Connection("c1", "c2"), new Connection("c2", "sink2"), new Connection("c2", "c3"), new Connection("c3", "sink3"));
Set<String> conditions = new HashSet<>(Arrays.asList("c1", "c2", "c3"));
Set<String> reduceNodes = new HashSet<>(Arrays.asList("agg1", "agg2"));
Set<String> isolationNodes = new HashSet<>();
Set<String> multiPortNodes = new HashSet<>();
Set<Dag> actual = PipelinePlanner.split(connections, conditions, reduceNodes, isolationNodes, EMPTY_ACTIONS, multiPortNodes, EMPTY_CONNECTORS);
Dag dag1 = new Dag(ImmutableSet.of(new Connection("file", "csv"), new Connection("csv", "c1")));
Dag dag2 = new Dag(ImmutableSet.of(new Connection("c1", "t1"), new Connection("t1", "agg1"), new Connection("agg1", "agg2.connector")));
Dag dag3 = new Dag(ImmutableSet.of(new Connection("agg2.connector", "agg2"), new Connection("agg2", "sink1")));
Dag dag4 = new Dag(ImmutableSet.of(new Connection("c1", "c2")));
Dag dag5 = new Dag(ImmutableSet.of(new Connection("c2", "sink2")));
Dag dag6 = new Dag(ImmutableSet.of(new Connection("c2", "c3")));
Dag dag7 = new Dag(ImmutableSet.of(new Connection("c3", "sink3")));
Set<Dag> expected = ImmutableSet.of(dag1, dag2, dag3, dag4, dag5, dag6, dag7);
Assert.assertEquals(actual, expected);
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class ConnectorDagTest method testSimpleConditionWithReducers.
@Test
public void testSimpleConditionWithReducers() {
/*
|--- n2
n1 --|
|--- n3(r) --- n4---condition----n5
|
|---------n6
*/
Set<Connection> connections = ImmutableSet.of(new Connection("n1", "n2"), new Connection("n1", "n3"), new Connection("n3", "n4"), new Connection("n4", "condition"), new Connection("condition", "n5"), new Connection("condition", "n6"));
Set<String> conditions = Collections.singleton("condition");
Set<String> reduceNodes = Collections.singleton("n3");
Set<String> isolationNodes = new HashSet<>();
Set<String> multiPortNodes = new HashSet<>();
Set<Dag> actual = PipelinePlanner.split(connections, conditions, reduceNodes, isolationNodes, EMPTY_ACTIONS, multiPortNodes, EMPTY_CONNECTORS);
Dag dag1 = new Dag(ImmutableSet.of(new Connection("n1", "n2"), new Connection("n1", "n3.connector")));
Dag dag2 = new Dag(ImmutableSet.of(new Connection("n3.connector", "n3"), new Connection("n3", "n4"), new Connection("n4", "condition")));
Dag dag3 = new Dag(ImmutableSet.of(new Connection("condition", "n5")));
Dag dag4 = new Dag(ImmutableSet.of(new Connection("condition", "n6")));
Set<Dag> expected = ImmutableSet.of(dag1, dag2, dag3, dag4);
Assert.assertEquals(actual, expected);
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class ConnectorDagTest method testSimpleConditionWithMultipleSources.
@Test
public void testSimpleConditionWithMultipleSources() {
/*
|--- n2
n1 --|
|--- n3(r) --- n4---condition----n5
| |
n11---------| |---------n6
*/
Set<Connection> connections = ImmutableSet.of(new Connection("n1", "n2"), new Connection("n1", "n3"), new Connection("n3", "n4"), new Connection("n4", "condition"), new Connection("condition", "n5"), new Connection("condition", "n6"), new Connection("n11", "n3"));
Set<String> conditions = Collections.singleton("condition");
Set<String> reduceNodes = Collections.singleton("n3");
Set<String> isolationNodes = new HashSet<>();
Set<String> multiPortNodes = new HashSet<>();
Set<Dag> actual = PipelinePlanner.split(connections, conditions, reduceNodes, isolationNodes, EMPTY_ACTIONS, multiPortNodes, EMPTY_CONNECTORS);
Dag dag1 = new Dag(ImmutableSet.of(new Connection("n1", "n2"), new Connection("n1", "n3.connector"), new Connection("n11", "n3.connector")));
Dag dag2 = new Dag(ImmutableSet.of(new Connection("n3.connector", "n3"), new Connection("n3", "n4"), new Connection("n4", "condition")));
Dag dag3 = new Dag(ImmutableSet.of(new Connection("condition", "n5")));
Dag dag4 = new Dag(ImmutableSet.of(new Connection("condition", "n6")));
Set<Dag> expected = ImmutableSet.of(dag1, dag2, dag3, dag4);
Assert.assertEquals(actual, expected);
}
Aggregations