use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class FieldLineageProcessorTest method testGeneratedOperations.
@Test
public void testGeneratedOperations() throws Exception {
// src -> transform1 -> transform2 -> sink
Schema srcSchema = Schema.recordOf("srcSchema", Schema.Field.of("body", Schema.of(Schema.Type.STRING)), Schema.Field.of("offset", Schema.of(Schema.Type.INT)));
Schema transform1Schema = Schema.recordOf("trans1Schema", Schema.Field.of("body", Schema.of(Schema.Type.STRING)));
Schema transform2Schema = Schema.recordOf("trans2Schema", Schema.Field.of("id", Schema.of(Schema.Type.INT)), Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
Set<StageSpec> stageSpecs = ImmutableSet.of(StageSpec.builder("src", DUMMY_PLUGIN).addOutput(srcSchema, "transform1").build(), StageSpec.builder("transform1", DUMMY_PLUGIN).addInputSchema("src", srcSchema).addOutput(transform1Schema, "transform2").build(), StageSpec.builder("transform2", DUMMY_PLUGIN).addInputSchema("transform1", transform1Schema).addOutput(transform2Schema, "sink").build(), StageSpec.builder("sink", DUMMY_PLUGIN).addInputSchema("transform2", transform2Schema).build());
Set<Connection> connections = ImmutableSet.of(new Connection("src", "transform1"), new Connection("transform1", "transform2"), new Connection("transform2", "sink"));
PipelineSpec pipelineSpec = PipelineSpec.builder().addStages(stageSpecs).addConnections(connections).build();
FieldLineageProcessor processor = new FieldLineageProcessor(pipelineSpec);
Map<String, List<FieldOperation>> fieldOperations = ImmutableMap.of("src", Collections.singletonList(new FieldReadOperation("Read", "1st operation", EndPoint.of("file"), ImmutableList.of("body", "offset"))), "transform1", Collections.emptyList(), "transform2", Collections.emptyList(), "sink", Collections.singletonList(new FieldWriteOperation("Write", "4th operation", EndPoint.of("sink"), ImmutableList.of("id", "name"))));
Set<Operation> operations = processor.validateAndConvert(fieldOperations);
Set<Operation> expected = ImmutableSet.of(new ReadOperation("src.Read", "1st operation", EndPoint.of("file"), ImmutableList.of("body", "offset")), new TransformOperation("transform1.Transform", "", ImmutableList.of(InputField.of("src.Read", "body"), InputField.of("src.Read", "offset")), "body"), new TransformOperation("transform2.Transform", "", ImmutableList.of(InputField.of("transform1.Transform", "body")), ImmutableList.of("id", "name")), new WriteOperation("sink.Write", "4th operation", EndPoint.of("sink"), ImmutableList.of(InputField.of("transform2.Transform", "id"), InputField.of("transform2.Transform", "name"))));
Assert.assertEquals(expected, operations);
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class LineageOperationProcessorTest method testJoinWith3Inputs.
@Test
public void testJoinWith3Inputs() {
// customer -> (id, name)---------- |
// |
// purchase ->(customer_id, item)------> JOIN --->(id_from_customer, customer_id, address_id,
// | name_from_customer, address)
// |
// address ->(address_id, address)--|
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint aEndPoint = EndPoint.of("default", "address");
EndPoint acpEndPoint = EndPoint.of("default", "customer_purchase_address");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
stageOperations.put("n3", Collections.singletonList(new FieldReadOperation("ReadAddress", "read description", aEndPoint, "address_id", "address")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id", "n3.address_id"), Arrays.asList("id", "customer_id", "address_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename Operation", Collections.singletonList("id"), Collections.singletonList("id_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Rename customer.name", "Rename Operation", Collections.singletonList("n1.name"), Collections.singletonList("name_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Identity address.address", "Identity Operation", Collections.singletonList("n3.address"), Collections.singletonList("address")));
stageOperations.put("n4", operationsFromJoin);
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("Write", "Write Operation", acpEndPoint, "id_from_customer", "customer_id", "address_id", "name_from_customer", "address")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n4"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new ReadOperation("n3.ReadAddress", "read description", aEndPoint, "address_id", "address"));
expectedOperations.add(new TransformOperation("n4.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id"), InputField.of("n3.ReadAddress", "address_id")), "id", "customer_id", "address_id"));
expectedOperations.add(new TransformOperation("n4.Rename id", "Rename Operation", Collections.singletonList(InputField.of("n4.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n4.Rename customer.name", "Rename Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n4.Identity address.address", "Identity Operation", Collections.singletonList(InputField.of("n3.ReadAddress", "address")), "address"));
expectedOperations.add(new WriteOperation("n5.Write", "Write Operation", acpEndPoint, Arrays.asList(InputField.of("n4.Rename id", "id_from_customer"), InputField.of("n4.Join", "customer_id"), InputField.of("n4.Join", "address_id"), InputField.of("n4.Rename customer.name", "name_from_customer"), InputField.of("n4.Identity address.address", "address"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class LineageOperationProcessorTest method testDirectMerge.
@Test
public void testDirectMerge() {
// n1--------->n3
// |
// n2--------->n4
// n1 => pRead: personFile -> (offset, body)
// n2 => hRead: hrFile -> (offset, body)
// n1.n2.merge => n1.n2.merge: (pRead.offset, pRead.body, hRead.offset, hRead.body) -> (offset, body)
// n3 => write1: (n1.n2.merge.offset, n1.n2.merge.body) -> testStore
// n4 => write1: (n1.n2.merge.offset, n1.n2.merge.body) -> prodStore
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n2", "n4"));
EndPoint pEndPoint = EndPoint.of("ns", "personFile");
EndPoint hEndPoint = EndPoint.of("ns", "hrFile");
EndPoint testEndPoint = EndPoint.of("ns", "testStore");
EndPoint prodEndPoint = EndPoint.of("ns", "prodStore");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("pRead", "Reading from person file", pEndPoint, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("hRead", "Reading from hr file", hEndPoint, "offset", "body"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write1", "Writing to test store", testEndPoint, "offset", "body"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write2", "Writing to prod store", prodEndPoint, "offset", "body"));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation pRead = new ReadOperation("n1.pRead", "Reading from person file", pEndPoint, "offset", "body");
expectedOperations.add(pRead);
ReadOperation hRead = new ReadOperation("n2.hRead", "Reading from hr file", hEndPoint, "offset", "body");
expectedOperations.add(hRead);
// implicit merge should be added by app
TransformOperation merge1 = new TransformOperation("n1,n2.merge.offset", "Merged stages: n1,n2", Arrays.asList(InputField.of("n1.pRead", "offset"), InputField.of("n2.hRead", "offset")), "offset");
TransformOperation merge2 = new TransformOperation("n1,n2.merge.body", "Merged stages: n1,n2", Arrays.asList(InputField.of("n1.pRead", "body"), InputField.of("n2.hRead", "body")), "body");
expectedOperations.add(merge1);
expectedOperations.add(merge2);
WriteOperation write1 = new WriteOperation("n3.write1", "Writing to test store", testEndPoint, Arrays.asList(InputField.of("n1,n2.merge.offset", "offset"), InputField.of("n1,n2.merge.body", "body")));
expectedOperations.add(write1);
WriteOperation write2 = new WriteOperation("n4.write2", "Writing to prod store", prodEndPoint, Arrays.asList(InputField.of("n1,n2.merge.offset", "offset"), InputField.of("n1,n2.merge.body", "body")));
expectedOperations.add(write2);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinOperation.
@Test
public void testSimpleJoinOperation() {
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
// customer -> (id)------------
// |
// JOIN ------->(id, customer_id)
// |
// purchase -> (customer_id)---
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id")));
stageOperations.put("n3", Collections.singletonList(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id"))));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id", "customer_id")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Join", "id"), InputField.of("n3.Join", "customer_id"))));
Assert.assertEquals(expectedOperations, processor.process());
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by caskdata.
the class LineageOperationProcessorTest method testSameKeyAndRenameJoin.
@Test
public void testSameKeyAndRenameJoin() {
// n1(id(key), swap1, n1same) ---------
// |
// JOIN ------->(id, new_id, swap1, swap2, n1same, n2same)
// |
// n2(id(key), swap2, n2same)----------
// operations (n1.id, n2.id) -> id
// (n2.id) -> new_id
// (n1.swap1) -> swap2
// (n2.swap2) -> swap1
// (n1.n1same) -> n1same
// (n2.n2same) -> n2same
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint src1 = EndPoint.of("default", "n1");
EndPoint src2 = EndPoint.of("default", "n2");
EndPoint dest = EndPoint.of("default", "n4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("readSrc1", "read description", src1, "id", "swap1", "n1same")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("readSrc2", "read description", src2, "id", "swap2", "n2same")));
List<FieldOperation> joinOperations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
joinOperations.add(new FieldTransformOperation("JoinKey", "Join Key", Arrays.asList("n1.id", "n2.id"), "id"));
joinOperations.add(new FieldTransformOperation("RenameN2", "rename", Collections.singletonList("n2.id"), "new_id"));
joinOperations.add(new FieldTransformOperation("swap1", "swap", Collections.singletonList("n1.swap1"), "swap2"));
joinOperations.add(new FieldTransformOperation("swap2", "swap", Collections.singletonList("n2.swap2"), "swap1"));
joinOperations.add(new FieldTransformOperation("unchange1", "unchange", Collections.singletonList("n1.n1same"), "n1same"));
joinOperations.add(new FieldTransformOperation("unchange2", "unchange", Collections.singletonList("n2.n2same"), "n2same"));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", dest, "id", "new_id", "swap1", "swap2", "n1same", "n2same")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.readSrc1", "read description", src1, "id", "swap1", "n1same"));
expectedOperations.add(new ReadOperation("n2.readSrc2", "read description", src2, "id", "swap2", "n2same"));
expectedOperations.add(new TransformOperation("n3.JoinKey", "Join Key", Arrays.asList(InputField.of("n1.readSrc1", "id"), InputField.of("n2.readSrc2", "id")), "id"));
expectedOperations.add(new TransformOperation("n3.RenameN2", "rename", Collections.singletonList(InputField.of("n2.readSrc2", "id")), "new_id"));
expectedOperations.add(new TransformOperation("n3.swap1", "swap", Collections.singletonList(InputField.of("n1.readSrc1", "swap1")), "swap2"));
expectedOperations.add(new TransformOperation("n3.swap2", "swap", Collections.singletonList(InputField.of("n2.readSrc2", "swap2")), "swap1"));
expectedOperations.add(new TransformOperation("n3.unchange1", "unchange", Collections.singletonList(InputField.of("n1.readSrc1", "n1same")), "n1same"));
expectedOperations.add(new TransformOperation("n3.unchange2", "unchange", Collections.singletonList(InputField.of("n2.readSrc2", "n2same")), "n2same"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", dest, Arrays.asList(InputField.of("n3.JoinKey", "id"), InputField.of("n3.RenameN2", "new_id"), InputField.of("n3.swap2", "swap1"), InputField.of("n3.swap1", "swap2"), InputField.of("n3.unchange1", "n1same"), InputField.of("n3.unchange2", "n2same"))));
Assert.assertEquals(expectedOperations, processor.process());
}
Aggregations