use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by caskdata.
the class FieldLineageProcessorTest method testGeneratedOperations.
@Test
public void testGeneratedOperations() throws Exception {
// src -> transform1 -> transform2 -> sink
Schema srcSchema = Schema.recordOf("srcSchema", Schema.Field.of("body", Schema.of(Schema.Type.STRING)), Schema.Field.of("offset", Schema.of(Schema.Type.INT)));
Schema transform1Schema = Schema.recordOf("trans1Schema", Schema.Field.of("body", Schema.of(Schema.Type.STRING)));
Schema transform2Schema = Schema.recordOf("trans2Schema", Schema.Field.of("id", Schema.of(Schema.Type.INT)), Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
Set<StageSpec> stageSpecs = ImmutableSet.of(StageSpec.builder("src", DUMMY_PLUGIN).addOutput(srcSchema, "transform1").build(), StageSpec.builder("transform1", DUMMY_PLUGIN).addInputSchema("src", srcSchema).addOutput(transform1Schema, "transform2").build(), StageSpec.builder("transform2", DUMMY_PLUGIN).addInputSchema("transform1", transform1Schema).addOutput(transform2Schema, "sink").build(), StageSpec.builder("sink", DUMMY_PLUGIN).addInputSchema("transform2", transform2Schema).build());
Set<Connection> connections = ImmutableSet.of(new Connection("src", "transform1"), new Connection("transform1", "transform2"), new Connection("transform2", "sink"));
PipelineSpec pipelineSpec = PipelineSpec.builder().addStages(stageSpecs).addConnections(connections).build();
FieldLineageProcessor processor = new FieldLineageProcessor(pipelineSpec);
Map<String, List<FieldOperation>> fieldOperations = ImmutableMap.of("src", Collections.singletonList(new FieldReadOperation("Read", "1st operation", EndPoint.of("file"), ImmutableList.of("body", "offset"))), "transform1", Collections.emptyList(), "transform2", Collections.emptyList(), "sink", Collections.singletonList(new FieldWriteOperation("Write", "4th operation", EndPoint.of("sink"), ImmutableList.of("id", "name"))));
Set<Operation> operations = processor.validateAndConvert(fieldOperations);
Set<Operation> expected = ImmutableSet.of(new ReadOperation("src.Read", "1st operation", EndPoint.of("file"), ImmutableList.of("body", "offset")), new TransformOperation("transform1.Transform", "", ImmutableList.of(InputField.of("src.Read", "body"), InputField.of("src.Read", "offset")), "body"), new TransformOperation("transform2.Transform", "", ImmutableList.of(InputField.of("transform1.Transform", "body")), ImmutableList.of("id", "name")), new WriteOperation("sink.Write", "4th operation", EndPoint.of("sink"), ImmutableList.of(InputField.of("transform2.Transform", "id"), InputField.of("transform2.Transform", "name"))));
Assert.assertEquals(expected, operations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testUnusedInvalidInputs.
@Test
public void testUnusedInvalidInputs() {
// for write operation, all the inputs are written to the endpoint
FieldOperation write = new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip");
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(Collections.singletonList(write));
builder.addStageInputs(Arrays.asList("name", "address"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
InvalidFieldOperations expected = new InvalidFieldOperations(ImmutableMap.of("zip", Collections.singletonList("write")), Collections.emptyMap());
Assert.assertEquals(expected, stageOperationsValidator.getStageInvalids());
// the input field is not in a non-existing field, so invalid
FieldTransformOperation transform = new FieldTransformOperation("transform", "description", ImmutableList.of("nonexist"), ImmutableList.of("val2"));
builder = new StageOperationsValidator.Builder(Collections.singletonList(transform));
builder.addStageInputs(Collections.singletonList("exist"));
builder.addStageOutputs(Collections.singletonList("val2"));
StageOperationsValidator validator = builder.build();
validator.validate();
expected = new InvalidFieldOperations(Collections.singletonMap("nonexist", Collections.singletonList("transform")), Collections.emptyMap());
InvalidFieldOperations stageInvalids = validator.getStageInvalids();
Assert.assertEquals(expected, stageInvalids);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testUnusedValidOutputs.
@Test
public void testUnusedValidOutputs() {
// the read operation has [offset, body] as output fields, the output schema only has [body], it
// should still be valid
FieldOperation read = new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body");
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(Collections.singletonList(read));
builder.addStageOutputs(Collections.singleton("body"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
// the transform has [body] -> [name, address, zip], the output schema has [name, address], it is still valid
// since the field is generated by the input schema fields
FieldOperation transform = new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip"));
builder = new StageOperationsValidator.Builder(Collections.singletonList(transform));
builder.addStageInputs(Collections.singleton("body"));
builder.addStageOutputs(Arrays.asList("name", "address"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
// simulate joiner, the operations are [d1.k1, d2.k2] -> [k1, k2], [d1.val1] -> [val1], [d2.val2] -> [val2],
// the input schema are [d1.k1, d2.k2, d1.val1, d1.val2], output schemas are [val1, val2], this is still valid
// since both d1.k1, d2.k2 are in the input schema though k1, k2 are not in output schema.
FieldTransformOperation transform1 = new FieldTransformOperation("transform1", "description1", ImmutableList.of("d1.k1", "d2.k2"), ImmutableList.of("k1", "k2"));
FieldTransformOperation transform2 = new FieldTransformOperation("transform2", "description2", ImmutableList.of("d1.val1"), ImmutableList.of("val1"));
FieldTransformOperation transform3 = new FieldTransformOperation("transform3", "description3", ImmutableList.of("d2.val2"), ImmutableList.of("val2"));
builder = new StageOperationsValidator.Builder(ImmutableList.of(transform1, transform2, transform3));
builder.addStageInputs(ImmutableList.of("d1.k1", "d2.k2", "d1.val1", "d2.val2"));
builder.addStageOutputs(ImmutableList.of("val1", "val2"));
StageOperationsValidator validator = builder.build();
validator.validate();
Assert.assertNull(validator.getStageInvalids());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testRedundantOutputUsedAsInput.
@Test
public void testRedundantOutputUsedAsInput() {
List<FieldOperation> pipelineOperations = new ArrayList<>();
pipelineOperations.add(new FieldTransformOperation("redundant_parse1", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("redundant_parse2", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("non_redundant_parse", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("parse", "parsing data", Arrays.asList("body", "name"), Arrays.asList("name", "address", "zip")));
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(pipelineOperations);
builder.addStageInputs(Arrays.asList("offset", "body"));
builder.addStageOutputs(Arrays.asList("name", "address", "zip"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Map<String, List<String>> expected = ImmutableMap.of("name", Arrays.asList("redundant_parse1", "redundant_parse2"));
Assert.assertEquals(expected, stageOperationsValidator.getRedundantOutputs());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testJoinWith3Inputs.
@Test
public void testJoinWith3Inputs() {
// customer -> (id, name)---------- |
// |
// purchase ->(customer_id, item)------> JOIN --->(id_from_customer, customer_id, address_id,
// | name_from_customer, address)
// |
// address ->(address_id, address)--|
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint aEndPoint = EndPoint.of("default", "address");
EndPoint acpEndPoint = EndPoint.of("default", "customer_purchase_address");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
stageOperations.put("n3", Collections.singletonList(new FieldReadOperation("ReadAddress", "read description", aEndPoint, "address_id", "address")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id", "n3.address_id"), Arrays.asList("id", "customer_id", "address_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename Operation", Collections.singletonList("id"), Collections.singletonList("id_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Rename customer.name", "Rename Operation", Collections.singletonList("n1.name"), Collections.singletonList("name_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Identity address.address", "Identity Operation", Collections.singletonList("n3.address"), Collections.singletonList("address")));
stageOperations.put("n4", operationsFromJoin);
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("Write", "Write Operation", acpEndPoint, "id_from_customer", "customer_id", "address_id", "name_from_customer", "address")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n4"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new ReadOperation("n3.ReadAddress", "read description", aEndPoint, "address_id", "address"));
expectedOperations.add(new TransformOperation("n4.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id"), InputField.of("n3.ReadAddress", "address_id")), "id", "customer_id", "address_id"));
expectedOperations.add(new TransformOperation("n4.Rename id", "Rename Operation", Collections.singletonList(InputField.of("n4.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n4.Rename customer.name", "Rename Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n4.Identity address.address", "Identity Operation", Collections.singletonList(InputField.of("n3.ReadAddress", "address")), "address"));
expectedOperations.add(new WriteOperation("n5.Write", "Write Operation", acpEndPoint, Arrays.asList(InputField.of("n4.Rename id", "id_from_customer"), InputField.of("n4.Join", "customer_id"), InputField.of("n4.Join", "address_id"), InputField.of("n4.Rename customer.name", "name_from_customer"), InputField.of("n4.Identity address.address", "address"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
Aggregations