use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testUnusedInvalidInputs.
@Test
public void testUnusedInvalidInputs() {
// for write operation, all the inputs are written to the endpoint
FieldOperation write = new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip");
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(Collections.singletonList(write));
builder.addStageInputs(Arrays.asList("name", "address"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
InvalidFieldOperations expected = new InvalidFieldOperations(ImmutableMap.of("zip", Collections.singletonList("write")), Collections.emptyMap());
Assert.assertEquals(expected, stageOperationsValidator.getStageInvalids());
// the input field is not in a non-existing field, so invalid
FieldTransformOperation transform = new FieldTransformOperation("transform", "description", ImmutableList.of("nonexist"), ImmutableList.of("val2"));
builder = new StageOperationsValidator.Builder(Collections.singletonList(transform));
builder.addStageInputs(Collections.singletonList("exist"));
builder.addStageOutputs(Collections.singletonList("val2"));
StageOperationsValidator validator = builder.build();
validator.validate();
expected = new InvalidFieldOperations(Collections.singletonMap("nonexist", Collections.singletonList("transform")), Collections.emptyMap());
InvalidFieldOperations stageInvalids = validator.getStageInvalids();
Assert.assertEquals(expected, stageInvalids);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testUnusedValidOutputs.
@Test
public void testUnusedValidOutputs() {
// the read operation has [offset, body] as output fields, the output schema only has [body], it
// should still be valid
FieldOperation read = new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body");
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(Collections.singletonList(read));
builder.addStageOutputs(Collections.singleton("body"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
// the transform has [body] -> [name, address, zip], the output schema has [name, address], it is still valid
// since the field is generated by the input schema fields
FieldOperation transform = new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip"));
builder = new StageOperationsValidator.Builder(Collections.singletonList(transform));
builder.addStageInputs(Collections.singleton("body"));
builder.addStageOutputs(Arrays.asList("name", "address"));
stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Assert.assertNull(stageOperationsValidator.getStageInvalids());
// simulate joiner, the operations are [d1.k1, d2.k2] -> [k1, k2], [d1.val1] -> [val1], [d2.val2] -> [val2],
// the input schema are [d1.k1, d2.k2, d1.val1, d1.val2], output schemas are [val1, val2], this is still valid
// since both d1.k1, d2.k2 are in the input schema though k1, k2 are not in output schema.
FieldTransformOperation transform1 = new FieldTransformOperation("transform1", "description1", ImmutableList.of("d1.k1", "d2.k2"), ImmutableList.of("k1", "k2"));
FieldTransformOperation transform2 = new FieldTransformOperation("transform2", "description2", ImmutableList.of("d1.val1"), ImmutableList.of("val1"));
FieldTransformOperation transform3 = new FieldTransformOperation("transform3", "description3", ImmutableList.of("d2.val2"), ImmutableList.of("val2"));
builder = new StageOperationsValidator.Builder(ImmutableList.of(transform1, transform2, transform3));
builder.addStageInputs(ImmutableList.of("d1.k1", "d2.k2", "d1.val1", "d2.val2"));
builder.addStageOutputs(ImmutableList.of("val1", "val2"));
StageOperationsValidator validator = builder.build();
validator.validate();
Assert.assertNull(validator.getStageInvalids());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class StageOperationsValidatorTest method testRedundantOutputUsedAsInput.
@Test
public void testRedundantOutputUsedAsInput() {
List<FieldOperation> pipelineOperations = new ArrayList<>();
pipelineOperations.add(new FieldTransformOperation("redundant_parse1", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("redundant_parse2", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("non_redundant_parse", "parsing data", Collections.singletonList("body"), "name"));
pipelineOperations.add(new FieldTransformOperation("parse", "parsing data", Arrays.asList("body", "name"), Arrays.asList("name", "address", "zip")));
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(pipelineOperations);
builder.addStageInputs(Arrays.asList("offset", "body"));
builder.addStageOutputs(Arrays.asList("name", "address", "zip"));
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
Map<String, List<String>> expected = ImmutableMap.of("name", Arrays.asList("redundant_parse1", "redundant_parse2"));
Assert.assertEquals(expected, stageOperationsValidator.getRedundantOutputs());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testJoinWith3Inputs.
@Test
public void testJoinWith3Inputs() {
// customer -> (id, name)---------- |
// |
// purchase ->(customer_id, item)------> JOIN --->(id_from_customer, customer_id, address_id,
// | name_from_customer, address)
// |
// address ->(address_id, address)--|
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n4"));
connections.add(new Connection("n2", "n4"));
connections.add(new Connection("n3", "n4"));
connections.add(new Connection("n4", "n5"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint aEndPoint = EndPoint.of("default", "address");
EndPoint acpEndPoint = EndPoint.of("default", "customer_purchase_address");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id", "name")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id", "item")));
stageOperations.put("n3", Collections.singletonList(new FieldReadOperation("ReadAddress", "read description", aEndPoint, "address_id", "address")));
List<FieldOperation> operationsFromJoin = new ArrayList<>();
operationsFromJoin.add(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id", "n3.address_id"), Arrays.asList("id", "customer_id", "address_id")));
operationsFromJoin.add(new FieldTransformOperation("Rename id", "Rename Operation", Collections.singletonList("id"), Collections.singletonList("id_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Rename customer.name", "Rename Operation", Collections.singletonList("n1.name"), Collections.singletonList("name_from_customer")));
operationsFromJoin.add(new FieldTransformOperation("Identity address.address", "Identity Operation", Collections.singletonList("n3.address"), Collections.singletonList("address")));
stageOperations.put("n4", operationsFromJoin);
stageOperations.put("n5", Collections.singletonList(new FieldWriteOperation("Write", "Write Operation", acpEndPoint, "id_from_customer", "customer_id", "address_id", "name_from_customer", "address")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n4"));
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id", "name"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id", "item"));
expectedOperations.add(new ReadOperation("n3.ReadAddress", "read description", aEndPoint, "address_id", "address"));
expectedOperations.add(new TransformOperation("n4.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id"), InputField.of("n3.ReadAddress", "address_id")), "id", "customer_id", "address_id"));
expectedOperations.add(new TransformOperation("n4.Rename id", "Rename Operation", Collections.singletonList(InputField.of("n4.Join", "id")), "id_from_customer"));
expectedOperations.add(new TransformOperation("n4.Rename customer.name", "Rename Operation", Collections.singletonList(InputField.of("n1.ReadCustomer", "name")), "name_from_customer"));
expectedOperations.add(new TransformOperation("n4.Identity address.address", "Identity Operation", Collections.singletonList(InputField.of("n3.ReadAddress", "address")), "address"));
expectedOperations.add(new WriteOperation("n5.Write", "Write Operation", acpEndPoint, Arrays.asList(InputField.of("n4.Rename id", "id_from_customer"), InputField.of("n4.Join", "customer_id"), InputField.of("n4.Join", "address_id"), InputField.of("n4.Rename customer.name", "name_from_customer"), InputField.of("n4.Identity address.address", "address"))));
Assert.assertEquals(expectedOperations, processedOperations);
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimpleJoinOperation.
@Test
public void testSimpleJoinOperation() {
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint cEndPoint = EndPoint.of("default", "customer");
EndPoint pEndPoint = EndPoint.of("default", "purchase");
EndPoint cpEndPoint = EndPoint.of("default", "customer_purchase");
// customer -> (id)------------
// |
// JOIN ------->(id, customer_id)
// |
// purchase -> (customer_id)---
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("ReadCustomer", "read description", cEndPoint, "id")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("ReadPurchase", "read description", pEndPoint, "customer_id")));
stageOperations.put("n3", Collections.singletonList(new FieldTransformOperation("Join", "Join Operation", Arrays.asList("n1.id", "n2.customer_id"), Arrays.asList("id", "customer_id"))));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", cpEndPoint, "id", "customer_id")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.ReadCustomer", "read description", cEndPoint, "id"));
expectedOperations.add(new ReadOperation("n2.ReadPurchase", "read description", pEndPoint, "customer_id"));
expectedOperations.add(new TransformOperation("n3.Join", "Join Operation", Arrays.asList(InputField.of("n1.ReadCustomer", "id"), InputField.of("n2.ReadPurchase", "customer_id")), "id", "customer_id"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", cpEndPoint, Arrays.asList(InputField.of("n3.Join", "id"), InputField.of("n3.Join", "customer_id"))));
Assert.assertEquals(expectedOperations, processor.process());
}
Aggregations