use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSameKeyAndRenameJoin.
@Test
public void testSameKeyAndRenameJoin() {
// n1(id(key), swap1, n1same) ---------
// |
// JOIN ------->(id, new_id, swap1, swap2, n1same, n2same)
// |
// n2(id(key), swap2, n2same)----------
// operations (n1.id, n2.id) -> id
// (n2.id) -> new_id
// (n1.swap1) -> swap2
// (n2.swap2) -> swap1
// (n1.n1same) -> n1same
// (n2.n2same) -> n2same
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint src1 = EndPoint.of("default", "n1");
EndPoint src2 = EndPoint.of("default", "n2");
EndPoint dest = EndPoint.of("default", "n4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("readSrc1", "read description", src1, "id", "swap1", "n1same")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("readSrc2", "read description", src2, "id", "swap2", "n2same")));
List<FieldOperation> joinOperations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
joinOperations.add(new FieldTransformOperation("JoinKey", "Join Key", Arrays.asList("n1.id", "n2.id"), "id"));
joinOperations.add(new FieldTransformOperation("RenameN2", "rename", Collections.singletonList("n2.id"), "new_id"));
joinOperations.add(new FieldTransformOperation("swap1", "swap", Collections.singletonList("n1.swap1"), "swap2"));
joinOperations.add(new FieldTransformOperation("swap2", "swap", Collections.singletonList("n2.swap2"), "swap1"));
joinOperations.add(new FieldTransformOperation("unchange1", "unchange", Collections.singletonList("n1.n1same"), "n1same"));
joinOperations.add(new FieldTransformOperation("unchange2", "unchange", Collections.singletonList("n2.n2same"), "n2same"));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", dest, "id", "new_id", "swap1", "swap2", "n1same", "n2same")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.readSrc1", "read description", src1, "id", "swap1", "n1same"));
expectedOperations.add(new ReadOperation("n2.readSrc2", "read description", src2, "id", "swap2", "n2same"));
expectedOperations.add(new TransformOperation("n3.JoinKey", "Join Key", Arrays.asList(InputField.of("n1.readSrc1", "id"), InputField.of("n2.readSrc2", "id")), "id"));
expectedOperations.add(new TransformOperation("n3.RenameN2", "rename", Collections.singletonList(InputField.of("n2.readSrc2", "id")), "new_id"));
expectedOperations.add(new TransformOperation("n3.swap1", "swap", Collections.singletonList(InputField.of("n1.readSrc1", "swap1")), "swap2"));
expectedOperations.add(new TransformOperation("n3.swap2", "swap", Collections.singletonList(InputField.of("n2.readSrc2", "swap2")), "swap1"));
expectedOperations.add(new TransformOperation("n3.unchange1", "unchange", Collections.singletonList(InputField.of("n1.readSrc1", "n1same")), "n1same"));
expectedOperations.add(new TransformOperation("n3.unchange2", "unchange", Collections.singletonList(InputField.of("n2.readSrc2", "n2same")), "n2same"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", dest, Arrays.asList(InputField.of("n3.JoinKey", "id"), InputField.of("n3.RenameN2", "new_id"), InputField.of("n3.swap2", "swap1"), InputField.of("n3.swap1", "swap2"), InputField.of("n3.unchange1", "n1same"), InputField.of("n3.unchange2", "n2same"))));
Assert.assertEquals(expectedOperations, processor.process());
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSourceWithMultipleDestinations.
@Test
public void testSourceWithMultipleDestinations() {
// |----->n3
// n1--->n2-----|
// |----->n4
// n1 => read: file -> (offset, body)
// n2 => parse: body -> (id, name, address, zip)
// n3 => write1: (parse.id, parse.name) -> info
// n4 => write2: (parse.address, parse.zip) -> location
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint source = EndPoint.of("ns", "file");
EndPoint info = EndPoint.of("ns", "info");
EndPoint location = EndPoint.of("ns", "location");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading from file", source, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "id", "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("infoWrite", "writing info", info, "id", "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("locationWrite", "writing location", location, "address", "zip"));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading from file", source, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "id", "name", "address", "zip");
expectedOperations.add(parse);
WriteOperation infoWrite = new WriteOperation("n3.infoWrite", "writing info", info, InputField.of("n2.parse", "id"), InputField.of("n2.parse", "name"));
expectedOperations.add(infoWrite);
WriteOperation locationWrite = new WriteOperation("n4.locationWrite", "writing location", location, InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip"));
expectedOperations.add(locationWrite);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class LineageOperationProcessorTest method testSimplePipeline.
@Test
public void testSimplePipeline() {
// n1-->n2-->n3
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
stageOperations.put("n3", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expected = new HashSet<>();
expected.add(new ReadOperation("n1.read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
expected.add(new TransformOperation("n2.parse", "parsing data", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip"));
expected.add(new WriteOperation("n3.write", "writing data", EndPoint.of("default", "file2"), InputField.of("n2.parse", "name"), InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip")));
Assert.assertEquals(new FieldLineageInfo(expected), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class FieldLineageProcessor method validateAndConvert.
public Set<Operation> validateAndConvert(Map<String, List<FieldOperation>> allStageOperations) {
Map<String, List<FieldOperation>> allOperations = new HashMap<>(allStageOperations);
// Set of stages for which no implicit merge operation is required even if
// stage has multiple inputs, for example join stages
Set<String> noMergeRequiredStages = new HashSet<>();
for (StageSpec stageSpec : pipelineSpec.getStages()) {
if (BatchJoiner.PLUGIN_TYPE.equals(stageSpec.getPlugin().getType())) {
noMergeRequiredStages.add(stageSpec.getName());
}
}
// validate the stage operations
Map<String, InvalidFieldOperations> stageInvalids = new HashMap<>();
Map<String, Map<String, List<String>>> stageRedundants = new HashMap<>();
for (StageSpec stageSpec : pipelineSpec.getStages()) {
Map<String, Schema> inputSchemas = stageSpec.getInputSchemas();
// TODO: CDAP-16428 populate the schema if macro is enabled to avoid this
if (inputSchemas == null) {
LOG.warn("Field lineage will not be recorded since the input schema is not set. ");
return Collections.emptySet();
}
// If current stage is of type JOIN add fields as inputstageName.fieldName
List<String> stageInputs = new ArrayList<>();
List<String> stageOutputs = new ArrayList<>();
if (BatchJoiner.PLUGIN_TYPE.equals(stageSpec.getPlugin().getType())) {
for (Map.Entry<String, Schema> entry : inputSchemas.entrySet()) {
Schema schema = entry.getValue();
if (schema != null && schema.getFields() != null) {
stageInputs.addAll(schema.getFields().stream().map(field -> entry.getKey() + "." + field.getName()).collect(Collectors.toList()));
}
}
} else {
for (Map.Entry<String, Schema> entry : inputSchemas.entrySet()) {
Schema schema = entry.getValue();
if (schema != null && schema.getFields() != null) {
stageInputs.addAll(schema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList()));
}
}
}
Schema outputSchema = stageSpec.getOutputSchema();
if (outputSchema != null && outputSchema.getFields() != null) {
stageOutputs.addAll(outputSchema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList()));
}
String stageName = stageSpec.getName();
// only auto generate for stages that have input and output schema
if (!stageInputs.isEmpty() && !stageOutputs.isEmpty()) {
allOperations.compute(stageName, (stage, fieldOperations) -> {
// output schema
if (fieldOperations == null || fieldOperations.isEmpty()) {
return Collections.singletonList(new FieldTransformOperation("Transform", "", stageInputs, stageOutputs));
}
return fieldOperations;
});
}
List<FieldOperation> fieldOperations = allOperations.computeIfAbsent(stageName, stage -> Collections.emptyList());
StageOperationsValidator.Builder builder = new StageOperationsValidator.Builder(fieldOperations);
builder.addStageInputs(stageInputs);
builder.addStageOutputs(stageOutputs);
StageOperationsValidator stageOperationsValidator = builder.build();
stageOperationsValidator.validate();
LOG.trace("Stage Name: {}", stageName);
LOG.trace("Stage Operations {}", GSON.toJson(fieldOperations));
LOG.trace("Stage inputs: {}", stageInputs);
LOG.trace("Stage outputs: {}", stageOutputs);
InvalidFieldOperations invalidFieldOperations = stageOperationsValidator.getStageInvalids();
if (invalidFieldOperations != null) {
stageInvalids.put(stageName, invalidFieldOperations);
}
if (!stageOperationsValidator.getRedundantOutputs().isEmpty()) {
stageRedundants.put(stageName, stageOperationsValidator.getRedundantOutputs());
}
}
if (!stageRedundants.isEmpty()) {
LOG.debug("The pipeline has redundant operations {} and they will be ignored", stageRedundants);
}
if (!stageInvalids.isEmpty()) {
// Do not throw but just log the exception message for validation failure
// Once most of the plugins are updated to write lineage exception can be thrown
LOG.debug(new InvalidLineageException(stageInvalids).getMessage());
}
LineageOperationsProcessor processor = new LineageOperationsProcessor(pipelineSpec.getConnections(), allOperations, noMergeRequiredStages);
return processor.process();
}
use of io.cdap.cdap.etl.api.lineage.field.FieldTransformOperation in project cdap by caskdata.
the class IdentityTransform method prepareRun.
@Override
public void prepareRun(StageSubmitterContext context) throws Exception {
super.prepareRun(context);
Schema schema = context.getInputSchema();
if (schema != null && schema.getFields() != null) {
schema.getFields().stream().map(Schema.Field::getName).collect(Collectors.toList()).forEach(field -> context.record(Collections.singletonList(new FieldTransformOperation("Identity transform " + field, "Identity transform", Collections.singletonList(field), field))));
}
}
Aggregations