use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.
the class PipelineSpecGenerator method validateConfig.
/**
* Validate that this is a valid pipeline. A valid pipeline has the following properties:
*
* All stages in the pipeline have a unique name.
* Source stages have at least one output and no inputs.
* Sink stages have at least one input and no outputs.
* There are no cycles in the pipeline.
* All inputs into a stage have the same schema.
* ErrorTransforms only have BatchSource, Transform, or BatchAggregator as input stages.
* AlertPublishers have at least one input and no outputs and don't have SparkSink or BatchSink as input.
* Action stages can only be at the start or end of the pipeline.
* Condition stages have at most 2 outputs. Each stage on a condition's output branch has at most a single input.
*
* Returns the stages in the order they should be configured to ensure that all input stages are configured
* before their output.
*
* @param config the user provided configuration
* @return the order to configure the stages in
* @throws IllegalArgumentException if the pipeline is invalid
*/
protected ValidatedPipeline validateConfig(ETLConfig config) {
config.validate();
if (config.getStages().isEmpty()) {
throw new IllegalArgumentException("A pipeline must contain at least one stage.");
}
Set<String> actionStages = new HashSet<>();
Set<String> conditionStages = new HashSet<>();
Map<String, String> stageTypes = new HashMap<>();
// check stage name uniqueness
Set<String> stageNames = new HashSet<>();
for (ETLStage stage : config.getStages()) {
if (!stageNames.add(stage.getName())) {
throw new IllegalArgumentException(String.format("Invalid pipeline. Multiple stages are named %s. Please ensure all stage names are unique", stage.getName()));
}
// if stage is Action stage, add it to the Action stage set
if (isAction(stage.getPlugin().getType())) {
actionStages.add(stage.getName());
}
// if the stage is condition add it to the Condition stage set
if (stage.getPlugin().getType().equals(Condition.PLUGIN_TYPE)) {
conditionStages.add(stage.getName());
}
stageTypes.put(stage.getName(), stage.getPlugin().getType());
}
// check that the from and to are names of actual stages
// also check that conditions have at most 2 outgoing connections each label with true or
// false but not both
Map<String, Boolean> conditionBranch = new HashMap<>();
for (Connection connection : config.getConnections()) {
if (!stageNames.contains(connection.getFrom())) {
throw new IllegalArgumentException(String.format("Invalid connection %s. %s is not a stage.", connection, connection.getFrom()));
}
if (!stageNames.contains(connection.getTo())) {
throw new IllegalArgumentException(String.format("Invalid connection %s. %s is not a stage.", connection, connection.getTo()));
}
if (conditionStages.contains(connection.getFrom())) {
if (connection.getCondition() == null) {
String msg = String.format("For condition stage %s, the connection %s is not marked with either " + "'true' or 'false'.", connection.getFrom(), connection);
throw new IllegalArgumentException(msg);
}
// check if connection from the condition node is marked as true or false multiple times
if (conditionBranch.containsKey(connection.getFrom()) && connection.getCondition().equals(conditionBranch.get(connection.getFrom()))) {
String msg = String.format("For condition stage '%s', more than one outgoing connections are marked as %s.", connection.getFrom(), connection.getCondition());
throw new IllegalArgumentException(msg);
}
conditionBranch.put(connection.getFrom(), connection.getCondition());
}
}
List<ETLStage> traversalOrder = new ArrayList<>(stageNames.size());
// can only have empty connections if the pipeline consists of a single action.
if (config.getConnections().isEmpty()) {
if (actionStages.size() == 1 && stageNames.size() == 1) {
traversalOrder.add(config.getStages().iterator().next());
return new ValidatedPipeline(traversalOrder, config);
} else {
throw new IllegalArgumentException("Invalid pipeline. There are no connections between stages. " + "This is only allowed if the pipeline consists of a single action plugin.");
}
}
Dag dag = new Dag(config.getConnections());
Set<String> controlStages = Sets.union(actionStages, conditionStages);
Map<String, ETLStage> stages = new HashMap<>();
for (ETLStage stage : config.getStages()) {
String stageName = stage.getName();
Set<String> stageInputs = dag.getNodeInputs(stageName);
Set<String> stageOutputs = dag.getNodeOutputs(stageName);
String stageType = stage.getPlugin().getType();
boolean isSource = isSource(stageType);
boolean isSink = isSink(stageType);
// check source plugins are sources in the dag
if (isSource) {
if (!stageInputs.isEmpty() && !controlStages.containsAll(stageInputs)) {
throw new IllegalArgumentException(String.format("%s %s has incoming connections from %s. %s stages cannot have any incoming connections.", stageType, stageName, Joiner.on(',').join(stageInputs), stageType));
}
// check that source plugins are not present after any non-condition/action stage
Set<String> parents = dag.parentsOf(stageName);
Set<String> nonControlParents = Sets.difference(parents, controlStages);
if (nonControlParents.size() > 1) {
// the stage's nonControlParents should only contain itself
throw new IllegalArgumentException(String.format("%s %s is invalid. %s stages can only be placed at the start of the pipeline.", stageType, stageName, stageType));
}
} else if (isSink) {
if (!stageOutputs.isEmpty() && !controlStages.containsAll(stageOutputs)) {
throw new IllegalArgumentException(String.format("%s %s has outgoing connections to %s. %s stages cannot have any outgoing connections.", stageType, stageName, Joiner.on(',').join(stageOutputs), stageType));
}
} else if (ErrorTransform.PLUGIN_TYPE.equals(stageType)) {
for (String inputStage : stageInputs) {
String inputType = stageTypes.get(inputStage);
if (!VALID_ERROR_INPUTS.contains(inputType)) {
throw new IllegalArgumentException(String.format("ErrorTransform %s cannot have stage %s of type %s as input. Only %s stages can emit errors.", stageName, inputStage, inputType, Joiner.on(',').join(VALID_ERROR_INPUTS)));
}
}
}
boolean isAction = isAction(stageType);
if (!isAction && !stageType.equals(Condition.PLUGIN_TYPE) && !isSource && stageInputs.isEmpty()) {
throw new IllegalArgumentException(String.format("Stage %s is unreachable, it has no incoming connections.", stageName));
}
if (!isAction && !isSink && stageOutputs.isEmpty()) {
throw new IllegalArgumentException(String.format("Stage %s is a dead end, it has no outgoing connections.", stageName));
}
stages.put(stageName, stage);
}
// make sure actions are not in the middle of the pipeline -- only at the start and/or end
for (String actionStage : actionStages) {
Set<String> actionParents = dag.parentsOf(actionStage);
Set<String> actionChildren = dag.accessibleFrom(actionStage);
Set<String> nonControlParents = Sets.difference(actionParents, controlStages);
Set<String> nonControlChildren = Sets.difference(actionChildren, controlStages);
if (!nonControlChildren.isEmpty() && !nonControlParents.isEmpty()) {
throw new IllegalArgumentException(String.format("Action stage '%s' is invalid. Actions can only be placed at the start or end of the pipeline.", actionStage));
}
}
validateConditionBranches(conditionStages, dag);
for (String stageName : dag.getTopologicalOrder()) {
traversalOrder.add(stages.get(stageName));
}
return new ValidatedPipeline(traversalOrder, config);
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.
the class FieldLineageProcessorTest method testGeneratedOperations.
@Test
public void testGeneratedOperations() throws Exception {
// src -> transform1 -> transform2 -> sink
Schema srcSchema = Schema.recordOf("srcSchema", Schema.Field.of("body", Schema.of(Schema.Type.STRING)), Schema.Field.of("offset", Schema.of(Schema.Type.INT)));
Schema transform1Schema = Schema.recordOf("trans1Schema", Schema.Field.of("body", Schema.of(Schema.Type.STRING)));
Schema transform2Schema = Schema.recordOf("trans2Schema", Schema.Field.of("id", Schema.of(Schema.Type.INT)), Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
Set<StageSpec> stageSpecs = ImmutableSet.of(StageSpec.builder("src", DUMMY_PLUGIN).addOutput(srcSchema, "transform1").build(), StageSpec.builder("transform1", DUMMY_PLUGIN).addInputSchema("src", srcSchema).addOutput(transform1Schema, "transform2").build(), StageSpec.builder("transform2", DUMMY_PLUGIN).addInputSchema("transform1", transform1Schema).addOutput(transform2Schema, "sink").build(), StageSpec.builder("sink", DUMMY_PLUGIN).addInputSchema("transform2", transform2Schema).build());
Set<Connection> connections = ImmutableSet.of(new Connection("src", "transform1"), new Connection("transform1", "transform2"), new Connection("transform2", "sink"));
PipelineSpec pipelineSpec = PipelineSpec.builder().addStages(stageSpecs).addConnections(connections).build();
FieldLineageProcessor processor = new FieldLineageProcessor(pipelineSpec);
Map<String, List<FieldOperation>> fieldOperations = ImmutableMap.of("src", Collections.singletonList(new FieldReadOperation("Read", "1st operation", EndPoint.of("file"), ImmutableList.of("body", "offset"))), "transform1", Collections.emptyList(), "transform2", Collections.emptyList(), "sink", Collections.singletonList(new FieldWriteOperation("Write", "4th operation", EndPoint.of("sink"), ImmutableList.of("id", "name"))));
Set<Operation> operations = processor.validateAndConvert(fieldOperations);
Set<Operation> expected = ImmutableSet.of(new ReadOperation("src.Read", "1st operation", EndPoint.of("file"), ImmutableList.of("body", "offset")), new TransformOperation("transform1.Transform", "", ImmutableList.of(InputField.of("src.Read", "body"), InputField.of("src.Read", "offset")), "body"), new TransformOperation("transform2.Transform", "", ImmutableList.of(InputField.of("transform1.Transform", "body")), ImmutableList.of("id", "name")), new WriteOperation("sink.Write", "4th operation", EndPoint.of("sink"), ImmutableList.of(InputField.of("transform2.Transform", "id"), InputField.of("transform2.Transform", "name"))));
Assert.assertEquals(expected, operations);
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.
the class LineageOperationProcessorTest method testSimplePipeline.
@Test
public void testSimplePipeline() {
// n1-->n2-->n3
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing data", Collections.singletonList("body"), Arrays.asList("name", "address", "zip")));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("write", "writing data", EndPoint.of("default", "file2"), "name", "address", "zip"));
stageOperations.put("n3", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expected = new HashSet<>();
expected.add(new ReadOperation("n1.read", "reading data", EndPoint.of("default", "file"), "offset", "body"));
expected.add(new TransformOperation("n2.parse", "parsing data", Collections.singletonList(InputField.of("n1.read", "body")), "name", "address", "zip"));
expected.add(new WriteOperation("n3.write", "writing data", EndPoint.of("default", "file2"), InputField.of("n2.parse", "name"), InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip")));
Assert.assertEquals(new FieldLineageInfo(expected), new FieldLineageInfo(processedOperations));
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.
the class LineageOperationProcessorTest method testSameKeyAndRenameJoin.
@Test
public void testSameKeyAndRenameJoin() {
// n1(id(key), swap1, n1same) ---------
// |
// JOIN ------->(id, new_id, swap1, swap2, n1same, n2same)
// |
// n2(id(key), swap2, n2same)----------
// operations (n1.id, n2.id) -> id
// (n2.id) -> new_id
// (n1.swap1) -> swap2
// (n2.swap2) -> swap1
// (n1.n1same) -> n1same
// (n2.n2same) -> n2same
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n3"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint src1 = EndPoint.of("default", "n1");
EndPoint src2 = EndPoint.of("default", "n2");
EndPoint dest = EndPoint.of("default", "n4");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
stageOperations.put("n1", Collections.singletonList(new FieldReadOperation("readSrc1", "read description", src1, "id", "swap1", "n1same")));
stageOperations.put("n2", Collections.singletonList(new FieldReadOperation("readSrc2", "read description", src2, "id", "swap2", "n2same")));
List<FieldOperation> joinOperations = stageOperations.computeIfAbsent("n3", k -> new ArrayList<>());
joinOperations.add(new FieldTransformOperation("JoinKey", "Join Key", Arrays.asList("n1.id", "n2.id"), "id"));
joinOperations.add(new FieldTransformOperation("RenameN2", "rename", Collections.singletonList("n2.id"), "new_id"));
joinOperations.add(new FieldTransformOperation("swap1", "swap", Collections.singletonList("n1.swap1"), "swap2"));
joinOperations.add(new FieldTransformOperation("swap2", "swap", Collections.singletonList("n2.swap2"), "swap1"));
joinOperations.add(new FieldTransformOperation("unchange1", "unchange", Collections.singletonList("n1.n1same"), "n1same"));
joinOperations.add(new FieldTransformOperation("unchange2", "unchange", Collections.singletonList("n2.n2same"), "n2same"));
stageOperations.put("n4", Collections.singletonList(new FieldWriteOperation("Write", "write description", dest, "id", "new_id", "swap1", "swap2", "n1same", "n2same")));
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.singleton("n3"));
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(new ReadOperation("n1.readSrc1", "read description", src1, "id", "swap1", "n1same"));
expectedOperations.add(new ReadOperation("n2.readSrc2", "read description", src2, "id", "swap2", "n2same"));
expectedOperations.add(new TransformOperation("n3.JoinKey", "Join Key", Arrays.asList(InputField.of("n1.readSrc1", "id"), InputField.of("n2.readSrc2", "id")), "id"));
expectedOperations.add(new TransformOperation("n3.RenameN2", "rename", Collections.singletonList(InputField.of("n2.readSrc2", "id")), "new_id"));
expectedOperations.add(new TransformOperation("n3.swap1", "swap", Collections.singletonList(InputField.of("n1.readSrc1", "swap1")), "swap2"));
expectedOperations.add(new TransformOperation("n3.swap2", "swap", Collections.singletonList(InputField.of("n2.readSrc2", "swap2")), "swap1"));
expectedOperations.add(new TransformOperation("n3.unchange1", "unchange", Collections.singletonList(InputField.of("n1.readSrc1", "n1same")), "n1same"));
expectedOperations.add(new TransformOperation("n3.unchange2", "unchange", Collections.singletonList(InputField.of("n2.readSrc2", "n2same")), "n2same"));
expectedOperations.add(new WriteOperation("n4.Write", "write description", dest, Arrays.asList(InputField.of("n3.JoinKey", "id"), InputField.of("n3.RenameN2", "new_id"), InputField.of("n3.swap2", "swap1"), InputField.of("n3.swap1", "swap2"), InputField.of("n3.unchange1", "n1same"), InputField.of("n3.unchange2", "n2same"))));
Assert.assertEquals(expectedOperations, processor.process());
}
use of io.cdap.cdap.etl.proto.Connection in project cdap by cdapio.
the class LineageOperationProcessorTest method testSourceWithMultipleDestinations.
@Test
public void testSourceWithMultipleDestinations() {
// |----->n3
// n1--->n2-----|
// |----->n4
// n1 => read: file -> (offset, body)
// n2 => parse: body -> (id, name, address, zip)
// n3 => write1: (parse.id, parse.name) -> info
// n4 => write2: (parse.address, parse.zip) -> location
Set<Connection> connections = new HashSet<>();
connections.add(new Connection("n1", "n2"));
connections.add(new Connection("n2", "n3"));
connections.add(new Connection("n3", "n4"));
EndPoint source = EndPoint.of("ns", "file");
EndPoint info = EndPoint.of("ns", "info");
EndPoint location = EndPoint.of("ns", "location");
Map<String, List<FieldOperation>> stageOperations = new HashMap<>();
List<FieldOperation> fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldReadOperation("read", "reading from file", source, "offset", "body"));
stageOperations.put("n1", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldTransformOperation("parse", "parsing body", Collections.singletonList("body"), "id", "name", "address", "zip"));
stageOperations.put("n2", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("infoWrite", "writing info", info, "id", "name"));
stageOperations.put("n3", fieldOperations);
fieldOperations = new ArrayList<>();
fieldOperations.add(new FieldWriteOperation("locationWrite", "writing location", location, "address", "zip"));
stageOperations.put("n4", fieldOperations);
LineageOperationsProcessor processor = new LineageOperationsProcessor(connections, stageOperations, Collections.emptySet());
Set<Operation> processedOperations = processor.process();
Set<Operation> expectedOperations = new HashSet<>();
ReadOperation read = new ReadOperation("n1.read", "reading from file", source, "offset", "body");
expectedOperations.add(read);
TransformOperation parse = new TransformOperation("n2.parse", "parsing body", Collections.singletonList(InputField.of("n1.read", "body")), "id", "name", "address", "zip");
expectedOperations.add(parse);
WriteOperation infoWrite = new WriteOperation("n3.infoWrite", "writing info", info, InputField.of("n2.parse", "id"), InputField.of("n2.parse", "name"));
expectedOperations.add(infoWrite);
WriteOperation locationWrite = new WriteOperation("n4.locationWrite", "writing location", location, InputField.of("n2.parse", "address"), InputField.of("n2.parse", "zip"));
expectedOperations.add(locationWrite);
Assert.assertEquals(new FieldLineageInfo(expectedOperations), new FieldLineageInfo(processedOperations));
}
Aggregations