use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class StreamGraphGeneratorExecutionModeDetectionTest method generateStreamGraph.
private StreamGraph generateStreamGraph(final RuntimeExecutionMode initMode, final Transformation<?>... transformations) {
final List<Transformation<?>> registeredTransformations = new ArrayList<>();
Collections.addAll(registeredTransformations, transformations);
final Configuration configuration = new Configuration();
configuration.set(ExecutionOptions.RUNTIME_MODE, initMode);
return new StreamGraphGenerator(registeredTransformations, new ExecutionConfig(), new CheckpointConfig(), configuration).generate();
}
use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class DefaultExecutorTest method testDefaultBatchProperties.
@Test
public void testDefaultBatchProperties() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final Executor executor = new DefaultExecutor(env);
final List<Transformation<?>> dummyTransformations = Collections.singletonList(env.fromElements(1, 2, 3).addSink(new DiscardingSink<>()).getTransformation());
final Configuration configuration = new Configuration();
configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
final StreamGraph streamGraph = (StreamGraph) executor.createPipeline(dummyTransformations, configuration, "Default Name");
assertTrue(streamGraph.getExecutionConfig().isObjectReuseEnabled());
assertEquals(0, streamGraph.getExecutionConfig().getLatencyTrackingInterval());
assertTrue(streamGraph.isChainingEnabled());
assertFalse(streamGraph.isAllVerticesInSameSlotSharingGroupByDefault());
assertFalse(streamGraph.getCheckpointConfig().isCheckpointingEnabled());
assertEquals(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, streamGraph.getGlobalStreamExchangeMode());
}
use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class CommonExecPythonCalc method translateToPlanInternal.
@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
final Configuration pythonConfig = CommonPythonUtil.getMergedConfig(planner.getExecEnv(), config.getTableConfig());
OneInputTransformation<RowData, RowData> ret = createPythonOneInputTransformation(inputTransform, config, pythonConfig);
if (CommonPythonUtil.isPythonWorkerUsingManagedMemory(pythonConfig)) {
ret.declareManagedMemoryUseCaseAtSlotScope(ManagedMemoryUseCase.PYTHON);
}
return ret;
}
use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class CommonExecSink method deriveSinkParallelism.
/**
* Returns the parallelism of sink operator, it assumes the sink runtime provider implements
* {@link ParallelismProvider}. It returns parallelism defined in {@link ParallelismProvider} if
* the parallelism is provided, otherwise it uses parallelism of input transformation.
*/
private int deriveSinkParallelism(Transformation<RowData> inputTransform, SinkRuntimeProvider runtimeProvider) {
final int inputParallelism = inputTransform.getParallelism();
if (!(runtimeProvider instanceof ParallelismProvider)) {
return inputParallelism;
}
final ParallelismProvider parallelismProvider = (ParallelismProvider) runtimeProvider;
return parallelismProvider.getParallelism().map(sinkParallelism -> {
if (sinkParallelism <= 0) {
throw new TableException(String.format("Invalid configured parallelism %s for table '%s'.", sinkParallelism, tableSinkSpec.getContextResolvedTable().getIdentifier().asSummaryString()));
}
return sinkParallelism;
}).orElse(inputParallelism);
}
use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class CommonExecSink method applyConstraintValidations.
/**
* Apply an operator to filter or report error to process not-null values for not-null fields.
*/
private Transformation<RowData> applyConstraintValidations(Transformation<RowData> inputTransform, ReadableConfig config, RowType physicalRowType) {
final ConstraintEnforcer.Builder validatorBuilder = ConstraintEnforcer.newBuilder();
final String[] fieldNames = physicalRowType.getFieldNames().toArray(new String[0]);
// Build NOT NULL enforcer
final int[] notNullFieldIndices = getNotNullFieldIndices(physicalRowType);
if (notNullFieldIndices.length > 0) {
final ExecutionConfigOptions.NotNullEnforcer notNullEnforcer = config.get(ExecutionConfigOptions.TABLE_EXEC_SINK_NOT_NULL_ENFORCER);
final List<String> notNullFieldNames = Arrays.stream(notNullFieldIndices).mapToObj(idx -> fieldNames[idx]).collect(Collectors.toList());
validatorBuilder.addNotNullConstraint(notNullEnforcer, notNullFieldIndices, notNullFieldNames, fieldNames);
}
final ExecutionConfigOptions.TypeLengthEnforcer typeLengthEnforcer = config.get(ExecutionConfigOptions.TABLE_EXEC_SINK_TYPE_LENGTH_ENFORCER);
// Build CHAR/VARCHAR length enforcer
final List<ConstraintEnforcer.FieldInfo> charFieldInfo = getFieldInfoForLengthEnforcer(physicalRowType, LengthEnforcerType.CHAR);
if (!charFieldInfo.isEmpty()) {
final List<String> charFieldNames = charFieldInfo.stream().map(cfi -> fieldNames[cfi.fieldIdx()]).collect(Collectors.toList());
validatorBuilder.addCharLengthConstraint(typeLengthEnforcer, charFieldInfo, charFieldNames, fieldNames);
}
// Build BINARY/VARBINARY length enforcer
final List<ConstraintEnforcer.FieldInfo> binaryFieldInfo = getFieldInfoForLengthEnforcer(physicalRowType, LengthEnforcerType.BINARY);
if (!binaryFieldInfo.isEmpty()) {
final List<String> binaryFieldNames = binaryFieldInfo.stream().map(cfi -> fieldNames[cfi.fieldIdx()]).collect(Collectors.toList());
validatorBuilder.addBinaryLengthConstraint(typeLengthEnforcer, binaryFieldInfo, binaryFieldNames, fieldNames);
}
ConstraintEnforcer constraintEnforcer = validatorBuilder.build();
if (constraintEnforcer != null) {
return ExecNodeUtil.createOneInputTransformation(inputTransform, createTransformationMeta(CONSTRAINT_VALIDATOR_TRANSFORMATION, constraintEnforcer.getOperatorName(), "ConstraintEnforcer", config), constraintEnforcer, getInputTypeInfo(), inputTransform.getParallelism());
} else {
// there are no not-null fields, just skip adding the enforcer operator
return inputTransform;
}
}
Aggregations