use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class CollectionExecutor method executeDataSource.
private <OUT> List<OUT> executeDataSource(GenericDataSourceBase<?, ?> source, int superStep) throws Exception {
@SuppressWarnings("unchecked") GenericDataSourceBase<OUT, ?> typedSource = (GenericDataSourceBase<OUT, ?>) source;
// build the runtime context and compute broadcast variables, if necessary
TaskInfo taskInfo = new TaskInfo(typedSource.getName(), 1, 0, 1, 0);
RuntimeUDFContext ctx;
MetricGroup metrics = new UnregisteredMetricsGroup();
if (RichInputFormat.class.isAssignableFrom(typedSource.getUserCodeWrapper().getUserCodeClass())) {
ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulators, metrics) : new IterationRuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulators, metrics);
} else {
ctx = null;
}
return typedSource.executeOnCollections(ctx, executionConfig);
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class CollectionExecutor method executeDataSink.
// --------------------------------------------------------------------------------------------
// Operator class specific execution methods
// --------------------------------------------------------------------------------------------
private <IN> void executeDataSink(GenericDataSinkBase<?> sink, int superStep) throws Exception {
Operator<?> inputOp = sink.getInput();
if (inputOp == null) {
throw new InvalidProgramException("The data sink " + sink.getName() + " has no input.");
}
@SuppressWarnings("unchecked") List<IN> input = (List<IN>) execute(inputOp);
@SuppressWarnings("unchecked") GenericDataSinkBase<IN> typedSink = (GenericDataSinkBase<IN>) sink;
// build the runtime context and compute broadcast variables, if necessary
TaskInfo taskInfo = new TaskInfo(typedSink.getName(), 1, 0, 1, 0);
RuntimeUDFContext ctx;
MetricGroup metrics = new UnregisteredMetricsGroup();
if (RichOutputFormat.class.isAssignableFrom(typedSink.getUserCodeWrapper().getUserCodeClass())) {
ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulators, metrics) : new IterationRuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulators, metrics);
} else {
ctx = null;
}
typedSink.executeOnCollections(input, ctx, executionConfig);
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class RichInputFormatTest method testCheckRuntimeContextAccess.
@Test
public void testCheckRuntimeContextAccess() {
final SerializedInputFormat<Value> inputFormat = new SerializedInputFormat<Value>();
final TaskInfo taskInfo = new TaskInfo("test name", 3, 1, 3, 0);
inputFormat.setRuntimeContext(new RuntimeUDFContext(taskInfo, getClass().getClassLoader(), new ExecutionConfig(), new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()));
assertEquals(inputFormat.getRuntimeContext().getIndexOfThisSubtask(), 1);
assertEquals(inputFormat.getRuntimeContext().getNumberOfParallelSubtasks(), 3);
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class RichOutputFormatTest method testCheckRuntimeContextAccess.
@Test
public void testCheckRuntimeContextAccess() {
final SerializedOutputFormat<Value> inputFormat = new SerializedOutputFormat<Value>();
final TaskInfo taskInfo = new TaskInfo("test name", 3, 1, 3, 0);
inputFormat.setRuntimeContext(new RuntimeUDFContext(taskInfo, getClass().getClassLoader(), new ExecutionConfig(), new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()));
assertEquals(inputFormat.getRuntimeContext().getIndexOfThisSubtask(), 1);
assertEquals(inputFormat.getRuntimeContext().getNumberOfParallelSubtasks(), 3);
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class FlatMapOperatorCollectionTest method testExecuteOnCollection.
private void testExecuteOnCollection(FlatMapFunction<String, String> udf, List<String> input, boolean mutableSafe) throws Exception {
ExecutionConfig executionConfig = new ExecutionConfig();
if (mutableSafe) {
executionConfig.disableObjectReuse();
} else {
executionConfig.enableObjectReuse();
}
final TaskInfo taskInfo = new TaskInfo("Test UDF", 4, 0, 4, 0);
// run on collections
final List<String> result = getTestFlatMapOperator(udf).executeOnCollections(input, new RuntimeUDFContext(taskInfo, null, executionConfig, new HashMap<String, Future<Path>>(), new HashMap<String, Accumulator<?, ?>>(), new UnregisteredMetricsGroup()), executionConfig);
Assert.assertEquals(input.size(), result.size());
Assert.assertEquals(input, result);
}
Aggregations