use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class GenericDataSinkBaseTest method testDataSourceWithRuntimeContext.
@Test
public void testDataSourceWithRuntimeContext() {
try {
TestRichOutputFormat out = new TestRichOutputFormat();
GenericDataSinkBase<String> sink = new GenericDataSinkBase<String>(out, new UnaryOperatorInformation<String, Nothing>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.getInfoFor(Nothing.class)), "test_sink");
sink.setInput(source);
ExecutionConfig executionConfig = new ExecutionConfig();
final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo("test_sink", 1, 0, 1, 0);
executionConfig.disableObjectReuse();
in.reset();
sink.executeOnCollections(asList(TestIOData.NAMES), new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup()), executionConfig);
assertEquals(out.output, asList(TestIOData.RICH_NAMES));
executionConfig.enableObjectReuse();
out.clear();
in.reset();
sink.executeOnCollections(asList(TestIOData.NAMES), new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup()), executionConfig);
assertEquals(out.output, asList(TestIOData.RICH_NAMES));
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class GenericDataSourceBaseTest method testDataSourceWithRuntimeContext.
@Test
public void testDataSourceWithRuntimeContext() {
try {
TestRichInputFormat in = new TestRichInputFormat();
GenericDataSourceBase<String, TestRichInputFormat> source = new GenericDataSourceBase<String, TestRichInputFormat>(in, new OperatorInformation<String>(BasicTypeInfo.STRING_TYPE_INFO), "testSource");
final HashMap<String, Accumulator<?, ?>> accumulatorMap = new HashMap<String, Accumulator<?, ?>>();
final HashMap<String, Future<Path>> cpTasks = new HashMap<>();
final TaskInfo taskInfo = new TaskInfo("test_source", 1, 0, 1, 0);
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.disableObjectReuse();
assertEquals(false, in.hasBeenClosed());
assertEquals(false, in.hasBeenOpened());
List<String> resultMutableSafe = source.executeOnCollections(new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup()), executionConfig);
assertEquals(true, in.hasBeenClosed());
assertEquals(true, in.hasBeenOpened());
in.reset();
executionConfig.enableObjectReuse();
assertEquals(false, in.hasBeenClosed());
assertEquals(false, in.hasBeenOpened());
List<String> resultRegular = source.executeOnCollections(new RuntimeUDFContext(taskInfo, null, executionConfig, cpTasks, accumulatorMap, new UnregisteredMetricsGroup()), executionConfig);
assertEquals(true, in.hasBeenClosed());
assertEquals(true, in.hasBeenOpened());
assertEquals(asList(TestIOData.RICH_NAMES), resultMutableSafe);
assertEquals(asList(TestIOData.RICH_NAMES), resultRegular);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class BoltWrapperTest method createMockStreamTask.
public static StreamTask<?, ?> createMockStreamTask(ExecutionConfig execConfig) {
Environment env = mock(Environment.class);
when(env.getTaskInfo()).thenReturn(new TaskInfo("Mock Task", 1, 0, 1, 0));
when(env.getUserClassLoader()).thenReturn(BoltWrapperTest.class.getClassLoader());
when(env.getMetricGroup()).thenReturn(new UnregisteredTaskMetricsGroup());
when(env.getTaskManagerInfo()).thenReturn(new TestingTaskManagerRuntimeInfo());
StreamTask<?, ?> mockTask = mock(StreamTask.class);
when(mockTask.getCheckpointLock()).thenReturn(new Object());
when(mockTask.getConfiguration()).thenReturn(new StreamConfig(new Configuration()));
when(mockTask.getEnvironment()).thenReturn(env);
when(mockTask.getExecutionConfig()).thenReturn(execConfig);
return mockTask;
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class RocksDBStateBackendConfigTest method getMockEnvironment.
static Environment getMockEnvironment(File[] tempDirs) {
final String[] tempDirStrings = new String[tempDirs.length];
for (int i = 0; i < tempDirs.length; i++) {
tempDirStrings[i] = tempDirs[i].getAbsolutePath();
}
IOManager ioMan = mock(IOManager.class);
when(ioMan.getSpillingDirectories()).thenReturn(tempDirs);
Environment env = mock(Environment.class);
when(env.getJobID()).thenReturn(new JobID());
when(env.getUserClassLoader()).thenReturn(RocksDBStateBackendConfigTest.class.getClassLoader());
when(env.getIOManager()).thenReturn(ioMan);
when(env.getTaskKvStateRegistry()).thenReturn(new KvStateRegistry().createTaskRegistry(new JobID(), new JobVertexID()));
TaskInfo taskInfo = mock(TaskInfo.class);
when(env.getTaskInfo()).thenReturn(taskInfo);
when(taskInfo.getIndexOfThisSubtask()).thenReturn(0);
TaskManagerRuntimeInfo tmInfo = new TestingTaskManagerRuntimeInfo(new Configuration(), tempDirStrings);
when(env.getTaskManagerInfo()).thenReturn(tmInfo);
return env;
}
use of org.apache.flink.api.common.TaskInfo in project flink by apache.
the class CollectionExecutor method executeBinaryOperator.
private <IN1, IN2, OUT> List<OUT> executeBinaryOperator(DualInputOperator<?, ?, ?, ?> operator, int superStep) throws Exception {
Operator<?> inputOp1 = operator.getFirstInput();
Operator<?> inputOp2 = operator.getSecondInput();
if (inputOp1 == null) {
throw new InvalidProgramException("The binary operation " + operator.getName() + " has no first input.");
}
if (inputOp2 == null) {
throw new InvalidProgramException("The binary operation " + operator.getName() + " has no second input.");
}
// compute inputs
@SuppressWarnings("unchecked") List<IN1> inputData1 = (List<IN1>) execute(inputOp1, superStep);
@SuppressWarnings("unchecked") List<IN2> inputData2 = (List<IN2>) execute(inputOp2, superStep);
@SuppressWarnings("unchecked") DualInputOperator<IN1, IN2, OUT, ?> typedOp = (DualInputOperator<IN1, IN2, OUT, ?>) operator;
// build the runtime context and compute broadcast variables, if necessary
TaskInfo taskInfo = new TaskInfo(typedOp.getName(), 1, 0, 1, 0);
RuntimeUDFContext ctx;
MetricGroup metrics = new UnregisteredMetricsGroup();
if (RichFunction.class.isAssignableFrom(typedOp.getUserCodeWrapper().getUserCodeClass())) {
ctx = superStep == 0 ? new RuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulators, metrics) : new IterationRuntimeUDFContext(taskInfo, classLoader, executionConfig, cachedFiles, accumulators, metrics);
for (Map.Entry<String, Operator<?>> bcInputs : operator.getBroadcastInputs().entrySet()) {
List<?> bcData = execute(bcInputs.getValue());
ctx.setBroadcastVariable(bcInputs.getKey(), bcData);
}
} else {
ctx = null;
}
return typedOp.executeOnCollections(inputData1, inputData2, ctx, executionConfig);
}
Aggregations