use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class BoltWrapperTest method testOpen.
@SuppressWarnings("unchecked")
@Test
public void testOpen() throws Exception {
// utility mocks
final StormConfig stormConfig = new StormConfig();
final Configuration flinkConfig = new Configuration();
final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig).thenReturn(flinkConfig);
final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
when(taskContext.getTaskName()).thenReturn("name");
when(taskContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());
final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
declarer.declare(new Fields("dummy"));
PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
// (1) open with no configuration
{
ExecutionConfig execConfig = mock(ExecutionConfig.class);
when(execConfig.getGlobalJobParameters()).thenReturn(null);
final IRichBolt bolt = mock(IRichBolt.class);
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
wrapper.setup(createMockStreamTask(execConfig), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
verify(bolt).prepare(any(Map.class), any(TopologyContext.class), any(OutputCollector.class));
}
// (2) open with a storm specific configuration
{
ExecutionConfig execConfig = mock(ExecutionConfig.class);
when(execConfig.getGlobalJobParameters()).thenReturn(stormConfig);
final IRichBolt bolt = mock(IRichBolt.class);
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
wrapper.setup(createMockStreamTask(execConfig), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
verify(bolt).prepare(same(stormConfig), any(TopologyContext.class), any(OutputCollector.class));
}
// (3) open with a flink config
{
final Configuration cfg = new Configuration();
cfg.setString("foo", "bar");
cfg.setInteger("the end (the int)", Integer.MAX_VALUE);
ExecutionConfig execConfig = mock(ExecutionConfig.class);
when(execConfig.getGlobalJobParameters()).thenReturn(new UnmodifiableConfiguration(cfg));
TestDummyBolt testBolt = new TestDummyBolt();
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(testBolt);
wrapper.setup(createMockStreamTask(execConfig), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
for (Entry<String, String> entry : cfg.toMap().entrySet()) {
Assert.assertEquals(entry.getValue(), testBolt.config.get(entry.getKey()));
}
}
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class BoltWrapperTest method testClose.
@SuppressWarnings("unchecked")
@Test
public void testClose() throws Exception {
final IRichBolt bolt = mock(IRichBolt.class);
final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
declarer.declare(new Fields("dummy"));
PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
final BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
wrapper.setup(createMockStreamTask(), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.close();
wrapper.dispose();
verify(bolt).cleanup();
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class TwoInputStreamTask method init.
@Override
public void init() throws Exception {
StreamConfig configuration = getConfiguration();
ClassLoader userClassLoader = getUserCodeClassLoader();
TypeSerializer<IN1> inputDeserializer1 = configuration.getTypeSerializerIn1(userClassLoader);
TypeSerializer<IN2> inputDeserializer2 = configuration.getTypeSerializerIn2(userClassLoader);
int numberOfInputs = configuration.getNumberOfInputs();
ArrayList<InputGate> inputList1 = new ArrayList<InputGate>();
ArrayList<InputGate> inputList2 = new ArrayList<InputGate>();
List<StreamEdge> inEdges = configuration.getInPhysicalEdges(userClassLoader);
for (int i = 0; i < numberOfInputs; i++) {
int inputType = inEdges.get(i).getTypeNumber();
InputGate reader = getEnvironment().getInputGate(i);
switch(inputType) {
case 1:
inputList1.add(reader);
break;
case 2:
inputList2.add(reader);
break;
default:
throw new RuntimeException("Invalid input type number: " + inputType);
}
}
this.inputProcessor = new StreamTwoInputProcessor<>(inputList1, inputList2, inputDeserializer1, inputDeserializer2, this, configuration.getCheckpointMode(), getCheckpointLock(), getEnvironment().getIOManager(), getEnvironment().getTaskManagerInfo().getConfiguration(), getStreamStatusMaintainer(), this.headOperator);
// make sure that stream tasks report their I/O statistics
inputProcessor.setMetricGroup(getEnvironment().getMetricGroup().getIOMetricGroup());
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class AsyncWaitOperatorTest method testTimeoutCleanup.
/**
* FLINK-5652
* Tests that registered timers are properly canceled upon completion of a
* {@link StreamRecordQueueEntry} in order to avoid resource leaks because TriggerTasks hold
* a reference on the StreamRecordQueueEntry.
*/
@Test
public void testTimeoutCleanup() throws Exception {
final Object lock = new Object();
final long timeout = 100000L;
final long timestamp = 1L;
Environment environment = mock(Environment.class);
when(environment.getMetricGroup()).thenReturn(new UnregisteredTaskMetricsGroup());
when(environment.getTaskManagerInfo()).thenReturn(new TestingTaskManagerRuntimeInfo());
when(environment.getUserClassLoader()).thenReturn(getClass().getClassLoader());
when(environment.getTaskInfo()).thenReturn(new TaskInfo("testTask", 1, 0, 1, 0));
ScheduledFuture<?> scheduledFuture = mock(ScheduledFuture.class);
ProcessingTimeService processingTimeService = mock(ProcessingTimeService.class);
when(processingTimeService.getCurrentProcessingTime()).thenReturn(timestamp);
doReturn(scheduledFuture).when(processingTimeService).registerTimer(anyLong(), any(ProcessingTimeCallback.class));
StreamTask<?, ?> containingTask = mock(StreamTask.class);
when(containingTask.getEnvironment()).thenReturn(environment);
when(containingTask.getCheckpointLock()).thenReturn(lock);
when(containingTask.getProcessingTimeService()).thenReturn(processingTimeService);
StreamConfig streamConfig = mock(StreamConfig.class);
doReturn(IntSerializer.INSTANCE).when(streamConfig).getTypeSerializerIn1(any(ClassLoader.class));
Output<StreamRecord<Integer>> output = mock(Output.class);
AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(new AsyncFunction<Integer, Integer>() {
private static final long serialVersionUID = -3718276118074877073L;
@Override
public void asyncInvoke(Integer input, AsyncCollector<Integer> collector) throws Exception {
collector.collect(Collections.singletonList(input));
}
}, timeout, 1, AsyncDataStream.OutputMode.UNORDERED);
operator.setup(containingTask, streamConfig, output);
operator.open();
final StreamRecord<Integer> streamRecord = new StreamRecord<>(42, timestamp);
synchronized (lock) {
// processing an element will register a timeout
operator.processElement(streamRecord);
}
synchronized (lock) {
// closing the operator waits until all inputs have been processed
operator.close();
}
// check that we actually outputted the result of the single input
verify(output).collect(eq(streamRecord));
verify(processingTimeService).registerTimer(eq(processingTimeService.getCurrentProcessingTime() + timeout), any(ProcessingTimeCallback.class));
// check that we have cancelled our registered timeout
verify(scheduledFuture).cancel(eq(true));
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamSourceOperatorTest method setupSourceOperator.
@SuppressWarnings("unchecked")
private static <T> void setupSourceOperator(StreamSource<T, ?> operator, TimeCharacteristic timeChar, long watermarkInterval, long latencyMarkInterval, final ProcessingTimeService timeProvider) {
ExecutionConfig executionConfig = new ExecutionConfig();
executionConfig.setAutoWatermarkInterval(watermarkInterval);
executionConfig.setLatencyTrackingInterval(latencyMarkInterval);
StreamConfig cfg = new StreamConfig(new Configuration());
cfg.setStateBackend(new MemoryStateBackend());
cfg.setTimeCharacteristic(timeChar);
Environment env = new DummyEnvironment("MockTwoInputTask", 1, 0);
StreamStatusMaintainer streamStatusMaintainer = mock(StreamStatusMaintainer.class);
when(streamStatusMaintainer.getStreamStatus()).thenReturn(StreamStatus.ACTIVE);
StreamTask<?, ?> mockTask = mock(StreamTask.class);
when(mockTask.getName()).thenReturn("Mock Task");
when(mockTask.getCheckpointLock()).thenReturn(new Object());
when(mockTask.getConfiguration()).thenReturn(cfg);
when(mockTask.getEnvironment()).thenReturn(env);
when(mockTask.getExecutionConfig()).thenReturn(executionConfig);
when(mockTask.getAccumulatorMap()).thenReturn(Collections.<String, Accumulator<?, ?>>emptyMap());
when(mockTask.getStreamStatusMaintainer()).thenReturn(streamStatusMaintainer);
doAnswer(new Answer<ProcessingTimeService>() {
@Override
public ProcessingTimeService answer(InvocationOnMock invocation) throws Throwable {
if (timeProvider == null) {
throw new RuntimeException("The time provider is null.");
}
return timeProvider;
}
}).when(mockTask).getProcessingTimeService();
operator.setup(mockTask, cfg, (Output<StreamRecord<T>>) mock(Output.class));
}
Aggregations