use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamTaskCancellationTest method testCanceleablesCanceledOnCancelTaskError.
@Test
public void testCanceleablesCanceledOnCancelTaskError() throws Exception {
CancelFailingTask.syncLatch = new OneShotLatch();
StreamConfig cfg = new StreamConfig(new Configuration());
try (NettyShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build()) {
Task task = createTask(CancelFailingTask.class, shuffleEnvironment, cfg, new Configuration());
// start the task and wait until it runs
// execution state RUNNING is not enough, we need to wait until the stream task's run()
// method
// is entered
task.startTaskThread();
CancelFailingTask.syncLatch.await();
// cancel the execution - this should lead to smooth shutdown
task.cancelExecution();
task.getExecutingThread().join();
assertEquals(ExecutionState.CANCELED, task.getExecutionState());
}
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamTaskTestHarness method setupOperatorChain.
public StreamConfigChainer<StreamTaskTestHarness<OUT>> setupOperatorChain(OperatorID headOperatorId, StreamOperatorFactory<?> headOperatorFactory) {
Preconditions.checkState(!setupCalled, "This harness was already setup.");
setupCalled = true;
StreamConfig streamConfig = getStreamConfig();
streamConfig.setStreamOperatorFactory(headOperatorFactory);
return new StreamConfigChainer(headOperatorId, streamConfig, this, 1);
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class TwoInputStreamTaskTest method testCheckpointBarrierMetrics.
/**
* Tests the checkpoint related metrics are registered into {@link TaskIOMetricGroup} correctly
* while generating the {@link TwoInputStreamTask}.
*/
@Test
public void testCheckpointBarrierMetrics() throws Exception {
final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness = new TwoInputStreamTaskTestHarness<>(TwoInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
final StreamConfig streamConfig = testHarness.getStreamConfig();
final CoStreamMap<String, Integer, String> coMapOperator = new CoStreamMap<>(new IdentityMap());
testHarness.setupOutputForSingletonOperatorChain();
streamConfig.setStreamOperator(coMapOperator);
final Map<String, Metric> metrics = new ConcurrentHashMap<>();
final TaskMetricGroup taskMetricGroup = StreamTaskTestHarness.createTaskMetricGroup(metrics);
final StreamMockEnvironment environment = testHarness.createEnvironment();
environment.setTaskMetricGroup(taskMetricGroup);
testHarness.invoke(environment);
testHarness.waitForTaskRunning();
assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_ALIGNMENT_TIME));
assertThat(metrics, IsMapContaining.hasKey(MetricNames.CHECKPOINT_START_DELAY_TIME));
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class TwoInputStreamTaskTest method testWatermarkAndWatermarkStatusForwarding.
/**
* This test verifies that watermarks and watermark statuses are correctly forwarded. This also
* checks whether watermarks are forwarded only when we have received watermarks from all
* inputs. The forwarded watermark must be the minimum of the watermarks of all active inputs.
*/
@Test
public void testWatermarkAndWatermarkStatusForwarding() throws Exception {
final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness = new TwoInputStreamTaskTestHarness<>(TwoInputStreamTask::new, 2, 2, new int[] { 1, 2 }, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
CoStreamMap<String, Integer, String> coMapOperator = new CoStreamMap<>(new IdentityMap());
streamConfig.setStreamOperator(coMapOperator);
streamConfig.setOperatorID(new OperatorID());
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
long initialTime = 0L;
testHarness.invoke();
testHarness.waitForTaskRunning();
testHarness.processElement(new Watermark(initialTime), 0, 0);
testHarness.processElement(new Watermark(initialTime), 0, 1);
testHarness.processElement(new Watermark(initialTime), 1, 0);
// now the output should still be empty
testHarness.waitForInputProcessing();
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.processElement(new Watermark(initialTime), 1, 1);
// now the watermark should have propagated, Map simply forward Watermarks
testHarness.waitForInputProcessing();
expectedOutput.add(new Watermark(initialTime));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
// contrary to checkpoint barriers these elements are not blocked by watermarks
testHarness.processElement(new StreamRecord<>("Hello", initialTime), 0, 0);
testHarness.processElement(new StreamRecord<>(42, initialTime), 1, 1);
expectedOutput.add(new StreamRecord<>("Hello", initialTime));
expectedOutput.add(new StreamRecord<>("42", initialTime));
testHarness.waitForInputProcessing();
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.processElement(new Watermark(initialTime + 4), 0, 0);
testHarness.processElement(new Watermark(initialTime + 3), 0, 1);
testHarness.processElement(new Watermark(initialTime + 3), 1, 0);
testHarness.processElement(new Watermark(initialTime + 2), 1, 1);
// check whether we get the minimum of all the watermarks, this must also only occur in
// the output after the two StreamRecords
expectedOutput.add(new Watermark(initialTime + 2));
testHarness.waitForInputProcessing();
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
// advance watermark from one of the inputs, now we should get a new one since the
// minimum increases
testHarness.processElement(new Watermark(initialTime + 4), 1, 1);
testHarness.waitForInputProcessing();
expectedOutput.add(new Watermark(initialTime + 3));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
// advance the other two inputs, now we should get a new one since the
// minimum increases again
testHarness.processElement(new Watermark(initialTime + 4), 0, 1);
testHarness.processElement(new Watermark(initialTime + 4), 1, 0);
testHarness.waitForInputProcessing();
expectedOutput.add(new Watermark(initialTime + 4));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
// test whether idle input channels are acknowledged correctly when forwarding watermarks
testHarness.processElement(WatermarkStatus.IDLE, 0, 1);
testHarness.processElement(WatermarkStatus.IDLE, 1, 0);
testHarness.processElement(new Watermark(initialTime + 6), 0, 0);
testHarness.processElement(new Watermark(initialTime + 5), 1, // this watermark should be advanced first
1);
// once this is acknowledged,
testHarness.processElement(WatermarkStatus.IDLE, 1, 1);
testHarness.waitForInputProcessing();
expectedOutput.add(new Watermark(initialTime + 5));
expectedOutput.add(new Watermark(initialTime + 6));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
// make all input channels idle and check that the operator's idle status is forwarded
testHarness.processElement(WatermarkStatus.IDLE, 0, 0);
testHarness.waitForInputProcessing();
expectedOutput.add(WatermarkStatus.IDLE);
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
// make some input channels active again and check that the operator's active status is
// forwarded only once
testHarness.processElement(WatermarkStatus.ACTIVE, 1, 0);
testHarness.processElement(WatermarkStatus.ACTIVE, 0, 1);
testHarness.waitForInputProcessing();
expectedOutput.add(WatermarkStatus.ACTIVE);
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.endInput();
testHarness.waitForTaskCompletion();
List<String> resultElements = TestHarnessUtil.getRawElementsFromOutput(testHarness.getOutput());
Assert.assertEquals(2, resultElements.size());
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class TwoInputStreamTaskTest method testOpenCloseAndTimestamps.
/**
* This test verifies that open() and close() are correctly called. This test also verifies that
* timestamps of emitted elements are correct. {@link CoStreamMap} assigns the input timestamp
* to emitted elements.
*/
@Test
public void testOpenCloseAndTimestamps() throws Exception {
final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness = new TwoInputStreamTaskTestHarness<>(TwoInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
CoStreamMap<String, Integer, String> coMapOperator = new CoStreamMap<>(new TestOpenCloseMapFunction());
streamConfig.setStreamOperator(coMapOperator);
streamConfig.setOperatorID(new OperatorID());
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.invoke();
testHarness.waitForTaskRunning();
testHarness.processElement(new StreamRecord<>("Hello", initialTime + 1), 0, 0);
expectedOutput.add(new StreamRecord<>("Hello", initialTime + 1));
// wait until the input is processed to ensure ordering of the output
testHarness.waitForInputProcessing();
testHarness.processElement(new StreamRecord<>(1337, initialTime + 2), 1, 0);
expectedOutput.add(new StreamRecord<>("1337", initialTime + 2));
testHarness.waitForInputProcessing();
testHarness.endInput();
testHarness.waitForTaskCompletion();
Assert.assertTrue("RichFunction methods were not called.", TestOpenCloseMapFunction.closeCalled);
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
Aggregations