use of org.apache.flink.streaming.api.operators.StreamMap in project flink by apache.
the class OneInputStreamTaskTest method testCheckpointBarriers.
/**
* This test verifies that checkpoint barriers are correctly forwarded.
*/
@Test
public void testCheckpointBarriers() throws Exception {
final OneInputStreamTask<String, String> mapTask = new OneInputStreamTask<String, String>();
final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<String, String>(mapTask, 2, 2, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap());
streamConfig.setStreamOperator(mapOperator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
long initialTime = 0L;
testHarness.invoke();
testHarness.waitForTaskRunning();
testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 0, 0);
// These elements should be buffered until we receive barriers from
// all inputs
testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);
// These elements should be forwarded, since we did not yet receive a checkpoint barrier
// on that input, only add to same input, otherwise we would not know the ordering
// of the output since the Task might read the inputs in any order
testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1);
testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1);
expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime));
expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime));
testHarness.waitForInputProcessing();
// we should not yet see the barrier, only the two elements from non-blocked input
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 0, 1);
testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 1, 0);
testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 1, 1);
testHarness.waitForInputProcessing();
// now we should see the barrier and after that the buffered elements
expectedOutput.add(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()));
expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));
testHarness.endInput();
testHarness.waitForTaskCompletion();
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
use of org.apache.flink.streaming.api.operators.StreamMap in project flink by apache.
the class StreamOperatorChainingTest method testMultiChainingWithSplit.
/**
* Verify that multi-chaining works with object reuse enabled.
*/
private void testMultiChainingWithSplit(StreamExecutionEnvironment env) throws Exception {
// the actual elements will not be used
DataStream<Integer> input = env.fromElements(1, 2, 3);
sink1Results = new ArrayList<>();
sink2Results = new ArrayList<>();
sink3Results = new ArrayList<>();
input = input.map(new MapFunction<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer map(Integer value) throws Exception {
return value;
}
});
SplitStream<Integer> split = input.split(new OutputSelector<Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> select(Integer value) {
if (value.equals(1)) {
return Collections.singletonList("one");
} else {
return Collections.singletonList("other");
}
}
});
split.select("one").map(new MapFunction<Integer, String>() {
private static final long serialVersionUID = 1L;
@Override
public String map(Integer value) throws Exception {
return "First 1: " + value;
}
}).addSink(new SinkFunction<String>() {
private static final long serialVersionUID = 1L;
@Override
public void invoke(String value) throws Exception {
sink1Results.add(value);
}
});
split.select("one").map(new MapFunction<Integer, String>() {
private static final long serialVersionUID = 1L;
@Override
public String map(Integer value) throws Exception {
return "First 2: " + value;
}
}).addSink(new SinkFunction<String>() {
private static final long serialVersionUID = 1L;
@Override
public void invoke(String value) throws Exception {
sink2Results.add(value);
}
});
split.select("other").map(new MapFunction<Integer, String>() {
private static final long serialVersionUID = 1L;
@Override
public String map(Integer value) throws Exception {
return "Second: " + value;
}
}).addSink(new SinkFunction<String>() {
private static final long serialVersionUID = 1L;
@Override
public void invoke(String value) throws Exception {
sink3Results.add(value);
}
});
// be build our own StreamTask and OperatorChain
JobGraph jobGraph = env.getStreamGraph().getJobGraph();
Assert.assertTrue(jobGraph.getVerticesSortedTopologicallyFromSources().size() == 2);
JobVertex chainedVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
Configuration configuration = chainedVertex.getConfiguration();
StreamConfig streamConfig = new StreamConfig(configuration);
StreamMap<Integer, Integer> headOperator = streamConfig.getStreamOperator(Thread.currentThread().getContextClassLoader());
StreamTask<Integer, StreamMap<Integer, Integer>> mockTask = createMockTask(streamConfig, chainedVertex.getName());
OperatorChain<Integer, StreamMap<Integer, Integer>> operatorChain = new OperatorChain<>(mockTask);
headOperator.setup(mockTask, streamConfig, operatorChain.getChainEntryPoint());
for (StreamOperator<?> operator : operatorChain.getAllOperators()) {
if (operator != null) {
operator.open();
}
}
headOperator.processElement(new StreamRecord<>(1));
headOperator.processElement(new StreamRecord<>(2));
headOperator.processElement(new StreamRecord<>(3));
assertThat(sink1Results, contains("First 1: 1"));
assertThat(sink2Results, contains("First 2: 1"));
assertThat(sink3Results, contains("Second: 2", "Second: 3"));
}
use of org.apache.flink.streaming.api.operators.StreamMap in project flink by apache.
the class TestProcessingTimeServiceTest method testCustomTimeServiceProvider.
@Test
public void testCustomTimeServiceProvider() throws Throwable {
TestProcessingTimeService tp = new TestProcessingTimeService();
final OneInputStreamTask<String, String> mapTask = new OneInputStreamTask<>();
mapTask.setProcessingTimeService(tp);
final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(mapTask, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
StreamMap<String, String> mapOperator = new StreamMap<>(new StreamTaskTimerTest.DummyMapFunction<String>());
streamConfig.setStreamOperator(mapOperator);
testHarness.invoke();
assertEquals(Long.MIN_VALUE, testHarness.getProcessingTimeService().getCurrentProcessingTime());
tp.setCurrentTime(11);
assertEquals(testHarness.getProcessingTimeService().getCurrentProcessingTime(), 11);
tp.setCurrentTime(15);
tp.setCurrentTime(16);
assertEquals(testHarness.getProcessingTimeService().getCurrentProcessingTime(), 16);
// register 2 tasks
mapTask.getProcessingTimeService().registerTimer(30, new ProcessingTimeCallback() {
@Override
public void onProcessingTime(long timestamp) {
}
});
mapTask.getProcessingTimeService().registerTimer(40, new ProcessingTimeCallback() {
@Override
public void onProcessingTime(long timestamp) {
}
});
assertEquals(2, tp.getNumActiveTimers());
tp.setCurrentTime(35);
assertEquals(1, tp.getNumActiveTimers());
tp.setCurrentTime(40);
assertEquals(0, tp.getNumActiveTimers());
tp.shutdownService();
}
use of org.apache.flink.streaming.api.operators.StreamMap in project flink by apache.
the class StreamOperatorChainingTest method testMultiChaining.
/**
* Verify that multi-chaining works.
*/
private void testMultiChaining(StreamExecutionEnvironment env) throws Exception {
// the actual elements will not be used
DataStream<Integer> input = env.fromElements(1, 2, 3);
sink1Results = new ArrayList<>();
sink2Results = new ArrayList<>();
input = input.map(new MapFunction<Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer map(Integer value) throws Exception {
return value;
}
});
input.map(new MapFunction<Integer, String>() {
private static final long serialVersionUID = 1L;
@Override
public String map(Integer value) throws Exception {
return "First: " + value;
}
}).addSink(new SinkFunction<String>() {
private static final long serialVersionUID = 1L;
@Override
public void invoke(String value) throws Exception {
sink1Results.add(value);
}
});
input.map(new MapFunction<Integer, String>() {
private static final long serialVersionUID = 1L;
@Override
public String map(Integer value) throws Exception {
return "Second: " + value;
}
}).addSink(new SinkFunction<String>() {
private static final long serialVersionUID = 1L;
@Override
public void invoke(String value) throws Exception {
sink2Results.add(value);
}
});
// be build our own StreamTask and OperatorChain
JobGraph jobGraph = env.getStreamGraph().getJobGraph();
Assert.assertTrue(jobGraph.getVerticesSortedTopologicallyFromSources().size() == 2);
JobVertex chainedVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
Configuration configuration = chainedVertex.getConfiguration();
StreamConfig streamConfig = new StreamConfig(configuration);
StreamMap<Integer, Integer> headOperator = streamConfig.getStreamOperator(Thread.currentThread().getContextClassLoader());
StreamTask<Integer, StreamMap<Integer, Integer>> mockTask = createMockTask(streamConfig, chainedVertex.getName());
OperatorChain<Integer, StreamMap<Integer, Integer>> operatorChain = new OperatorChain<>(mockTask);
headOperator.setup(mockTask, streamConfig, operatorChain.getChainEntryPoint());
for (StreamOperator<?> operator : operatorChain.getAllOperators()) {
if (operator != null) {
operator.open();
}
}
headOperator.processElement(new StreamRecord<>(1));
headOperator.processElement(new StreamRecord<>(2));
headOperator.processElement(new StreamRecord<>(3));
assertThat(sink1Results, contains("First: 1", "First: 2", "First: 3"));
assertThat(sink2Results, contains("Second: 1", "Second: 2", "Second: 3"));
}
use of org.apache.flink.streaming.api.operators.StreamMap in project flink by apache.
the class StreamTaskTimerTest method checkScheduledTimestampe.
@Test
public void checkScheduledTimestampe() {
try {
final OneInputStreamTask<String, String> mapTask = new OneInputStreamTask<>();
final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(mapTask, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>());
streamConfig.setStreamOperator(mapOperator);
testHarness.invoke();
testHarness.waitForTaskRunning();
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
final long t1 = System.currentTimeMillis();
final long t2 = System.currentTimeMillis() - 200;
final long t3 = System.currentTimeMillis() + 100;
final long t4 = System.currentTimeMillis() + 200;
ProcessingTimeService timeService = mapTask.getProcessingTimeService();
timeService.registerTimer(t1, new ValidatingProcessingTimeCallback(errorRef, t1, 0));
timeService.registerTimer(t2, new ValidatingProcessingTimeCallback(errorRef, t2, 1));
timeService.registerTimer(t3, new ValidatingProcessingTimeCallback(errorRef, t3, 2));
timeService.registerTimer(t4, new ValidatingProcessingTimeCallback(errorRef, t4, 3));
long deadline = System.currentTimeMillis() + 20000;
while (errorRef.get() == null && ValidatingProcessingTimeCallback.numInSequence < 4 && System.currentTimeMillis() < deadline) {
Thread.sleep(100);
}
// handle errors
if (errorRef.get() != null) {
errorRef.get().printStackTrace();
fail(errorRef.get().getMessage());
}
assertEquals(4, ValidatingProcessingTimeCallback.numInSequence);
testHarness.endInput();
testHarness.waitForTaskCompletion();
// wait until the trigger thread is shut down. otherwise, the other tests may become unstable
deadline = System.currentTimeMillis() + 4000;
while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) {
Thread.sleep(10);
}
assertEquals("Trigger timer thread did not properly shut down", 0, StreamTask.TRIGGER_THREAD_GROUP.activeCount());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations