use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class SourceOperatorTestContext method createOperatorStateStore.
private OperatorStateStore createOperatorStateStore() throws Exception {
MockEnvironment env = new MockEnvironmentBuilder().build();
final AbstractStateBackend abstractStateBackend = new MemoryStateBackend();
CloseableRegistry cancelStreamRegistry = new CloseableRegistry();
return abstractStateBackend.createOperatorStateBackend(env, "test-operator", Collections.emptyList(), cancelStreamRegistry);
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class AsyncWaitOperatorTest method testAsyncTimeout.
private void testAsyncTimeout(LazyAsyncFunction lazyAsyncFunction, Optional<Class<? extends Throwable>> expectedException, StreamRecord<Integer>... expectedRecords) throws Exception {
final long timeout = 10L;
final OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = createTestHarness(lazyAsyncFunction, timeout, 2, AsyncDataStream.OutputMode.ORDERED);
final MockEnvironment mockEnvironment = testHarness.getEnvironment();
mockEnvironment.setExpectedExternalFailureCause(Throwable.class);
final long initialTime = 0L;
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.setProcessingTime(initialTime);
synchronized (testHarness.getCheckpointLock()) {
testHarness.processElement(new StreamRecord<>(1, initialTime));
testHarness.setProcessingTime(initialTime + 5L);
testHarness.processElement(new StreamRecord<>(2, initialTime + 5L));
}
// trigger the timeout of the first stream record
testHarness.setProcessingTime(initialTime + timeout + 1L);
// allow the second async stream record to be processed
lazyAsyncFunction.countDown();
// wait until all async collectors in the buffer have been emitted out.
synchronized (testHarness.getCheckpointLock()) {
testHarness.endInput();
testHarness.close();
}
expectedOutput.addAll(Arrays.asList(expectedRecords));
TestHarnessUtil.assertOutputEquals("Output with watermark was not correct.", expectedOutput, testHarness.getOutput());
if (expectedException.isPresent()) {
assertTrue(mockEnvironment.getActualExternalFailureCause().isPresent());
assertTrue(ExceptionUtils.findThrowable(mockEnvironment.getActualExternalFailureCause().get(), expectedException.get()).isPresent());
}
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class BufferDataOverWindowOperatorTest method test.
private void test(OverWindowFrame[] frames, GenericRowData[] expect) throws Exception {
MockEnvironment env = new MockEnvironmentBuilder().setIOManager(ioManager).setMemoryManager(memoryManager).build();
StreamTask<Object, StreamOperator<Object>> task = new StreamTask<Object, StreamOperator<Object>>(env) {
@Override
protected void init() {
}
};
operator = new BufferDataOverWindowOperator(frames, comparator, true) {
{
output = new NonBufferOverWindowOperatorTest.ConsumerOutput(new Consumer<RowData>() {
@Override
public void accept(RowData r) {
collect.add(GenericRowData.of(r.getInt(0), r.getLong(1), r.getLong(2), r.getLong(3), r.getLong(4)));
}
});
}
@Override
public ClassLoader getUserCodeClassloader() {
return Thread.currentThread().getContextClassLoader();
}
@Override
public StreamConfig getOperatorConfig() {
StreamConfig conf = mock(StreamConfig.class);
when(conf.<RowData>getTypeSerializerIn1(getUserCodeClassloader())).thenReturn(inputSer);
when(conf.getManagedMemoryFractionOperatorUseCaseOfSlot(eq(ManagedMemoryUseCase.OPERATOR), any(Configuration.class), any(ClassLoader.class))).thenReturn(0.99);
return conf;
}
@Override
public StreamTask<?, ?> getContainingTask() {
return task;
}
@Override
public StreamingRuntimeContext getRuntimeContext() {
return mock(StreamingRuntimeContext.class);
}
};
operator.setProcessingTimeService(new TestProcessingTimeService());
operator.open();
addRow(0, 1L, 4L);
/* 1 **/
addRow(0, 1L, 1L);
/* 2 **/
addRow(0, 1L, 1L);
/* 3 **/
addRow(0, 1L, 1L);
/* 4 **/
addRow(1, 5L, 2L);
/* 5 **/
addRow(2, 5L, 4L);
/* 6 **/
addRow(2, 6L, 2L);
/* 7 **/
addRow(2, 6L, 2L);
/* 8 **/
addRow(2, 6L, 2L);
/* 9 **/
operator.endInput();
GenericRowData[] outputs = this.collect.toArray(new GenericRowData[0]);
Assert.assertArrayEquals(expect, outputs);
operator.close();
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class OperatorChainTest method setupOperatorChain.
// ------------------------------------------------------------------------
// Operator Chain Setup Utils
// ------------------------------------------------------------------------
@SafeVarargs
public static <T, OP extends StreamOperator<T>> OperatorChain<T, OP> setupOperatorChain(OneInputStreamOperator<T, T>... operators) throws Exception {
checkNotNull(operators);
checkArgument(operators.length > 0);
try (MockEnvironment env = MockEnvironment.builder().build()) {
final StreamTask<?, ?> containingTask = new MockStreamTaskBuilder(env).build();
final StreamConfig cfg = new StreamConfig(new Configuration());
cfg.setOperatorID(new OperatorID());
cfg.setStateKeySerializer(new StringSerializer());
final List<StreamOperatorWrapper<?, ?>> operatorWrappers = new ArrayList<>();
// initial output goes to nowhere
@SuppressWarnings({ "unchecked", "rawtypes" }) WatermarkGaugeExposingOutput<StreamRecord<T>> lastWriter = new BroadcastingOutputCollector<>(new Output[0]);
// build the reverse operators array
for (int i = 0; i < operators.length; i++) {
int operatorIndex = operators.length - i - 1;
OneInputStreamOperator<T, T> op = operators[operatorIndex];
if (op instanceof SetupableStreamOperator) {
((SetupableStreamOperator) op).setup(containingTask, cfg, lastWriter);
}
lastWriter = new ChainingOutput<>(op, null);
ProcessingTimeService processingTimeService = null;
if (op instanceof AbstractStreamOperator) {
processingTimeService = ((AbstractStreamOperator) op).getProcessingTimeService();
}
operatorWrappers.add(new StreamOperatorWrapper<>(op, Optional.ofNullable(processingTimeService), containingTask.getMailboxExecutorFactory().createExecutor(i), operatorIndex == 0));
}
@SuppressWarnings("unchecked") final StreamOperatorWrapper<T, OP> headOperatorWrapper = (StreamOperatorWrapper<T, OP>) operatorWrappers.get(operatorWrappers.size() - 1);
return new RegularOperatorChain<>(operatorWrappers, new RecordWriterOutput<?>[0], lastWriter, headOperatorWrapper);
}
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class PojoSerializerUpgradeTest method runOperator.
private OperatorSubtaskState runOperator(Configuration taskConfiguration, ExecutionConfig executionConfig, OneInputStreamOperator<Long, Long> operator, KeySelector<Long, Long> keySelector, boolean isKeyedState, StateBackend stateBackend, ClassLoader classLoader, OperatorSubtaskState operatorSubtaskState, Iterable<Long> input) throws Exception {
try (final MockEnvironment environment = new MockEnvironmentBuilder().setTaskName("test task").setManagedMemorySize(32 * 1024).setInputSplitProvider(new MockInputSplitProvider()).setBufferSize(256).setTaskConfiguration(taskConfiguration).setExecutionConfig(executionConfig).setMaxParallelism(16).setUserCodeClassLoader(classLoader).build()) {
OneInputStreamOperatorTestHarness<Long, Long> harness = null;
try {
if (isKeyedState) {
harness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.LONG_TYPE_INFO, environment);
} else {
harness = new OneInputStreamOperatorTestHarness<>(operator, LongSerializer.INSTANCE, environment);
}
harness.setStateBackend(stateBackend);
harness.setup();
harness.initializeState(operatorSubtaskState);
harness.open();
long timestamp = 0L;
for (Long value : input) {
harness.processElement(value, timestamp++);
}
long checkpointId = 1L;
long checkpointTimestamp = timestamp + 1L;
return harness.snapshot(checkpointId, checkpointTimestamp);
} finally {
IOUtils.closeQuietly(harness);
}
}
}
Aggregations