use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class KeyedStateInputFormatTest method testReadMultipleOutputPerKey.
@Test
public void testReadMultipleOutputPerKey() throws Exception {
OperatorID operatorID = OperatorIDGenerator.fromUid("uid");
OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction()));
OperatorState operatorState = new OperatorState(operatorID, 1, 128);
operatorState.putState(0, state);
KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT));
KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];
KeyedStateReaderFunction<Integer, Integer> userFunction = new DoubleReaderFunction();
List<Integer> data = readInputSplit(split, userFunction);
Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 1, 2, 2, 3, 3), data);
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class StreamOperatorContextBuilderTest method testStateBackendLoading.
@Test(expected = CustomStateBackendFactory.ExpectedException.class)
public void testStateBackendLoading() throws Exception {
Configuration configuration = new Configuration();
configuration.set(StateBackendOptions.STATE_BACKEND, CustomStateBackendFactory.class.getCanonicalName());
StreamOperatorContextBuilder builder = new StreamOperatorContextBuilder(new MockStreamingRuntimeContext(true, 1, 0), configuration, new OperatorState(new OperatorID(), 1, 128), new PrioritizedOperatorSubtaskStateInputSplit() {
@Override
public PrioritizedOperatorSubtaskState getPrioritizedOperatorSubtaskState() {
return PrioritizedOperatorSubtaskState.emptyNotRestored();
}
@Override
public int getSplitNumber() {
return 0;
}
}, new CloseableRegistry(), null);
builder.build(LOG);
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class SavepointTest method testExistingSavepointEnforceUniqueUIDsWithOldSavepoint.
@Test(expected = IllegalArgumentException.class)
public void testExistingSavepointEnforceUniqueUIDsWithOldSavepoint() throws IOException {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(10);
DataSource<Integer> input = env.fromElements(0);
BootstrapTransformation<Integer> transformation = OperatorTransformation.bootstrapWith(input).transform(new ExampleStateBootstrapFunction());
Collection<OperatorState> operatorStates = Collections.singletonList(new OperatorState(OperatorIDGenerator.fromUid(UID), 1, 4));
SavepointMetadata metadata = new SavepointMetadata(4, Collections.emptyList(), operatorStates);
new ExistingSavepoint(env, metadata, new MemoryStateBackend()).withOperator(UID, transformation).write("");
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class SavepointTest method testExistingSavepointEnforceUniqueUIDs.
@Test(expected = IllegalArgumentException.class)
public void testExistingSavepointEnforceUniqueUIDs() throws IOException {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(10);
DataSource<Integer> input = env.fromElements(0);
BootstrapTransformation<Integer> transformation = OperatorTransformation.bootstrapWith(input).transform(new ExampleStateBootstrapFunction());
Collection<OperatorState> operatorStates = Collections.singletonList(new OperatorState(OperatorIDGenerator.fromUid(UID), 1, 4));
SavepointMetadata metadata = new SavepointMetadata(4, Collections.emptyList(), operatorStates);
new ExistingSavepoint(env, metadata, new MemoryStateBackend()).withOperator(UID, transformation).withOperator(UID, transformation);
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class WindowReaderTest method testAggregateWindow.
@Test
public void testAggregateWindow() throws Exception {
WindowOperator<Integer, Integer, ?, Void, ?> operator = getWindowOperator(stream -> stream.window(TumblingEventTimeWindows.of(Time.milliseconds(1))).aggregate(new AggregateSum()));
OperatorState operatorState = getOperatorState(operator);
KeyedStateInputFormat<Integer, TimeWindow, Integer> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), WindowReaderOperator.aggregate(new AggregateSum(), new PassThroughReader<>(), Types.INT, new TimeWindow.Serializer(), Types.INT));
List<Integer> list = readState(format);
Assert.assertEquals(Arrays.asList(1, 1), list);
}
Aggregations