use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class KeyedStateInputFormatTest method testReadTime.
@Test
public void testReadTime() throws Exception {
OperatorID operatorID = OperatorIDGenerator.fromUid("uid");
OperatorSubtaskState state = createOperatorSubtaskState(new KeyedProcessOperator<>(new StatefulFunctionWithTime()));
OperatorState operatorState = new OperatorState(operatorID, 1, 128);
operatorState.putState(0, state);
KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new TimerReaderFunction(), Types.INT));
KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];
KeyedStateReaderFunction<Integer, Integer> userFunction = new TimerReaderFunction();
List<Integer> data = readInputSplit(split, userFunction);
Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 1, 2, 2, 3, 3), data);
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class KeyedStateInputFormatTest method testReadState.
@Test
public void testReadState() throws Exception {
OperatorID operatorID = OperatorIDGenerator.fromUid("uid");
OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction()));
OperatorState operatorState = new OperatorState(operatorID, 1, 128);
operatorState.putState(0, state);
KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT));
KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];
KeyedStateReaderFunction<Integer, Integer> userFunction = new ReaderFunction();
List<Integer> data = readInputSplit(split, userFunction);
Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 2, 3), data);
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class KeyedStateInputFormatTest method testInvalidProcessReaderFunctionFails.
@Test(expected = IOException.class)
public void testInvalidProcessReaderFunctionFails() throws Exception {
OperatorID operatorID = OperatorIDGenerator.fromUid("uid");
OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction()));
OperatorState operatorState = new OperatorState(operatorID, 1, 128);
operatorState.putState(0, state);
KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT));
KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];
KeyedStateReaderFunction<Integer, Integer> userFunction = new InvalidReaderFunction();
readInputSplit(split, userFunction);
Assert.fail("KeyedStateReaderFunction did not fail on invalid RuntimeContext use");
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class SavepointMetadata method addOperator.
public void addOperator(String uid, BootstrapTransformation<?> transformation) {
OperatorID id = OperatorIDGenerator.fromUid(uid);
if (operatorStateIndex.containsKey(id)) {
throw new IllegalArgumentException("The savepoint already contains uid " + uid + ". All uid's must be unique");
}
operatorStateIndex.put(id, OperatorStateSpec.newWithTransformation(new BootstrapTransformationWithID<>(id, transformation)));
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class SavepointMetadataV2 method addOperator.
public void addOperator(String uid, StateBootstrapTransformation<?> transformation) {
OperatorID id = OperatorIDGenerator.fromUid(uid);
if (operatorStateIndex.containsKey(id)) {
throw new IllegalArgumentException("The savepoint already contains uid " + uid + ". All uid's must be unique");
}
operatorStateIndex.put(id, OperatorStateSpecV2.newWithTransformation(new StateBootstrapTransformationWithID<>(id, transformation)));
}
Aggregations