use of org.apache.flink.api.common.state.MapStateDescriptor in project flink by apache.
the class ExistingSavepoint method readBroadcastState.
/**
* Read operator {@code BroadcastState} from a {@code Savepoint}.
*
* @param uid The uid of the operator.
* @param name The (unique) name for the state.
* @param keyTypeInfo The type information for the keys in the state.
* @param valueTypeInfo The type information for the values in the state.
* @param <K> The type of keys in state.
* @param <V> The type of values in state.
* @return A {@code DataSet} of key-value pairs from state.
* @throws IOException If the savepoint does not contain the specified uid.
*/
public <K, V> DataSource<Tuple2<K, V>> readBroadcastState(String uid, String name, TypeInformation<K> keyTypeInfo, TypeInformation<V> valueTypeInfo) throws IOException {
OperatorState operatorState = metadata.getOperatorState(uid);
MapStateDescriptor<K, V> descriptor = new MapStateDescriptor<>(name, keyTypeInfo, valueTypeInfo);
BroadcastStateInputFormat<K, V> inputFormat = new BroadcastStateInputFormat<>(operatorState, env.getConfiguration(), stateBackend, descriptor);
return env.createInput(inputFormat, new TupleTypeInfo<>(keyTypeInfo, valueTypeInfo));
}
use of org.apache.flink.api.common.state.MapStateDescriptor in project flink by apache.
the class SavepointReader method readBroadcastState.
/**
* Read operator {@code BroadcastState} from a {@code Savepoint}.
*
* @param uid The uid of the operator.
* @param name The (unique) name for the state.
* @param keyTypeInfo The type information for the keys in the state.
* @param valueTypeInfo The type information for the values in the state.
* @param <K> The type of keys in state.
* @param <V> The type of values in state.
* @return A {@code DataStream} of key-value pairs from state.
* @throws IOException If the savepoint does not contain the specified uid.
*/
public <K, V> DataStream<Tuple2<K, V>> readBroadcastState(String uid, String name, TypeInformation<K> keyTypeInfo, TypeInformation<V> valueTypeInfo) throws IOException {
OperatorState operatorState = metadata.getOperatorState(uid);
MapStateDescriptor<K, V> descriptor = new MapStateDescriptor<>(name, keyTypeInfo, valueTypeInfo);
BroadcastStateInputFormat<K, V> inputFormat = new BroadcastStateInputFormat<>(operatorState, MutableConfig.of(env.getConfiguration()), stateBackend, descriptor);
return SourceBuilder.fromFormat(env, inputFormat, new TupleTypeInfo<>(keyTypeInfo, valueTypeInfo));
}
use of org.apache.flink.api.common.state.MapStateDescriptor in project flink by apache.
the class SimpleStateRequestHandler method getMapState.
private MapState<ByteArrayWrapper, byte[]> getMapState(BeamFnApi.StateRequest request) throws Exception {
BeamFnApi.StateKey.MultimapSideInput mapUserState = request.getStateKey().getMultimapSideInput();
byte[] data = Base64.getDecoder().decode(mapUserState.getSideInputId());
FlinkFnApi.StateDescriptor stateDescriptor = FlinkFnApi.StateDescriptor.parseFrom(data);
String stateName = PYTHON_STATE_PREFIX + stateDescriptor.getStateName();
StateDescriptor cachedStateDescriptor = stateDescriptorCache.get(stateName);
MapStateDescriptor<ByteArrayWrapper, byte[]> mapStateDescriptor;
if (cachedStateDescriptor instanceof MapStateDescriptor) {
mapStateDescriptor = (MapStateDescriptor<ByteArrayWrapper, byte[]>) cachedStateDescriptor;
} else if (cachedStateDescriptor == null) {
mapStateDescriptor = new MapStateDescriptor<>(stateName, ByteArrayWrapperSerializer.INSTANCE, valueSerializer);
if (stateDescriptor.hasStateTtlConfig()) {
FlinkFnApi.StateDescriptor.StateTTLConfig stateTtlConfigProto = stateDescriptor.getStateTtlConfig();
StateTtlConfig stateTtlConfig = ProtoUtils.parseStateTtlConfigFromProto(stateTtlConfigProto);
mapStateDescriptor.enableTimeToLive(stateTtlConfig);
}
stateDescriptorCache.put(stateName, mapStateDescriptor);
} else {
throw new RuntimeException(String.format("State name corrupt detected: " + "'%s' is used both as MAP state and '%s' state at the same time.", stateName, cachedStateDescriptor.getType()));
}
byte[] windowBytes = mapUserState.getWindow().toByteArray();
if (windowBytes.length != 0) {
bais.setBuffer(windowBytes, 0, windowBytes.length);
Object namespace = namespaceSerializer.deserialize(baisWrapper);
return (MapState<ByteArrayWrapper, byte[]>) keyedStateBackend.getPartitionedState(namespace, namespaceSerializer, mapStateDescriptor);
} else {
return (MapState<ByteArrayWrapper, byte[]>) keyedStateBackend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, mapStateDescriptor);
}
}
use of org.apache.flink.api.common.state.MapStateDescriptor in project flink by apache.
the class QsStateClient method main.
public static void main(final String[] args) throws Exception {
ParameterTool parameters = ParameterTool.fromArgs(args);
// setup values
String jobId = parameters.getRequired("job-id");
String host = parameters.get("host", "localhost");
int port = parameters.getInt("port", 9069);
int numIterations = parameters.getInt("iterations", 1500);
QueryableStateClient client = new QueryableStateClient(host, port);
client.setExecutionConfig(new ExecutionConfig());
MapStateDescriptor<EmailId, EmailInformation> stateDescriptor = new MapStateDescriptor<>(QsConstants.STATE_NAME, TypeInformation.of(new TypeHint<EmailId>() {
}), TypeInformation.of(new TypeHint<EmailInformation>() {
}));
System.out.println("Wait until the state can be queried.");
// wait for state to exist
for (int i = 0; i < BOOTSTRAP_RETRIES; i++) {
// ~120s
try {
getMapState(jobId, client, stateDescriptor);
break;
} catch (ExecutionException e) {
if (e.getCause() instanceof UnknownKeyOrNamespaceException) {
System.err.println("State does not exist yet; sleeping 500ms");
Thread.sleep(500L);
} else {
throw e;
}
}
if (i == (BOOTSTRAP_RETRIES - 1)) {
throw new RuntimeException("Timeout: state doesn't exist after 120s");
}
}
System.out.println(String.format("State exists. Start querying it %d times.", numIterations));
// query state
for (int iterations = 0; iterations < numIterations; iterations++) {
MapState<EmailId, EmailInformation> mapState = getMapState(jobId, client, stateDescriptor);
int counter = 0;
for (Map.Entry<EmailId, EmailInformation> entry : mapState.entries()) {
// this is to force deserialization
entry.getKey();
entry.getValue();
counter++;
}
System.out.println(// we look for it in the test
"MapState has " + counter + " entries");
Thread.sleep(100L);
}
}
use of org.apache.flink.api.common.state.MapStateDescriptor in project flink by apache.
the class CEPITCase method testRichPatternFlatSelectFunction.
@Test
public void testRichPatternFlatSelectFunction() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(envConfiguration);
DataStream<Event> input = env.fromElements(new Event(1, "barfoo", 1.0), new Event(2, "start", 2.0), new Event(3, "foobar", 3.0), new SubEvent(4, "foo", 4.0, 1.0), new Event(5, "middle", 5.0), new SubEvent(6, "middle", 6.0, 2.0), new SubEvent(7, "bar", 3.0, 3.0), new Event(42, "42", 42.0), new Event(8, "end", 1.0));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new RichIterativeCondition<Event>() {
@Override
public boolean filter(Event value, Context<Event> ctx) throws Exception {
return value.getName().equals("start");
}
}).followedByAny("middle").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("middle");
}
}).followedByAny("end").where(new SimpleCondition<Event>() {
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
DataStream<String> result = CEP.pattern(input, pattern).inProcessingTime().flatSelect(new RichPatternFlatSelectFunction<Event, String>() {
@Override
public void open(Configuration config) {
try {
getRuntimeContext().getMapState(new MapStateDescriptor<>("test", LongSerializer.INSTANCE, LongSerializer.INSTANCE));
throw new RuntimeException("Expected getMapState to fail with unsupported operation exception.");
} catch (UnsupportedOperationException e) {
// ignore, expected
}
getRuntimeContext().getUserCodeClassLoader();
}
@Override
public void flatSelect(Map<String, List<Event>> p, Collector<String> o) throws Exception {
StringBuilder builder = new StringBuilder();
builder.append(p.get("start").get(0).getId()).append(",").append(p.get("middle").get(0).getId()).append(",").append(p.get("end").get(0).getId());
o.collect(builder.toString());
}
}, Types.STRING);
List<String> resultList = new ArrayList<>();
DataStreamUtils.collect(result).forEachRemaining(resultList::add);
assertEquals(Arrays.asList("2,6,8"), resultList);
}
Aggregations