use of org.apache.flink.runtime.state.VoidNamespace in project flink by apache.
the class RocksDBReducingStateTest method testAddAndGet.
// ------------------------------------------------------------------------
@Test
public void testAddAndGet() throws Exception {
final ReducingStateDescriptor<Long> stateDescr = new ReducingStateDescriptor<>("my-state", new AddingFunction(), Long.class);
stateDescr.initializeSerializerUnlessSet(new ExecutionConfig());
final RocksDBStateBackend backend = new RocksDBStateBackend(tmp.newFolder().toURI());
backend.setDbStoragePath(tmp.newFolder().getAbsolutePath());
final RocksDBKeyedStateBackend<String> keyedBackend = createKeyedBackend(backend);
try {
InternalReducingState<VoidNamespace, Long> state = keyedBackend.createReducingState(VoidNamespaceSerializer.INSTANCE, stateDescr);
state.setCurrentNamespace(VoidNamespace.INSTANCE);
keyedBackend.setCurrentKey("abc");
assertNull(state.get());
keyedBackend.setCurrentKey("def");
assertNull(state.get());
state.add(17L);
state.add(11L);
assertEquals(28L, state.get().longValue());
keyedBackend.setCurrentKey("abc");
assertNull(state.get());
keyedBackend.setCurrentKey("g");
assertNull(state.get());
state.add(1L);
state.add(2L);
keyedBackend.setCurrentKey("def");
assertEquals(28L, state.get().longValue());
state.clear();
assertNull(state.get());
keyedBackend.setCurrentKey("g");
state.add(3L);
state.add(2L);
state.add(1L);
keyedBackend.setCurrentKey("def");
assertNull(state.get());
keyedBackend.setCurrentKey("g");
assertEquals(9L, state.get().longValue());
} finally {
keyedBackend.close();
keyedBackend.dispose();
}
}
use of org.apache.flink.runtime.state.VoidNamespace in project flink by apache.
the class KvStateRequestSerializerTest method testMapSerialization.
/**
* Tests map serialization utils.
*/
@Test
public void testMapSerialization() throws Exception {
final long key = 0L;
// objects for heap state list serialisation
final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend = new HeapKeyedStateBackend<>(mock(TaskKvStateRegistry.class), LongSerializer.INSTANCE, ClassLoader.getSystemClassLoader(), 1, new KeyGroupRange(0, 0), async, new ExecutionConfig());
longHeapKeyedStateBackend.setCurrentKey(key);
final InternalMapState<VoidNamespace, Long, String> mapState = (InternalMapState<VoidNamespace, Long, String>) longHeapKeyedStateBackend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, new MapStateDescriptor<>("test", LongSerializer.INSTANCE, StringSerializer.INSTANCE));
testMapSerialization(key, mapState);
}
use of org.apache.flink.runtime.state.VoidNamespace in project flink by apache.
the class HeapReducingStateTest method testAddAndGet.
@Test
public void testAddAndGet() throws Exception {
final ReducingStateDescriptor<Long> stateDescr = new ReducingStateDescriptor<>("my-state", new AddingFunction(), Long.class);
stateDescr.initializeSerializerUnlessSet(new ExecutionConfig());
final HeapKeyedStateBackend<String> keyedBackend = createKeyedBackend();
try {
InternalReducingState<VoidNamespace, Long> state = keyedBackend.createReducingState(VoidNamespaceSerializer.INSTANCE, stateDescr);
state.setCurrentNamespace(VoidNamespace.INSTANCE);
keyedBackend.setCurrentKey("abc");
assertNull(state.get());
keyedBackend.setCurrentKey("def");
assertNull(state.get());
state.add(17L);
state.add(11L);
assertEquals(28L, state.get().longValue());
keyedBackend.setCurrentKey("abc");
assertNull(state.get());
keyedBackend.setCurrentKey("g");
assertNull(state.get());
state.add(1L);
state.add(2L);
keyedBackend.setCurrentKey("def");
assertEquals(28L, state.get().longValue());
state.clear();
assertNull(state.get());
keyedBackend.setCurrentKey("g");
state.add(3L);
state.add(2L);
state.add(1L);
keyedBackend.setCurrentKey("def");
assertNull(state.get());
keyedBackend.setCurrentKey("g");
assertEquals(9L, state.get().longValue());
state.clear();
// make sure all lists / maps are cleared
StateTable<String, VoidNamespace, Long> stateTable = ((HeapReducingState<String, VoidNamespace, Long>) state).stateTable;
assertTrue(stateTable.isEmpty());
} finally {
keyedBackend.close();
keyedBackend.dispose();
}
}
use of org.apache.flink.runtime.state.VoidNamespace in project flink by apache.
the class KVStateRequestSerializerRocksDBTest method testMapSerialization.
/**
* Tests map serialization and deserialization match.
*
* @see KvStateRequestSerializerTest#testMapSerialization()
* KvStateRequestSerializerTest#testMapSerialization() using the heap state back-end
* test
*/
@Test
public void testMapSerialization() throws Exception {
final long key = 0L;
// objects for RocksDB state list serialisation
DBOptions dbOptions = PredefinedOptions.DEFAULT.createDBOptions();
dbOptions.setCreateIfMissing(true);
ColumnFamilyOptions columnFamilyOptions = PredefinedOptions.DEFAULT.createColumnOptions();
final RocksDBKeyedStateBackend<Long> longHeapKeyedStateBackend = new RocksDBKeyedStateBackend<>(new JobID(), "no-op", ClassLoader.getSystemClassLoader(), temporaryFolder.getRoot(), dbOptions, columnFamilyOptions, mock(TaskKvStateRegistry.class), LongSerializer.INSTANCE, 1, new KeyGroupRange(0, 0), new ExecutionConfig());
longHeapKeyedStateBackend.setCurrentKey(key);
final InternalMapState<VoidNamespace, Long, String> mapState = (InternalMapState<VoidNamespace, Long, String>) longHeapKeyedStateBackend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, new MapStateDescriptor<>("test", LongSerializer.INSTANCE, StringSerializer.INSTANCE));
KvStateRequestSerializerTest.testMapSerialization(key, mapState);
}
use of org.apache.flink.runtime.state.VoidNamespace in project flink by apache.
the class AbstractStreamArrowPythonOverWindowAggregateFunctionOperator method open.
@Override
public void open() throws Exception {
super.open();
InternalTimerService<VoidNamespace> internalTimerService = getInternalTimerService("python-over-window-timers", VoidNamespaceSerializer.INSTANCE, this);
timerService = new SimpleTimerService(internalTimerService);
InternalTypeInfo<RowData> inputTypeInfo = InternalTypeInfo.of(inputType);
ListTypeInfo<RowData> rowListTypeInfo = new ListTypeInfo<>(inputTypeInfo);
MapStateDescriptor<Long, List<RowData>> inputStateDesc = new MapStateDescriptor<>("inputState", Types.LONG, rowListTypeInfo);
ValueStateDescriptor<Long> lastTriggeringTsDescriptor = new ValueStateDescriptor<>("lastTriggeringTsState", Types.LONG);
lastTriggeringTsState = getRuntimeContext().getState(lastTriggeringTsDescriptor);
ValueStateDescriptor<Long> cleanupTsStateDescriptor = new ValueStateDescriptor<>("cleanupTsState", Types.LONG);
cleanupTsState = getRuntimeContext().getState(cleanupTsStateDescriptor);
inputState = getRuntimeContext().getMapState(inputStateDesc);
}
Aggregations