Search in sources :

Example 51 with DataOutputViewStreamWrapper

use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.

the class StateInitializationContextImplTest method setUp.

@Before
public void setUp() throws Exception {
    this.writtenKeyGroups = 0;
    this.writtenOperatorStates = new HashSet<>();
    this.closableRegistry = new CloseableRegistry();
    ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos(64);
    List<KeyedStateHandle> keyedStateHandles = new ArrayList<>(NUM_HANDLES);
    int prev = 0;
    for (int i = 0; i < NUM_HANDLES; ++i) {
        out.reset();
        int size = i % 4;
        int end = prev + size;
        DataOutputView dov = new DataOutputViewStreamWrapper(out);
        KeyGroupRangeOffsets offsets = new KeyGroupRangeOffsets(i == 9 ? KeyGroupRange.EMPTY_KEY_GROUP_RANGE : new KeyGroupRange(prev, end));
        prev = end + 1;
        for (int kg : offsets.getKeyGroupRange()) {
            offsets.setKeyGroupOffset(kg, out.getPosition());
            dov.writeInt(kg);
            ++writtenKeyGroups;
        }
        KeyedStateHandle handle = new KeyGroupsStateHandle(offsets, new ByteStateHandleCloseChecking("kg-" + i, out.toByteArray()));
        keyedStateHandles.add(handle);
    }
    List<OperatorStateHandle> operatorStateHandles = new ArrayList<>(NUM_HANDLES);
    for (int i = 0; i < NUM_HANDLES; ++i) {
        int size = i % 4;
        out.reset();
        DataOutputView dov = new DataOutputViewStreamWrapper(out);
        LongArrayList offsets = new LongArrayList(size);
        for (int s = 0; s < size; ++s) {
            offsets.add(out.getPosition());
            int val = i * NUM_HANDLES + s;
            dov.writeInt(val);
            writtenOperatorStates.add(val);
        }
        Map<String, OperatorStateHandle.StateMetaInfo> offsetsMap = new HashMap<>();
        offsetsMap.put(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME, new OperatorStateHandle.StateMetaInfo(offsets.toArray(), OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
        OperatorStateHandle operatorStateHandle = new OperatorStreamStateHandle(offsetsMap, new ByteStateHandleCloseChecking("os-" + i, out.toByteArray()));
        operatorStateHandles.add(operatorStateHandle);
    }
    OperatorSubtaskState operatorSubtaskState = OperatorSubtaskState.builder().setRawOperatorState(new StateObjectCollection<>(operatorStateHandles)).setRawKeyedState(new StateObjectCollection<>(keyedStateHandles)).build();
    OperatorID operatorID = new OperatorID();
    TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot();
    taskStateSnapshot.putSubtaskStateByOperatorID(operatorID, operatorSubtaskState);
    JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore(0L, taskStateSnapshot);
    TaskStateManager manager = new TaskStateManagerImpl(new JobID(), new ExecutionAttemptID(), new TestTaskLocalStateStore(), new InMemoryStateChangelogStorage(), jobManagerTaskRestore, mock(CheckpointResponder.class));
    DummyEnvironment environment = new DummyEnvironment("test", 1, 0, prev);
    environment.setTaskStateManager(manager);
    StateBackend stateBackend = new MemoryStateBackend(1024);
    StreamTaskStateInitializer streamTaskStateManager = new StreamTaskStateInitializerImpl(environment, stateBackend, TtlTimeProvider.DEFAULT, new InternalTimeServiceManager.Provider() {

        @Override
        public <K> InternalTimeServiceManager<K> create(CheckpointableKeyedStateBackend<K> keyedStatedBackend, ClassLoader userClassloader, KeyContext keyContext, ProcessingTimeService processingTimeService, Iterable<KeyGroupStatePartitionStreamProvider> rawKeyedStates) throws Exception {
            // stream.
            return null;
        }
    });
    AbstractStreamOperator<?> mockOperator = mock(AbstractStreamOperator.class);
    when(mockOperator.getOperatorID()).thenReturn(operatorID);
    StreamOperatorStateContext stateContext = streamTaskStateManager.streamOperatorStateContext(operatorID, "TestOperatorClass", mock(ProcessingTimeService.class), mockOperator, // consumed by the timer service.
    IntSerializer.INSTANCE, closableRegistry, new UnregisteredMetricsGroup(), 1.0, false);
    OptionalLong restoredCheckpointId = stateContext.getRestoredCheckpointId();
    this.initializationContext = new StateInitializationContextImpl(restoredCheckpointId.isPresent() ? restoredCheckpointId.getAsLong() : null, stateContext.operatorStateBackend(), mock(KeyedStateStore.class), stateContext.rawKeyedStateInputs(), stateContext.rawOperatorStateInputs());
}
Also used : HashMap(java.util.HashMap) KeyGroupRangeOffsets(org.apache.flink.runtime.state.KeyGroupRangeOffsets) LongArrayList(org.apache.flink.runtime.util.LongArrayList) ArrayList(java.util.ArrayList) KeyGroupRange(org.apache.flink.runtime.state.KeyGroupRange) CloseableRegistry(org.apache.flink.core.fs.CloseableRegistry) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) StateBackend(org.apache.flink.runtime.state.StateBackend) DefaultOperatorStateBackend(org.apache.flink.runtime.state.DefaultOperatorStateBackend) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) CheckpointableKeyedStateBackend(org.apache.flink.runtime.state.CheckpointableKeyedStateBackend) KeyGroupStatePartitionStreamProvider(org.apache.flink.runtime.state.KeyGroupStatePartitionStreamProvider) InMemoryStateChangelogStorage(org.apache.flink.runtime.state.changelog.inmemory.InMemoryStateChangelogStorage) ByteArrayOutputStreamWithPos(org.apache.flink.core.memory.ByteArrayOutputStreamWithPos) TestTaskLocalStateStore(org.apache.flink.runtime.state.TestTaskLocalStateStore) DataOutputView(org.apache.flink.core.memory.DataOutputView) StateObjectCollection(org.apache.flink.runtime.checkpoint.StateObjectCollection) OperatorStreamStateHandle(org.apache.flink.runtime.state.OperatorStreamStateHandle) StateInitializationContextImpl(org.apache.flink.runtime.state.StateInitializationContextImpl) OperatorStateHandle(org.apache.flink.runtime.state.OperatorStateHandle) JobID(org.apache.flink.api.common.JobID) TaskStateManagerImpl(org.apache.flink.runtime.state.TaskStateManagerImpl) UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) JobManagerTaskRestore(org.apache.flink.runtime.checkpoint.JobManagerTaskRestore) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) KeyedStateHandle(org.apache.flink.runtime.state.KeyedStateHandle) KeyGroupsStateHandle(org.apache.flink.runtime.state.KeyGroupsStateHandle) TaskStateSnapshot(org.apache.flink.runtime.checkpoint.TaskStateSnapshot) ProcessingTimeService(org.apache.flink.streaming.runtime.tasks.ProcessingTimeService) ExecutionAttemptID(org.apache.flink.runtime.executiongraph.ExecutionAttemptID) LongArrayList(org.apache.flink.runtime.util.LongArrayList) CheckpointResponder(org.apache.flink.runtime.taskmanager.CheckpointResponder) DummyEnvironment(org.apache.flink.runtime.operators.testutils.DummyEnvironment) TaskStateManager(org.apache.flink.runtime.state.TaskStateManager) IOException(java.io.IOException) DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) OptionalLong(java.util.OptionalLong) Before(org.junit.Before)

Example 52 with DataOutputViewStreamWrapper

use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.

the class InternalTimerServiceImplTest method testSnapshotAndRebalancingRestore.

private void testSnapshotAndRebalancingRestore(int snapshotVersion) throws Exception {
    @SuppressWarnings("unchecked") Triggerable<Integer, String> mockTriggerable = mock(Triggerable.class);
    TestKeyContext keyContext = new TestKeyContext();
    TestProcessingTimeService processingTimeService = new TestProcessingTimeService();
    final PriorityQueueSetFactory queueFactory = createQueueFactory();
    InternalTimerServiceImpl<Integer, String> timerService = createAndStartInternalTimerService(mockTriggerable, keyContext, processingTimeService, testKeyGroupRange, queueFactory);
    int midpoint = testKeyGroupRange.getStartKeyGroup() + (testKeyGroupRange.getEndKeyGroup() - testKeyGroupRange.getStartKeyGroup()) / 2;
    // get two sub key-ranges so that we can restore two ranges separately
    KeyGroupRange subKeyGroupRange1 = new KeyGroupRange(testKeyGroupRange.getStartKeyGroup(), midpoint);
    KeyGroupRange subKeyGroupRange2 = new KeyGroupRange(midpoint + 1, testKeyGroupRange.getEndKeyGroup());
    // get two different keys, one per sub range
    int key1 = getKeyInKeyGroupRange(subKeyGroupRange1, maxParallelism);
    int key2 = getKeyInKeyGroupRange(subKeyGroupRange2, maxParallelism);
    keyContext.setCurrentKey(key1);
    timerService.registerProcessingTimeTimer("ciao", 10);
    timerService.registerEventTimeTimer("hello", 10);
    keyContext.setCurrentKey(key2);
    timerService.registerEventTimeTimer("ciao", 10);
    timerService.registerProcessingTimeTimer("hello", 10);
    assertEquals(2, timerService.numProcessingTimeTimers());
    assertEquals(1, timerService.numProcessingTimeTimers("hello"));
    assertEquals(1, timerService.numProcessingTimeTimers("ciao"));
    assertEquals(2, timerService.numEventTimeTimers());
    assertEquals(1, timerService.numEventTimeTimers("hello"));
    assertEquals(1, timerService.numEventTimeTimers("ciao"));
    // one map per sub key-group range
    Map<Integer, byte[]> snapshot1 = new HashMap<>();
    Map<Integer, byte[]> snapshot2 = new HashMap<>();
    for (Integer keyGroupIndex : testKeyGroupRange) {
        try (ByteArrayOutputStream outStream = new ByteArrayOutputStream()) {
            InternalTimersSnapshot<Integer, String> timersSnapshot = timerService.snapshotTimersForKeyGroup(keyGroupIndex);
            InternalTimersSnapshotReaderWriters.getWriterForVersion(snapshotVersion, timersSnapshot, timerService.getKeySerializer(), timerService.getNamespaceSerializer()).writeTimersSnapshot(new DataOutputViewStreamWrapper(outStream));
            if (subKeyGroupRange1.contains(keyGroupIndex)) {
                snapshot1.put(keyGroupIndex, outStream.toByteArray());
            } else if (subKeyGroupRange2.contains(keyGroupIndex)) {
                snapshot2.put(keyGroupIndex, outStream.toByteArray());
            } else {
                throw new IllegalStateException("Key-Group index doesn't belong to any sub range.");
            }
        }
    }
    // from now on we need everything twice. once per sub key-group range
    @SuppressWarnings("unchecked") Triggerable<Integer, String> mockTriggerable1 = mock(Triggerable.class);
    @SuppressWarnings("unchecked") Triggerable<Integer, String> mockTriggerable2 = mock(Triggerable.class);
    TestKeyContext keyContext1 = new TestKeyContext();
    TestKeyContext keyContext2 = new TestKeyContext();
    TestProcessingTimeService processingTimeService1 = new TestProcessingTimeService();
    TestProcessingTimeService processingTimeService2 = new TestProcessingTimeService();
    InternalTimerServiceImpl<Integer, String> timerService1 = restoreTimerService(snapshot1, snapshotVersion, mockTriggerable1, keyContext1, processingTimeService1, subKeyGroupRange1, queueFactory);
    InternalTimerServiceImpl<Integer, String> timerService2 = restoreTimerService(snapshot2, snapshotVersion, mockTriggerable2, keyContext2, processingTimeService2, subKeyGroupRange2, queueFactory);
    processingTimeService1.setCurrentTime(10);
    timerService1.advanceWatermark(10);
    verify(mockTriggerable1, times(1)).onProcessingTime(anyInternalTimer());
    verify(mockTriggerable1, times(1)).onProcessingTime(eq(new TimerHeapInternalTimer<>(10, key1, "ciao")));
    verify(mockTriggerable1, never()).onProcessingTime(eq(new TimerHeapInternalTimer<>(10, key2, "hello")));
    verify(mockTriggerable1, times(1)).onEventTime(anyInternalTimer());
    verify(mockTriggerable1, times(1)).onEventTime(eq(new TimerHeapInternalTimer<>(10, key1, "hello")));
    verify(mockTriggerable1, never()).onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "ciao")));
    assertEquals(0, timerService1.numEventTimeTimers());
    processingTimeService2.setCurrentTime(10);
    timerService2.advanceWatermark(10);
    verify(mockTriggerable2, times(1)).onProcessingTime(anyInternalTimer());
    verify(mockTriggerable2, never()).onProcessingTime(eq(new TimerHeapInternalTimer<>(10, key1, "ciao")));
    verify(mockTriggerable2, times(1)).onProcessingTime(eq(new TimerHeapInternalTimer<>(10, key2, "hello")));
    verify(mockTriggerable2, times(1)).onEventTime(anyInternalTimer());
    verify(mockTriggerable2, never()).onEventTime(eq(new TimerHeapInternalTimer<>(10, key1, "hello")));
    verify(mockTriggerable2, times(1)).onEventTime(eq(new TimerHeapInternalTimer<>(10, key2, "ciao")));
    assertEquals(0, timerService2.numEventTimeTimers());
}
Also used : PriorityQueueSetFactory(org.apache.flink.runtime.state.PriorityQueueSetFactory) HeapPriorityQueueSetFactory(org.apache.flink.runtime.state.heap.HeapPriorityQueueSetFactory) HashMap(java.util.HashMap) KeyGroupRange(org.apache.flink.runtime.state.KeyGroupRange) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService)

Example 53 with DataOutputViewStreamWrapper

use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.

the class InternalTimeServiceManagerImpl method snapshotToRawKeyedState.

// ////////////////				Fault Tolerance Methods				///////////////////
@Override
public void snapshotToRawKeyedState(KeyedStateCheckpointOutputStream out, String operatorName) throws Exception {
    try {
        KeyGroupsList allKeyGroups = out.getKeyGroupList();
        for (int keyGroupIdx : allKeyGroups) {
            out.startNewKeyGroup(keyGroupIdx);
            snapshotStateForKeyGroup(new DataOutputViewStreamWrapper(out), keyGroupIdx);
        }
    } catch (Exception exception) {
        throw new Exception("Could not write timer service of " + operatorName + " to checkpoint state stream.", exception);
    } finally {
        try {
            out.close();
        } catch (Exception closeException) {
            LOG.warn("Could not close raw keyed operator state stream for {}. This " + "might have prevented deleting some state data.", operatorName, closeException);
        }
    }
}
Also used : DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) KeyGroupsList(org.apache.flink.runtime.state.KeyGroupsList) IOException(java.io.IOException)

Example 54 with DataOutputViewStreamWrapper

use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.

the class StateBackendTestBase method testValueStateNullUpdate.

/**
 * This test verifies that passing {@code null} to {@link ValueState#update(Object)} acts the
 * same as {@link ValueState#clear()}.
 *
 * @throws Exception
 */
@Test
@SuppressWarnings("unchecked")
public void testValueStateNullUpdate() throws Exception {
    // later if null values where actually stored in the state instead of acting as clear()
    try {
        LongSerializer.INSTANCE.serialize(null, new DataOutputViewStreamWrapper(new ByteArrayOutputStream()));
        fail("Should fail with NullPointerException");
    } catch (NullPointerException e) {
    // alrighty
    }
    CheckpointStreamFactory streamFactory = createStreamFactory();
    SharedStateRegistry sharedStateRegistry = new SharedStateRegistryImpl();
    CheckpointableKeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE);
    try {
        ValueStateDescriptor<Long> kvId = new ValueStateDescriptor<>("id", LongSerializer.INSTANCE, 42L);
        ValueState<Long> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
        // some modifications to the state
        backend.setCurrentKey(1);
        // verify default value
        assertEquals(42L, (long) state.value());
        state.update(1L);
        assertEquals(1L, (long) state.value());
        backend.setCurrentKey(2);
        assertEquals(42L, (long) state.value());
        backend.setCurrentKey(1);
        state.clear();
        assertEquals(42L, (long) state.value());
        state.update(17L);
        assertEquals(17L, (long) state.value());
        state.update(null);
        assertEquals(42L, (long) state.value());
        // draw a snapshot
        KeyedStateHandle snapshot1 = runSnapshot(backend.snapshot(682375462378L, 2, streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation()), sharedStateRegistry);
        backend.dispose();
        backend = restoreKeyedBackend(IntSerializer.INSTANCE, snapshot1);
        snapshot1.discardState();
        backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId);
    } finally {
        IOUtils.closeQuietly(backend);
        backend.dispose();
    }
}
Also used : BlockerCheckpointStreamFactory(org.apache.flink.runtime.util.BlockerCheckpointStreamFactory) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) ValueStateDescriptor(org.apache.flink.api.common.state.ValueStateDescriptor) DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) Test(org.junit.Test)

Example 55 with DataOutputViewStreamWrapper

use of org.apache.flink.core.memory.DataOutputViewStreamWrapper in project flink by apache.

the class SerializationProxiesTest method testKeyedStateMetaInfoSerialization.

@Test
public void testKeyedStateMetaInfoSerialization() throws Exception {
    String name = "test";
    TypeSerializer<?> namespaceSerializer = LongSerializer.INSTANCE;
    TypeSerializer<?> stateSerializer = DoubleSerializer.INSTANCE;
    StateMetaInfoSnapshot metaInfo = new RegisteredKeyValueStateBackendMetaInfo<>(StateDescriptor.Type.VALUE, name, namespaceSerializer, stateSerializer).snapshot();
    byte[] serialized;
    try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) {
        StateMetaInfoSnapshotReadersWriters.getWriter().writeStateMetaInfoSnapshot(metaInfo, new DataOutputViewStreamWrapper(out));
        serialized = out.toByteArray();
    }
    try (ByteArrayInputStreamWithPos in = new ByteArrayInputStreamWithPos(serialized)) {
        final StateMetaInfoReader reader = StateMetaInfoSnapshotReadersWriters.getReader(CURRENT_STATE_META_INFO_SNAPSHOT_VERSION, StateMetaInfoSnapshotReadersWriters.StateTypeHint.KEYED_STATE);
        metaInfo = reader.readStateMetaInfoSnapshot(new DataInputViewStreamWrapper(in), Thread.currentThread().getContextClassLoader());
    }
    Assert.assertEquals(name, metaInfo.getName());
}
Also used : DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) ByteArrayInputStreamWithPos(org.apache.flink.core.memory.ByteArrayInputStreamWithPos) StateMetaInfoSnapshot(org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshot) StateMetaInfoReader(org.apache.flink.runtime.state.metainfo.StateMetaInfoReader) ByteArrayOutputStreamWithPos(org.apache.flink.core.memory.ByteArrayOutputStreamWithPos) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) Test(org.junit.Test)

Aggregations

DataOutputViewStreamWrapper (org.apache.flink.core.memory.DataOutputViewStreamWrapper)123 DataInputViewStreamWrapper (org.apache.flink.core.memory.DataInputViewStreamWrapper)55 ByteArrayOutputStream (java.io.ByteArrayOutputStream)49 Test (org.junit.Test)43 ByteArrayOutputStreamWithPos (org.apache.flink.core.memory.ByteArrayOutputStreamWithPos)35 IOException (java.io.IOException)28 ByteArrayInputStream (java.io.ByteArrayInputStream)26 ByteArrayInputStreamWithPos (org.apache.flink.core.memory.ByteArrayInputStreamWithPos)23 DataOutputView (org.apache.flink.core.memory.DataOutputView)18 HashMap (java.util.HashMap)13 ArrayList (java.util.ArrayList)12 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)11 Map (java.util.Map)10 TypeSerializerSnapshot (org.apache.flink.api.common.typeutils.TypeSerializerSnapshot)7 StateMetaInfoSnapshot (org.apache.flink.runtime.state.metainfo.StateMetaInfoSnapshot)7 Before (org.junit.Before)6 Socket (java.net.Socket)5 PipedInputStream (java.io.PipedInputStream)4 PipedOutputStream (java.io.PipedOutputStream)4 List (java.util.List)4