Search in sources :

Example 26 with OperatorID

use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.

the class StateAssignmentOperationTest method assigningStateHandlesCanNotBeNull.

@Test
public void assigningStateHandlesCanNotBeNull() {
    OperatorState state = new OperatorState(new OperatorID(), 1, MAX_P);
    List<KeyedStateHandle> managedKeyedStateHandles = StateAssignmentOperation.getManagedKeyedStateHandles(state, KeyGroupRange.of(0, 1));
    List<KeyedStateHandle> rawKeyedStateHandles = StateAssignmentOperation.getRawKeyedStateHandles(state, KeyGroupRange.of(0, 1));
    assertThat(managedKeyedStateHandles, is(empty()));
    assertThat(rawKeyedStateHandles, is(empty()));
}
Also used : OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) KeyedStateHandle(org.apache.flink.runtime.state.KeyedStateHandle) StateHandleDummyUtil.createNewKeyedStateHandle(org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.createNewKeyedStateHandle) Test(org.junit.Test)

Example 27 with OperatorID

use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.

the class StateAssignmentOperationTest method testRepartitionSplitDistributeStates.

@Test
public void testRepartitionSplitDistributeStates() {
    OperatorID operatorID = new OperatorID();
    OperatorState operatorState = new OperatorState(operatorID, 2, 4);
    Map<String, OperatorStateHandle.StateMetaInfo> metaInfoMap1 = new HashMap<>(1);
    metaInfoMap1.put("t-1", new OperatorStateHandle.StateMetaInfo(new long[] { 0, 10 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
    OperatorStateHandle osh1 = new OperatorStreamStateHandle(metaInfoMap1, new ByteStreamStateHandle("test1", new byte[30]));
    operatorState.putState(0, OperatorSubtaskState.builder().setManagedOperatorState(osh1).build());
    Map<String, OperatorStateHandle.StateMetaInfo> metaInfoMap2 = new HashMap<>(1);
    metaInfoMap2.put("t-2", new OperatorStateHandle.StateMetaInfo(new long[] { 0, 15 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
    OperatorStateHandle osh2 = new OperatorStreamStateHandle(metaInfoMap2, new ByteStreamStateHandle("test2", new byte[40]));
    operatorState.putState(1, OperatorSubtaskState.builder().setManagedOperatorState(osh2).build());
    verifyOneKindPartitionableStateRescale(operatorState, operatorID);
}
Also used : HashMap(java.util.HashMap) OperatorStreamStateHandle(org.apache.flink.runtime.state.OperatorStreamStateHandle) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) StateHandleDummyUtil.createNewOperatorStateHandle(org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.createNewOperatorStateHandle) OperatorStateHandle(org.apache.flink.runtime.state.OperatorStateHandle) ByteStreamStateHandle(org.apache.flink.runtime.state.memory.ByteStreamStateHandle) Test(org.junit.Test)

Example 28 with OperatorID

use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.

the class StateAssignmentOperationTest method toExecutionVertices.

private Map<OperatorID, ExecutionJobVertex> toExecutionVertices(JobVertex... jobVertices) throws JobException, JobExecutionException {
    JobGraph jobGraph = JobGraphTestUtils.streamingJobGraph(jobVertices);
    ExecutionGraph eg = TestingDefaultExecutionGraphBuilder.newBuilder().setJobGraph(jobGraph).build();
    return Arrays.stream(jobVertices).collect(Collectors.toMap(jobVertex -> jobVertex.getOperatorIDs().get(0).getGeneratedOperatorID(), jobVertex -> {
        try {
            return eg.getJobVertex(jobVertex.getID());
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }));
}
Also used : InflightDataRescalingDescriptorUtil.set(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptorUtil.set) StateHandleDummyUtil.createNewOperatorStateHandle(org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.createNewOperatorStateHandle) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) TestingDefaultExecutionGraphBuilder(org.apache.flink.runtime.executiongraph.TestingDefaultExecutionGraphBuilder) RANGE(org.apache.flink.runtime.io.network.api.writer.SubtaskStateMapper.RANGE) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) SubtaskStateMapper(org.apache.flink.runtime.io.network.api.writer.SubtaskStateMapper) Random(java.util.Random) RESCALING(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptor.InflightDataGateOrPartitionRescalingDescriptor.MappingType.RESCALING) Collections.singletonList(java.util.Collections.singletonList) ARBITRARY(org.apache.flink.runtime.io.network.api.writer.SubtaskStateMapper.ARBITRARY) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) InflightDataGateOrPartitionRescalingDescriptor(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptor.InflightDataGateOrPartitionRescalingDescriptor) StateHandleDummyUtil.createNewResultSubpartitionStateHandle(org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.createNewResultSubpartitionStateHandle) KeyGroupRange(org.apache.flink.runtime.state.KeyGroupRange) EnumMap(java.util.EnumMap) KeyedStateHandle(org.apache.flink.runtime.state.KeyedStateHandle) Set(java.util.Set) Collectors(java.util.stream.Collectors) List(java.util.List) StateHandleDummyUtil.createNewInputChannelStateHandle(org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.createNewInputChannelStateHandle) Stream(java.util.stream.Stream) OperatorInstanceID(org.apache.flink.runtime.jobgraph.OperatorInstanceID) Matchers.containsInAnyOrder(org.hamcrest.Matchers.containsInAnyOrder) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) InflightDataRescalingDescriptorUtil.rescalingDescriptor(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptorUtil.rescalingDescriptor) IntStream(java.util.stream.IntStream) JobEdge(org.apache.flink.runtime.jobgraph.JobEdge) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) ExecutionGraph(org.apache.flink.runtime.executiongraph.ExecutionGraph) ResultPartitionType(org.apache.flink.runtime.io.network.partition.ResultPartitionType) HashMap(java.util.HashMap) StateHandleDummyUtil.createNewKeyedStateHandle(org.apache.flink.runtime.checkpoint.StateHandleDummyUtil.createNewKeyedStateHandle) OperatorStreamStateHandle(org.apache.flink.runtime.state.OperatorStreamStateHandle) Function(java.util.function.Function) InflightDataRescalingDescriptorUtil.array(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptorUtil.array) JobVertexID(org.apache.flink.runtime.jobgraph.JobVertexID) HashSet(java.util.HashSet) JobException(org.apache.flink.runtime.JobException) InflightDataRescalingDescriptorUtil.to(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptorUtil.to) JobGraphTestUtils(org.apache.flink.runtime.jobgraph.JobGraphTestUtils) OperatorStateHandle(org.apache.flink.runtime.state.OperatorStateHandle) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) JobExecutionException(org.apache.flink.runtime.client.JobExecutionException) DistributionPattern(org.apache.flink.runtime.jobgraph.DistributionPattern) ExecutionJobVertex(org.apache.flink.runtime.executiongraph.ExecutionJobVertex) Matchers.empty(org.hamcrest.Matchers.empty) Collections.emptySet(java.util.Collections.emptySet) ExecutionGraphTestUtils(org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils) ROUND_ROBIN(org.apache.flink.runtime.io.network.api.writer.SubtaskStateMapper.ROUND_ROBIN) Test(org.junit.Test) OperatorIDPair(org.apache.flink.runtime.OperatorIDPair) InflightDataRescalingDescriptorUtil.mappings(org.apache.flink.runtime.checkpoint.InflightDataRescalingDescriptorUtil.mappings) ByteStreamStateHandle(org.apache.flink.runtime.state.memory.ByteStreamStateHandle) Assert(org.junit.Assert) ExecutionVertex(org.apache.flink.runtime.executiongraph.ExecutionVertex) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) NoOpInvokable(org.apache.flink.runtime.testtasks.NoOpInvokable) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) ExecutionGraph(org.apache.flink.runtime.executiongraph.ExecutionGraph) JobException(org.apache.flink.runtime.JobException) JobExecutionException(org.apache.flink.runtime.client.JobExecutionException)

Example 29 with OperatorID

use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.

the class CheckpointTestUtils method createOperatorStates.

/**
 * Creates a random collection of OperatorState objects containing various types of state
 * handles.
 *
 * @param basePath The basePath for savepoint, will be null for checkpoint.
 * @param numAllRunningTaskStates Number of tasks belong to all running vertex.
 * @param numPartlyFinishedTaskStates Number of tasks belong to partly finished vertex.
 * @param numFullyFinishedTaskStates Number of tasks belong to fully finished vertex.
 * @param numSubtasksPerTask Number of subtasks for each task.
 */
public static Collection<OperatorState> createOperatorStates(Random random, @Nullable String basePath, int numAllRunningTaskStates, int numPartlyFinishedTaskStates, int numFullyFinishedTaskStates, int numSubtasksPerTask) {
    List<OperatorState> taskStates = new ArrayList<>(numAllRunningTaskStates + numPartlyFinishedTaskStates + numFullyFinishedTaskStates);
    for (int stateIdx = 0; stateIdx < numAllRunningTaskStates; ++stateIdx) {
        OperatorState taskState = new OperatorState(new OperatorID(), numSubtasksPerTask, 128);
        randomlySetCoordinatorState(taskState, random);
        randomlySetSubtaskState(taskState, IntStream.range(0, numSubtasksPerTask).toArray(), random, basePath);
        taskStates.add(taskState);
    }
    for (int stateIdx = 0; stateIdx < numPartlyFinishedTaskStates; ++stateIdx) {
        OperatorState taskState = new OperatorState(new OperatorID(), numSubtasksPerTask, 128);
        randomlySetCoordinatorState(taskState, random);
        randomlySetSubtaskState(taskState, IntStream.range(0, numSubtasksPerTask / 2).toArray(), random, basePath);
        IntStream.range(numSubtasksPerTask / 2, numSubtasksPerTask).forEach(index -> taskState.putState(index, FinishedOperatorSubtaskState.INSTANCE));
        taskStates.add(taskState);
    }
    for (int stateIdx = 0; stateIdx < numFullyFinishedTaskStates; ++stateIdx) {
        taskStates.add(new FullyFinishedOperatorState(new OperatorID(), numSubtasksPerTask, 128));
    }
    return taskStates;
}
Also used : ArrayList(java.util.ArrayList) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) OperatorState(org.apache.flink.runtime.checkpoint.OperatorState) FullyFinishedOperatorState(org.apache.flink.runtime.checkpoint.FullyFinishedOperatorState) FullyFinishedOperatorState(org.apache.flink.runtime.checkpoint.FullyFinishedOperatorState)

Example 30 with OperatorID

use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.

the class CheckpointStateRestoreTest method testNonRestoredState.

/**
 * Tests that the allow non restored state flag is correctly handled.
 *
 * <p>The flag only applies for state that is part of the checkpoint.
 */
@Test
public void testNonRestoredState() throws Exception {
    // --- (1) Create tasks to restore checkpoint with ---
    JobVertexID jobVertexId1 = new JobVertexID();
    JobVertexID jobVertexId2 = new JobVertexID();
    OperatorID operatorId1 = OperatorID.fromJobVertexID(jobVertexId1);
    // 1st JobVertex
    ExecutionVertex vertex11 = mockExecutionVertex(mockExecution(), jobVertexId1, 0, 3);
    ExecutionVertex vertex12 = mockExecutionVertex(mockExecution(), jobVertexId1, 1, 3);
    ExecutionVertex vertex13 = mockExecutionVertex(mockExecution(), jobVertexId1, 2, 3);
    // 2nd JobVertex
    ExecutionVertex vertex21 = mockExecutionVertex(mockExecution(), jobVertexId2, 0, 2);
    ExecutionVertex vertex22 = mockExecutionVertex(mockExecution(), jobVertexId2, 1, 2);
    ExecutionJobVertex jobVertex1 = mockExecutionJobVertex(jobVertexId1, new ExecutionVertex[] { vertex11, vertex12, vertex13 });
    ExecutionJobVertex jobVertex2 = mockExecutionJobVertex(jobVertexId2, new ExecutionVertex[] { vertex21, vertex22 });
    Set<ExecutionJobVertex> tasks = new HashSet<>();
    tasks.add(jobVertex1);
    tasks.add(jobVertex2);
    CheckpointCoordinator coord = new CheckpointCoordinatorBuilder().build();
    // --- (2) Checkpoint misses state for a jobVertex (should work) ---
    Map<OperatorID, OperatorState> checkpointTaskStates = new HashMap<>();
    {
        OperatorState taskState = new OperatorState(operatorId1, 3, 3);
        taskState.putState(0, OperatorSubtaskState.builder().build());
        taskState.putState(1, OperatorSubtaskState.builder().build());
        taskState.putState(2, OperatorSubtaskState.builder().build());
        checkpointTaskStates.put(operatorId1, taskState);
    }
    CompletedCheckpoint checkpoint = new CompletedCheckpoint(new JobID(), 0, 1, 2, new HashMap<>(checkpointTaskStates), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation());
    coord.getCheckpointStore().addCheckpointAndSubsumeOldestOne(checkpoint, new CheckpointsCleaner(), () -> {
    });
    assertTrue(coord.restoreLatestCheckpointedStateToAll(tasks, false));
    assertTrue(coord.restoreLatestCheckpointedStateToAll(tasks, true));
    // --- (3) JobVertex missing for task state that is part of the checkpoint ---
    JobVertexID newJobVertexID = new JobVertexID();
    OperatorID newOperatorID = OperatorID.fromJobVertexID(newJobVertexID);
    // There is no task for this
    {
        OperatorState taskState = new OperatorState(newOperatorID, 1, 1);
        taskState.putState(0, OperatorSubtaskState.builder().build());
        checkpointTaskStates.put(newOperatorID, taskState);
    }
    checkpoint = new CompletedCheckpoint(new JobID(), 1, 2, 3, new HashMap<>(checkpointTaskStates), Collections.<MasterState>emptyList(), CheckpointProperties.forCheckpoint(CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new TestCompletedCheckpointStorageLocation());
    coord.getCheckpointStore().addCheckpointAndSubsumeOldestOne(checkpoint, new CheckpointsCleaner(), () -> {
    });
    // (i) Allow non restored state (should succeed)
    final boolean restored = coord.restoreLatestCheckpointedStateToAll(tasks, true);
    assertTrue(restored);
    // (ii) Don't allow non restored state (should fail)
    try {
        coord.restoreLatestCheckpointedStateToAll(tasks, false);
        fail("Did not throw the expected Exception.");
    } catch (IllegalStateException ignored) {
    }
}
Also used : HashMap(java.util.HashMap) JobVertexID(org.apache.flink.runtime.jobgraph.JobVertexID) TestCompletedCheckpointStorageLocation(org.apache.flink.runtime.state.testutils.TestCompletedCheckpointStorageLocation) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) ExecutionVertex(org.apache.flink.runtime.executiongraph.ExecutionVertex) CheckpointCoordinatorBuilder(org.apache.flink.runtime.checkpoint.CheckpointCoordinatorTestingUtils.CheckpointCoordinatorBuilder) ExecutionJobVertex(org.apache.flink.runtime.executiongraph.ExecutionJobVertex) JobID(org.apache.flink.api.common.JobID) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

OperatorID (org.apache.flink.runtime.jobgraph.OperatorID)211 Test (org.junit.Test)132 HashMap (java.util.HashMap)46 JobVertexID (org.apache.flink.runtime.jobgraph.JobVertexID)44 StreamConfig (org.apache.flink.streaming.api.graph.StreamConfig)41 JobID (org.apache.flink.api.common.JobID)38 Configuration (org.apache.flink.configuration.Configuration)30 ExecutionAttemptID (org.apache.flink.runtime.executiongraph.ExecutionAttemptID)28 ExecutionJobVertex (org.apache.flink.runtime.executiongraph.ExecutionJobVertex)28 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)24 ExecutionGraph (org.apache.flink.runtime.executiongraph.ExecutionGraph)23 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)21 OperatorStateHandle (org.apache.flink.runtime.state.OperatorStateHandle)21 ArrayList (java.util.ArrayList)20 HashSet (java.util.HashSet)20 TaskStateSnapshot (org.apache.flink.runtime.checkpoint.TaskStateSnapshot)19 OperatorStreamStateHandle (org.apache.flink.runtime.state.OperatorStreamStateHandle)19 MemoryStateBackend (org.apache.flink.runtime.state.memory.MemoryStateBackend)19 IOException (java.io.IOException)18 ExecutionVertex (org.apache.flink.runtime.executiongraph.ExecutionVertex)18