use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class CompletedCheckpointTest method testCleanUpOnSubsume.
/**
* Tests that the garbage collection properties are respected when subsuming checkpoints.
*/
@Test
public void testCleanUpOnSubsume() throws Exception {
OperatorState state = mock(OperatorState.class);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
operatorStates.put(new OperatorID(), state);
EmptyStreamStateHandle metadata = new EmptyStreamStateHandle();
TestCompletedCheckpointStorageLocation location = new TestCompletedCheckpointStorageLocation(metadata, "ptr");
CheckpointProperties props = new CheckpointProperties(false, CheckpointType.CHECKPOINT, true, false, false, false, false, false);
CompletedCheckpoint checkpoint = new CompletedCheckpoint(new JobID(), 0, 0, 1, operatorStates, Collections.emptyList(), props, location);
SharedStateRegistry sharedStateRegistry = new SharedStateRegistryImpl();
checkpoint.registerSharedStatesAfterRestored(sharedStateRegistry);
verify(state, times(1)).registerSharedStates(sharedStateRegistry, 0L);
// Subsume
checkpoint.discardOnSubsume();
verify(state, times(1)).discardState();
assertTrue(location.isDisposed());
assertTrue(metadata.isDisposed());
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class DefaultCheckpointPlanTest method testFulfillFinishedStates.
@Test
public void testFulfillFinishedStates() throws Exception {
JobVertexID fullyFinishedVertexId = new JobVertexID();
JobVertexID finishedOnRestoreVertexId = new JobVertexID();
JobVertexID partiallyFinishedVertexId = new JobVertexID();
OperatorID fullyFinishedOperatorId = new OperatorID();
OperatorID finishedOnRestoreOperatorId = new OperatorID();
OperatorID partiallyFinishedOperatorId = new OperatorID();
ExecutionGraph executionGraph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder().addJobVertex(fullyFinishedVertexId, 2, 2, Collections.singletonList(OperatorIDPair.generatedIDOnly(fullyFinishedOperatorId)), true).addJobVertex(finishedOnRestoreVertexId, 2, 2, Collections.singletonList(OperatorIDPair.generatedIDOnly(finishedOnRestoreOperatorId)), true).addJobVertex(partiallyFinishedVertexId, 2, 2, Collections.singletonList(OperatorIDPair.generatedIDOnly(partiallyFinishedOperatorId)), true).build();
ExecutionVertex[] fullyFinishedVertexTasks = executionGraph.getJobVertex(fullyFinishedVertexId).getTaskVertices();
ExecutionVertex[] finishedOnRestoreVertexTasks = executionGraph.getJobVertex(finishedOnRestoreVertexId).getTaskVertices();
ExecutionVertex[] partiallyFinishedVertexTasks = executionGraph.getJobVertex(partiallyFinishedVertexId).getTaskVertices();
Arrays.stream(fullyFinishedVertexTasks).forEach(task -> task.getCurrentExecutionAttempt().markFinished());
partiallyFinishedVertexTasks[0].getCurrentExecutionAttempt().markFinished();
CheckpointPlan checkpointPlan = createCheckpointPlan(executionGraph);
Arrays.stream(finishedOnRestoreVertexTasks).forEach(checkpointPlan::reportTaskFinishedOnRestore);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
checkpointPlan.fulfillFinishedTaskStatus(operatorStates);
assertEquals(3, operatorStates.size());
assertTrue(operatorStates.get(fullyFinishedOperatorId).isFullyFinished());
assertTrue(operatorStates.get(finishedOnRestoreOperatorId).isFullyFinished());
OperatorState operatorState = operatorStates.get(partiallyFinishedOperatorId);
assertFalse(operatorState.isFullyFinished());
assertTrue(operatorState.getState(0).isFinished());
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class DefaultCheckpointPlanTest method testAbortionIfPartiallyFinishedVertexUsedUnionListState.
@Test
public void testAbortionIfPartiallyFinishedVertexUsedUnionListState() throws Exception {
JobVertexID jobVertexId = new JobVertexID();
OperatorID operatorId = new OperatorID();
ExecutionGraph executionGraph = new CheckpointCoordinatorTestingUtils.CheckpointExecutionGraphBuilder().addJobVertex(jobVertexId, 2, 2, Collections.singletonList(OperatorIDPair.generatedIDOnly(operatorId)), true).build();
ExecutionVertex[] tasks = executionGraph.getJobVertex(jobVertexId).getTaskVertices();
tasks[0].getCurrentExecutionAttempt().markFinished();
CheckpointPlan checkpointPlan = createCheckpointPlan(executionGraph);
Map<OperatorID, OperatorState> operatorStates = new HashMap<>();
OperatorState operatorState = new OperatorState(operatorId, 2, 2);
operatorState.putState(0, createSubtaskStateWithUnionListState(TEMPORARY_FOLDER.newFile()));
operatorStates.put(operatorId, operatorState);
expectedException.expect(FlinkRuntimeException.class);
expectedException.expectMessage(String.format("The vertex %s (id = %s) has " + "used UnionListState, but part of its tasks are FINISHED", executionGraph.getJobVertex(jobVertexId).getName(), jobVertexId));
checkpointPlan.fulfillFinishedTaskStatus(operatorStates);
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class TaskStateSnapshotTest method hasState.
@Test
public void hasState() {
Random random = new Random(0x42);
TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot();
Assert.assertFalse(taskStateSnapshot.hasState());
OperatorSubtaskState emptyOperatorSubtaskState = OperatorSubtaskState.builder().build();
Assert.assertFalse(emptyOperatorSubtaskState.hasState());
taskStateSnapshot.putSubtaskStateByOperatorID(new OperatorID(), emptyOperatorSubtaskState);
Assert.assertFalse(taskStateSnapshot.hasState());
OperatorStateHandle stateHandle = StateHandleDummyUtil.createNewOperatorStateHandle(2, random);
OperatorSubtaskState nonEmptyOperatorSubtaskState = OperatorSubtaskState.builder().setManagedOperatorState(stateHandle).build();
Assert.assertTrue(nonEmptyOperatorSubtaskState.hasState());
taskStateSnapshot.putSubtaskStateByOperatorID(new OperatorID(), nonEmptyOperatorSubtaskState);
Assert.assertTrue(taskStateSnapshot.hasState());
}
use of org.apache.flink.runtime.jobgraph.OperatorID in project flink by apache.
the class TaskStateSnapshotTest method getStateSize.
@Test
public void getStateSize() {
Random random = new Random(0x42);
TaskStateSnapshot taskStateSnapshot = new TaskStateSnapshot();
Assert.assertEquals(0, taskStateSnapshot.getStateSize());
OperatorSubtaskState emptyOperatorSubtaskState = OperatorSubtaskState.builder().build();
Assert.assertFalse(emptyOperatorSubtaskState.hasState());
taskStateSnapshot.putSubtaskStateByOperatorID(new OperatorID(), emptyOperatorSubtaskState);
Assert.assertEquals(0, taskStateSnapshot.getStateSize());
OperatorStateHandle stateHandle_1 = StateHandleDummyUtil.createNewOperatorStateHandle(2, random);
OperatorSubtaskState nonEmptyOperatorSubtaskState_1 = OperatorSubtaskState.builder().setManagedOperatorState(stateHandle_1).build();
OperatorStateHandle stateHandle_2 = StateHandleDummyUtil.createNewOperatorStateHandle(2, random);
OperatorSubtaskState nonEmptyOperatorSubtaskState_2 = OperatorSubtaskState.builder().setRawOperatorState(stateHandle_2).build();
taskStateSnapshot.putSubtaskStateByOperatorID(new OperatorID(), nonEmptyOperatorSubtaskState_1);
taskStateSnapshot.putSubtaskStateByOperatorID(new OperatorID(), nonEmptyOperatorSubtaskState_2);
long totalSize = stateHandle_1.getStateSize() + stateHandle_2.getStateSize();
Assert.assertEquals(totalSize, taskStateSnapshot.getStateSize());
}
Aggregations