use of org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings in project flink by apache.
the class CheckpointConfigHandlerTest method createGraphAndSettings.
private static GraphAndSettings createGraphAndSettings(boolean externalized, boolean exactlyOnce) {
long interval = 18231823L;
long timeout = 996979L;
long minPause = 119191919L;
int maxConcurrent = 12929329;
ExternalizedCheckpointSettings externalizedSetting = externalized ? ExternalizedCheckpointSettings.externalizeCheckpoints(true) : ExternalizedCheckpointSettings.none();
JobSnapshottingSettings settings = new JobSnapshottingSettings(Collections.<JobVertexID>emptyList(), Collections.<JobVertexID>emptyList(), Collections.<JobVertexID>emptyList(), interval, timeout, minPause, maxConcurrent, externalizedSetting, null, exactlyOnce);
AccessExecutionGraph graph = mock(AccessExecutionGraph.class);
when(graph.getJobSnapshottingSettings()).thenReturn(settings);
return new GraphAndSettings(graph, settings, externalizedSetting);
}
use of org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings in project flink by apache.
the class CheckpointCoordinatorExternalizedCheckpointsTest method testTriggerAndConfirmSimpleExternalizedCheckpoint.
/**
* Triggers multiple externalized checkpoints and verifies that the metadata
* files have been created.
*/
@Test
public void testTriggerAndConfirmSimpleExternalizedCheckpoint() throws Exception {
final JobID jid = new JobID();
final ExternalizedCheckpointSettings externalizedCheckpointSettings = ExternalizedCheckpointSettings.externalizeCheckpoints(false);
final File checkpointDir = tmp.newFolder();
// create some mock Execution vertices that receive the checkpoint trigger messages
final ExecutionAttemptID attemptID1 = new ExecutionAttemptID();
final ExecutionAttemptID attemptID2 = new ExecutionAttemptID();
ExecutionVertex vertex1 = CheckpointCoordinatorTest.mockExecutionVertex(attemptID1);
ExecutionVertex vertex2 = CheckpointCoordinatorTest.mockExecutionVertex(attemptID2);
Map<JobVertexID, ExecutionJobVertex> jobVertices = new HashMap<>();
jobVertices.put(vertex1.getJobvertexId(), vertex1.getJobVertex());
jobVertices.put(vertex2.getJobvertexId(), vertex2.getJobVertex());
// set up the coordinator and validate the initial state
CheckpointCoordinator coord = new CheckpointCoordinator(jid, 600000, 600000, 0, Integer.MAX_VALUE, externalizedCheckpointSettings, new ExecutionVertex[] { vertex1, vertex2 }, new ExecutionVertex[] { vertex1, vertex2 }, new ExecutionVertex[] { vertex1, vertex2 }, new StandaloneCheckpointIDCounter(), new StandaloneCompletedCheckpointStore(1), checkpointDir.getAbsolutePath(), Executors.directExecutor());
assertEquals(0, coord.getNumberOfPendingCheckpoints());
assertEquals(0, coord.getNumberOfRetainedSuccessfulCheckpoints());
// ---------------
// trigger checkpoint 1
// ---------------
{
final long timestamp1 = System.currentTimeMillis();
coord.triggerCheckpoint(timestamp1, false);
long checkpointId1 = coord.getPendingCheckpoints().entrySet().iterator().next().getKey();
coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId1));
coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId1));
CompletedCheckpoint latest = coord.getCheckpointStore().getLatestCheckpoint();
verifyExternalizedCheckpoint(latest, jid, checkpointId1, timestamp1);
verifyExternalizedCheckpointRestore(latest, jobVertices, vertex1, vertex2);
}
// ---------------
// trigger checkpoint 2
// ---------------
{
final long timestamp2 = System.currentTimeMillis() + 7;
coord.triggerCheckpoint(timestamp2, false);
long checkpointId2 = coord.getPendingCheckpoints().entrySet().iterator().next().getKey();
coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId2));
coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId2));
CompletedCheckpoint latest = coord.getCheckpointStore().getLatestCheckpoint();
verifyExternalizedCheckpoint(latest, jid, checkpointId2, timestamp2);
verifyExternalizedCheckpointRestore(latest, jobVertices, vertex1, vertex2);
}
// ---------------
// trigger checkpoint 3
// ---------------
{
final long timestamp3 = System.currentTimeMillis() + 146;
coord.triggerCheckpoint(timestamp3, false);
long checkpointId3 = coord.getPendingCheckpoints().entrySet().iterator().next().getKey();
coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID1, checkpointId3));
coord.receiveAcknowledgeMessage(new AcknowledgeCheckpoint(jid, attemptID2, checkpointId3));
CompletedCheckpoint latest = coord.getCheckpointStore().getLatestCheckpoint();
verifyExternalizedCheckpoint(latest, jid, checkpointId3, timestamp3);
verifyExternalizedCheckpointRestore(latest, jobVertices, vertex1, vertex2);
}
coord.shutdown(JobStatus.FINISHED);
}
use of org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings in project flink by apache.
the class CheckpointConfigHandler method createCheckpointConfigJson.
private static String createCheckpointConfigJson(AccessExecutionGraph graph) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
JobSnapshottingSettings settings = graph.getJobSnapshottingSettings();
if (settings == null) {
return "{}";
}
gen.writeStartObject();
{
gen.writeStringField("mode", settings.isExactlyOnce() ? "exactly_once" : "at_least_once");
gen.writeNumberField("interval", settings.getCheckpointInterval());
gen.writeNumberField("timeout", settings.getCheckpointTimeout());
gen.writeNumberField("min_pause", settings.getMinPauseBetweenCheckpoints());
gen.writeNumberField("max_concurrent", settings.getMaxConcurrentCheckpoints());
ExternalizedCheckpointSettings externalization = settings.getExternalizedCheckpointSettings();
gen.writeObjectFieldStart("externalization");
{
if (externalization.externalizeCheckpoints()) {
gen.writeBooleanField("enabled", true);
gen.writeBooleanField("delete_on_cancellation", externalization.deleteOnCancellation());
} else {
gen.writeBooleanField("enabled", false);
}
}
gen.writeEndObject();
}
gen.writeEndObject();
gen.close();
return writer.toString();
}
use of org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings in project flink by apache.
the class CheckpointConfigHandlerTest method testEnabledExternalizedCheckpointSettings.
/**
* Tests that the externalized checkpoint settings are forwarded.
*/
@Test
public void testEnabledExternalizedCheckpointSettings() throws Exception {
GraphAndSettings graphAndSettings = createGraphAndSettings(true, false);
AccessExecutionGraph graph = graphAndSettings.graph;
ExternalizedCheckpointSettings externalizedSettings = graphAndSettings.externalizedSettings;
CheckpointConfigHandler handler = new CheckpointConfigHandler(mock(ExecutionGraphHolder.class));
String json = handler.handleRequest(graph, Collections.<String, String>emptyMap());
ObjectMapper mapper = new ObjectMapper();
JsonNode externalizedNode = mapper.readTree(json).get("externalization");
assertNotNull(externalizedNode);
assertEquals(externalizedSettings.externalizeCheckpoints(), externalizedNode.get("enabled").asBoolean());
assertEquals(externalizedSettings.deleteOnCancellation(), externalizedNode.get("delete_on_cancellation").asBoolean());
}
use of org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings in project flink by apache.
the class CheckpointConfigHandlerTest method testArchiver.
@Test
public void testArchiver() throws IOException {
JsonArchivist archivist = new CheckpointConfigHandler.CheckpointConfigJsonArchivist();
GraphAndSettings graphAndSettings = createGraphAndSettings(true, true);
AccessExecutionGraph graph = graphAndSettings.graph;
when(graph.getJobID()).thenReturn(new JobID());
JobSnapshottingSettings settings = graphAndSettings.snapshottingSettings;
ExternalizedCheckpointSettings externalizedSettings = graphAndSettings.externalizedSettings;
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(graph);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + graph.getJobID() + "/checkpoints/config", archive.getPath());
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = mapper.readTree(archive.getJson());
Assert.assertEquals("exactly_once", rootNode.get("mode").asText());
Assert.assertEquals(settings.getCheckpointInterval(), rootNode.get("interval").asLong());
Assert.assertEquals(settings.getCheckpointTimeout(), rootNode.get("timeout").asLong());
Assert.assertEquals(settings.getMinPauseBetweenCheckpoints(), rootNode.get("min_pause").asLong());
Assert.assertEquals(settings.getMaxConcurrentCheckpoints(), rootNode.get("max_concurrent").asInt());
JsonNode externalizedNode = rootNode.get("externalization");
Assert.assertNotNull(externalizedNode);
Assert.assertEquals(externalizedSettings.externalizeCheckpoints(), externalizedNode.get("enabled").asBoolean());
Assert.assertEquals(externalizedSettings.deleteOnCancellation(), externalizedNode.get("delete_on_cancellation").asBoolean());
}
Aggregations