use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class SubtasksTimesHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new SubtasksTimesHandler.SubtasksTimesJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
AccessExecutionJobVertex originalTask = ArchivedJobGenerationUtils.getTestTask();
AccessExecution originalAttempt = ArchivedJobGenerationUtils.getTestAttempt();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/vertices/" + originalTask.getJobVertexId() + "/subtasktimes", archive.getPath());
compareSubtaskTimes(originalTask, originalAttempt, archive.getJson());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class CheckpointConfigHandlerTest method testArchiver.
@Test
public void testArchiver() throws IOException {
JsonArchivist archivist = new CheckpointConfigHandler.CheckpointConfigJsonArchivist();
GraphAndSettings graphAndSettings = createGraphAndSettings(true, true);
AccessExecutionGraph graph = graphAndSettings.graph;
when(graph.getJobID()).thenReturn(new JobID());
JobSnapshottingSettings settings = graphAndSettings.snapshottingSettings;
ExternalizedCheckpointSettings externalizedSettings = graphAndSettings.externalizedSettings;
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(graph);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + graph.getJobID() + "/checkpoints/config", archive.getPath());
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = mapper.readTree(archive.getJson());
Assert.assertEquals("exactly_once", rootNode.get("mode").asText());
Assert.assertEquals(settings.getCheckpointInterval(), rootNode.get("interval").asLong());
Assert.assertEquals(settings.getCheckpointTimeout(), rootNode.get("timeout").asLong());
Assert.assertEquals(settings.getMinPauseBetweenCheckpoints(), rootNode.get("min_pause").asLong());
Assert.assertEquals(settings.getMaxConcurrentCheckpoints(), rootNode.get("max_concurrent").asInt());
JsonNode externalizedNode = rootNode.get("externalization");
Assert.assertNotNull(externalizedNode);
Assert.assertEquals(externalizedSettings.externalizeCheckpoints(), externalizedNode.get("enabled").asBoolean());
Assert.assertEquals(externalizedSettings.deleteOnCancellation(), externalizedNode.get("delete_on_cancellation").asBoolean());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class CheckpointStatsSubtaskDetailsHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new CheckpointStatsDetailsSubtasksHandler.CheckpointStatsDetailsSubtasksJsonArchivist();
ObjectMapper mapper = new ObjectMapper();
PendingCheckpointStats checkpoint = mock(PendingCheckpointStats.class);
when(checkpoint.getCheckpointId()).thenReturn(1992139L);
when(checkpoint.getStatus()).thenReturn(CheckpointStatsStatus.IN_PROGRESS);
// ack timestamp = duration
when(checkpoint.getTriggerTimestamp()).thenReturn(0L);
TaskStateStats task = createTaskStateStats(1237);
when(checkpoint.getAllTaskStateStats()).thenReturn(Collections.singletonList(task));
CheckpointStatsHistory history = mock(CheckpointStatsHistory.class);
when(history.getCheckpoints()).thenReturn(Collections.<AbstractCheckpointStats>singletonList(checkpoint));
CheckpointStatsSnapshot snapshot = mock(CheckpointStatsSnapshot.class);
when(snapshot.getHistory()).thenReturn(history);
AccessExecutionGraph graph = mock(AccessExecutionGraph.class);
when(graph.getCheckpointStatsSnapshot()).thenReturn(snapshot);
when(graph.getJobID()).thenReturn(new JobID());
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(graph);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + graph.getJobID() + "/checkpoints/details/" + checkpoint.getCheckpointId() + "/subtasks/" + task.getJobVertexId(), archive.getPath());
JsonNode rootNode = mapper.readTree(archive.getJson());
assertEquals(checkpoint.getCheckpointId(), rootNode.get("id").asLong());
assertEquals(checkpoint.getStatus().toString(), rootNode.get("status").asText());
verifyTaskNode(rootNode, task, checkpoint.getTriggerTimestamp());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class JobAccumulatorsHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new JobAccumulatorsHandler.JobAccumulatorsJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/accumulators", archive.getPath());
compareAccumulators(originalJob, archive.getJson());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class JobConfigHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new JobConfigHandler.JobConfigJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/config", archive.getPath());
compareJobConfig(originalJob, archive.getJson());
}
Aggregations