use of org.apache.flink.runtime.executiongraph.AccessExecutionGraph in project flink by apache.
the class JobDetailsHandlerTest method testJsonGeneration.
@Test
public void testJsonGeneration() throws Exception {
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
String json = JobDetailsHandler.createJobDetailsJson(originalJob, null);
compareJobDetails(originalJob, json);
}
use of org.apache.flink.runtime.executiongraph.AccessExecutionGraph in project flink by apache.
the class JobExceptionsHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new JobExceptionsHandler.JobExceptionsJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/exceptions", archive.getPath());
compareExceptions(originalJob, archive.getJson());
}
use of org.apache.flink.runtime.executiongraph.AccessExecutionGraph in project flink by apache.
the class JobExceptionsHandlerTest method testJsonGeneration.
@Test
public void testJsonGeneration() throws Exception {
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
String json = JobExceptionsHandler.createJobExceptionsJson(originalJob);
compareExceptions(originalJob, json);
}
use of org.apache.flink.runtime.executiongraph.AccessExecutionGraph in project flink by apache.
the class CheckpointStatsDetailsHandlerTest method testArchiver.
@Test
public void testArchiver() throws IOException {
JsonArchivist archivist = new CheckpointStatsDetailsHandler.CheckpointStatsDetailsJsonArchivist();
CompletedCheckpointStats completedCheckpoint = createCompletedCheckpoint();
FailedCheckpointStats failedCheckpoint = createFailedCheckpoint();
List<AbstractCheckpointStats> checkpoints = new ArrayList<>();
checkpoints.add(failedCheckpoint);
checkpoints.add(completedCheckpoint);
CheckpointStatsHistory history = mock(CheckpointStatsHistory.class);
when(history.getCheckpoints()).thenReturn(checkpoints);
CheckpointStatsSnapshot snapshot = mock(CheckpointStatsSnapshot.class);
when(snapshot.getHistory()).thenReturn(history);
AccessExecutionGraph graph = mock(AccessExecutionGraph.class);
when(graph.getCheckpointStatsSnapshot()).thenReturn(snapshot);
when(graph.getJobID()).thenReturn(new JobID());
ObjectMapper mapper = new ObjectMapper();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(graph);
Assert.assertEquals(2, archives.size());
Iterator<ArchivedJson> iterator = archives.iterator();
ArchivedJson archive1 = iterator.next();
Assert.assertEquals("/jobs/" + graph.getJobID() + "/checkpoints/details/" + failedCheckpoint.getCheckpointId(), archive1.getPath());
compareFailedCheckpoint(failedCheckpoint, mapper.readTree(archive1.getJson()));
ArchivedJson archive2 = iterator.next();
Assert.assertEquals("/jobs/" + graph.getJobID() + "/checkpoints/details/" + completedCheckpoint.getCheckpointId(), archive2.getPath());
compareCompletedCheckpoint(completedCheckpoint, mapper.readTree(archive2.getJson()));
}
use of org.apache.flink.runtime.executiongraph.AccessExecutionGraph in project flink by apache.
the class CheckpointStatsDetailsHandlerTest method triggerRequest.
private static JsonNode triggerRequest(AbstractCheckpointStats checkpoint) throws Exception {
CheckpointStatsHistory history = mock(CheckpointStatsHistory.class);
when(history.getCheckpointById(anyLong())).thenReturn(checkpoint);
CheckpointStatsSnapshot snapshot = mock(CheckpointStatsSnapshot.class);
when(snapshot.getHistory()).thenReturn(history);
AccessExecutionGraph graph = mock(AccessExecutionGraph.class);
when(graph.getCheckpointStatsSnapshot()).thenReturn(snapshot);
CheckpointStatsDetailsHandler handler = new CheckpointStatsDetailsHandler(mock(ExecutionGraphHolder.class), new CheckpointStatsCache(0));
Map<String, String> params = new HashMap<>();
params.put("checkpointid", "123");
String json = handler.handleRequest(graph, params);
ObjectMapper mapper = new ObjectMapper();
return mapper.readTree(json);
}
Aggregations