use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class JobVertexTaskManagersHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new JobVertexTaskManagersHandler.JobVertexTaskManagersJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
AccessExecutionJobVertex originalTask = ArchivedJobGenerationUtils.getTestTask();
AccessExecutionVertex originalSubtask = ArchivedJobGenerationUtils.getTestSubtask();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/vertices/" + originalTask.getJobVertexId() + "/taskmanagers", archive.getPath());
compareVertexTaskManagers(originalTask, originalSubtask, archive.getJson());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class SubtaskExecutionAttemptDetailsHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new SubtaskExecutionAttemptDetailsHandler.SubtaskExecutionAttemptDetailsJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
AccessExecutionJobVertex originalTask = ArchivedJobGenerationUtils.getTestTask();
AccessExecution originalAttempt = ArchivedJobGenerationUtils.getTestAttempt();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(2, archives.size());
Iterator<ArchivedJson> iterator = archives.iterator();
ArchivedJson archive1 = iterator.next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/vertices/" + originalTask.getJobVertexId() + "/subtasks/" + originalAttempt.getParallelSubtaskIndex(), archive1.getPath());
compareAttemptDetails(originalAttempt, archive1.getJson());
ArchivedJson archive2 = iterator.next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/vertices/" + originalTask.getJobVertexId() + "/subtasks/" + originalAttempt.getParallelSubtaskIndex() + "/attempts/" + originalAttempt.getAttemptNumber(), archive2.getPath());
compareAttemptDetails(originalAttempt, archive2.getJson());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class SubtasksAllAccumulatorsHandlerTest method testArchiver.
@Test
public void testArchiver() throws Exception {
JsonArchivist archivist = new SubtasksAllAccumulatorsHandler.SubtasksAllAccumulatorsJsonArchivist();
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
AccessExecutionJobVertex originalTask = ArchivedJobGenerationUtils.getTestTask();
Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob);
Assert.assertEquals(1, archives.size());
ArchivedJson archive = archives.iterator().next();
Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/vertices/" + originalTask.getJobVertexId() + "/subtasks/accumulators", archive.getPath());
compareSubtaskAccumulators(originalTask, archive.getJson());
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class FsJobArchivist method archiveJob.
/**
* Writes the given {@link AccessExecutionGraph} to the {@link FileSystem} pointed to by {@link
* JobManagerOptions#ARCHIVE_DIR}.
*
* @param rootPath directory to which the archive should be written to
* @param jobId job id
* @param jsonToArchive collection of json-path pairs to that should be archived
* @return path to where the archive was written, or null if no archive was created
* @throws IOException
*/
public static Path archiveJob(Path rootPath, JobID jobId, Collection<ArchivedJson> jsonToArchive) throws IOException {
try {
FileSystem fs = rootPath.getFileSystem();
Path path = new Path(rootPath, jobId.toString());
OutputStream out = fs.create(path, FileSystem.WriteMode.NO_OVERWRITE);
try (JsonGenerator gen = jacksonFactory.createGenerator(out, JsonEncoding.UTF8)) {
gen.writeStartObject();
gen.writeArrayFieldStart(ARCHIVE);
for (ArchivedJson archive : jsonToArchive) {
gen.writeStartObject();
gen.writeStringField(PATH, archive.getPath());
gen.writeStringField(JSON, archive.getJson());
gen.writeEndObject();
}
gen.writeEndArray();
gen.writeEndObject();
} catch (Exception e) {
fs.delete(path, false);
throw e;
}
LOG.info("Job {} has been archived at {}.", jobId, path);
return path;
} catch (IOException e) {
LOG.error("Failed to archive job.", e);
throw e;
}
}
use of org.apache.flink.runtime.webmonitor.history.ArchivedJson in project flink by apache.
the class CheckpointConfigHandler method archiveJsonWithPath.
@Override
public Collection<ArchivedJson> archiveJsonWithPath(AccessExecutionGraph graph) throws IOException {
ResponseBody response;
try {
response = createCheckpointConfigInfo(graph);
} catch (RestHandlerException rhe) {
response = new ErrorResponseBody(rhe.getMessage());
}
String path = CheckpointConfigHeaders.getInstance().getTargetRestEndpointURL().replace(':' + JobIDPathParameter.KEY, graph.getJobID().toString());
return Collections.singletonList(new ArchivedJson(path, response));
}
Aggregations