use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JarDeleteHandler method handleJsonRequest.
@Override
public String handleJsonRequest(Map<String, String> pathParams, Map<String, String> queryParams, ActorGateway jobManager) throws Exception {
final String file = pathParams.get("jarid");
try {
File[] list = jarDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.equals(file);
}
});
boolean success = false;
for (File f : list) {
// although next to impossible for multiple files, we still delete them.
success = success || f.delete();
}
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
gen.writeStartObject();
if (!success) {
// this seems to always fail on Windows.
gen.writeStringField("error", "The requested jar couldn't be deleted. Please try again.");
}
gen.writeEndObject();
gen.close();
return writer.toString();
} catch (Exception e) {
throw new RuntimeException("Failed to delete jar id " + pathParams.get("jarid") + ": " + e.getMessage(), e);
}
}
use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JarRunHandler method handleJsonRequest.
@Override
public String handleJsonRequest(Map<String, String> pathParams, Map<String, String> queryParams, ActorGateway jobManager) throws Exception {
try {
JarActionHandlerConfig config = JarActionHandlerConfig.fromParams(pathParams, queryParams);
Tuple2<JobGraph, ClassLoader> graph = getJobGraphAndClassLoader(config);
try {
graph.f0.uploadUserJars(jobManager, timeout, clientConfig);
} catch (IOException e) {
throw new ProgramInvocationException("Failed to upload jar files to the job manager", e);
}
try {
JobClient.submitJobDetached(jobManager, clientConfig, graph.f0, timeout, graph.f1);
} catch (JobExecutionException e) {
throw new ProgramInvocationException("Failed to submit the job to the job manager", e);
}
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
gen.writeStartObject();
gen.writeStringField("jobid", graph.f0.getJobID().toString());
gen.writeEndObject();
gen.close();
return writer.toString();
} catch (Exception e) {
return sendError(e);
}
}
use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JobConfigHandler method createJobConfigJson.
public static String createJobConfigJson(AccessExecutionGraph graph) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
gen.writeStartObject();
gen.writeStringField("jid", graph.getJobID().toString());
gen.writeStringField("name", graph.getJobName());
final ArchivedExecutionConfig summary = graph.getArchivedExecutionConfig();
if (summary != null) {
gen.writeObjectFieldStart("execution-config");
gen.writeStringField("execution-mode", summary.getExecutionMode());
gen.writeStringField("restart-strategy", summary.getRestartStrategyDescription());
gen.writeNumberField("job-parallelism", summary.getParallelism());
gen.writeBooleanField("object-reuse-mode", summary.getObjectReuseEnabled());
Map<String, String> ucVals = summary.getGlobalJobParameters();
if (ucVals != null) {
gen.writeObjectFieldStart("user-config");
for (Map.Entry<String, String> ucVal : ucVals.entrySet()) {
gen.writeStringField(ucVal.getKey(), ucVal.getValue());
}
gen.writeEndObject();
}
gen.writeEndObject();
}
gen.writeEndObject();
gen.close();
return writer.toString();
}
use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JobDetailsHandler method createJobDetailsJson.
public static String createJobDetailsJson(AccessExecutionGraph graph, @Nullable MetricFetcher fetcher) throws IOException {
final StringWriter writer = new StringWriter();
final JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
final long now = System.currentTimeMillis();
gen.writeStartObject();
// basic info
gen.writeStringField("jid", graph.getJobID().toString());
gen.writeStringField("name", graph.getJobName());
gen.writeBooleanField("isStoppable", graph.isStoppable());
gen.writeStringField("state", graph.getState().name());
// times and duration
final long jobStartTime = graph.getStatusTimestamp(JobStatus.CREATED);
final long jobEndTime = graph.getState().isGloballyTerminalState() ? graph.getStatusTimestamp(graph.getState()) : -1L;
gen.writeNumberField("start-time", jobStartTime);
gen.writeNumberField("end-time", jobEndTime);
gen.writeNumberField("duration", (jobEndTime > 0 ? jobEndTime : now) - jobStartTime);
gen.writeNumberField("now", now);
// timestamps
gen.writeObjectFieldStart("timestamps");
for (JobStatus status : JobStatus.values()) {
gen.writeNumberField(status.name(), graph.getStatusTimestamp(status));
}
gen.writeEndObject();
// job vertices
int[] jobVerticesPerState = new int[ExecutionState.values().length];
gen.writeArrayFieldStart("vertices");
for (AccessExecutionJobVertex ejv : graph.getVerticesTopologically()) {
int[] tasksPerState = new int[ExecutionState.values().length];
long startTime = Long.MAX_VALUE;
long endTime = 0;
boolean allFinished = true;
for (AccessExecutionVertex vertex : ejv.getTaskVertices()) {
final ExecutionState state = vertex.getExecutionState();
tasksPerState[state.ordinal()]++;
// take the earliest start time
long started = vertex.getStateTimestamp(ExecutionState.DEPLOYING);
if (started > 0) {
startTime = Math.min(startTime, started);
}
allFinished &= state.isTerminal();
endTime = Math.max(endTime, vertex.getStateTimestamp(state));
}
long duration;
if (startTime < Long.MAX_VALUE) {
if (allFinished) {
duration = endTime - startTime;
} else {
endTime = -1L;
duration = now - startTime;
}
} else {
startTime = -1L;
endTime = -1L;
duration = -1L;
}
ExecutionState jobVertexState = ExecutionJobVertex.getAggregateJobVertexState(tasksPerState, ejv.getParallelism());
jobVerticesPerState[jobVertexState.ordinal()]++;
gen.writeStartObject();
gen.writeStringField("id", ejv.getJobVertexId().toString());
gen.writeStringField("name", ejv.getName());
gen.writeNumberField("parallelism", ejv.getParallelism());
gen.writeStringField("status", jobVertexState.name());
gen.writeNumberField("start-time", startTime);
gen.writeNumberField("end-time", endTime);
gen.writeNumberField("duration", duration);
gen.writeObjectFieldStart("tasks");
for (ExecutionState state : ExecutionState.values()) {
gen.writeNumberField(state.name(), tasksPerState[state.ordinal()]);
}
gen.writeEndObject();
MutableIOMetrics counts = new MutableIOMetrics();
for (AccessExecutionVertex vertex : ejv.getTaskVertices()) {
counts.addIOMetrics(vertex.getCurrentExecutionAttempt(), fetcher, graph.getJobID().toString(), ejv.getJobVertexId().toString());
}
counts.writeIOMetricsAsJson(gen);
gen.writeEndObject();
}
gen.writeEndArray();
gen.writeObjectFieldStart("status-counts");
for (ExecutionState state : ExecutionState.values()) {
gen.writeNumberField(state.name(), jobVerticesPerState[state.ordinal()]);
}
gen.writeEndObject();
gen.writeFieldName("plan");
gen.writeRawValue(graph.getJsonPlan());
gen.writeEndObject();
gen.close();
return writer.toString();
}
use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JobExceptionsHandler method createJobExceptionsJson.
public static String createJobExceptionsJson(AccessExecutionGraph graph) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
gen.writeStartObject();
// most important is the root failure cause
String rootException = graph.getFailureCauseAsString();
if (rootException != null && !rootException.equals(ExceptionUtils.STRINGIFIED_NULL_EXCEPTION)) {
gen.writeStringField("root-exception", rootException);
}
// we additionally collect all exceptions (up to a limit) that occurred in the individual tasks
gen.writeArrayFieldStart("all-exceptions");
int numExceptionsSoFar = 0;
boolean truncated = false;
for (AccessExecutionVertex task : graph.getAllExecutionVertices()) {
String t = task.getFailureCauseAsString();
if (t != null && !t.equals(ExceptionUtils.STRINGIFIED_NULL_EXCEPTION)) {
if (numExceptionsSoFar >= MAX_NUMBER_EXCEPTION_TO_REPORT) {
truncated = true;
break;
}
TaskManagerLocation location = task.getCurrentAssignedResourceLocation();
String locationString = location != null ? location.getFQDNHostname() + ':' + location.dataPort() : "(unassigned)";
gen.writeStartObject();
gen.writeStringField("exception", t);
gen.writeStringField("task", task.getTaskNameWithSubtaskIndex());
gen.writeStringField("location", locationString);
gen.writeEndObject();
numExceptionsSoFar++;
}
}
gen.writeEndArray();
gen.writeBooleanField("truncated", truncated);
gen.writeEndObject();
gen.close();
return writer.toString();
}
Aggregations