use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class ClusterOverviewHandler method handleJsonRequest.
@Override
public String handleJsonRequest(Map<String, String> pathParams, Map<String, String> queryParams, ActorGateway jobManager) throws Exception {
// we need no parameters, get all requests
try {
if (jobManager != null) {
Future<Object> future = jobManager.ask(RequestStatusOverview.getInstance(), timeout);
StatusOverview overview = (StatusOverview) Await.result(future, timeout);
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
gen.writeStartObject();
gen.writeNumberField("taskmanagers", overview.getNumTaskManagersConnected());
gen.writeNumberField("slots-total", overview.getNumSlotsTotal());
gen.writeNumberField("slots-available", overview.getNumSlotsAvailable());
gen.writeNumberField("jobs-running", overview.getNumJobsRunningOrPending());
gen.writeNumberField("jobs-finished", overview.getNumJobsFinished());
gen.writeNumberField("jobs-cancelled", overview.getNumJobsCancelled());
gen.writeNumberField("jobs-failed", overview.getNumJobsFailed());
gen.writeStringField("flink-version", version);
if (!commitID.equals(EnvironmentInformation.UNKNOWN)) {
gen.writeStringField("flink-commit", commitID);
}
gen.writeEndObject();
gen.close();
return writer.toString();
} else {
throw new Exception("No connection to the leading JobManager.");
}
} catch (Exception e) {
throw new RuntimeException("Failed to fetch list of all running jobs: " + e.getMessage(), e);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class CurrentJobsOverviewHandler method handleJsonRequest.
@Override
public String handleJsonRequest(Map<String, String> pathParams, Map<String, String> queryParams, ActorGateway jobManager) throws Exception {
try {
if (jobManager != null) {
Future<Object> future = jobManager.ask(new RequestJobDetails(includeRunningJobs, includeFinishedJobs), timeout);
MultipleJobsDetails result = (MultipleJobsDetails) Await.result(future, timeout);
final long now = System.currentTimeMillis();
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
gen.writeStartObject();
if (includeRunningJobs && includeFinishedJobs) {
gen.writeArrayFieldStart("running");
for (JobDetails detail : result.getRunningJobs()) {
writeJobDetailOverviewAsJson(detail, gen, now);
}
gen.writeEndArray();
gen.writeArrayFieldStart("finished");
for (JobDetails detail : result.getFinishedJobs()) {
writeJobDetailOverviewAsJson(detail, gen, now);
}
gen.writeEndArray();
} else {
gen.writeArrayFieldStart("jobs");
for (JobDetails detail : includeRunningJobs ? result.getRunningJobs() : result.getFinishedJobs()) {
writeJobDetailOverviewAsJson(detail, gen, now);
}
gen.writeEndArray();
}
gen.writeEndObject();
gen.close();
return writer.toString();
} else {
throw new Exception("No connection to the leading JobManager.");
}
} catch (Exception e) {
throw new Exception("Failed to fetch the status overview: " + e.getMessage(), e);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JobVertexAccumulatorsHandler method createVertexAccumulatorsJson.
public static String createVertexAccumulatorsJson(AccessExecutionJobVertex jobVertex) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
StringifiedAccumulatorResult[] accs = jobVertex.getAggregatedUserAccumulatorsStringified();
gen.writeStartObject();
gen.writeStringField("id", jobVertex.getJobVertexId().toString());
gen.writeArrayFieldStart("user-accumulators");
for (StringifiedAccumulatorResult acc : accs) {
gen.writeStartObject();
gen.writeStringField("name", acc.getName());
gen.writeStringField("type", acc.getType());
gen.writeStringField("value", acc.getValue());
gen.writeEndObject();
}
gen.writeEndArray();
gen.writeEndObject();
gen.close();
return writer.toString();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JobVertexBackPressureHandler method handleRequest.
@Override
public String handleRequest(AccessExecutionJobVertex accessJobVertex, Map<String, String> params) throws Exception {
if (accessJobVertex instanceof ArchivedExecutionJobVertex) {
return "";
}
ExecutionJobVertex jobVertex = (ExecutionJobVertex) accessJobVertex;
try (StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer)) {
gen.writeStartObject();
Option<OperatorBackPressureStats> statsOption = backPressureStatsTracker.getOperatorBackPressureStats(jobVertex);
if (statsOption.isDefined()) {
OperatorBackPressureStats stats = statsOption.get();
// Check whether we need to refresh
if (refreshInterval <= System.currentTimeMillis() - stats.getEndTimestamp()) {
backPressureStatsTracker.triggerStackTraceSample(jobVertex);
gen.writeStringField("status", "deprecated");
} else {
gen.writeStringField("status", "ok");
}
gen.writeStringField("backpressure-level", getBackPressureLevel(stats.getMaxBackPressureRatio()));
gen.writeNumberField("end-timestamp", stats.getEndTimestamp());
// Sub tasks
gen.writeArrayFieldStart("subtasks");
int numSubTasks = stats.getNumberOfSubTasks();
for (int i = 0; i < numSubTasks; i++) {
double ratio = stats.getBackPressureRatio(i);
gen.writeStartObject();
gen.writeNumberField("subtask", i);
gen.writeStringField("backpressure-level", getBackPressureLevel(ratio));
gen.writeNumberField("ratio", ratio);
gen.writeEndObject();
}
gen.writeEndArray();
} else {
backPressureStatsTracker.triggerStackTraceSample(jobVertex);
gen.writeStringField("status", "deprecated");
}
gen.writeEndObject();
gen.close();
return writer.toString();
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class SubtaskExecutionAttemptDetailsHandler method createAttemptDetailsJson.
public static String createAttemptDetailsJson(AccessExecution execAttempt, String jobID, String vertexID, @Nullable MetricFetcher fetcher) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
final ExecutionState status = execAttempt.getState();
final long now = System.currentTimeMillis();
TaskManagerLocation location = execAttempt.getAssignedResourceLocation();
String locationString = location == null ? "(unassigned)" : location.getHostname();
long startTime = execAttempt.getStateTimestamp(ExecutionState.DEPLOYING);
if (startTime == 0) {
startTime = -1;
}
long endTime = status.isTerminal() ? execAttempt.getStateTimestamp(status) : -1;
long duration = startTime > 0 ? ((endTime > 0 ? endTime : now) - startTime) : -1;
gen.writeStartObject();
gen.writeNumberField("subtask", execAttempt.getParallelSubtaskIndex());
gen.writeStringField("status", status.name());
gen.writeNumberField("attempt", execAttempt.getAttemptNumber());
gen.writeStringField("host", locationString);
gen.writeNumberField("start-time", startTime);
gen.writeNumberField("end-time", endTime);
gen.writeNumberField("duration", duration);
MutableIOMetrics counts = new MutableIOMetrics();
counts.addIOMetrics(execAttempt, fetcher, jobID, vertexID);
counts.writeIOMetricsAsJson(gen);
gen.writeEndObject();
gen.close();
return writer.toString();
}
Aggregations