use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JobVertexBackPressureHandler method handleRequest.
@Override
public String handleRequest(AccessExecutionJobVertex accessJobVertex, Map<String, String> params) throws Exception {
if (accessJobVertex instanceof ArchivedExecutionJobVertex) {
return "";
}
ExecutionJobVertex jobVertex = (ExecutionJobVertex) accessJobVertex;
try (StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer)) {
gen.writeStartObject();
Option<OperatorBackPressureStats> statsOption = backPressureStatsTracker.getOperatorBackPressureStats(jobVertex);
if (statsOption.isDefined()) {
OperatorBackPressureStats stats = statsOption.get();
// Check whether we need to refresh
if (refreshInterval <= System.currentTimeMillis() - stats.getEndTimestamp()) {
backPressureStatsTracker.triggerStackTraceSample(jobVertex);
gen.writeStringField("status", "deprecated");
} else {
gen.writeStringField("status", "ok");
}
gen.writeStringField("backpressure-level", getBackPressureLevel(stats.getMaxBackPressureRatio()));
gen.writeNumberField("end-timestamp", stats.getEndTimestamp());
// Sub tasks
gen.writeArrayFieldStart("subtasks");
int numSubTasks = stats.getNumberOfSubTasks();
for (int i = 0; i < numSubTasks; i++) {
double ratio = stats.getBackPressureRatio(i);
gen.writeStartObject();
gen.writeNumberField("subtask", i);
gen.writeStringField("backpressure-level", getBackPressureLevel(ratio));
gen.writeNumberField("ratio", ratio);
gen.writeEndObject();
}
gen.writeEndArray();
} else {
backPressureStatsTracker.triggerStackTraceSample(jobVertex);
gen.writeStringField("status", "deprecated");
}
gen.writeEndObject();
gen.close();
return writer.toString();
}
}
use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class SubtaskExecutionAttemptDetailsHandler method createAttemptDetailsJson.
public static String createAttemptDetailsJson(AccessExecution execAttempt, String jobID, String vertexID, @Nullable MetricFetcher fetcher) throws IOException {
StringWriter writer = new StringWriter();
JsonGenerator gen = JsonFactory.jacksonFactory.createGenerator(writer);
final ExecutionState status = execAttempt.getState();
final long now = System.currentTimeMillis();
TaskManagerLocation location = execAttempt.getAssignedResourceLocation();
String locationString = location == null ? "(unassigned)" : location.getHostname();
long startTime = execAttempt.getStateTimestamp(ExecutionState.DEPLOYING);
if (startTime == 0) {
startTime = -1;
}
long endTime = status.isTerminal() ? execAttempt.getStateTimestamp(status) : -1;
long duration = startTime > 0 ? ((endTime > 0 ? endTime : now) - startTime) : -1;
gen.writeStartObject();
gen.writeNumberField("subtask", execAttempt.getParallelSubtaskIndex());
gen.writeStringField("status", status.name());
gen.writeNumberField("attempt", execAttempt.getAttemptNumber());
gen.writeStringField("host", locationString);
gen.writeNumberField("start-time", startTime);
gen.writeNumberField("end-time", endTime);
gen.writeNumberField("duration", duration);
MutableIOMetrics counts = new MutableIOMetrics();
counts.addIOMetrics(execAttempt, fetcher, jobID, vertexID);
counts.writeIOMetricsAsJson(gen);
gen.writeEndObject();
gen.close();
return writer.toString();
}
use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class CurrentJobsOverviewHandlerTest method testJsonGeneration.
@Test
public void testJsonGeneration() throws Exception {
AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob();
JobDetails expectedDetails = WebMonitorUtils.createDetailsForJob(originalJob);
StringWriter writer = new StringWriter();
try (JsonGenerator gen = ArchivedJobGenerationUtils.jacksonFactory.createGenerator(writer)) {
CurrentJobsOverviewHandler.writeJobDetailOverviewAsJson(expectedDetails, gen, 0);
}
compareJobOverview(expectedDetails, writer.toString());
}
use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.
the class Configuration method dumpConfiguration.
/**
* Writes out all properties and their attributes (final and resource) to
* the given {@link Writer}, the format of the output would be,
*
* <pre>
* { "properties" :
* [ { key : "key1",
* value : "value1",
* isFinal : "key1.isFinal",
* resource : "key1.resource" },
* { key : "key2",
* value : "value2",
* isFinal : "ke2.isFinal",
* resource : "key2.resource" }
* ]
* }
* </pre>
*
* It does not output the properties of the configuration object which
* is loaded from an input stream.
* <p>
*
* @param config the configuration
* @param out the Writer to write to
* @throws IOException
*/
public static void dumpConfiguration(Configuration config, Writer out) throws IOException {
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
dumpGenerator.writeStartObject();
dumpGenerator.writeFieldName("properties");
dumpGenerator.writeStartArray();
dumpGenerator.flush();
synchronized (config) {
for (Map.Entry<Object, Object> item : config.getProps().entrySet()) {
appendJSONProperty(dumpGenerator, config, item.getKey().toString());
}
}
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
dumpGenerator.flush();
}
use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.
the class JMXJsonServlet method doGet.
/**
* Process a GET request for the specified resource.
*
* @param request
* The servlet request we are processing
* @param response
* The servlet response we are creating
*/
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) {
try {
// If user is a static user and auth Type is null, that means
// there is a non-security environment and no need authorization,
// otherwise, do the authorization.
final ServletContext servletContext = getServletContext();
if (!HttpServer2.isStaticUserAndNoneAuthType(servletContext, request) && !isInstrumentationAccessAllowed(request, response)) {
return;
}
JsonGenerator jg = null;
PrintWriter writer = null;
try {
writer = response.getWriter();
response.setContentType("application/json; charset=utf8");
response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET");
response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
jg = jsonFactory.createGenerator(writer);
jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
jg.useDefaultPrettyPrinter();
jg.writeStartObject();
// query per mbean attribute
String getmethod = request.getParameter("get");
if (getmethod != null) {
String[] splitStrings = getmethod.split("\\:\\:");
if (splitStrings.length != 2) {
jg.writeStringField("result", "ERROR");
jg.writeStringField("message", "query format is not as expected.");
jg.flush();
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
return;
}
listBeans(jg, new ObjectName(splitStrings[0]), splitStrings[1], response);
return;
}
// query per mbean
String qry = request.getParameter("qry");
if (qry == null) {
qry = "*:*";
}
listBeans(jg, new ObjectName(qry), null, response);
} finally {
if (jg != null) {
jg.close();
}
if (writer != null) {
writer.close();
}
}
} catch (IOException e) {
LOG.error("Caught an exception while processing JMX request", e);
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} catch (MalformedObjectNameException e) {
LOG.error("Caught an exception while processing JMX request", e);
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
}
}
Aggregations