use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.
the class JsonMapper method getOptimizerPropertiesJson.
public static String getOptimizerPropertiesJson(JsonFactory jsonFactory, PlanNode node) {
try {
final StringWriter writer = new StringWriter(256);
final JsonGenerator gen = jsonFactory.createGenerator(writer);
final OptimizerNode optNode = node.getOptimizerNode();
gen.writeStartObject();
// global properties
if (node.getGlobalProperties() != null) {
GlobalProperties gp = node.getGlobalProperties();
gen.writeArrayFieldStart("global_properties");
addProperty(gen, "Partitioning", gp.getPartitioning().name());
if (gp.getPartitioningFields() != null) {
addProperty(gen, "Partitioned on", gp.getPartitioningFields().toString());
}
if (gp.getPartitioningOrdering() != null) {
addProperty(gen, "Partitioning Order", gp.getPartitioningOrdering().toString());
} else {
addProperty(gen, "Partitioning Order", "(none)");
}
if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
addProperty(gen, "Uniqueness", "not unique");
} else {
addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
}
gen.writeEndArray();
}
// local properties
if (node.getLocalProperties() != null) {
LocalProperties lp = node.getLocalProperties();
gen.writeArrayFieldStart("local_properties");
if (lp.getOrdering() != null) {
addProperty(gen, "Order", lp.getOrdering().toString());
} else {
addProperty(gen, "Order", "(none)");
}
if (lp.getGroupedFields() != null && lp.getGroupedFields().size() > 0) {
addProperty(gen, "Grouped on", lp.getGroupedFields().toString());
} else {
addProperty(gen, "Grouping", "not grouped");
}
if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
addProperty(gen, "Uniqueness", "not unique");
} else {
addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
}
gen.writeEndArray();
}
// output size estimates
{
gen.writeArrayFieldStart("estimates");
addProperty(gen, "Est. Output Size", optNode.getEstimatedOutputSize() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedOutputSize(), "B"));
addProperty(gen, "Est. Cardinality", optNode.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedNumRecords()));
gen.writeEndArray();
}
// output node cost
if (node.getNodeCosts() != null) {
gen.writeArrayFieldStart("costs");
addProperty(gen, "Network", node.getNodeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getNetworkCost(), "B"));
addProperty(gen, "Disk I/O", node.getNodeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getDiskCost(), "B"));
addProperty(gen, "CPU", node.getNodeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getCpuCost(), ""));
addProperty(gen, "Cumulative Network", node.getCumulativeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getNetworkCost(), "B"));
addProperty(gen, "Cumulative Disk I/O", node.getCumulativeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getDiskCost(), "B"));
addProperty(gen, "Cumulative CPU", node.getCumulativeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getCpuCost(), ""));
gen.writeEndArray();
}
// compiler hints
if (optNode.getOperator().getCompilerHints() != null) {
CompilerHints hints = optNode.getOperator().getCompilerHints();
CompilerHints defaults = new CompilerHints();
String size = hints.getOutputSize() == defaults.getOutputSize() ? "(none)" : String.valueOf(hints.getOutputSize());
String card = hints.getOutputCardinality() == defaults.getOutputCardinality() ? "(none)" : String.valueOf(hints.getOutputCardinality());
String width = hints.getAvgOutputRecordSize() == defaults.getAvgOutputRecordSize() ? "(none)" : String.valueOf(hints.getAvgOutputRecordSize());
String filter = hints.getFilterFactor() == defaults.getFilterFactor() ? "(none)" : String.valueOf(hints.getFilterFactor());
gen.writeArrayFieldStart("compiler_hints");
addProperty(gen, "Output Size (bytes)", size);
addProperty(gen, "Output Cardinality", card);
addProperty(gen, "Avg. Output Record Size (bytes)", width);
addProperty(gen, "Filter Factor", filter);
gen.writeEndArray();
}
gen.writeEndObject();
gen.close();
return writer.toString();
} catch (Exception e) {
return "{}";
}
}
use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.
the class StartupProgressServlet method doGet.
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
resp.setContentType("application/json; charset=UTF-8");
StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(getServletContext());
StartupProgressView view = prog.createView();
JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
try {
json.writeStartObject();
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete());
json.writeArrayFieldStart(PHASES);
for (Phase phase : view.getPhases()) {
json.writeStartObject();
json.writeStringField(NAME, phase.getName());
json.writeStringField(DESC, phase.getDescription());
json.writeStringField(STATUS, view.getStatus(phase).toString());
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase));
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase));
writeStringFieldIfNotNull(json, FILE, view.getFile(phase));
writeNumberFieldIfDefined(json, SIZE, view.getSize(phase));
json.writeArrayFieldStart(STEPS);
for (Step step : view.getSteps(phase)) {
json.writeStartObject();
StepType type = step.getType();
if (type != null) {
json.writeStringField(NAME, type.getName());
json.writeStringField(DESC, type.getDescription());
}
json.writeNumberField(COUNT, view.getCount(phase, step));
writeStringFieldIfNotNull(json, FILE, step.getFile());
writeNumberFieldIfDefined(json, SIZE, step.getSize());
json.writeNumberField(TOTAL, view.getTotal(phase, step));
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase, step));
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase, step));
json.writeEndObject();
}
json.writeEndArray();
json.writeEndObject();
}
json.writeEndArray();
json.writeEndObject();
} finally {
IOUtils.cleanup(LOG, json);
}
}
use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.
the class Anonymizer method anonymizeTrace.
// anonymize the job trace file
private void anonymizeTrace() throws Exception {
if (anonymizeTrace) {
System.out.println("Anonymizing trace file: " + inputTracePath);
JobTraceReader reader = null;
JsonGenerator outGen = null;
Configuration conf = getConf();
try {
// create a generator
outGen = createJsonGenerator(conf, outputTracePath);
// define the input trace reader
reader = new JobTraceReader(inputTracePath, conf);
// read the plain unanonymized logged job
LoggedJob job = reader.getNext();
while (job != null) {
// write it via an anonymizing channel
outGen.writeObject(job);
// read the next job
job = reader.getNext();
}
System.out.println("Anonymized trace file: " + outputTracePath);
} finally {
if (outGen != null) {
outGen.close();
}
if (reader != null) {
reader.close();
}
}
}
}
use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.
the class TestLogInfo method writeDomainLeaveOpen.
private void writeDomainLeaveOpen(TimelineDomain domain, Path logPath) throws IOException {
if (outStreamDomain == null) {
outStreamDomain = PluginStoreTestUtils.createLogFile(logPath, fs);
}
// Write domain uses its own json generator to isolate from entity writers
JsonGenerator jsonGeneratorLocal = new JsonFactory().createGenerator(outStreamDomain);
jsonGeneratorLocal.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
objMapper.writeValue(jsonGeneratorLocal, domain);
outStreamDomain.hflush();
}
use of com.fasterxml.jackson.core.JsonGenerator in project storm by apache.
the class JsonSerializer method write.
@Override
public ByteBuffer write(List<Object> data, ByteBuffer buffer) {
Preconditions.checkArgument(data != null && data.size() == fieldNames.size(), "Invalid schema");
StringWriter sw = new StringWriter();
try (JsonGenerator jg = jsonFactory.createGenerator(sw)) {
jg.writeStartObject();
for (int i = 0; i < fieldNames.size(); ++i) {
jg.writeFieldName(fieldNames.get(i));
jg.writeObject(data.get(i));
}
jg.writeEndObject();
} catch (IOException e) {
throw new RuntimeException(e);
}
return ByteBuffer.wrap(sw.toString().getBytes(StandardCharsets.UTF_8));
}
Aggregations