Search in sources :

Example 16 with JsonGenerator

use of com.fasterxml.jackson.core.JsonGenerator in project flink by apache.

the class JsonMapper method getOptimizerPropertiesJson.

public static String getOptimizerPropertiesJson(JsonFactory jsonFactory, PlanNode node) {
    try {
        final StringWriter writer = new StringWriter(256);
        final JsonGenerator gen = jsonFactory.createGenerator(writer);
        final OptimizerNode optNode = node.getOptimizerNode();
        gen.writeStartObject();
        // global properties
        if (node.getGlobalProperties() != null) {
            GlobalProperties gp = node.getGlobalProperties();
            gen.writeArrayFieldStart("global_properties");
            addProperty(gen, "Partitioning", gp.getPartitioning().name());
            if (gp.getPartitioningFields() != null) {
                addProperty(gen, "Partitioned on", gp.getPartitioningFields().toString());
            }
            if (gp.getPartitioningOrdering() != null) {
                addProperty(gen, "Partitioning Order", gp.getPartitioningOrdering().toString());
            } else {
                addProperty(gen, "Partitioning Order", "(none)");
            }
            if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
                addProperty(gen, "Uniqueness", "not unique");
            } else {
                addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
            }
            gen.writeEndArray();
        }
        // local properties
        if (node.getLocalProperties() != null) {
            LocalProperties lp = node.getLocalProperties();
            gen.writeArrayFieldStart("local_properties");
            if (lp.getOrdering() != null) {
                addProperty(gen, "Order", lp.getOrdering().toString());
            } else {
                addProperty(gen, "Order", "(none)");
            }
            if (lp.getGroupedFields() != null && lp.getGroupedFields().size() > 0) {
                addProperty(gen, "Grouped on", lp.getGroupedFields().toString());
            } else {
                addProperty(gen, "Grouping", "not grouped");
            }
            if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
                addProperty(gen, "Uniqueness", "not unique");
            } else {
                addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
            }
            gen.writeEndArray();
        }
        // output size estimates
        {
            gen.writeArrayFieldStart("estimates");
            addProperty(gen, "Est. Output Size", optNode.getEstimatedOutputSize() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedOutputSize(), "B"));
            addProperty(gen, "Est. Cardinality", optNode.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedNumRecords()));
            gen.writeEndArray();
        }
        // output node cost
        if (node.getNodeCosts() != null) {
            gen.writeArrayFieldStart("costs");
            addProperty(gen, "Network", node.getNodeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getNetworkCost(), "B"));
            addProperty(gen, "Disk I/O", node.getNodeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getDiskCost(), "B"));
            addProperty(gen, "CPU", node.getNodeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getCpuCost(), ""));
            addProperty(gen, "Cumulative Network", node.getCumulativeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getNetworkCost(), "B"));
            addProperty(gen, "Cumulative Disk I/O", node.getCumulativeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getDiskCost(), "B"));
            addProperty(gen, "Cumulative CPU", node.getCumulativeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getCpuCost(), ""));
            gen.writeEndArray();
        }
        // compiler hints
        if (optNode.getOperator().getCompilerHints() != null) {
            CompilerHints hints = optNode.getOperator().getCompilerHints();
            CompilerHints defaults = new CompilerHints();
            String size = hints.getOutputSize() == defaults.getOutputSize() ? "(none)" : String.valueOf(hints.getOutputSize());
            String card = hints.getOutputCardinality() == defaults.getOutputCardinality() ? "(none)" : String.valueOf(hints.getOutputCardinality());
            String width = hints.getAvgOutputRecordSize() == defaults.getAvgOutputRecordSize() ? "(none)" : String.valueOf(hints.getAvgOutputRecordSize());
            String filter = hints.getFilterFactor() == defaults.getFilterFactor() ? "(none)" : String.valueOf(hints.getFilterFactor());
            gen.writeArrayFieldStart("compiler_hints");
            addProperty(gen, "Output Size (bytes)", size);
            addProperty(gen, "Output Cardinality", card);
            addProperty(gen, "Avg. Output Record Size (bytes)", width);
            addProperty(gen, "Filter Factor", filter);
            gen.writeEndArray();
        }
        gen.writeEndObject();
        gen.close();
        return writer.toString();
    } catch (Exception e) {
        return "{}";
    }
}
Also used : StringWriter(java.io.StringWriter) OptimizerNode(org.apache.flink.optimizer.dag.OptimizerNode) GlobalProperties(org.apache.flink.optimizer.dataproperties.GlobalProperties) CompilerHints(org.apache.flink.api.common.operators.CompilerHints) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) LocalProperties(org.apache.flink.optimizer.dataproperties.LocalProperties) IOException(java.io.IOException)

Example 17 with JsonGenerator

use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.

the class StartupProgressServlet method doGet.

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    resp.setContentType("application/json; charset=UTF-8");
    StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(getServletContext());
    StartupProgressView view = prog.createView();
    JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
    try {
        json.writeStartObject();
        json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());
        json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete());
        json.writeArrayFieldStart(PHASES);
        for (Phase phase : view.getPhases()) {
            json.writeStartObject();
            json.writeStringField(NAME, phase.getName());
            json.writeStringField(DESC, phase.getDescription());
            json.writeStringField(STATUS, view.getStatus(phase).toString());
            json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase));
            json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase));
            writeStringFieldIfNotNull(json, FILE, view.getFile(phase));
            writeNumberFieldIfDefined(json, SIZE, view.getSize(phase));
            json.writeArrayFieldStart(STEPS);
            for (Step step : view.getSteps(phase)) {
                json.writeStartObject();
                StepType type = step.getType();
                if (type != null) {
                    json.writeStringField(NAME, type.getName());
                    json.writeStringField(DESC, type.getDescription());
                }
                json.writeNumberField(COUNT, view.getCount(phase, step));
                writeStringFieldIfNotNull(json, FILE, step.getFile());
                writeNumberFieldIfDefined(json, SIZE, step.getSize());
                json.writeNumberField(TOTAL, view.getTotal(phase, step));
                json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase, step));
                json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase, step));
                json.writeEndObject();
            }
            json.writeEndArray();
            json.writeEndObject();
        }
        json.writeEndArray();
        json.writeEndObject();
    } finally {
        IOUtils.cleanup(LOG, json);
    }
}
Also used : Phase(org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase) StepType(org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType) StartupProgressView(org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressView) JsonFactory(com.fasterxml.jackson.core.JsonFactory) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) Step(org.apache.hadoop.hdfs.server.namenode.startupprogress.Step) StartupProgress(org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress)

Example 18 with JsonGenerator

use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.

the class Anonymizer method anonymizeTrace.

// anonymize the job trace file
private void anonymizeTrace() throws Exception {
    if (anonymizeTrace) {
        System.out.println("Anonymizing trace file: " + inputTracePath);
        JobTraceReader reader = null;
        JsonGenerator outGen = null;
        Configuration conf = getConf();
        try {
            // create a generator
            outGen = createJsonGenerator(conf, outputTracePath);
            // define the input trace reader
            reader = new JobTraceReader(inputTracePath, conf);
            // read the plain unanonymized logged job
            LoggedJob job = reader.getNext();
            while (job != null) {
                // write it via an anonymizing channel
                outGen.writeObject(job);
                // read the next job
                job = reader.getNext();
            }
            System.out.println("Anonymized trace file: " + outputTracePath);
        } finally {
            if (outGen != null) {
                outGen.close();
            }
            if (reader != null) {
                reader.close();
            }
        }
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator)

Example 19 with JsonGenerator

use of com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.

the class TestLogInfo method writeDomainLeaveOpen.

private void writeDomainLeaveOpen(TimelineDomain domain, Path logPath) throws IOException {
    if (outStreamDomain == null) {
        outStreamDomain = PluginStoreTestUtils.createLogFile(logPath, fs);
    }
    // Write domain uses its own json generator to isolate from entity writers
    JsonGenerator jsonGeneratorLocal = new JsonFactory().createGenerator(outStreamDomain);
    jsonGeneratorLocal.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
    objMapper.writeValue(jsonGeneratorLocal, domain);
    outStreamDomain.hflush();
}
Also used : MinimalPrettyPrinter(com.fasterxml.jackson.core.util.MinimalPrettyPrinter) JsonFactory(com.fasterxml.jackson.core.JsonFactory) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator)

Example 20 with JsonGenerator

use of com.fasterxml.jackson.core.JsonGenerator in project storm by apache.

the class JsonSerializer method write.

@Override
public ByteBuffer write(List<Object> data, ByteBuffer buffer) {
    Preconditions.checkArgument(data != null && data.size() == fieldNames.size(), "Invalid schema");
    StringWriter sw = new StringWriter();
    try (JsonGenerator jg = jsonFactory.createGenerator(sw)) {
        jg.writeStartObject();
        for (int i = 0; i < fieldNames.size(); ++i) {
            jg.writeFieldName(fieldNames.get(i));
            jg.writeObject(data.get(i));
        }
        jg.writeEndObject();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    return ByteBuffer.wrap(sw.toString().getBytes(StandardCharsets.UTF_8));
}
Also used : StringWriter(java.io.StringWriter) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) IOException(java.io.IOException)

Aggregations

JsonGenerator (com.fasterxml.jackson.core.JsonGenerator)174 StringWriter (java.io.StringWriter)75 IOException (java.io.IOException)48 JsonFactory (com.fasterxml.jackson.core.JsonFactory)38 ByteArrayOutputStream (java.io.ByteArrayOutputStream)25 Map (java.util.Map)16 OutputStream (java.io.OutputStream)14 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)13 JsonParser (com.fasterxml.jackson.core.JsonParser)10 ArrayList (java.util.ArrayList)10 Test (org.junit.Test)10 HashMap (java.util.HashMap)9 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)8 OutputStreamWriter (java.io.OutputStreamWriter)6 ServletOutputStream (javax.servlet.ServletOutputStream)6 TaskManagerLocation (org.apache.flink.runtime.taskmanager.TaskManagerLocation)6 HeapDataOutputStream (org.apache.geode.internal.HeapDataOutputStream)6 ExecutionState (org.apache.flink.runtime.execution.ExecutionState)5 AccessExecutionVertex (org.apache.flink.runtime.executiongraph.AccessExecutionVertex)5 RemoteSession (org.apache.jackrabbit.oak.remote.RemoteSession)5