use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonFactory in project wiquery by WiQuery.
the class AutocompleteComponent method onBeforeRenderAutocomplete.
@Override
protected void onBeforeRenderAutocomplete(Autocomplete<?> autocomplete) {
StringWriter sw = new StringWriter();
try {
JsonGenerator gen = new JsonFactory().createGenerator(sw);
List<Object> json = new ArrayList<Object>();
T defaultValue = AutocompleteComponent.this.getModelObject();
AutocompleteJson value = null;
Integer index = 0;
for (T obj : AutocompleteComponent.this.list.getObject()) {
index++;
value = newAutocompleteJson(index, obj);
json.add(value);
if (obj.equals(defaultValue)) {
autocomplete.setDefaultModelObject(value.getLabel());
getAutocompleteHidden().setModelObject(value.getValueId());
}
}
new ObjectMapper().writeValue(gen, json);
} catch (IOException e) {
throw new WicketRuntimeException(e);
}
autocomplete.getOptions().put("source", sw.toString());
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonFactory in project jackson-module-afterburner by FasterXML.
the class TestSerializePerf method main.
public static void main(String[] args) throws Exception {
// JsonFactory f = new org.codehaus.jackson.smile.SmileFactory();
JsonFactory f = new JsonFactory();
ObjectMapper mapperSlow = new ObjectMapper(f);
ObjectMapper mapperFast = new ObjectMapper(f);
// !!! TEST -- to get profile info, comment out:
mapperSlow.registerModule(new AfterburnerModule());
mapperFast.registerModule(new AfterburnerModule());
new TestSerializePerf().testWith(mapperSlow, mapperFast);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonFactory in project spring-framework by spring-projects.
the class Jackson2TokenizerTests method createParser.
@BeforeEach
public void createParser() {
this.jsonFactory = new JsonFactory();
this.objectMapper = new ObjectMapper(this.jsonFactory);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonFactory in project flink by apache.
the class JsonMapper method getOptimizerPropertiesJson.
public static String getOptimizerPropertiesJson(JsonFactory jsonFactory, PlanNode node) {
try {
final StringWriter writer = new StringWriter(256);
final JsonGenerator gen = jsonFactory.createGenerator(writer);
final OptimizerNode optNode = node.getOptimizerNode();
gen.writeStartObject();
// global properties
if (node.getGlobalProperties() != null) {
GlobalProperties gp = node.getGlobalProperties();
gen.writeArrayFieldStart("global_properties");
addProperty(gen, "Partitioning", gp.getPartitioning().name());
if (gp.getPartitioningFields() != null) {
addProperty(gen, "Partitioned on", gp.getPartitioningFields().toString());
}
if (gp.getPartitioningOrdering() != null) {
addProperty(gen, "Partitioning Order", gp.getPartitioningOrdering().toString());
} else {
addProperty(gen, "Partitioning Order", "(none)");
}
if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
addProperty(gen, "Uniqueness", "not unique");
} else {
addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
}
gen.writeEndArray();
}
// local properties
if (node.getLocalProperties() != null) {
LocalProperties lp = node.getLocalProperties();
gen.writeArrayFieldStart("local_properties");
if (lp.getOrdering() != null) {
addProperty(gen, "Order", lp.getOrdering().toString());
} else {
addProperty(gen, "Order", "(none)");
}
if (lp.getGroupedFields() != null && lp.getGroupedFields().size() > 0) {
addProperty(gen, "Grouped on", lp.getGroupedFields().toString());
} else {
addProperty(gen, "Grouping", "not grouped");
}
if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) {
addProperty(gen, "Uniqueness", "not unique");
} else {
addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString());
}
gen.writeEndArray();
}
// output size estimates
{
gen.writeArrayFieldStart("estimates");
addProperty(gen, "Est. Output Size", optNode.getEstimatedOutputSize() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedOutputSize(), "B"));
addProperty(gen, "Est. Cardinality", optNode.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedNumRecords()));
gen.writeEndArray();
}
// output node cost
if (node.getNodeCosts() != null) {
gen.writeArrayFieldStart("costs");
addProperty(gen, "Network", node.getNodeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getNetworkCost(), "B"));
addProperty(gen, "Disk I/O", node.getNodeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getDiskCost(), "B"));
addProperty(gen, "CPU", node.getNodeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getNodeCosts().getCpuCost(), ""));
addProperty(gen, "Cumulative Network", node.getCumulativeCosts().getNetworkCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getNetworkCost(), "B"));
addProperty(gen, "Cumulative Disk I/O", node.getCumulativeCosts().getDiskCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getDiskCost(), "B"));
addProperty(gen, "Cumulative CPU", node.getCumulativeCosts().getCpuCost() == -1 ? "(unknown)" : formatNumber(node.getCumulativeCosts().getCpuCost(), ""));
gen.writeEndArray();
}
// compiler hints
if (optNode.getOperator().getCompilerHints() != null) {
CompilerHints hints = optNode.getOperator().getCompilerHints();
CompilerHints defaults = new CompilerHints();
String size = hints.getOutputSize() == defaults.getOutputSize() ? "(none)" : String.valueOf(hints.getOutputSize());
String card = hints.getOutputCardinality() == defaults.getOutputCardinality() ? "(none)" : String.valueOf(hints.getOutputCardinality());
String width = hints.getAvgOutputRecordSize() == defaults.getAvgOutputRecordSize() ? "(none)" : String.valueOf(hints.getAvgOutputRecordSize());
String filter = hints.getFilterFactor() == defaults.getFilterFactor() ? "(none)" : String.valueOf(hints.getFilterFactor());
gen.writeArrayFieldStart("compiler_hints");
addProperty(gen, "Output Size (bytes)", size);
addProperty(gen, "Output Cardinality", card);
addProperty(gen, "Avg. Output Record Size (bytes)", width);
addProperty(gen, "Filter Factor", filter);
gen.writeEndArray();
}
gen.writeEndObject();
gen.close();
return writer.toString();
} catch (Exception e) {
return "{}";
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonFactory in project flink by apache.
the class Runner method writeJobDetails.
/**
* Write the following job details as a JSON encoded file: runtime environment job ID, runtime,
* parameters, and accumulators.
*
* @param env the execution environment
* @param jobDetailsPath filesystem path to write job details
* @throws IOException on error writing to jobDetailsPath
*/
private static void writeJobDetails(ExecutionEnvironment env, String jobDetailsPath) throws IOException {
JobExecutionResult result = env.getLastJobExecutionResult();
File jsonFile = new File(jobDetailsPath);
try (JsonGenerator json = new JsonFactory().createGenerator(jsonFile, JsonEncoding.UTF8)) {
json.writeStartObject();
json.writeObjectFieldStart("Apache Flink");
json.writeStringField("version", EnvironmentInformation.getVersion());
json.writeStringField("commit ID", EnvironmentInformation.getRevisionInformation().commitId);
json.writeStringField("commit date", EnvironmentInformation.getRevisionInformation().commitDate);
json.writeEndObject();
json.writeStringField("job_id", result.getJobID().toString());
json.writeNumberField("runtime_ms", result.getNetRuntime());
json.writeObjectFieldStart("parameters");
for (Map.Entry<String, String> entry : env.getConfig().getGlobalJobParameters().toMap().entrySet()) {
json.writeStringField(entry.getKey(), entry.getValue());
}
json.writeEndObject();
json.writeObjectFieldStart("accumulators");
for (Map.Entry<String, Object> entry : result.getAllAccumulatorResults().entrySet()) {
json.writeStringField(entry.getKey(), entry.getValue().toString());
}
json.writeEndObject();
json.writeEndObject();
}
}
Aggregations