use of org.codehaus.jackson.JsonFactory in project databus by linkedin.
the class AvroConverter method convert.
public void convert(InputStream in, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
if (AvroFormat.JSON == _outputFormat)
jsonGenerator.useDefaultPrettyPrinter();
List<GenericRecord> result = convert(in);
Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
for (GenericRecord r : result) {
genericWriter.write(r, outputEncoder);
}
outputEncoder.flush();
out.flush();
}
use of org.codehaus.jackson.JsonFactory in project databus by linkedin.
the class JsonUtils method toJsonString.
/**
* Serializes a bean as JSON
* @param <T> the bean type
* @param bean the bean to serialize
* @param pretty a flag if the output is to be pretty printed
* @return the JSON string
*/
public static <T> String toJsonString(T bean, boolean pretty) throws JsonGenerationException, JsonMappingException, IOException {
JsonFactory jsonFactory = new JsonFactory(new ObjectMapper());
StringWriter out = new StringWriter(1000);
JsonGenerator jsonGenerator = jsonFactory.createJsonGenerator(out);
if (pretty)
jsonGenerator.useDefaultPrettyPrinter();
jsonGenerator.writeObject(bean);
out.flush();
return out.toString();
}
use of org.codehaus.jackson.JsonFactory in project databus by linkedin.
the class DbusEventAvroDecoder method dumpEventValueInJSON.
public void dumpEventValueInJSON(DbusEvent e, OutputStream out) {
byte[] md5 = new byte[16];
e.schemaId(md5);
SchemaId schemaId = new SchemaId(md5);
VersionedSchema sourceSchema = _schemaSet.getById(schemaId);
ByteBuffer valueBuffer = e.value();
byte[] valueBytes = new byte[valueBuffer.remaining()];
valueBuffer.get(valueBytes);
try {
Schema schema = sourceSchema.getSchema();
DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
binDecoder.set(DecoderFactory.defaultFactory().createBinaryDecoder(valueBytes, binDecoder.get()));
Object datum = reader.read(null, binDecoder.get());
DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
JsonGenerator g = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
// write the src ID
g.writeStartObject();
g.writeFieldName(SRC_ID_FIELD_NAME);
g.writeNumber(e.getSourceId());
g.writeFieldName(OPCODE_FIELD_NAME);
g.writeString(e.getOpcode().toString());
g.writeFieldName("partId");
g.writeNumber(Integer.valueOf(e.getPartitionId()));
g.writeFieldName(VALUE_FIELD_NAME);
writer.write(datum, new JsonEncoder(schema, g));
g.writeEndObject();
g.writeEndObject();
try {
g.writeEndObject();
} catch (JsonGenerationException e_json) {
// ignore the error as some how avro JsonEncoder may some times missing two }
}
g.flush();
} catch (IOException e1) {
LOG.error("event value serialization error; event = " + e, e1);
}
}
use of org.codehaus.jackson.JsonFactory in project pinot by linkedin.
the class JSONRecordReader method init.
@Override
public void init() throws Exception {
final Reader reader = new FileReader(_dataFile);
_parser = new JsonFactory().createJsonParser(reader);
_iterator = new ObjectMapper().readValues(_parser, Map.class);
}
use of org.codehaus.jackson.JsonFactory in project flink by apache.
the class DumpCompiledPlanTest method dump.
private void dump(Plan p) {
p.setExecutionConfig(new ExecutionConfig());
try {
OptimizedPlan op = compileNoStats(p);
PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
String json = dumper.getOptimizerPlanAsJSON(op);
JsonParser parser = new JsonFactory().createJsonParser(json);
while (parser.nextToken() != null) ;
} catch (JsonParseException e) {
e.printStackTrace();
Assert.fail("JSON Generator produced malformatted output: " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An error occurred in the test: " + e.getMessage());
}
}
Aggregations