use of org.codehaus.jackson.JsonGenerator in project cassandra by apache.
the class JsonTransformer method toJson.
public static void toJson(ISSTableScanner currentScanner, Stream<UnfilteredRowIterator> partitions, boolean rawTime, TableMetadata metadata, OutputStream out) throws IOException {
try (JsonGenerator json = jsonFactory.createJsonGenerator(new OutputStreamWriter(out, StandardCharsets.UTF_8))) {
JsonTransformer transformer = new JsonTransformer(json, currentScanner, rawTime, metadata);
json.writeStartArray();
partitions.forEach(transformer::serializePartition);
json.writeEndArray();
}
}
use of org.codehaus.jackson.JsonGenerator in project databus by linkedin.
the class JsonUtils method toJsonString.
/**
* Serializes a bean as JSON
* @param <T> the bean type
* @param bean the bean to serialize
* @param pretty a flag if the output is to be pretty printed
* @return the JSON string
*/
public static <T> String toJsonString(T bean, boolean pretty) throws JsonGenerationException, JsonMappingException, IOException {
JsonFactory jsonFactory = new JsonFactory(new ObjectMapper());
StringWriter out = new StringWriter(1000);
JsonGenerator jsonGenerator = jsonFactory.createJsonGenerator(out);
if (pretty)
jsonGenerator.useDefaultPrettyPrinter();
jsonGenerator.writeObject(bean);
out.flush();
return out.toString();
}
use of org.codehaus.jackson.JsonGenerator in project databus by linkedin.
the class AbstractRequestProcesser method makeJsonResponse.
protected <T> String makeJsonResponse(T obj, DatabusRequest request) throws IOException {
StringWriter out = new StringWriter(102400);
ObjectMapper mapper = new ObjectMapper();
JsonGenerator jsonGen = createJsonGenerator(mapper, out, null != request.getParams().getProperty(PRETTY_PRINT_PARAM));
mapper.writeValue(jsonGen, obj);
return out.toString();
}
use of org.codehaus.jackson.JsonGenerator in project databus by linkedin.
the class DbusEventAvroDecoder method dumpEventValueInJSON.
public void dumpEventValueInJSON(DbusEvent e, OutputStream out) {
byte[] md5 = new byte[16];
e.schemaId(md5);
SchemaId schemaId = new SchemaId(md5);
VersionedSchema sourceSchema = _schemaSet.getById(schemaId);
ByteBuffer valueBuffer = e.value();
byte[] valueBytes = new byte[valueBuffer.remaining()];
valueBuffer.get(valueBytes);
try {
Schema schema = sourceSchema.getSchema();
DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
binDecoder.set(DecoderFactory.defaultFactory().createBinaryDecoder(valueBytes, binDecoder.get()));
Object datum = reader.read(null, binDecoder.get());
DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
JsonGenerator g = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
// write the src ID
g.writeStartObject();
g.writeFieldName(SRC_ID_FIELD_NAME);
g.writeNumber(e.getSourceId());
g.writeFieldName(OPCODE_FIELD_NAME);
g.writeString(e.getOpcode().toString());
g.writeFieldName("partId");
g.writeNumber(Integer.valueOf(e.getPartitionId()));
g.writeFieldName(VALUE_FIELD_NAME);
writer.write(datum, new JsonEncoder(schema, g));
g.writeEndObject();
g.writeEndObject();
try {
g.writeEndObject();
} catch (JsonGenerationException e_json) {
// ignore the error as some how avro JsonEncoder may some times missing two }
}
g.flush();
} catch (IOException e1) {
LOG.error("event value serialization error; event = " + e, e1);
}
}
use of org.codehaus.jackson.JsonGenerator in project databus by linkedin.
the class AvroConverter method convert.
public void convert(InputStream in, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
if (AvroFormat.JSON == _outputFormat)
jsonGenerator.useDefaultPrettyPrinter();
List<GenericRecord> result = convert(in);
Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
for (GenericRecord r : result) {
genericWriter.write(r, outputEncoder);
}
outputEncoder.flush();
out.flush();
}
Aggregations