use of org.apache.avro.generic.GenericDatumWriter in project databus by linkedin.
the class AvroConverter method convert.
public void convert(InputStream in, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
if (AvroFormat.JSON == _outputFormat)
jsonGenerator.useDefaultPrettyPrinter();
List<GenericRecord> result = convert(in);
Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
for (GenericRecord r : result) {
genericWriter.write(r, outputEncoder);
}
outputEncoder.flush();
out.flush();
}
use of org.apache.avro.generic.GenericDatumWriter in project databus by linkedin.
the class OpenReplicatorAvroEventFactory method serializeEvent.
protected byte[] serializeEvent(GenericRecord record) throws EventCreationException {
// Serialize the row
byte[] serializedValue;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
serializedValue = bos.toByteArray();
} catch (IOException ex) {
throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
} catch (RuntimeException ex) {
// Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
_log.error("Exception for record: " + record + " with schema: " + record.getSchema().getFullName());
throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
}
return serializedValue;
}
use of org.apache.avro.generic.GenericDatumWriter in project databus by linkedin.
the class OracleAvroGenericEventFactory method serializeEvent.
protected byte[] serializeEvent(GenericRecord record, long scn, long timestamp, ResultSet row, DbusEventBufferAppendable eventBuffer, boolean enableTracing, DbusEventsStatisticsCollector dbusEventsStatisticsCollector) throws EventCreationException, UnsupportedKeyException {
// Serialize the row
byte[] serializedValue;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
serializedValue = bos.toByteArray();
} catch (IOException ex) {
throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
} catch (RuntimeException ex) {
// Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
}
return serializedValue;
}
use of org.apache.avro.generic.GenericDatumWriter in project rest.li by linkedin.
the class AvroUtil method bytesFromGenericRecord.
public static byte[] bytesFromGenericRecord(GenericRecord record) throws IOException {
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
AvroAdapter avroAdapter = AvroAdapterFinder.getAvroAdapter();
Encoder binaryEncoder = avroAdapter.createBinaryEncoder(outputStream);
writer.setSchema(record.getSchema());
writer.write(record, binaryEncoder);
binaryEncoder.flush();
return outputStream.toByteArray();
}
use of org.apache.avro.generic.GenericDatumWriter in project databus by linkedin.
the class DbusEventAvroDecoder method dumpEventValueInJSON.
public void dumpEventValueInJSON(DbusEvent e, OutputStream out) {
byte[] md5 = new byte[16];
e.schemaId(md5);
SchemaId schemaId = new SchemaId(md5);
VersionedSchema sourceSchema = _schemaSet.getById(schemaId);
ByteBuffer valueBuffer = e.value();
byte[] valueBytes = new byte[valueBuffer.remaining()];
valueBuffer.get(valueBytes);
try {
Schema schema = sourceSchema.getSchema();
DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
binDecoder.set(DecoderFactory.defaultFactory().createBinaryDecoder(valueBytes, binDecoder.get()));
Object datum = reader.read(null, binDecoder.get());
DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
JsonGenerator g = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
// write the src ID
g.writeStartObject();
g.writeFieldName(SRC_ID_FIELD_NAME);
g.writeNumber(e.getSourceId());
g.writeFieldName(OPCODE_FIELD_NAME);
g.writeString(e.getOpcode().toString());
g.writeFieldName("partId");
g.writeNumber(Integer.valueOf(e.getPartitionId()));
g.writeFieldName(VALUE_FIELD_NAME);
writer.write(datum, new JsonEncoder(schema, g));
g.writeEndObject();
g.writeEndObject();
try {
g.writeEndObject();
} catch (JsonGenerationException e_json) {
// ignore the error as some how avro JsonEncoder may some times missing two }
}
g.flush();
} catch (IOException e1) {
LOG.error("event value serialization error; event = " + e, e1);
}
}
Aggregations