use of org.apache.avro.io.BinaryEncoder in project cdap by caskdata.
the class AvroRecordFormatTest method toStreamEvent.
private StreamEvent toStreamEvent(GenericRecord record, boolean writeSchema) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema());
writer.write(record, encoder);
encoder.flush();
out.close();
byte[] serializedRecord = out.toByteArray();
String schemaString = record.getSchema().toString();
Map<String, String> headers = Maps.newHashMap();
if (writeSchema) {
headers.put(AvroRecordFormat.SCHEMA, schemaString);
headers.put(AvroRecordFormat.SCHEMA_HASH, Hashing.md5().hashString(schemaString, Charsets.UTF_8).toString());
}
return new StreamEvent(headers, ByteBuffer.wrap(serializedRecord));
}
use of org.apache.avro.io.BinaryEncoder in project cdap by caskdata.
the class MapReduceStreamInputTestRun method createEvent.
private byte[] createEvent(Schema schema, String ticker, int count, float price) throws IOException {
GenericRecord record = new GenericRecordBuilder(schema).set("ticker", ticker).set("num_traded", count).set("price", price).build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
out.close();
return out.toByteArray();
}
use of org.apache.avro.io.BinaryEncoder in project hive by apache.
the class AvroGenericRecordWritable method write.
@Override
public void write(DataOutput out) throws IOException {
// Write schema since we need it to pull the data out. (see point #1 above)
String schemaString = record.getSchema().toString(false);
out.writeUTF(schemaString);
schemaString = fileSchema.toString(false);
out.writeUTF(schemaString);
recordReaderID.write(out);
// Write record to byte buffer
GenericDatumWriter<GenericRecord> gdw = new GenericDatumWriter<GenericRecord>();
BinaryEncoder be = EncoderFactory.get().directBinaryEncoder((DataOutputStream) out, null);
gdw.setSchema(record.getSchema());
gdw.write(record, be);
}
use of org.apache.avro.io.BinaryEncoder in project haivvreo by jghoman.
the class AvroGenericRecordWritable method write.
@Override
public void write(DataOutput out) throws IOException {
// Write schema since we need it to pull the data out. (see point #1 above)
String schemaString = record.getSchema().toString(false);
out.writeUTF(schemaString);
// Write record to byte buffer
GenericDatumWriter<GenericRecord> gdw = new GenericDatumWriter<GenericRecord>();
BinaryEncoder be = EncoderFactory.get().directBinaryEncoder((DataOutputStream) out, null);
gdw.setSchema(record.getSchema());
gdw.write(record, be);
}
use of org.apache.avro.io.BinaryEncoder in project databus by linkedin.
the class AvroBinaryDtailPrinter method printGenericRecord.
/**
* @see com.linkedin.databus2.tools.dtail.GenericRecordDtailPrinter#printGenericRecord(org.apache.avro.generic.GenericRecord)
*/
@Override
public ConsumerCallbackResult printGenericRecord(GenericRecord r) {
ConsumerCallbackResult result = ConsumerCallbackResult.SUCCESS;
try {
BinaryEncoder binEnc = _binEncoders.get(r.getSchema());
if (null == binEnc) {
binEnc = new BinaryEncoder(_out);
_binEncoders.put(r.getSchema(), binEnc);
}
GenericDatumWriter<GenericRecord> datumWriter = binWriters.get(r.getSchema());
if (null == datumWriter) {
datumWriter = new GenericDatumWriter<GenericRecord>(r.getSchema());
binWriters.put(r.getSchema(), datumWriter);
}
datumWriter.write(r, binEnc);
binEnc.flush();
_out.write('\n');
} catch (RuntimeException re) {
LOG.error("event dump error: " + re.getMessage(), re);
result = ConsumerCallbackResult.ERROR;
} catch (IOException ioe) {
LOG.error("event dump error: " + ioe.getMessage(), ioe);
result = ConsumerCallbackResult.ERROR;
}
return result;
}
Aggregations