use of org.apache.avro.generic.GenericDatumWriter in project cdap by caskdata.
the class HiveExploreServiceStreamTest method createAvroEvent.
private byte[] createAvroEvent(org.apache.avro.Schema schema, Object... values) throws IOException {
GenericRecordBuilder builder = new GenericRecordBuilder(schema);
int i = 0;
for (org.apache.avro.Schema.Field field : schema.getFields()) {
builder.set(field.name(), values[i]);
i++;
}
GenericRecord record = builder.build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
out.close();
return out.toByteArray();
}
use of org.apache.avro.generic.GenericDatumWriter in project cdap by caskdata.
the class FileWriterHelper method generateAvroFile.
/**
* Generate an Avro file of schema (key String, value String) containing the records ("<prefix>i", "#i")
* for start <= i < end. The file is written using the passed-in output stream.
*/
public static void generateAvroFile(OutputStream out, String prefix, int start, int end) throws IOException {
Schema schema = Schema.createRecord("kv", null, null, false);
schema.setFields(ImmutableList.of(new Schema.Field("key", Schema.create(Schema.Type.STRING), null, null), new Schema.Field("value", Schema.create(Schema.Type.STRING), null, null)));
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
dataFileWriter.create(schema, out);
try {
for (int i = start; i < end; i++) {
GenericRecord kv = new GenericData.Record(schema);
kv.put("key", prefix + i);
kv.put("value", "#" + i);
dataFileWriter.append(kv);
}
} finally {
Closeables.closeQuietly(dataFileWriter);
Closeables.closeQuietly(out);
}
}
use of org.apache.avro.generic.GenericDatumWriter in project cdap by caskdata.
the class AvroRecordFormatTest method toStreamEvent.
private StreamEvent toStreamEvent(GenericRecord record, boolean writeSchema) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema());
writer.write(record, encoder);
encoder.flush();
out.close();
byte[] serializedRecord = out.toByteArray();
String schemaString = record.getSchema().toString();
Map<String, String> headers = Maps.newHashMap();
if (writeSchema) {
headers.put(AvroRecordFormat.SCHEMA, schemaString);
headers.put(AvroRecordFormat.SCHEMA_HASH, Hashing.md5().hashString(schemaString, Charsets.UTF_8).toString());
}
return new StreamEvent(headers, ByteBuffer.wrap(serializedRecord));
}
use of org.apache.avro.generic.GenericDatumWriter in project cdap by caskdata.
the class MapReduceStreamInputTestRun method createEvent.
private byte[] createEvent(Schema schema, String ticker, int count, float price) throws IOException {
GenericRecord record = new GenericRecordBuilder(schema).set("ticker", ticker).set("num_traded", count).set("price", price).build();
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
out.close();
return out.toByteArray();
}
use of org.apache.avro.generic.GenericDatumWriter in project cdap by caskdata.
the class ClientMessagingService method encodeRollbackDetail.
/**
* Encodes the given {@link RollbackDetail} as expected by the rollback call. This method is rarely used
* as the call to {@link #rollback(TopicId, RollbackDetail)} expects a {@link ClientRollbackDetail} which
* already contains the encoded bytes.
*
* This method looks very similar to the {@code StoreHandler.encodeRollbackDetail} method, but is intended to have
* them separated. This is to allow client side classes be moved to separate module without any dependency
* on the server side (this can also be done with a util method in a common module, but it is kind of overkill
* for a simple method like this for now).
*/
private ByteBuffer encodeRollbackDetail(RollbackDetail rollbackDetail) throws IOException {
// Constructs the response object as GenericRecord
Schema schema = Schemas.V1.PublishResponse.SCHEMA;
GenericRecord record = new GenericData.Record(schema);
record.put("transactionWritePointer", rollbackDetail.getTransactionWritePointer());
GenericRecord rollbackRange = new GenericData.Record(schema.getField("rollbackRange").schema());
rollbackRange.put("startTimestamp", rollbackDetail.getStartTimestamp());
rollbackRange.put("startSequenceId", rollbackDetail.getStartSequenceId());
rollbackRange.put("endTimestamp", rollbackDetail.getEndTimestamp());
rollbackRange.put("endSequenceId", rollbackDetail.getEndSequenceId());
record.put("rollbackRange", rollbackRange);
ExposedByteArrayOutputStream os = new ExposedByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().directBinaryEncoder(os, null);
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(Schemas.V1.PublishRequest.SCHEMA);
datumWriter.write(record, encoder);
return os.toByteBuffer();
}
Aggregations