use of org.apache.avro.io.Encoder in project hive by apache.
the class TestHBaseSerDe method getTestAvroBytesFromClass2.
private byte[] getTestAvroBytesFromClass2(int i) throws IOException {
Employee employee = new Employee();
employee.setEmployeeName("Avro Employee" + i);
employee.setEmployeeID(11111L);
employee.setGender(Gender.FEMALE);
employee.setAge(25L);
Address address = new Address();
address.setAddress1("Avro First Address" + i);
address.setAddress2("Avro Second Address" + i);
address.setCity("Avro City" + i);
address.setZipcode(123456L);
Map<CharSequence, CharSequence> metadata = new HashMap<CharSequence, CharSequence>();
metadata.put("testkey", "testvalue");
address.setMetadata(metadata);
HomePhone hPhone = new HomePhone();
hPhone.setAreaCode(999L);
hPhone.setNumber(1234567890L);
OfficePhone oPhone = new OfficePhone();
oPhone.setAreaCode(999L);
oPhone.setNumber(1234455555L);
ContactInfo contact = new ContactInfo();
List<Address> addresses = new ArrayList<Address>();
// set value for the union type
address.setCounty(hPhone);
addresses.add(address);
addresses.add(address);
contact.setAddress(addresses);
contact.setHomePhone(hPhone);
contact.setOfficePhone(oPhone);
employee.setContactInfo(contact);
DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<Employee>(Employee.class);
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
// write out a header for the payload
out.write(TEST_BYTE_ARRAY);
employeeWriter.write(employee, encoder);
encoder.flush();
return out.toByteArray();
}
use of org.apache.avro.io.Encoder in project databus by linkedin.
the class AvroConverter method convert.
public void convert(InputStream in, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
if (AvroFormat.JSON == _outputFormat)
jsonGenerator.useDefaultPrettyPrinter();
List<GenericRecord> result = convert(in);
Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
for (GenericRecord r : result) {
genericWriter.write(r, outputEncoder);
}
outputEncoder.flush();
out.flush();
}
use of org.apache.avro.io.Encoder in project databus by linkedin.
the class OpenReplicatorAvroEventFactory method serializeEvent.
protected byte[] serializeEvent(GenericRecord record) throws EventCreationException {
// Serialize the row
byte[] serializedValue;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
serializedValue = bos.toByteArray();
} catch (IOException ex) {
throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
} catch (RuntimeException ex) {
// Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
_log.error("Exception for record: " + record + " with schema: " + record.getSchema().getFullName());
throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
}
return serializedValue;
}
use of org.apache.avro.io.Encoder in project databus by linkedin.
the class OracleAvroGenericEventFactory method serializeEvent.
protected byte[] serializeEvent(GenericRecord record, long scn, long timestamp, ResultSet row, DbusEventBufferAppendable eventBuffer, boolean enableTracing, DbusEventsStatisticsCollector dbusEventsStatisticsCollector) throws EventCreationException, UnsupportedKeyException {
// Serialize the row
byte[] serializedValue;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
serializedValue = bos.toByteArray();
} catch (IOException ex) {
throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
} catch (RuntimeException ex) {
// Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
}
return serializedValue;
}
use of org.apache.avro.io.Encoder in project rest.li by linkedin.
the class AvroUtil method bytesFromGenericRecord.
public static byte[] bytesFromGenericRecord(GenericRecord record) throws IOException {
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
AvroAdapter avroAdapter = AvroAdapterFinder.getAvroAdapter();
Encoder binaryEncoder = avroAdapter.createBinaryEncoder(outputStream);
writer.setSchema(record.getSchema());
writer.write(record, binaryEncoder);
binaryEncoder.flush();
return outputStream.toByteArray();
}
Aggregations