Search in sources :

Example 11 with Encoder

use of org.apache.avro.io.Encoder in project hive by apache.

the class TestHBaseSerDe method getTestAvroBytesFromClass2.

private byte[] getTestAvroBytesFromClass2(int i) throws IOException {
    Employee employee = new Employee();
    employee.setEmployeeName("Avro Employee" + i);
    employee.setEmployeeID(11111L);
    employee.setGender(Gender.FEMALE);
    employee.setAge(25L);
    Address address = new Address();
    address.setAddress1("Avro First Address" + i);
    address.setAddress2("Avro Second Address" + i);
    address.setCity("Avro City" + i);
    address.setZipcode(123456L);
    Map<CharSequence, CharSequence> metadata = new HashMap<CharSequence, CharSequence>();
    metadata.put("testkey", "testvalue");
    address.setMetadata(metadata);
    HomePhone hPhone = new HomePhone();
    hPhone.setAreaCode(999L);
    hPhone.setNumber(1234567890L);
    OfficePhone oPhone = new OfficePhone();
    oPhone.setAreaCode(999L);
    oPhone.setNumber(1234455555L);
    ContactInfo contact = new ContactInfo();
    List<Address> addresses = new ArrayList<Address>();
    // set value for the union type
    address.setCounty(hPhone);
    addresses.add(address);
    addresses.add(address);
    contact.setAddress(addresses);
    contact.setHomePhone(hPhone);
    contact.setOfficePhone(oPhone);
    employee.setContactInfo(contact);
    DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<Employee>(Employee.class);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    // write out a header for the payload
    out.write(TEST_BYTE_ARRAY);
    employeeWriter.write(employee, encoder);
    encoder.flush();
    return out.toByteArray();
}
Also used : OfficePhone(org.apache.hadoop.hive.hbase.avro.OfficePhone) Address(org.apache.hadoop.hive.hbase.avro.Address) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SpecificDatumWriter(org.apache.avro.specific.SpecificDatumWriter) Employee(org.apache.hadoop.hive.hbase.avro.Employee) HomePhone(org.apache.hadoop.hive.hbase.avro.HomePhone) Encoder(org.apache.avro.io.Encoder) ContactInfo(org.apache.hadoop.hive.hbase.avro.ContactInfo)

Example 12 with Encoder

use of org.apache.avro.io.Encoder in project databus by linkedin.

the class AvroConverter method convert.

public void convert(InputStream in, OutputStream out) throws IOException {
    JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
    if (AvroFormat.JSON == _outputFormat)
        jsonGenerator.useDefaultPrettyPrinter();
    List<GenericRecord> result = convert(in);
    Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
    GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
    for (GenericRecord r : result) {
        genericWriter.write(r, outputEncoder);
    }
    outputEncoder.flush();
    out.flush();
}
Also used : BinaryEncoder(org.apache.avro.io.BinaryEncoder) JsonEncoder(org.apache.avro.io.JsonEncoder) Encoder(org.apache.avro.io.Encoder) BinaryEncoder(org.apache.avro.io.BinaryEncoder) JsonEncoder(org.apache.avro.io.JsonEncoder) JsonFactory(org.codehaus.jackson.JsonFactory) JsonGenerator(org.codehaus.jackson.JsonGenerator) OutputStreamWriter(java.io.OutputStreamWriter) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) GenericRecord(org.apache.avro.generic.GenericRecord)

Example 13 with Encoder

use of org.apache.avro.io.Encoder in project databus by linkedin.

the class OpenReplicatorAvroEventFactory method serializeEvent.

protected byte[] serializeEvent(GenericRecord record) throws EventCreationException {
    // Serialize the row
    byte[] serializedValue;
    try {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        Encoder encoder = new BinaryEncoder(bos);
        GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
        writer.write(record, encoder);
        serializedValue = bos.toByteArray();
    } catch (IOException ex) {
        throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
    } catch (RuntimeException ex) {
        // Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
        _log.error("Exception for record: " + record + " with schema: " + record.getSchema().getFullName());
        throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
    }
    return serializedValue;
}
Also used : BinaryEncoder(org.apache.avro.io.BinaryEncoder) Encoder(org.apache.avro.io.Encoder) BinaryEncoder(org.apache.avro.io.BinaryEncoder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) IOException(java.io.IOException) GenericRecord(org.apache.avro.generic.GenericRecord)

Example 14 with Encoder

use of org.apache.avro.io.Encoder in project databus by linkedin.

the class OracleAvroGenericEventFactory method serializeEvent.

protected byte[] serializeEvent(GenericRecord record, long scn, long timestamp, ResultSet row, DbusEventBufferAppendable eventBuffer, boolean enableTracing, DbusEventsStatisticsCollector dbusEventsStatisticsCollector) throws EventCreationException, UnsupportedKeyException {
    // Serialize the row
    byte[] serializedValue;
    try {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        Encoder encoder = new BinaryEncoder(bos);
        GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
        writer.write(record, encoder);
        serializedValue = bos.toByteArray();
    } catch (IOException ex) {
        throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
    } catch (RuntimeException ex) {
        // Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
        throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
    }
    return serializedValue;
}
Also used : BinaryEncoder(org.apache.avro.io.BinaryEncoder) Encoder(org.apache.avro.io.Encoder) BinaryEncoder(org.apache.avro.io.BinaryEncoder) EventCreationException(com.linkedin.databus2.producers.EventCreationException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) IOException(java.io.IOException) GenericRecord(org.apache.avro.generic.GenericRecord)

Example 15 with Encoder

use of org.apache.avro.io.Encoder in project rest.li by linkedin.

the class AvroUtil method bytesFromGenericRecord.

public static byte[] bytesFromGenericRecord(GenericRecord record) throws IOException {
    GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    AvroAdapter avroAdapter = AvroAdapterFinder.getAvroAdapter();
    Encoder binaryEncoder = avroAdapter.createBinaryEncoder(outputStream);
    writer.setSchema(record.getSchema());
    writer.write(record, binaryEncoder);
    binaryEncoder.flush();
    return outputStream.toByteArray();
}
Also used : AvroAdapter(com.linkedin.data.avro.AvroAdapter) Encoder(org.apache.avro.io.Encoder) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericRecord(org.apache.avro.generic.GenericRecord)

Aggregations

Encoder (org.apache.avro.io.Encoder)20 ByteArrayOutputStream (java.io.ByteArrayOutputStream)15 GenericDatumWriter (org.apache.avro.generic.GenericDatumWriter)12 GenericRecord (org.apache.avro.generic.GenericRecord)12 IOException (java.io.IOException)10 BinaryEncoder (org.apache.avro.io.BinaryEncoder)10 Schema (org.apache.avro.Schema)7 SerializationException (voldemort.serialization.SerializationException)5 SpecificDatumWriter (org.apache.avro.specific.SpecificDatumWriter)3 AvroAdapter (com.linkedin.data.avro.AvroAdapter)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Properties (java.util.Properties)2 Producer (kafka.javaapi.producer.Producer)2 Message (kafka.message.Message)2 ProducerConfig (kafka.producer.ProducerConfig)2 Test (org.junit.Test)2 TopicNotFoundException (co.cask.cdap.api.messaging.TopicNotFoundException)1 TopicId (co.cask.cdap.proto.id.TopicId)1 HttpRequest (co.cask.common.http.HttpRequest)1