use of org.apache.avro.specific.SpecificRecord in project samza by apache.
the class AzureBlobAvroWriter method encodeRecord.
@VisibleForTesting
byte[] encodeRecord(IndexedRecord record) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Schema schema = record.getSchema();
try {
EncoderFactory encoderfactory = new EncoderFactory();
BinaryEncoder encoder = encoderfactory.binaryEncoder(out, null);
DatumWriter<IndexedRecord> writer;
if (record instanceof SpecificRecord) {
writer = new SpecificDatumWriter<>(schema);
} else {
writer = new GenericDatumWriter<>(schema);
}
writer.write(record, encoder);
// encoder may buffer
encoder.flush();
} catch (Exception e) {
throw new SamzaException("Unable to serialize Avro record using schema within the record: " + schema.toString(), e);
}
return out.toByteArray();
}
use of org.apache.avro.specific.SpecificRecord in project samza by apache.
the class TestAzureBlobAvroWriter method createOME.
private OutgoingMessageEnvelope createOME(String streamName) {
SystemStream systemStream = new SystemStream(SYSTEM_NAME, streamName);
SpecificRecord record = new SpecificRecordEvent();
return new OutgoingMessageEnvelope(systemStream, record);
}
use of org.apache.avro.specific.SpecificRecord in project flink by apache.
the class AvroRowDeSerializationSchemaTest method testSpecificSerializeDeserializeFromClass.
@Test
public void testSpecificSerializeDeserializeFromClass() throws IOException {
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData();
final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(testData.f0);
final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(testData.f0);
final byte[] bytes = serializationSchema.serialize(testData.f2);
final Row actual = deserializationSchema.deserialize(bytes);
assertEquals(testData.f2, actual);
}
use of org.apache.avro.specific.SpecificRecord in project flink by apache.
the class AvroTestUtils method getSpecificTestData.
/**
* Tests all Avro data types as well as nested types for a specific record.
*/
public static Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> getSpecificTestData() {
final Address addr = Address.newBuilder().setNum(42).setStreet("Main Street 42").setCity("Test City").setState("Test State").setZip("12345").build();
final Row rowAddr = new Row(5);
rowAddr.setField(0, 42);
rowAddr.setField(1, "Main Street 42");
rowAddr.setField(2, "Test City");
rowAddr.setField(3, "Test State");
rowAddr.setField(4, "12345");
final User user = User.newBuilder().setName("Charlie").setFavoriteNumber(null).setFavoriteColor("blue").setTypeLongTest(1337L).setTypeDoubleTest(1.337d).setTypeNullTest(null).setTypeBoolTest(false).setTypeArrayString(Arrays.asList("hello", "world")).setTypeArrayBoolean(Arrays.asList(true, true, false)).setTypeNullableArray(null).setTypeEnum(Colors.RED).setTypeMap(Collections.singletonMap("test", 12L)).setTypeFixed(new Fixed16(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 })).setTypeUnion(12.0).setTypeNested(addr).setTypeBytes(ByteBuffer.allocate(10)).setTypeDate(LocalDate.parse("2014-03-01")).setTypeTimeMillis(LocalTime.parse("12:12:12")).setTypeTimeMicros(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS)).setTypeTimestampMillis(Instant.parse("2014-03-01T12:12:12.321Z")).setTypeTimestampMicros(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)).setTypeDecimalBytes(ByteBuffer.wrap(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray())).setTypeDecimalFixed(new Fixed2(BigDecimal.valueOf(2000, 2).unscaledValue().toByteArray())).build();
final Row rowUser = new Row(23);
rowUser.setField(0, "Charlie");
rowUser.setField(1, null);
rowUser.setField(2, "blue");
rowUser.setField(3, 1337L);
rowUser.setField(4, 1.337d);
rowUser.setField(5, null);
rowUser.setField(6, false);
rowUser.setField(7, new String[] { "hello", "world" });
rowUser.setField(8, new Boolean[] { true, true, false });
rowUser.setField(9, null);
rowUser.setField(10, "RED");
rowUser.setField(11, Collections.singletonMap("test", 12L));
rowUser.setField(12, new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 });
rowUser.setField(13, 12.0);
rowUser.setField(14, rowAddr);
rowUser.setField(15, new byte[10]);
rowUser.setField(16, Date.valueOf("2014-03-01"));
rowUser.setField(17, Time.valueOf("12:12:12"));
rowUser.setField(18, Time.valueOf(LocalTime.ofSecondOfDay(0).plus(123456L, ChronoUnit.MICROS)));
rowUser.setField(19, Timestamp.valueOf("2014-03-01 12:12:12.321"));
rowUser.setField(20, Timestamp.from(Instant.ofEpochSecond(0).plus(123456L, ChronoUnit.MICROS)));
rowUser.setField(21, BigDecimal.valueOf(2000, 2));
rowUser.setField(22, BigDecimal.valueOf(2000, 2));
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> t = new Tuple3<>();
t.f0 = User.class;
t.f1 = user;
t.f2 = rowUser;
return t;
}
use of org.apache.avro.specific.SpecificRecord in project flink by apache.
the class AvroRowDeSerializationSchemaTest method testSpecificSerializeDeserializeFromSchema.
@Test
public void testSpecificSerializeDeserializeFromSchema() throws IOException {
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData();
final String schemaString = testData.f1.getSchema().toString();
final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(schemaString);
final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(schemaString);
final byte[] bytes = serializationSchema.serialize(testData.f2);
final Row actual = deserializationSchema.deserialize(bytes);
assertEquals(testData.f2, actual);
}
Aggregations