use of org.apache.avro.specific.SpecificDatumWriter in project flink by apache.
the class GlueSchemaRegistryInputStreamDeserializerTest method testGetSchemaAndDeserializedStream_withoutCompression_succeeds.
/**
* Test whether getSchemaAndDeserializedStream method when compression is not enabled works.
*/
@Test
public void testGetSchemaAndDeserializedStream_withoutCompression_succeeds() throws IOException {
compressionByte = COMPRESSION_DEFAULT_BYTE;
compressionHandler = new GlueSchemaRegistryDefaultCompression();
ByteArrayOutputStream byteArrayOutputStream = buildByteArrayOutputStream(AWSSchemaRegistryConstants.HEADER_VERSION_BYTE, compressionByte);
byte[] bytes = writeToExistingStream(byteArrayOutputStream, encodeData(userDefinedPojo, new SpecificDatumWriter<>(userSchema)));
MutableByteArrayInputStream mutableByteArrayInputStream = new MutableByteArrayInputStream();
mutableByteArrayInputStream.setBuffer(bytes);
glueSchemaRegistryDeserializationFacade = new MockGlueSchemaRegistryDeserializationFacade(bytes, glueSchema, NONE);
GlueSchemaRegistryInputStreamDeserializer glueSchemaRegistryInputStreamDeserializer = new GlueSchemaRegistryInputStreamDeserializer(glueSchemaRegistryDeserializationFacade);
Schema resultSchema = glueSchemaRegistryInputStreamDeserializer.getSchemaAndDeserializedStream(mutableByteArrayInputStream);
assertThat(resultSchema.toString()).isEqualTo(glueSchema.getSchemaDefinition());
}
use of org.apache.avro.specific.SpecificDatumWriter in project avro-kafka-storm by ransilberman.
the class MainTest method testCompiledDatumRecord.
@Test
public void testCompiledDatumRecord() throws IOException, InterruptedException {
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(getClass().getResourceAsStream("LPEvent.avsc"));
LPEvent datum = new LPEvent();
datum.setRevision(1L);
datum.setSiteId("28280110");
datum.setEventType("PLine");
datum.setTimeStamp(System.currentTimeMillis());
datum.setSessionId("123456II");
pline plineDatum = new pline();
plineDatum.setText("Hello, I am your agent");
plineDatum.setLineType(2);
plineDatum.setRepId("REPID7777");
datum.setSubrecord(plineDatum);
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<LPEvent> writer = new SpecificDatumWriter<LPEvent>(LPEvent.class);
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
writer.write(datum, encoder);
encoder.flush();
out.close();
Message message = new Message(out.toByteArray());
Properties props = new Properties();
props.put("zk.connect", zkConnection);
Producer<Message, Message> producer = new kafka.javaapi.producer.Producer<Message, Message>(new ProducerConfig(props));
producer.send(new ProducerData<Message, Message>(topic, message));
}
use of org.apache.avro.specific.SpecificDatumWriter in project eiger by wlloyd.
the class SerDeUtils method serializeWithSchema.
/**
* Serializes a single object along with its Schema. NB: For performance critical areas, it is <b>much</b>
* more efficient to store the Schema independently.
* @param o Object to serialize
*/
public static <T extends SpecificRecord> ByteBuffer serializeWithSchema(T o) throws IOException {
OutputBuffer buff = new OutputBuffer();
BinaryEncoder enc = new BinaryEncoder(buff);
enc.writeString(new Utf8(o.getSchema().toString()));
SpecificDatumWriter<T> writer = new SpecificDatumWriter<T>(o.getSchema());
writer.write(o, enc);
enc.flush();
return ByteBuffer.wrap(buff.asByteArray());
}
use of org.apache.avro.specific.SpecificDatumWriter in project rocketmq-externals by apache.
the class RocketMQAvroSpecifiedSerializer method serialize.
@Override
public byte[] serialize(T obj) {
Validate.notNull(obj);
DatumWriter<T> datumWriter = new SpecificDatumWriter<T>(schema);
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
try {
datumWriter.write(obj, encoder);
encoder.flush();
byte[] bytes = out.toByteArray();
out.close();
return bytes;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project spring-cloud-stream by spring-cloud.
the class AvroMessageConverterSerializationTests method testOriginalContentTypeHeaderOnly.
@Test
public void testOriginalContentTypeHeaderOnly() throws Exception {
User specificRecord = new User();
specificRecord.setName("joe");
Schema v1 = new Schema.Parser().parse(AvroMessageConverterSerializationTests.class.getClassLoader().getResourceAsStream("schemas/user.avsc"));
GenericRecord genericRecord = new GenericData.Record(v1);
genericRecord.put("name", "joe");
SchemaRegistryClient client = new DefaultSchemaRegistryClient();
client.register("user", "avro", v1.toString());
AvroSchemaRegistryClientMessageConverter converter = new AvroSchemaRegistryClientMessageConverter(client, new NoOpCacheManager());
converter.setDynamicSchemaGenerationEnabled(false);
converter.afterPropertiesSet();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DatumWriter<User> writer = new SpecificDatumWriter<>(User.class);
Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
writer.write(specificRecord, encoder);
encoder.flush();
Message source = MessageBuilder.withPayload(baos.toByteArray()).setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_OCTET_STREAM).setHeader(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE, "application/vnd.user.v1+avro").build();
Object converted = converter.fromMessage(source, User.class);
Assert.assertNotNull(converted);
Assert.assertEquals(specificRecord.getName().toString(), ((User) converted).getName().toString());
}
Aggregations