use of org.apache.avro.specific.SpecificDatumWriter in project druid by apache.
the class AvroStreamInputRowParserTest method testParseSchemaless.
@Test
public void testParseSchemaless() throws SchemaValidationException, IOException {
// serde test
Repository repository = new InMemoryRepository(null);
AvroStreamInputRowParser parser = new AvroStreamInputRowParser(PARSE_SPEC_SCHEMALESS, new SchemaRepoBasedAvroBytesDecoder<>(new Avro1124SubjectAndIdConverter(TOPIC), repository), false, false);
ByteBufferInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsString(parser), ByteBufferInputRowParser.class);
repository = ((SchemaRepoBasedAvroBytesDecoder) ((AvroStreamInputRowParser) parser2).getAvroBytesDecoder()).getSchemaRepository();
// prepare data
GenericRecord someAvroDatum = buildSomeAvroDatum();
// encode schema id
Avro1124SubjectAndIdConverter converter = new Avro1124SubjectAndIdConverter(TOPIC);
TypedSchemaRepository<Integer, Schema, String> repositoryClient = new TypedSchemaRepository<>(repository, new IntegerConverter(), new AvroSchemaConverter(), new IdentityConverter());
Integer id = repositoryClient.registerSchema(TOPIC, SomeAvroDatum.getClassSchema());
ByteBuffer byteBuffer = ByteBuffer.allocate(4);
converter.putSubjectAndId(id, byteBuffer);
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
out.write(byteBuffer.array());
// encode data
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(someAvroDatum.getSchema());
// write avro datum to bytes
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
InputRow inputRow = parser2.parseBatch(ByteBuffer.wrap(out.toByteArray())).get(0);
assertInputRowCorrect(inputRow, DIMENSIONS_SCHEMALESS, false);
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project druid by apache.
the class InlineSchemaAvroBytesDecoderTest method testParse.
@Test
public void testParse() throws Exception {
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(schema);
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
GenericRecord actual = new InlineSchemaAvroBytesDecoder(schema).parse(ByteBuffer.wrap(out.toByteArray()));
Assert.assertEquals(someAvroDatum.get("id"), actual.get("id"));
}
use of org.apache.avro.specific.SpecificDatumWriter in project druid by apache.
the class InlineSchemasAvroBytesDecoderTest method testParse.
@Test
public void testParse() throws Exception {
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
ByteArrayOutputStream out = new ByteArrayOutputStream();
out.write(new byte[] { 1 });
out.write(ByteBuffer.allocate(4).putInt(10).array());
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(schema);
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
GenericRecord actual = new InlineSchemasAvroBytesDecoder(ImmutableMap.of(10, schema)).parse(ByteBuffer.wrap(out.toByteArray()));
Assert.assertEquals(someAvroDatum.get("id"), actual.get("id"));
}
use of org.apache.avro.specific.SpecificDatumWriter in project druid by apache.
the class SchemaRegistryBasedAvroBytesDecoderTest method getAvroDatum.
private byte[] getAvroDatum(Schema schema, GenericRecord someAvroDatum) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(schema);
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
return out.toByteArray();
}
use of org.apache.avro.specific.SpecificDatumWriter in project spring-integration by spring-projects.
the class SimpleToAvroTransformer method doTransform.
@Override
protected Object doTransform(Message<?> message) {
Assert.state(message.getPayload() instanceof SpecificRecord, "Payload must be an implementation of 'SpecificRecord'");
SpecificRecord specific = (SpecificRecord) message.getPayload();
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = this.encoderFactory.directBinaryEncoder(out, null);
DatumWriter<Object> writer = new SpecificDatumWriter<>(specific.getSchema());
try {
writer.write(specific, encoder);
encoder.flush();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return getMessageBuilderFactory().withPayload(out.toByteArray()).copyHeaders(message.getHeaders()).setHeader(AvroHeaders.TYPE, this.typeIdExpression.getValue(this.evaluationContext, message)).build();
}
Aggregations