use of org.apache.avro.specific.SpecificDatumWriter in project druid by apache.
the class AvroStreamInputRowParserTest method testParse.
@Test
public void testParse() throws SchemaValidationException, IOException {
// serde test
Repository repository = new InMemoryRepository(null);
AvroStreamInputRowParser parser = new AvroStreamInputRowParser(PARSE_SPEC, new SchemaRepoBasedAvroBytesDecoder<>(new Avro1124SubjectAndIdConverter(TOPIC), repository), false, false);
ByteBufferInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsString(parser), ByteBufferInputRowParser.class);
repository = ((SchemaRepoBasedAvroBytesDecoder) ((AvroStreamInputRowParser) parser2).getAvroBytesDecoder()).getSchemaRepository();
// prepare data
GenericRecord someAvroDatum = buildSomeAvroDatum();
// encode schema id
Avro1124SubjectAndIdConverter converter = new Avro1124SubjectAndIdConverter(TOPIC);
TypedSchemaRepository<Integer, Schema, String> repositoryClient = new TypedSchemaRepository<>(repository, new IntegerConverter(), new AvroSchemaConverter(), new IdentityConverter());
Integer id = repositoryClient.registerSchema(TOPIC, SomeAvroDatum.getClassSchema());
ByteBuffer byteBuffer = ByteBuffer.allocate(4);
converter.putSubjectAndId(id, byteBuffer);
ByteArrayOutputStream out = new ByteArrayOutputStream();
out.write(byteBuffer.array());
// encode data
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(someAvroDatum.getSchema());
// write avro datum to bytes
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
InputRow inputRow = parser2.parseBatch(ByteBuffer.wrap(out.toByteArray())).get(0);
assertInputRowCorrect(inputRow, DIMENSIONS, false);
}
use of org.apache.avro.specific.SpecificDatumWriter in project operatorfabric-core by opfab.
the class KafkaAvroWithoutRegistrySerializer method serialize.
@Override
public byte[] serialize(String topic, T record) {
if (record == null) {
return new byte[0];
} else {
try {
Schema schema = record.getSchema();
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = this.encoderFactory.directBinaryEncoder(out, null);
DatumWriter<T> writer = new SpecificDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
byte[] bytes = out.toByteArray();
out.close();
return bytes;
} catch (RuntimeException | IOException ioEx) {
throw new SerializationException("Error serializing Avro message", ioEx);
}
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project psc-discrepancies.api.ch.gov.uk by companieshouse.
the class AvroSerializer method serialize.
@Override
public byte[] serialize(String topic, ChipsRestInterfacesSend data) {
DatumWriter<ChipsRestInterfacesSend> datumWriter = new SpecificDatumWriter<>();
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
datumWriter.setSchema(data.getSchema());
datumWriter.write(data, encoder);
encoder.flush();
byte[] serializedData = out.toByteArray();
encoder.flush();
return serializedData;
} catch (IOException e) {
throw new SerializationException("Error when serializing ChipsRestInterfacesSend to byte[]");
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project avro-util by linkedin.
the class AvroCompatibilityHelperGeneratedEnumClassesTest method roundtrip.
private void roundtrip(Object thingie) throws Exception {
Schema schema = SpecificData.get().getSchema(thingie.getClass());
ByteArrayOutputStream os = new ByteArrayOutputStream();
BinaryEncoder binaryEncoder = AvroCompatibilityHelper.newBinaryEncoder(os, false, null);
SpecificDatumWriter<Object> writer = new SpecificDatumWriter<>(schema);
writer.write(thingie, binaryEncoder);
binaryEncoder.flush();
byte[] serialized = os.toByteArray();
ByteArrayInputStream is = new ByteArrayInputStream(serialized);
BinaryDecoder binaryDecoder = AvroCompatibilityHelper.newBinaryDecoder(is, false, null);
SpecificDatumReader<Object> reader = new SpecificDatumReader<>(schema);
Object deserialize = reader.read(null, binaryDecoder);
Assert.assertEquals(deserialize, thingie);
}
use of org.apache.avro.specific.SpecificDatumWriter in project avro-util by linkedin.
the class Generate17TestResources method main.
public static void main(String[] args) {
if (args == null || args.length != 1) {
System.err.println("exactly single argument required - output path. instead got " + Arrays.toString(args));
System.exit(1);
}
Path outputRoot = Paths.get(args[0].trim()).toAbsolutePath();
Path by17Root = outputRoot.resolve("by17");
by17.RecordWithUnion outer = new by17.RecordWithUnion();
outer.setF(new by17.InnerUnionRecord());
outer.getF().setF(17);
try {
SpecificDatumWriter<by17.RecordWithUnion> writer = new SpecificDatumWriter<>(outer.getSchema());
Path binaryRecordWithUnion = TestUtil.getNewFile(by17Root, "RecordWithUnion.binary");
BinaryEncoder binaryEnc = EncoderFactory.get().binaryEncoder(Files.newOutputStream(binaryRecordWithUnion), null);
Path jsonRecordWithUnion = TestUtil.getNewFile(by17Root, "RecordWithUnion.json");
JsonEncoder jsonEnc = EncoderFactory.get().jsonEncoder(outer.getSchema(), Files.newOutputStream(jsonRecordWithUnion));
writer.write(outer, binaryEnc);
binaryEnc.flush();
writer.write(outer, jsonEnc);
jsonEnc.flush();
} catch (Exception e) {
System.err.println("failed to generate payloads");
e.printStackTrace(System.err);
System.exit(1);
}
}
Aggregations