use of org.apache.avro.specific.SpecificDatumWriter in project spf4j by zolyfarkas.
the class Converter method saveLabeledDumps.
public static void saveLabeledDumps(final File file, final Map<String, SampleNode> pcollected) throws IOException {
try (OutputStream bos = newOutputStream(file)) {
final SpecificDatumWriter<StackSampleElement> writer = new SpecificDatumWriter<>(StackSampleElement.SCHEMA$);
final BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(bos, null);
encoder.writeMapStart();
final Map<String, SampleNode> collected = pcollected.entrySet().stream().filter((e) -> e.getValue() != null).collect(Collectors.toMap((e) -> e.getKey(), (e) -> e.getValue()));
encoder.setItemCount(collected.size());
for (Map.Entry<String, SampleNode> entry : collected.entrySet()) {
encoder.startItem();
encoder.writeString(entry.getKey());
encoder.writeArrayStart();
Converters.convert(Methods.ROOT, entry.getValue(), -1, 0, (final StackSampleElement object) -> {
try {
encoder.setItemCount(1L);
encoder.startItem();
writer.write(object, encoder);
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
});
encoder.writeArrayEnd();
}
encoder.writeMapEnd();
encoder.flush();
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project spf4j by zolyfarkas.
the class Converter method save.
public static void save(final File file, final SampleNode collected) throws IOException {
try (OutputStream bos = newOutputStream(file)) {
final SpecificDatumWriter<StackSampleElement> writer = new SpecificDatumWriter<>(StackSampleElement.getClassSchema());
final BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(bos, null);
Converters.convert(Methods.ROOT, collected, -1, 0, (StackSampleElement object) -> {
try {
writer.write(object, encoder);
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
});
encoder.flush();
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project spring-cloud-stream by spring-cloud.
the class AvroMessageConverterSerializationTests method testOriginalContentTypeHeaderOnly.
@Test
public void testOriginalContentTypeHeaderOnly() throws Exception {
User specificRecord = new User();
specificRecord.setName("joe");
Schema v1 = new Schema.Parser().parse(AvroMessageConverterSerializationTests.class.getClassLoader().getResourceAsStream("schemas/user.avsc"));
GenericRecord genericRecord = new GenericData.Record(v1);
genericRecord.put("name", "joe");
SchemaRegistryClient client = new DefaultSchemaRegistryClient();
client.register("user", "avro", v1.toString());
AvroSchemaRegistryClientMessageConverter converter = new AvroSchemaRegistryClientMessageConverter(client, new NoOpCacheManager());
converter.setDynamicSchemaGenerationEnabled(false);
converter.afterPropertiesSet();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DatumWriter<User> writer = new SpecificDatumWriter<>(User.class);
Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
writer.write(specificRecord, encoder);
encoder.flush();
Message source = MessageBuilder.withPayload(baos.toByteArray()).setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_OCTET_STREAM).setHeader(BinderHeaders.BINDER_ORIGINAL_CONTENT_TYPE, "application/vnd.user.v1+avro").build();
Object converted = converter.fromMessage(source, User.class);
Assert.assertNotNull(converted);
Assert.assertEquals(specificRecord.getName().toString(), ((User) converted).getName().toString());
}
use of org.apache.avro.specific.SpecificDatumWriter in project divolte-collector by divolte.
the class AvroGenericRecordMapperTest method testMapping.
@Test
public void testMapping() throws Exception {
/*
* Test what happens when JSON samples are mapped to a specific Avro schema.
*
* The outcome can be either:
* - An expected JSON result (defaults to the input JSON)
* - An expected exception occurs.
*
* A fixture can also specify specific deserialization options be [in]active.
*/
try {
final Object avroResult = reader.read(testFixture.jsonToMap, testFixture.avroSchema);
// If we expected an exception, fail...
testFixture.expectedException.ifPresent(e -> fail("Expected exception to be thrown: " + e));
// ...otherwise verify the result...
final JsonNode avroResultJson = JSON_MAPPER.readTree(GenericData.get().toString(avroResult));
assertEquals(testFixture.expectedJson, avroResultJson);
// ...and ensure it can be written out as a record.
final DatumWriter<Object> writer = new SpecificDatumWriter<>(testFixture.avroSchema);
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) {
final Encoder encoder = EncoderFactory.get().directBinaryEncoder(byteArrayOutputStream, null);
writer.write(avroResult, encoder);
}
} catch (final Exception e) {
// Suppress the exception if it was expected; otherwise rethrow.
testFixture.expectedException.filter(ee -> ee.isInstance(e)).orElseThrow(() -> e);
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project avro by apache.
the class TestSeekableByteArrayInput method getSerializedMessage.
private byte[] getSerializedMessage(IndexedRecord message, Schema schema) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
SpecificDatumWriter<IndexedRecord> writer = new SpecificDatumWriter<>();
try (DataFileWriter<IndexedRecord> dfw = new DataFileWriter<>(writer).create(schema, baos)) {
dfw.append(message);
}
return baos.toByteArray();
}
Aggregations