use of org.apache.avro.specific.SpecificDatumWriter in project spf4j by zolyfarkas.
the class SpecificRecordAppender method writeSerializationError.
@SuppressFBWarnings("ITC_INHERITANCE_TYPE_CHECKING")
static void writeSerializationError(final Object object, final StringBuilder sb, final Exception ex) throws IOException {
if (STRICT_SERIALIZATION) {
if (ex instanceof IOException) {
throw (IOException) ex;
} else if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
} else {
throw new IllegalStateException(ex);
}
}
sb.setLength(0);
sb.append("{\"SerializationError\":\n");
try (AppendableOutputStream bos = new AppendableOutputStream(sb, Charsets.UTF_8)) {
JThrowable at = Converters.convert(ex);
Schema schema = at.getSchema();
SpecificDatumWriter<SpecificRecord> writer = new SpecificDatumWriter<>(schema);
JsonEncoder jsonEncoder = EF.jsonEncoder(schema, bos, true);
writer.write(at, jsonEncoder);
jsonEncoder.flush();
}
sb.append(",\n");
sb.append("\"ObjectAsString\":\n\"");
EscapeJsonStringAppendableWrapper escaper = new EscapeJsonStringAppendableWrapper(sb);
escaper.append(object.toString());
sb.append("\"}");
}
use of org.apache.avro.specific.SpecificDatumWriter in project spf4j by zolyfarkas.
the class SpecificRecordJsonAppender method append.
@Override
public final void append(final SpecificRecord object, final Appendable appendTo) throws IOException {
final Schema schema = object.getSchema();
SpecificDatumWriter<SpecificRecord> writer = new SpecificDatumWriter<>(schema);
Encoder jsonEncoder = AvroCompatUtils.getJsonEncoder(schema, appendTo);
writer.write(object, jsonEncoder);
jsonEncoder.flush();
}
use of org.apache.avro.specific.SpecificDatumWriter in project spf4j by zolyfarkas.
the class AvroTest method testRw.
@Test
public void testRw() throws IOException {
DataBlock data = DataBlock.newBuilder().setBaseTimestamp(0).setValues(Collections.EMPTY_LIST).build();
try (ByteArrayBuilder bab = new ByteArrayBuilder()) {
Schema schema = data.getSchema();
SpecificDatumWriter<DataBlock> writer = new SpecificDatumWriter<>(schema);
final BinaryEncoder directBinaryEncoder = EncoderFactory.get().directBinaryEncoder(bab, null);
writer.write(data, directBinaryEncoder);
directBinaryEncoder.flush();
ByteArrayInputStream bis = new ByteArrayInputStream(bab.getBuffer(), 0, bab.size());
SpecificDatumReader<DataBlock> reader = new SpecificDatumReader<>(schema);
BinaryDecoder directBinaryDecoder = DecoderFactory.get().directBinaryDecoder(bis, null);
DataBlock read = reader.read(null, directBinaryDecoder);
Assert.assertEquals(read, data);
}
}
use of org.apache.avro.specific.SpecificDatumWriter in project incubator-pulsar by apache.
the class JsonConverterTests method serialize.
public static byte[] serialize(GenericRecord record, Schema schema) throws IOException {
SpecificDatumWriter<GenericRecord> datumWriter = new SpecificDatumWriter<>(schema);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
BinaryEncoder binaryEncoder = new EncoderFactory().binaryEncoder(byteArrayOutputStream, null);
datumWriter.write(record, binaryEncoder);
binaryEncoder.flush();
return byteArrayOutputStream.toByteArray();
}
use of org.apache.avro.specific.SpecificDatumWriter in project druid by apache.
the class AvroStreamInputFormatTest method testParse.
@Test
public void testParse() throws SchemaValidationException, IOException {
Repository repository = new InMemoryRepository(null);
AvroStreamInputFormat inputFormat = new AvroStreamInputFormat(flattenSpec, new SchemaRepoBasedAvroBytesDecoder<>(new Avro1124SubjectAndIdConverter(TOPIC), repository), false, false);
NestedInputFormat inputFormat2 = jsonMapper.readValue(jsonMapper.writeValueAsString(inputFormat), NestedInputFormat.class);
repository = ((SchemaRepoBasedAvroBytesDecoder) ((AvroStreamInputFormat) inputFormat2).getAvroBytesDecoder()).getSchemaRepository();
// prepare data
GenericRecord someAvroDatum = buildSomeAvroDatum();
// encode schema id
Avro1124SubjectAndIdConverter converter = new Avro1124SubjectAndIdConverter(TOPIC);
TypedSchemaRepository<Integer, Schema, String> repositoryClient = new TypedSchemaRepository<>(repository, new IntegerConverter(), new AvroSchemaConverter(), new IdentityConverter());
Integer id = repositoryClient.registerSchema(TOPIC, SomeAvroDatum.getClassSchema());
ByteBuffer byteBuffer = ByteBuffer.allocate(4);
converter.putSubjectAndId(id, byteBuffer);
ByteArrayOutputStream out = new ByteArrayOutputStream();
out.write(byteBuffer.array());
// encode data
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(someAvroDatum.getSchema());
// write avro datum to bytes
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
final ByteEntity entity = new ByteEntity(ByteBuffer.wrap(out.toByteArray()));
InputRow inputRow = inputFormat2.createReader(new InputRowSchema(timestampSpec, dimensionsSpec, null), entity, null).read().next();
assertInputRowCorrect(inputRow, DIMENSIONS, false);
}
Aggregations