use of io.confluent.kafka.schemaregistry.ParsedSchema in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoder method parse.
@Override
public GenericRecord parse(ByteBuffer bytes) {
int length = bytes.limit() - 1 - 4;
if (length < 0) {
throw new ParseException(null, "Failed to decode avro message, not enough bytes to decode (%s)", bytes.limit());
}
// ignore first \0 byte
bytes.get();
// extract schema registry id
int id = bytes.getInt();
int offset = bytes.position() + bytes.arrayOffset();
Schema schema;
try {
ParsedSchema parsedSchema = registry.getSchemaById(id);
schema = parsedSchema instanceof AvroSchema ? ((AvroSchema) parsedSchema).rawSchema() : null;
} catch (IOException | RestClientException ex) {
throw new ParseException(null, "Failed to get Avro schema: %s", id);
}
if (schema == null) {
throw new ParseException(null, "Failed to find Avro schema: %s", id);
}
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
try {
return reader.read(null, DecoderFactory.get().binaryDecoder(bytes.array(), offset, length, null));
} catch (Exception e) {
throw new ParseException(null, e, "Fail to decode Avro message for schema: %s!", id);
}
}
Aggregations