use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoderTest method testParseCorruptedPartial.
@Test(expected = ParseException.class)
public void testParseCorruptedPartial() throws Exception {
// Given
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
ByteBuffer bb = ByteBuffer.allocate(4 + 5).put((byte) 0).putInt(1234).put(bytes, 5, 4);
bb.rewind();
// When
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoderTest method testParse.
@Test
public void testParse() throws Exception {
// Given
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
ByteBuffer bb = ByteBuffer.allocate(bytes.length + 5).put((byte) 0).putInt(1234).put(bytes);
bb.rewind();
// When
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project flink by apache.
the class SQLClientSchemaRegistryITCase method testReading.
@Test
public void testReading() throws Exception {
String testCategoryTopic = "test-category-" + UUID.randomUUID().toString();
String testResultsTopic = "test-results-" + UUID.randomUUID().toString();
kafkaClient.createTopic(1, 1, testCategoryTopic);
Schema categoryRecord = SchemaBuilder.record("record").fields().requiredLong("category_id").optionalString("name").endRecord();
String categorySubject = testCategoryTopic + "-value";
registryClient.register(categorySubject, new AvroSchema(categoryRecord));
GenericRecordBuilder categoryBuilder = new GenericRecordBuilder(categoryRecord);
KafkaAvroSerializer valueSerializer = new KafkaAvroSerializer(registryClient);
kafkaClient.sendMessages(testCategoryTopic, valueSerializer, categoryBuilder.set("category_id", 1L).set("name", "electronics").build());
List<String> sqlLines = Arrays.asList("CREATE TABLE category (", " category_id BIGINT,", " name STRING,", // new field, should create new schema version, but
" description STRING", // be able to read old version
") WITH (", " 'connector' = 'kafka',", " 'properties.bootstrap.servers' = '" + INTER_CONTAINER_KAFKA_ALIAS + ":9092',", " 'topic' = '" + testCategoryTopic + "',", " 'scan.startup.mode' = 'earliest-offset',", " 'properties.group.id' = 'test-group',", " 'format' = 'avro-confluent',", " 'avro-confluent.url' = 'http://" + INTER_CONTAINER_REGISTRY_ALIAS + ":8082'", ");", "", "CREATE TABLE results (", " category_id BIGINT,", " name STRING,", " description STRING", ") WITH (", " 'connector' = 'kafka',", " 'properties.bootstrap.servers' = '" + INTER_CONTAINER_KAFKA_ALIAS + ":9092',", " 'properties.group.id' = 'test-group',", " 'topic' = '" + testResultsTopic + "',", " 'format' = 'csv',", " 'csv.null-literal' = 'null'", ");", "", "INSERT INTO results SELECT * FROM category;");
executeSqlStatements(sqlLines);
List<String> categories = kafkaClient.readMessages(1, "test-group", testResultsTopic, new StringDeserializer());
assertThat(categories, equalTo(Collections.singletonList("1,electronics,null")));
}
use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoder method parse.
@Override
public GenericRecord parse(ByteBuffer bytes) {
int length = bytes.limit() - 1 - 4;
if (length < 0) {
throw new ParseException(null, "Failed to decode avro message, not enough bytes to decode (%s)", bytes.limit());
}
// ignore first \0 byte
bytes.get();
// extract schema registry id
int id = bytes.getInt();
int offset = bytes.position() + bytes.arrayOffset();
Schema schema;
try {
ParsedSchema parsedSchema = registry.getSchemaById(id);
schema = parsedSchema instanceof AvroSchema ? ((AvroSchema) parsedSchema).rawSchema() : null;
} catch (IOException | RestClientException ex) {
throw new ParseException(null, "Failed to get Avro schema: %s", id);
}
if (schema == null) {
throw new ParseException(null, "Failed to find Avro schema: %s", id);
}
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
try {
return reader.read(null, DecoderFactory.get().binaryDecoder(bytes.array(), offset, length, null));
} catch (Exception e) {
throw new ParseException(null, e, "Fail to decode Avro message for schema: %s!", id);
}
}
Aggregations