Search in sources :

Example 1 with AvroSchema

use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project druid by druid-io.

the class SchemaRegistryBasedAvroBytesDecoderTest method testParseCorruptedPartial.

@Test(expected = ParseException.class)
public void testParseCorruptedPartial() throws Exception {
    // Given
    Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
    GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
    Schema schema = SomeAvroDatum.getClassSchema();
    byte[] bytes = getAvroDatum(schema, someAvroDatum);
    ByteBuffer bb = ByteBuffer.allocate(4 + 5).put((byte) 0).putInt(1234).put(bytes, 5, 4);
    bb.rewind();
    // When
    new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}
Also used : AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) Schema(org.apache.avro.Schema) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) GenericRecord(org.apache.avro.generic.GenericRecord) ByteBuffer(java.nio.ByteBuffer) AvroStreamInputRowParserTest(org.apache.druid.data.input.AvroStreamInputRowParserTest) Test(org.junit.Test)

Example 2 with AvroSchema

use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project druid by druid-io.

the class SchemaRegistryBasedAvroBytesDecoderTest method testParse.

@Test
public void testParse() throws Exception {
    // Given
    Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
    GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
    Schema schema = SomeAvroDatum.getClassSchema();
    byte[] bytes = getAvroDatum(schema, someAvroDatum);
    ByteBuffer bb = ByteBuffer.allocate(bytes.length + 5).put((byte) 0).putInt(1234).put(bytes);
    bb.rewind();
    // When
    new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}
Also used : AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) Schema(org.apache.avro.Schema) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) GenericRecord(org.apache.avro.generic.GenericRecord) ByteBuffer(java.nio.ByteBuffer) AvroStreamInputRowParserTest(org.apache.druid.data.input.AvroStreamInputRowParserTest) Test(org.junit.Test)

Example 3 with AvroSchema

use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project flink by apache.

the class SQLClientSchemaRegistryITCase method testReading.

@Test
public void testReading() throws Exception {
    String testCategoryTopic = "test-category-" + UUID.randomUUID().toString();
    String testResultsTopic = "test-results-" + UUID.randomUUID().toString();
    kafkaClient.createTopic(1, 1, testCategoryTopic);
    Schema categoryRecord = SchemaBuilder.record("record").fields().requiredLong("category_id").optionalString("name").endRecord();
    String categorySubject = testCategoryTopic + "-value";
    registryClient.register(categorySubject, new AvroSchema(categoryRecord));
    GenericRecordBuilder categoryBuilder = new GenericRecordBuilder(categoryRecord);
    KafkaAvroSerializer valueSerializer = new KafkaAvroSerializer(registryClient);
    kafkaClient.sendMessages(testCategoryTopic, valueSerializer, categoryBuilder.set("category_id", 1L).set("name", "electronics").build());
    List<String> sqlLines = Arrays.asList("CREATE TABLE category (", " category_id BIGINT,", " name STRING,", // new field, should create new schema version, but
    " description STRING", // be able to read old version
    ") WITH (", " 'connector' = 'kafka',", " 'properties.bootstrap.servers' = '" + INTER_CONTAINER_KAFKA_ALIAS + ":9092',", " 'topic' = '" + testCategoryTopic + "',", " 'scan.startup.mode' = 'earliest-offset',", " 'properties.group.id' = 'test-group',", " 'format' = 'avro-confluent',", " 'avro-confluent.url' = 'http://" + INTER_CONTAINER_REGISTRY_ALIAS + ":8082'", ");", "", "CREATE TABLE results (", " category_id BIGINT,", " name STRING,", " description STRING", ") WITH (", " 'connector' = 'kafka',", " 'properties.bootstrap.servers' = '" + INTER_CONTAINER_KAFKA_ALIAS + ":9092',", " 'properties.group.id' = 'test-group',", " 'topic' = '" + testResultsTopic + "',", " 'format' = 'csv',", " 'csv.null-literal' = 'null'", ");", "", "INSERT INTO results SELECT * FROM category;");
    executeSqlStatements(sqlLines);
    List<String> categories = kafkaClient.readMessages(1, "test-group", testResultsTopic, new StringDeserializer());
    assertThat(categories, equalTo(Collections.singletonList("1,electronics,null")));
}
Also used : AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) Test(org.junit.Test)

Example 4 with AvroSchema

use of io.confluent.kafka.schemaregistry.avro.AvroSchema in project druid by druid-io.

the class SchemaRegistryBasedAvroBytesDecoder method parse.

@Override
public GenericRecord parse(ByteBuffer bytes) {
    int length = bytes.limit() - 1 - 4;
    if (length < 0) {
        throw new ParseException(null, "Failed to decode avro message, not enough bytes to decode (%s)", bytes.limit());
    }
    // ignore first \0 byte
    bytes.get();
    // extract schema registry id
    int id = bytes.getInt();
    int offset = bytes.position() + bytes.arrayOffset();
    Schema schema;
    try {
        ParsedSchema parsedSchema = registry.getSchemaById(id);
        schema = parsedSchema instanceof AvroSchema ? ((AvroSchema) parsedSchema).rawSchema() : null;
    } catch (IOException | RestClientException ex) {
        throw new ParseException(null, "Failed to get Avro schema: %s", id);
    }
    if (schema == null) {
        throw new ParseException(null, "Failed to find Avro schema: %s", id);
    }
    DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
    try {
        return reader.read(null, DecoderFactory.get().binaryDecoder(bytes.array(), offset, length, null));
    } catch (Exception e) {
        throw new ParseException(null, e, "Fail to decode Avro message for schema: %s!", id);
    }
}
Also used : AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) Schema(org.apache.avro.Schema) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) RestClientException(io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException) ParseException(org.apache.druid.java.util.common.parsers.ParseException) ParsedSchema(io.confluent.kafka.schemaregistry.ParsedSchema) IOException(java.io.IOException) GenericRecord(org.apache.avro.generic.GenericRecord) ParseException(org.apache.druid.java.util.common.parsers.ParseException) IOException(java.io.IOException) RestClientException(io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException)

Aggregations

AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)4 Schema (org.apache.avro.Schema)4 ParsedSchema (io.confluent.kafka.schemaregistry.ParsedSchema)3 GenericRecord (org.apache.avro.generic.GenericRecord)3 Test (org.junit.Test)3 ByteBuffer (java.nio.ByteBuffer)2 AvroStreamInputRowParserTest (org.apache.druid.data.input.AvroStreamInputRowParserTest)2 RestClientException (io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException)1 KafkaAvroSerializer (io.confluent.kafka.serializers.KafkaAvroSerializer)1 IOException (java.io.IOException)1 GenericDatumReader (org.apache.avro.generic.GenericDatumReader)1 GenericRecordBuilder (org.apache.avro.generic.GenericRecordBuilder)1 ParseException (org.apache.druid.java.util.common.parsers.ParseException)1 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)1