Search in sources :

Example 6 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class AvroUtilTest method shouldPassAvroCheck.

@Test
public void shouldPassAvroCheck() throws Exception {
    SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
    SchemaMetadata schemaMetadata = new SchemaMetadata(1, 1, ordersAveroSchemaStr);
    expect(schemaRegistryClient.getLatestSchemaMetadata(anyString())).andReturn(schemaMetadata);
    replay(schemaRegistryClient);
    AbstractStreamCreateStatement abstractStreamCreateStatement = getAbstractStreamCreateStatement("CREATE STREAM S1 WITH " + "(kafka_topic='s1_topic', " + "value_format='avro' );");
    Pair<AbstractStreamCreateStatement, String> checkResult = avroUtil.checkAndSetAvroSchema(abstractStreamCreateStatement, new HashMap<>(), schemaRegistryClient);
    AbstractStreamCreateStatement newAbstractStreamCreateStatement = checkResult.getLeft();
    assertThat(newAbstractStreamCreateStatement.getElements(), equalTo(Arrays.asList(new TableElement("ORDERTIME", "BIGINT"), new TableElement("ORDERID", "BIGINT"), new TableElement("ITEMID", "VARCHAR"), new TableElement("ORDERUNITS", "DOUBLE"), new TableElement("ARRAYCOL", "ARRAY<DOUBLE>"), new TableElement("MAPCOL", "MAP<VARCHAR,DOUBLE>"))));
}
Also used : SchemaMetadata(io.confluent.kafka.schemaregistry.client.SchemaMetadata) AbstractStreamCreateStatement(io.confluent.ksql.parser.tree.AbstractStreamCreateStatement) EasyMock.anyString(org.easymock.EasyMock.anyString) TableElement(io.confluent.ksql.parser.tree.TableElement) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 7 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class TopicStreamWriterFormatTest method shouldMatchJsonFormatter.

@Test
public void shouldMatchJsonFormatter() throws Exception {
    SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
    replay(schemaRegistryClient);
    /**
     * Test data
     */
    String json = "{    \"name\": \"myrecord\"," + "    \"type\": \"record\"" + "}";
    ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(json.getBytes()));
    assertTrue(TopicStreamWriter.Format.JSON.isFormat("topic", record, schemaRegistryClient));
}
Also used : Bytes(org.apache.kafka.common.utils.Bytes) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 8 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class TopicStreamWriterFormatTest method shouldMatchAvroFormatter.

@Test
public void shouldMatchAvroFormatter() throws Exception {
    /**
     * Build an AVRO message
     */
    String USER_SCHEMA = "{\n" + "    \"fields\": [\n" + "        { \"name\": \"str1\", \"type\": \"string\" }\n" + "    ],\n" + "    \"name\": \"myrecord\",\n" + "    \"type\": \"record\"\n" + "}";
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(USER_SCHEMA);
    GenericData.Record avroRecord = new GenericData.Record(schema);
    avroRecord.put("str1", "My first string");
    /**
     * Setup expects
     */
    SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
    expect(schemaRegistryClient.register(anyString(), anyObject())).andReturn(1);
    expect(schemaRegistryClient.getById(anyInt())).andReturn(schema);
    replay(schemaRegistryClient);
    Map<String, String> props = new HashMap<>();
    props.put("schema.registry.url", "localhost:9092");
    KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props);
    /**
     * Test data
     */
    byte[] testRecordBytes = avroSerializer.serialize("topic", avroRecord);
    ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(testRecordBytes));
    /**
     * Assert
     */
    assertTrue(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient));
}
Also used : HashMap(java.util.HashMap) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) GenericData(org.apache.avro.generic.GenericData) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Bytes(org.apache.kafka.common.utils.Bytes) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 9 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeCorrectly.

@Test
@SuppressWarnings("unchecked")
public void shouldDeserializeCorrectly() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
    byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
    GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(row);
    assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
    assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo(1L));
    assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo("item_1"));
    assertThat("Incorrect deserializarion", row.getColumns().get(3), equalTo(10.0));
    assertThat("Incorrect deserializarion", ((Double[]) row.getColumns().get(4)).length, equalTo(1));
    assertThat("Incorrect deserializarion", ((Map) row.getColumns().get(5)).size(), equalTo(1));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) List(java.util.List) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Example 10 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeIfThereAreRedundantFields.

@Test
public void shouldDeserializeIfThereAreRedundantFields() {
    org.apache.kafka.connect.data.Schema newSchema = SchemaBuilder.struct().field("ordertime".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA).field("orderid".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA).field("itemid".toUpperCase(), org.apache.kafka.connect.data.Schema.STRING_SCHEMA).field("orderunits".toUpperCase(), org.apache.kafka.connect.data.Schema.FLOAT64_SCHEMA).build();
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(newSchema, schemaRegistryClient, false);
    byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
    GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(row);
    assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(4));
    assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
    assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) List(java.util.List) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Aggregations

SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)28 Test (org.junit.Test)21 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)11 KsqlConfig (io.confluent.ksql.util.KsqlConfig)10 GenericRow (io.confluent.ksql.GenericRow)8 List (java.util.List)8 HashMap (java.util.HashMap)7 Schema (org.apache.avro.Schema)6 GenericRecord (org.apache.avro.generic.GenericRecord)5 RestClientException (io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)4 KafkaAvroSerializer (io.confluent.kafka.serializers.KafkaAvroSerializer)4 IOException (java.io.IOException)4 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)4 Bytes (org.apache.kafka.common.utils.Bytes)4 Schema (org.apache.kafka.connect.data.Schema)4 Map (java.util.Map)3 GenericData (org.apache.avro.generic.GenericData)3 Test (org.testng.annotations.Test)3 SchemaMetadata (io.confluent.kafka.schemaregistry.client.SchemaMetadata)2