Search in sources :

Example 26 with MockSchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.

the class ProjectNodeTest method shouldCreateProjectionWithFieldNameExpressionPairs.

@Test
public void shouldCreateProjectionWithFieldNameExpressionPairs() {
    mockSourceNode();
    final BooleanLiteral trueExpression = new BooleanLiteral("true");
    final BooleanLiteral falseExpression = new BooleanLiteral("false");
    EasyMock.expect(stream.select(Arrays.asList(new Pair<>("field1", trueExpression), new Pair<>("field2", falseExpression)))).andReturn(stream);
    EasyMock.replay(source, stream);
    final ProjectNode node = new ProjectNode(new PlanNodeId("1"), source, SchemaBuilder.struct().field("field1", Schema.STRING_SCHEMA).field("field2", Schema.STRING_SCHEMA).build(), Arrays.asList(trueExpression, falseExpression));
    node.buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, new MockSchemaRegistryClient());
    EasyMock.verify(stream);
}
Also used : BooleanLiteral(io.confluent.ksql.parser.tree.BooleanLiteral) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Pair(io.confluent.ksql.util.Pair) Test(org.junit.Test)

Example 27 with MockSchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.

the class PhysicalPlanBuilderTest method buildPhysicalPlanBuilder.

private PhysicalPlanBuilder buildPhysicalPlanBuilder(Map<String, Object> overrideProperties) {
    final StreamsBuilder streamsBuilder = new StreamsBuilder();
    final FunctionRegistry functionRegistry = new FunctionRegistry();
    Map<String, Object> configMap = new HashMap<>();
    configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    configMap.put("application.id", "KSQL");
    configMap.put("commit.interval.ms", 0);
    configMap.put("cache.max.bytes.buffering", 0);
    configMap.put("auto.offset.reset", "earliest");
    return new PhysicalPlanBuilder(streamsBuilder, new KsqlConfig(configMap), new FakeKafkaTopicClient(), functionRegistry, overrideProperties, false, metaStore, new MockSchemaRegistryClient(), testKafkaStreamsBuilder);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) FakeKafkaTopicClient(io.confluent.ksql.util.FakeKafkaTopicClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig)

Example 28 with MockSchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.

the class KsqlResourceTest method setUp.

@Before
public void setUp() throws IOException, RestClientException {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    registerSchema(schemaRegistryClient);
    ksqlRestConfig = new KsqlRestConfig(TestKsqlResourceUtil.getDefaultKsqlConfig());
    KsqlConfig ksqlConfig = new KsqlConfig(ksqlRestConfig.getKsqlConfigProperties());
    ksqlEngine = new KsqlEngine(ksqlConfig, new MockKafkaTopicClient(), schemaRegistryClient, new MetaStoreImpl());
}
Also used : KsqlEngine(io.confluent.ksql.KsqlEngine) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlRestConfig(io.confluent.ksql.rest.server.KsqlRestConfig) MockKafkaTopicClient(io.confluent.ksql.rest.server.mock.MockKafkaTopicClient) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlConfig(io.confluent.ksql.util.KsqlConfig) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Before(org.junit.Before)

Example 29 with MockSchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeWithMissingFields.

@Test
public void shouldDeserializeWithMissingFields() {
    String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + "     {\"name\": \"orderTime\", \"type\": \"long\"}," + "     {\"name\": \"orderId\",  \"type\": \"long\"}," + "     {\"name\": \"itemId\", \"type\": \"string\"}," + "     {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}";
    Schema.Parser parser = new Schema.Parser();
    Schema avroSchema1 = parser.parse(schemaStr1);
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0);
    GenericRow genericRow = new GenericRow(columns);
    byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema1, genericRow);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
    GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
    assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
    assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
    assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
    Assert.assertNull(row.getColumns().get(4));
    Assert.assertNull(row.getColumns().get(5));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.avro.Schema) List(java.util.List) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Example 30 with MockSchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.

@Test
public void shouldSerializeRowCorrectly() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(genericRecord);
    assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
    assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
    assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
    assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
    GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
    Map map = (Map) genericRecord.get("mapcol".toUpperCase());
    assertThat("Incorrect serialization.", array.size(), equalTo(1));
    assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
    assertThat("Incorrect serialization.", map.size(), equalTo(1));
    assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
Also used : HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) GenericData(org.apache.avro.generic.GenericData) GenericRow(io.confluent.ksql.GenericRow) Utf8(org.apache.avro.util.Utf8) List(java.util.List) GenericRecord(org.apache.avro.generic.GenericRecord) HashMap(java.util.HashMap) Map(java.util.Map) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Aggregations

MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)32 Test (org.junit.Test)24 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)11 PlanNode (io.confluent.ksql.planner.plan.PlanNode)11 KsqlConfig (io.confluent.ksql.util.KsqlConfig)11 ArrayList (java.util.ArrayList)11 GenericRow (io.confluent.ksql.GenericRow)9 FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)9 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)8 List (java.util.List)7 HashMap (java.util.HashMap)6 FilterNode (io.confluent.ksql.planner.plan.FilterNode)4 ProjectNode (io.confluent.ksql.planner.plan.ProjectNode)4 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)4 Before (org.junit.Before)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)3 Map (java.util.Map)3 Schema (org.apache.avro.Schema)3 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)2 BooleanLiteral (io.confluent.ksql.parser.tree.BooleanLiteral)2