Search in sources :

Example 21 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class StructuredDataSourceNode method buildStream.

@Override
public SchemaKStream buildStream(final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) {
    if (getTimestampField() != null) {
        int timestampColumnIndex = getTimeStampColumnIndex();
        ksqlConfig.put(KsqlConfig.KSQL_TIMESTAMP_COLUMN_INDEX, timestampColumnIndex);
    }
    KsqlTopicSerDe ksqlTopicSerDe = getStructuredDataSource().getKsqlTopic().getKsqlTopicSerDe();
    Serde<GenericRow> genericRowSerde = ksqlTopicSerDe.getGenericRowSerde(SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(getSchema()), ksqlConfig, false, schemaRegistryClient);
    if (getDataSourceType() == StructuredDataSource.DataSourceType.KTABLE) {
        final KsqlTable table = (KsqlTable) getStructuredDataSource();
        final KTable kTable = createKTable(builder, getAutoOffsetReset(props), table, genericRowSerde, table.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(getSchema(), ksqlConfig, true, schemaRegistryClient));
        return new SchemaKTable(getSchema(), kTable, getKeyField(), new ArrayList<>(), table.isWindowed(), SchemaKStream.Type.SOURCE, functionRegistry, schemaRegistryClient);
    }
    return new SchemaKStream(getSchema(), builder.stream(getStructuredDataSource().getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), genericRowSerde)).mapValues(nonWindowedValueMapper).transformValues(new AddTimestampColumn()), getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, schemaRegistryClient);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) SchemaKTable(io.confluent.ksql.structured.SchemaKTable) KsqlTopicSerDe(io.confluent.ksql.serde.KsqlTopicSerDe) KsqlTable(io.confluent.ksql.metastore.KsqlTable) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) SchemaKTable(io.confluent.ksql.structured.SchemaKTable) KTable(org.apache.kafka.streams.kstream.KTable) AddTimestampColumn(io.confluent.ksql.physical.AddTimestampColumn)

Example 22 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class SchemaKGroupedStream method aggregate.

@SuppressWarnings("unchecked")
public SchemaKTable aggregate(final Initializer initializer, final UdafAggregator aggregator, final WindowExpression windowExpression, final Serde<GenericRow> topicValueSerDe) {
    final KTable aggKtable;
    if (windowExpression != null) {
        final Materialized<String, GenericRow, ?> materialized = Materialized.<String, GenericRow, WindowStore<Bytes, byte[]>>with(Serdes.String(), topicValueSerDe);
        final KsqlWindowExpression ksqlWindowExpression = windowExpression.getKsqlWindowExpression();
        aggKtable = ksqlWindowExpression.applyAggregate(kgroupedStream, initializer, aggregator, materialized);
    } else {
        aggKtable = kgroupedStream.aggregate(initializer, aggregator, Materialized.with(Serdes.String(), topicValueSerDe));
    }
    return new SchemaKTable(schema, aggKtable, keyField, sourceSchemaKStreams, windowExpression != null, SchemaKStream.Type.AGGREGATE, functionRegistry, schemaRegistryClient);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) WindowStore(org.apache.kafka.streams.state.WindowStore) KsqlWindowExpression(io.confluent.ksql.parser.tree.KsqlWindowExpression) KTable(org.apache.kafka.streams.kstream.KTable)

Example 23 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlRestClientTest method testStreamRowFromServer.

@Test
public void testStreamRowFromServer() throws InterruptedException {
    MockStreamedQueryResource sqr = mockApplication.getStreamedQueryResource();
    RestResponse<KsqlRestClient.QueryStream> queryResponse = ksqlRestClient.makeQueryRequest("Select *");
    Assert.assertNotNull(queryResponse);
    Assert.assertTrue(queryResponse.isSuccessful());
    // Get the stream writer from the mock server and load it up with a row
    List<MockStreamedQueryResource.TestStreamWriter> writers = sqr.getWriters();
    Assert.assertEquals(1, writers.size());
    MockStreamedQueryResource.TestStreamWriter writer = writers.get(0);
    try {
        writer.enq("hello");
        // Try and receive the row. Do this from another thread to avoid blocking indefinitely
        KsqlRestClient.QueryStream queryStream = queryResponse.getResponse();
        Thread t = new Thread(() -> queryStream.hasNext());
        t.setDaemon(true);
        t.start();
        t.join(10000);
        Assert.assertFalse(t.isAlive());
        Assert.assertTrue(queryStream.hasNext());
        StreamedRow sr = queryStream.next();
        Assert.assertNotNull(sr);
        GenericRow row = sr.getRow();
        Assert.assertEquals(1, row.getColumns().size());
        Assert.assertEquals("hello", row.getColumns().get(0));
    } finally {
        writer.finished();
    }
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) MockStreamedQueryResource(io.confluent.ksql.rest.server.mock.MockStreamedQueryResource) Test(org.junit.Test)

Example 24 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldCreateCorrectRow.

@Test
public void shouldCreateCorrectRow() {
    KafkaAvroDeserializer kafkaAvroDeserializer = EasyMock.mock(KafkaAvroDeserializer.class);
    EasyMock.expect(kafkaAvroDeserializer.deserialize(EasyMock.anyString(), EasyMock.anyObject())).andReturn(genericRecord);
    expectLastCall();
    replay(kafkaAvroDeserializer);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, kafkaAvroDeserializer, false);
    GenericRow genericRow = ksqlGenericRowAvroDeserializer.deserialize("", new byte[] {});
    assertThat("Column number does not match.", genericRow.getColumns().size(), equalTo(6));
    assertThat("Invalid column value.", genericRow.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Invalid column value.", genericRow.getColumns().get(1), equalTo(1L));
    assertThat("Invalid column value.", ((Double[]) genericRow.getColumns().get(4))[0], equalTo(100.0));
    assertThat("Invalid column value.", ((Map<String, Double>) genericRow.getColumns().get(5)).get("key1"), equalTo(100.0));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) Test(org.junit.Test)

Example 25 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeCorrectly.

@Test
@SuppressWarnings("unchecked")
public void shouldDeserializeCorrectly() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
    byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
    GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(row);
    assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
    assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo(1L));
    assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo("item_1"));
    assertThat("Incorrect deserializarion", row.getColumns().get(3), equalTo(10.0));
    assertThat("Incorrect deserializarion", ((Double[]) row.getColumns().get(4)).length, equalTo(1));
    assertThat("Incorrect deserializarion", ((Map) row.getColumns().get(5)).size(), equalTo(1));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) List(java.util.List) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Aggregations

GenericRow (io.confluent.ksql.GenericRow)65 Test (org.junit.Test)38 HashMap (java.util.HashMap)27 Schema (org.apache.kafka.connect.data.Schema)19 List (java.util.List)15 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 ArrayList (java.util.ArrayList)11 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)9 IntegrationTest (io.confluent.common.utils.IntegrationTest)8 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 KsqlTopicSerDe (io.confluent.ksql.serde.KsqlTopicSerDe)5 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)5 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Map (java.util.Map)5 GenericRecord (org.apache.avro.generic.GenericRecord)4 Windowed (org.apache.kafka.streams.kstream.Windowed)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)3 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)3