Search in sources :

Example 56 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class SchemaKTable method into.

@SuppressWarnings("unchecked")
@Override
public SchemaKTable into(final String kafkaTopicName, final Serde<GenericRow> topicValueSerDe, Set<Integer> rowkeyIndexes) {
    if (isWindowed) {
        final Serde<Windowed<String>> windowedSerde = WindowedSerdes.timeWindowedSerdeFrom(String.class);
        ktable.toStream().mapValues((ValueMapper<GenericRow, GenericRow>) row -> {
            if (row == null) {
                return null;
            }
            List columns = new ArrayList();
            for (int i = 0; i < row.getColumns().size(); i++) {
                if (!rowkeyIndexes.contains(i)) {
                    columns.add(row.getColumns().get(i));
                }
            }
            return new GenericRow(columns);
        }).to(kafkaTopicName, Produced.with(windowedSerde, topicValueSerDe));
    } else {
        ktable.toStream().mapValues((ValueMapper<GenericRow, GenericRow>) row -> {
            if (row == null) {
                return null;
            }
            List columns = new ArrayList();
            for (int i = 0; i < row.getColumns().size(); i++) {
                if (!rowkeyIndexes.contains(i)) {
                    columns.add(row.getColumns().get(i));
                }
            }
            return new GenericRow(columns);
        }).to(kafkaTopicName, Produced.with(Serdes.String(), topicValueSerDe));
    }
    return this;
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) GenericRow(io.confluent.ksql.GenericRow) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List)

Example 57 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class StreamedQueryResourceTest method testStreamQuery.

@Test
public void testStreamQuery() throws Throwable {
    final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
    final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
    final String queryString = "SELECT * FROM test_stream;";
    final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
    final LinkedList<GenericRow> writtenRows = new LinkedList<>();
    final Thread rowQueuePopulatorThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                for (int i = 0; ; i++) {
                    String key = Integer.toString(i);
                    GenericRow value = new GenericRow(Collections.singletonList(i));
                    synchronized (writtenRows) {
                        writtenRows.add(value);
                    }
                    rowQueue.put(new KeyValue<>(key, value));
                }
            } catch (InterruptedException exception) {
            // This should happen during the test, so it's fine
            }
        }
    }, "Row Queue Populator");
    rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
    rowQueuePopulatorThread.start();
    final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
    mockKafkaStreams.start();
    expectLastCall();
    mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
    expectLastCall();
    expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
    mockKafkaStreams.close();
    expectLastCall();
    mockKafkaStreams.cleanUp();
    expectLastCall();
    final OutputNode mockOutputNode = mock(OutputNode.class);
    expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
    final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
    KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
    KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
    expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
    final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
    expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
    mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
    expectLastCall();
    StatementParser mockStatementParser = mock(StatementParser.class);
    expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
    replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
    StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
    Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
    PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
    PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
    StreamingOutput responseStream = (StreamingOutput) response.getEntity();
    final Thread queryWriterThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                responseStream.write(responseOutputStream);
            } catch (EOFException exception) {
            // It's fine
            } catch (IOException exception) {
                throw new RuntimeException(exception);
            }
        }
    }, "Query Writer");
    queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
    queryWriterThread.start();
    Scanner responseScanner = new Scanner(responseInputStream);
    ObjectMapper objectMapper = new ObjectMapper();
    for (int i = 0; i < 5; i++) {
        if (!responseScanner.hasNextLine()) {
            throw new Exception("Response input stream failed to have expected line available");
        }
        String responseLine = responseScanner.nextLine();
        if (responseLine.trim().isEmpty()) {
            i--;
        } else {
            GenericRow expectedRow;
            synchronized (writtenRows) {
                expectedRow = writtenRows.poll();
            }
            GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
            assertEquals(expectedRow, testRow);
        }
    }
    responseOutputStream.close();
    queryWriterThread.join();
    rowQueuePopulatorThread.interrupt();
    rowQueuePopulatorThread.join();
    // Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
    verify(mockKafkaStreams);
    // If one of the other threads has somehow managed to throw an exception without breaking things up until this
    // point, we throw that exception now in the main thread and cause the test to fail
    Throwable exception = threadException.get();
    if (exception != null) {
        throw exception;
    }
}
Also used : Query(io.confluent.ksql.parser.tree.Query) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) Scanner(java.util.Scanner) EasyMock.mock(org.easymock.EasyMock.mock) KsqlEngine(io.confluent.ksql.KsqlEngine) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) Map(java.util.Map) EasyMock.replay(org.easymock.EasyMock.replay) LinkedList(java.util.LinkedList) EasyMock.anyObject(org.easymock.EasyMock.anyObject) OutputNode(io.confluent.ksql.planner.plan.OutputNode) SynchronousQueue(java.util.concurrent.SynchronousQueue) StatementParser(io.confluent.ksql.rest.server.StatementParser) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) PipedOutputStream(java.io.PipedOutputStream) EasyMock.expect(org.easymock.EasyMock.expect) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) EOFException(java.io.EOFException) DataSource(io.confluent.ksql.serde.DataSource) EasyMock.expectLastCall(org.easymock.EasyMock.expectLastCall) Response(javax.ws.rs.core.Response) GenericRow(io.confluent.ksql.GenericRow) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) EasyMock.verify(org.easymock.EasyMock.verify) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KsqlEngine(io.confluent.ksql.KsqlEngine) Scanner(java.util.Scanner) KeyValue(org.apache.kafka.streams.KeyValue) Query(io.confluent.ksql.parser.tree.Query) PipedOutputStream(java.io.PipedOutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput) StreamedQueryResource(io.confluent.ksql.rest.server.resources.streaming.StreamedQueryResource) GenericRow(io.confluent.ksql.GenericRow) KsqlRequest(io.confluent.ksql.rest.entity.KsqlRequest) SynchronousQueue(java.util.concurrent.SynchronousQueue) EOFException(java.io.EOFException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) KafkaStreams(org.apache.kafka.streams.KafkaStreams) OutputNode(io.confluent.ksql.planner.plan.OutputNode) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) StatementParser(io.confluent.ksql.rest.server.StatementParser) QueuedQueryMetadata(io.confluent.ksql.util.QueuedQueryMetadata) AtomicReference(java.util.concurrent.atomic.AtomicReference) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) LinkedList(java.util.LinkedList) IOException(java.io.IOException) EOFException(java.io.EOFException) Response(javax.ws.rs.core.Response) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) EasyMock.anyObject(org.easymock.EasyMock.anyObject) Test(org.junit.Test)

Example 58 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeWithMissingFields.

@Test
public void shouldDeserializeWithMissingFields() {
    String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + "     {\"name\": \"orderTime\", \"type\": \"long\"}," + "     {\"name\": \"orderId\",  \"type\": \"long\"}," + "     {\"name\": \"itemId\", \"type\": \"string\"}," + "     {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}";
    Schema.Parser parser = new Schema.Parser();
    Schema avroSchema1 = parser.parse(schemaStr1);
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0);
    GenericRow genericRow = new GenericRow(columns);
    byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema1, genericRow);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
    GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
    assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
    assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
    assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
    Assert.assertNull(row.getColumns().get(4));
    Assert.assertNull(row.getColumns().get(5));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.avro.Schema) List(java.util.List) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Example 59 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.

@Test
public void shouldSerializeRowCorrectly() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(genericRecord);
    assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
    assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
    assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
    assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
    GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
    Map map = (Map) genericRecord.get("mapcol".toUpperCase());
    assertThat("Incorrect serialization.", array.size(), equalTo(1));
    assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
    assertThat("Incorrect serialization.", map.size(), equalTo(1));
    assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
Also used : HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) GenericData(org.apache.avro.generic.GenericData) GenericRow(io.confluent.ksql.GenericRow) Utf8(org.apache.avro.util.Utf8) List(java.util.List) GenericRecord(org.apache.avro.generic.GenericRecord) HashMap(java.util.HashMap) Map(java.util.Map) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 60 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullCorrectly.

@Test
public void shouldSerializeRowWithNullCorrectly() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, null, 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(genericRecord);
    assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
    assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
    assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()), equalTo(null));
    assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
    GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
    Map map = (Map) genericRecord.get("mapcol".toUpperCase());
    assertThat("Incorrect serialization.", array.size(), equalTo(1));
    assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
    assertThat("Incorrect serialization.", map, equalTo(Collections.singletonMap(new Utf8("key1"), 100.0)));
}
Also used : HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) GenericData(org.apache.avro.generic.GenericData) GenericRow(io.confluent.ksql.GenericRow) Utf8(org.apache.avro.util.Utf8) List(java.util.List) GenericRecord(org.apache.avro.generic.GenericRecord) HashMap(java.util.HashMap) Map(java.util.Map) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Aggregations

GenericRow (io.confluent.ksql.GenericRow)65 Test (org.junit.Test)38 HashMap (java.util.HashMap)27 Schema (org.apache.kafka.connect.data.Schema)19 List (java.util.List)15 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 ArrayList (java.util.ArrayList)11 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)9 IntegrationTest (io.confluent.common.utils.IntegrationTest)8 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 KsqlTopicSerDe (io.confluent.ksql.serde.KsqlTopicSerDe)5 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)5 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Map (java.util.Map)5 GenericRecord (org.apache.avro.generic.GenericRecord)4 Windowed (org.apache.kafka.streams.kstream.Windowed)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)3 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)3