Search in sources :

Example 21 with Row

use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.

the class ClientMutationIntegrationTest method shouldExecuteQueryWithProperties.

@Test
public void shouldExecuteQueryWithProperties() {
    // Given
    final Map<String, Object> properties = new HashMap<>();
    properties.put("auto.offset.reset", "latest");
    final String sql = "SELECT * FROM " + TEST_STREAM + " EMIT CHANGES LIMIT 1;";
    final KsqlObject insertRow = new KsqlObject().put("K", new KsqlObject().put("F1", new KsqlArray().add("my_key_shouldExecuteQueryWithProperties"))).put("STR", "Value_shouldExecuteQueryWithProperties").put("LONG", 2000L).put("DEC", new BigDecimal("12.34")).put("BYTES_", new byte[] { 0, 1, 2 }).put("ARRAY", new KsqlArray().add("v1_shouldExecuteQueryWithProperties").add("v2_shouldExecuteQueryWithProperties")).put("MAP", new KsqlObject().put("test_name", "shouldExecuteQueryWithProperties")).put("STRUCT", new KsqlObject().put("F1", 4)).put("COMPLEX", COMPLEX_FIELD_VALUE).put("TIMESTAMP", "1970-01-01T00:00:00.001").put("DATE", "1970-01-01").put("TIME", "00:00:00");
    // When
    final BatchedQueryResult queryResult = client.executeQuery(sql, properties);
    // Then: a newly inserted row arrives
    // Wait for row to arrive
    final AtomicReference<Row> rowRef = new AtomicReference<>();
    new Thread(() -> {
        try {
            final List<Row> rows = queryResult.get();
            assertThat(rows, hasSize(1));
            rowRef.set(rows.get(0));
        } catch (final Exception e) {
            throw new RuntimeException(e);
        }
    }).start();
    // Insert a new row
    final Row row = assertThatEventually(() -> {
        // Potentially try inserting multiple times, in case the query wasn't started by the first time
        try {
            client.insertInto(TEST_STREAM, insertRow).get();
        } catch (final Exception e) {
            throw new RuntimeException(e);
        }
        return rowRef.get();
    }, is(notNullValue()));
    // Verify received row
    assertThat(row.getKsqlObject("K"), is(new KsqlObject().put("F1", new KsqlArray().add("my_key_shouldExecuteQueryWithProperties"))));
    assertThat(row.getString("STR"), is("Value_shouldExecuteQueryWithProperties"));
    assertThat(row.getLong("LONG"), is(2000L));
    assertThat(row.getDecimal("DEC"), is(new BigDecimal("12.34")));
    assertThat(row.getBytes("BYTES_"), is(new byte[] { 0, 1, 2 }));
    assertThat(row.getKsqlArray("ARRAY"), is(new KsqlArray().add("v1_shouldExecuteQueryWithProperties").add("v2_shouldExecuteQueryWithProperties")));
    assertThat(row.getKsqlObject("MAP"), is(new KsqlObject().put("test_name", "shouldExecuteQueryWithProperties")));
    assertThat(row.getKsqlObject("STRUCT"), is(new KsqlObject().put("F1", 4)));
    assertThat(row.getKsqlObject("COMPLEX"), is(EXPECTED_COMPLEX_FIELD_VALUE));
    assertThat(row.getString("TIMESTAMP"), is("1970-01-01T00:00:00.001"));
    assertThat(row.getString("DATE"), is("1970-01-01"));
    assertThat(row.getString("TIME"), is("00:00"));
}
Also used : HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) Matchers.containsString(org.hamcrest.Matchers.containsString) BigDecimal(java.math.BigDecimal) ZooKeeperClientException(kafka.zookeeper.ZooKeeperClientException) KsqlClientException(io.confluent.ksql.api.client.exception.KsqlClientException) ExecutionException(java.util.concurrent.ExecutionException) KsqlObject(io.confluent.ksql.api.client.KsqlObject) KsqlObject(io.confluent.ksql.api.client.KsqlObject) ConnectorList(io.confluent.ksql.rest.entity.ConnectorList) List(java.util.List) Row(io.confluent.ksql.api.client.Row) KsqlArray(io.confluent.ksql.api.client.KsqlArray) BatchedQueryResult(io.confluent.ksql.api.client.BatchedQueryResult) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 22 with Row

use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.

the class ClientIntegrationTest method shouldDeliverBufferedRowsViaPollIfComplete.

@Test
public void shouldDeliverBufferedRowsViaPollIfComplete() throws Exception {
    // Given
    final StreamedQueryResult streamedQueryResult = client.streamQuery(PUSH_QUERY_WITH_LIMIT).get();
    assertThatEventually(streamedQueryResult::isComplete, is(true));
    // When / Then
    for (int i = 0; i < PUSH_QUERY_LIMIT_NUM_ROWS; i++) {
        final Row row = streamedQueryResult.poll();
        verifyStreamRowWithIndex(row, i);
    }
    assertThat(streamedQueryResult.poll(), is(nullValue()));
}
Also used : Row(io.confluent.ksql.api.client.Row) GenericRow(io.confluent.ksql.GenericRow) StreamedQueryResult(io.confluent.ksql.api.client.StreamedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Example 23 with Row

use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.

the class ClientIntegrationTest method shouldStreamPullQueryOnStreamSync.

@Test
public void shouldStreamPullQueryOnStreamSync() throws Exception {
    // When
    final StreamedQueryResult streamedQueryResult = client.streamQuery(PULL_QUERY_ON_STREAM).get();
    // Then
    assertThat(streamedQueryResult.columnNames(), is(TEST_COLUMN_NAMES));
    assertThat(streamedQueryResult.columnTypes(), is(TEST_COLUMN_TYPES));
    assertThat(streamedQueryResult.queryID(), is(notNullValue()));
    final List<Row> results = new LinkedList<>();
    Row row;
    while (true) {
        row = streamedQueryResult.poll();
        if (row == null) {
            break;
        } else {
            results.add(row);
        }
    }
    verifyStreamRows(results, 6);
    assertThatEventually(streamedQueryResult::isComplete, is(true));
}
Also used : Row(io.confluent.ksql.api.client.Row) GenericRow(io.confluent.ksql.GenericRow) LinkedList(java.util.LinkedList) StreamedQueryResult(io.confluent.ksql.api.client.StreamedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Example 24 with Row

use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.

the class ClientIntegrationTest method shouldAllowSubscribeStreamedQueryResultIfComplete.

@Test
public void shouldAllowSubscribeStreamedQueryResultIfComplete() throws Exception {
    // Given
    final StreamedQueryResult streamedQueryResult = client.streamQuery(PUSH_QUERY_WITH_LIMIT).get();
    assertThatEventually(streamedQueryResult::isComplete, is(true));
    // When
    final TestSubscriber<Row> subscriber = subscribeAndWait(streamedQueryResult);
    assertThat(subscriber.getValues(), hasSize(0));
    subscriber.getSub().request(PUSH_QUERY_LIMIT_NUM_ROWS);
    // Then
    assertThatEventually(subscriber::getValues, hasSize(PUSH_QUERY_LIMIT_NUM_ROWS));
    verifyStreamRows(subscriber.getValues(), PUSH_QUERY_LIMIT_NUM_ROWS);
    assertThat(subscriber.getError(), is(nullValue()));
}
Also used : Row(io.confluent.ksql.api.client.Row) GenericRow(io.confluent.ksql.GenericRow) StreamedQueryResult(io.confluent.ksql.api.client.StreamedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Example 25 with Row

use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.

the class ClientIntegrationTest method shouldStreamPullQueryOnTruncatedStreamSync.

@Test
public void shouldStreamPullQueryOnTruncatedStreamSync() throws Exception {
    // double-check to make sure it's really truncated
    truncateTopic(TRUNCATED_TEST_TOPIC);
    // When
    final StreamedQueryResult streamedQueryResult = client.streamQuery("SELECT * FROM " + TRUNCATED_TEST_STREAM + ";").get();
    // Then
    assertThat(streamedQueryResult.columnNames(), is(TEST_COLUMN_NAMES));
    assertThat(streamedQueryResult.columnTypes(), is(TEST_COLUMN_TYPES));
    assertThat(streamedQueryResult.queryID(), is(notNullValue()));
    final List<Row> results = new LinkedList<>();
    Row row;
    while (true) {
        row = streamedQueryResult.poll();
        if (row == null) {
            break;
        } else {
            results.add(row);
        }
    }
    verifyStreamRows(results, 0);
    assertThatEventually(streamedQueryResult::isComplete, is(true));
}
Also used : Row(io.confluent.ksql.api.client.Row) GenericRow(io.confluent.ksql.GenericRow) LinkedList(java.util.LinkedList) StreamedQueryResult(io.confluent.ksql.api.client.StreamedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Aggregations

Row (io.confluent.ksql.api.client.Row)27 Test (org.junit.Test)16 IntegrationTest (io.confluent.common.utils.IntegrationTest)13 StreamedQueryResult (io.confluent.ksql.api.client.StreamedQueryResult)10 GenericRow (io.confluent.ksql.GenericRow)8 BatchedQueryResult (io.confluent.ksql.api.client.BatchedQueryResult)7 Matchers.containsString (org.hamcrest.Matchers.containsString)6 KsqlObject (io.confluent.ksql.api.client.KsqlObject)5 ExecutionException (java.util.concurrent.ExecutionException)5 KsqlArray (io.confluent.ksql.api.client.KsqlArray)4 KsqlClientException (io.confluent.ksql.api.client.exception.KsqlClientException)4 BigDecimal (java.math.BigDecimal)4 HashMap (java.util.HashMap)3 LinkedList (java.util.LinkedList)3 ZooKeeperClientException (kafka.zookeeper.ZooKeeperClientException)3 MigrationException (io.confluent.ksql.tools.migrations.MigrationException)2 JsonArray (io.vertx.core.json.JsonArray)2 JsonObject (io.vertx.core.json.JsonObject)2 ArrayList (java.util.ArrayList)2 AcksPublisher (io.confluent.ksql.api.client.AcksPublisher)1