use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.
the class ClientMutationIntegrationTest method shouldExecuteQueryWithProperties.
@Test
public void shouldExecuteQueryWithProperties() {
// Given
final Map<String, Object> properties = new HashMap<>();
properties.put("auto.offset.reset", "latest");
final String sql = "SELECT * FROM " + TEST_STREAM + " EMIT CHANGES LIMIT 1;";
final KsqlObject insertRow = new KsqlObject().put("K", new KsqlObject().put("F1", new KsqlArray().add("my_key_shouldExecuteQueryWithProperties"))).put("STR", "Value_shouldExecuteQueryWithProperties").put("LONG", 2000L).put("DEC", new BigDecimal("12.34")).put("BYTES_", new byte[] { 0, 1, 2 }).put("ARRAY", new KsqlArray().add("v1_shouldExecuteQueryWithProperties").add("v2_shouldExecuteQueryWithProperties")).put("MAP", new KsqlObject().put("test_name", "shouldExecuteQueryWithProperties")).put("STRUCT", new KsqlObject().put("F1", 4)).put("COMPLEX", COMPLEX_FIELD_VALUE).put("TIMESTAMP", "1970-01-01T00:00:00.001").put("DATE", "1970-01-01").put("TIME", "00:00:00");
// When
final BatchedQueryResult queryResult = client.executeQuery(sql, properties);
// Then: a newly inserted row arrives
// Wait for row to arrive
final AtomicReference<Row> rowRef = new AtomicReference<>();
new Thread(() -> {
try {
final List<Row> rows = queryResult.get();
assertThat(rows, hasSize(1));
rowRef.set(rows.get(0));
} catch (final Exception e) {
throw new RuntimeException(e);
}
}).start();
// Insert a new row
final Row row = assertThatEventually(() -> {
// Potentially try inserting multiple times, in case the query wasn't started by the first time
try {
client.insertInto(TEST_STREAM, insertRow).get();
} catch (final Exception e) {
throw new RuntimeException(e);
}
return rowRef.get();
}, is(notNullValue()));
// Verify received row
assertThat(row.getKsqlObject("K"), is(new KsqlObject().put("F1", new KsqlArray().add("my_key_shouldExecuteQueryWithProperties"))));
assertThat(row.getString("STR"), is("Value_shouldExecuteQueryWithProperties"));
assertThat(row.getLong("LONG"), is(2000L));
assertThat(row.getDecimal("DEC"), is(new BigDecimal("12.34")));
assertThat(row.getBytes("BYTES_"), is(new byte[] { 0, 1, 2 }));
assertThat(row.getKsqlArray("ARRAY"), is(new KsqlArray().add("v1_shouldExecuteQueryWithProperties").add("v2_shouldExecuteQueryWithProperties")));
assertThat(row.getKsqlObject("MAP"), is(new KsqlObject().put("test_name", "shouldExecuteQueryWithProperties")));
assertThat(row.getKsqlObject("STRUCT"), is(new KsqlObject().put("F1", 4)));
assertThat(row.getKsqlObject("COMPLEX"), is(EXPECTED_COMPLEX_FIELD_VALUE));
assertThat(row.getString("TIMESTAMP"), is("1970-01-01T00:00:00.001"));
assertThat(row.getString("DATE"), is("1970-01-01"));
assertThat(row.getString("TIME"), is("00:00"));
}
use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.
the class ClientIntegrationTest method shouldDeliverBufferedRowsViaPollIfComplete.
@Test
public void shouldDeliverBufferedRowsViaPollIfComplete() throws Exception {
// Given
final StreamedQueryResult streamedQueryResult = client.streamQuery(PUSH_QUERY_WITH_LIMIT).get();
assertThatEventually(streamedQueryResult::isComplete, is(true));
// When / Then
for (int i = 0; i < PUSH_QUERY_LIMIT_NUM_ROWS; i++) {
final Row row = streamedQueryResult.poll();
verifyStreamRowWithIndex(row, i);
}
assertThat(streamedQueryResult.poll(), is(nullValue()));
}
use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.
the class ClientIntegrationTest method shouldStreamPullQueryOnStreamSync.
@Test
public void shouldStreamPullQueryOnStreamSync() throws Exception {
// When
final StreamedQueryResult streamedQueryResult = client.streamQuery(PULL_QUERY_ON_STREAM).get();
// Then
assertThat(streamedQueryResult.columnNames(), is(TEST_COLUMN_NAMES));
assertThat(streamedQueryResult.columnTypes(), is(TEST_COLUMN_TYPES));
assertThat(streamedQueryResult.queryID(), is(notNullValue()));
final List<Row> results = new LinkedList<>();
Row row;
while (true) {
row = streamedQueryResult.poll();
if (row == null) {
break;
} else {
results.add(row);
}
}
verifyStreamRows(results, 6);
assertThatEventually(streamedQueryResult::isComplete, is(true));
}
use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.
the class ClientIntegrationTest method shouldAllowSubscribeStreamedQueryResultIfComplete.
@Test
public void shouldAllowSubscribeStreamedQueryResultIfComplete() throws Exception {
// Given
final StreamedQueryResult streamedQueryResult = client.streamQuery(PUSH_QUERY_WITH_LIMIT).get();
assertThatEventually(streamedQueryResult::isComplete, is(true));
// When
final TestSubscriber<Row> subscriber = subscribeAndWait(streamedQueryResult);
assertThat(subscriber.getValues(), hasSize(0));
subscriber.getSub().request(PUSH_QUERY_LIMIT_NUM_ROWS);
// Then
assertThatEventually(subscriber::getValues, hasSize(PUSH_QUERY_LIMIT_NUM_ROWS));
verifyStreamRows(subscriber.getValues(), PUSH_QUERY_LIMIT_NUM_ROWS);
assertThat(subscriber.getError(), is(nullValue()));
}
use of io.confluent.ksql.api.client.Row in project ksql by confluentinc.
the class ClientIntegrationTest method shouldStreamPullQueryOnTruncatedStreamSync.
@Test
public void shouldStreamPullQueryOnTruncatedStreamSync() throws Exception {
// double-check to make sure it's really truncated
truncateTopic(TRUNCATED_TEST_TOPIC);
// When
final StreamedQueryResult streamedQueryResult = client.streamQuery("SELECT * FROM " + TRUNCATED_TEST_STREAM + ";").get();
// Then
assertThat(streamedQueryResult.columnNames(), is(TEST_COLUMN_NAMES));
assertThat(streamedQueryResult.columnTypes(), is(TEST_COLUMN_TYPES));
assertThat(streamedQueryResult.queryID(), is(notNullValue()));
final List<Row> results = new LinkedList<>();
Row row;
while (true) {
row = streamedQueryResult.poll();
if (row == null) {
break;
} else {
results.add(row);
}
}
verifyStreamRows(results, 0);
assertThatEventually(streamedQueryResult::isComplete, is(true));
}
Aggregations