use of io.confluent.ksql.api.utils.QueryResponse in project ksql by confluentinc.
the class RestApiTest method shouldExecutePullQuery_allTypes.
@Test
public void shouldExecutePullQuery_allTypes() {
ImmutableList<String> formats = ImmutableList.of("application/vnd.ksqlapi.delimited.v1", KsqlMediaType.KSQL_V1_JSON.mediaType());
ImmutableList<HttpVersion> httpVersions = ImmutableList.of(HTTP_1_1, HTTP_2);
ImmutableList<String> endpoints = ImmutableList.of("/query-stream", "/query");
ImmutableList<Boolean> migrationFlags = ImmutableList.of(true, false);
final String query = "SELECT COUNT, USERID from " + AGG_TABLE + " WHERE USERID='" + AN_AGG_KEY + "';";
for (String format : formats) {
for (HttpVersion version : httpVersions) {
for (String endpoint : endpoints) {
for (Boolean migrationEnabled : migrationFlags) {
boolean routedToQueryStream = endpoint.equals("/query") && format.equals("application/vnd.ksqlapi.delimited.v1");
boolean migrated = routedToQueryStream || migrationEnabled;
if (!migrated && endpoint.equals("/query") && version == HTTP_2) {
LOG.info("Skipping pull query combination {} {} {} {}", format, version, endpoint, migrationEnabled);
continue;
}
LOG.info("Trying pull query combination {} {} {} {}", format, version, endpoint, migrationEnabled);
Object requestBody;
ImmutableMap<String, Object> overrides = ImmutableMap.of(KsqlConfig.KSQL_ENDPOINT_MIGRATE_QUERY_CONFIG, migrationEnabled);
if (endpoint.equals("/query-stream")) {
requestBody = new QueryStreamArgs(query, overrides, Collections.emptyMap(), Collections.emptyMap());
} else if (endpoint.equals("/query")) {
requestBody = new KsqlRequest(query, overrides, Collections.emptyMap(), Collections.emptyMap(), null);
} else {
fail("Unknown endpoint " + endpoint);
return;
}
// this is somewhat hard since they have different data, so we don't try.
if (format.equals("application/vnd.ksqlapi.delimited.v1")) {
QueryResponse[] queryResponse = new QueryResponse[1];
assertThatEventually(() -> {
try {
HttpResponse<Buffer> resp = RestIntegrationTestUtil.rawRestRequest(REST_APP, version, POST, endpoint, requestBody, "application/vnd.ksqlapi.delimited.v1", Optional.empty(), Optional.empty());
queryResponse[0] = new QueryResponse(resp.body().toString());
return queryResponse[0].rows.size();
} catch (Throwable t) {
return Integer.MAX_VALUE;
}
}, is(1));
assertThat(queryResponse[0].rows.get(0).getList(), is(ImmutableList.of(1, "USER_1")));
} else if (format.equals(KsqlMediaType.KSQL_V1_JSON.mediaType())) {
final Supplier<List<String>> call = () -> {
HttpResponse<Buffer> resp = RestIntegrationTestUtil.rawRestRequest(REST_APP, version, POST, endpoint, requestBody, KsqlMediaType.KSQL_V1_JSON.mediaType(), Optional.empty(), Optional.empty());
final String response = resp.body().toString();
return Arrays.asList(response.split(System.lineSeparator()));
};
// When:
final List<String> messages = assertThatEventually(call, hasSize(HEADER + 1));
// Then:
assertThat(messages, hasSize(HEADER + 1));
assertThat(messages.get(0), startsWith("[{\"header\":{\"queryId\":\""));
assertThat(messages.get(0), endsWith("\",\"schema\":\"`COUNT` BIGINT, `USERID` STRING KEY\"}},"));
assertThat(messages.get(1), is("{\"row\":{\"columns\":[1,\"USER_1\"]}}]"));
} else {
fail("Unknown format " + format);
return;
}
}
}
}
}
}
use of io.confluent.ksql.api.utils.QueryResponse in project ksql by confluentinc.
the class PullQueryMetricsHttp2FunctionalTest method shouldVerifyMetrics.
@Test
public void shouldVerifyMetrics() {
// Given:
final KafkaMetric recordsReturnedTableMetric = metrics.metric(recordsReturnedTable);
final KafkaMetric latencyTableMetric = metrics.metric(latencyTable);
final KafkaMetric responseSizeTableMetric = metrics.metric(responseSizeTable);
final KafkaMetric totalRequestsTableMetric = metrics.metric(totalRequestsTable);
final KafkaMetric requestDistributionTableMetric = metrics.metric(requestDistributionTable);
final KafkaMetric recordsReturnedStreamMetric = metrics.metric(recordsReturnedStream);
final KafkaMetric latencyStreamMetric = metrics.metric(latencyStream);
final KafkaMetric responseSizeStreamMetric = metrics.metric(responseSizeStream);
final KafkaMetric totalRequestsStreamMetric = metrics.metric(totalRequestsStream);
final KafkaMetric requestDistributionStreamMetric = metrics.metric(requestDistributionStream);
// When:
final String sqlTable = "SELECT COUNT, USERID from " + AGG_TABLE + " WHERE USERID='" + AN_AGG_KEY + "';";
QueryResponse queryResponse1 = executeQuery(sqlTable);
assertThat(queryResponse1.rows, hasSize(1));
final String sqlStream = "SELECT * from " + PAGE_VIEW_STREAM + " WHERE PAGEID='" + A_STREAM_KEY + "';";
QueryResponse queryResponse2 = executeQuery(sqlStream);
assertThat(queryResponse2.rows, hasSize(1));
// Then:
assertThat(recordsReturnedTableMetric.metricValue(), is(1.0));
assertThat((Double) latencyTableMetric.metricValue(), greaterThan(1.0));
assertThat((Double) responseSizeTableMetric.metricValue(), greaterThan(1.0));
assertThat(totalRequestsTableMetric.metricValue(), is(1.0));
assertThat((Double) requestDistributionTableMetric.metricValue(), greaterThan(1.0));
assertThat(recordsReturnedStreamMetric.metricValue(), is(1.0));
assertThat((Double) latencyStreamMetric.metricValue(), greaterThan(1.0));
assertThat((Double) responseSizeStreamMetric.metricValue(), greaterThan(1.0));
assertThat(totalRequestsStreamMetric.metricValue(), is(1.0));
assertThat((Double) requestDistributionStreamMetric.metricValue(), greaterThan(1.0));
}
use of io.confluent.ksql.api.utils.QueryResponse in project ksql by confluentinc.
the class ApiTest method shouldUseDelimitedFormatWhenNoAcceptHeaderQuery.
@Test
public void shouldUseDelimitedFormatWhenNoAcceptHeaderQuery() throws Exception {
// When
JsonObject requestBody = new JsonObject().put("sql", DEFAULT_PULL_QUERY);
VertxCompletableFuture<HttpResponse<Buffer>> requestFuture = new VertxCompletableFuture<>();
client.post("/query-stream").sendBuffer(requestBody.toBuffer(), requestFuture);
// Then
HttpResponse<Buffer> response = requestFuture.get();
QueryResponse queryResponse = new QueryResponse(response.bodyAsString());
assertThat(queryResponse.rows, hasSize(DEFAULT_JSON_ROWS.size()));
assertThat(response.bodyAsString().contains("\n"), is(true));
assertThat(response.statusCode(), is(200));
}
use of io.confluent.ksql.api.utils.QueryResponse in project ksql by confluentinc.
the class ApiTest method shouldExecutePushQuery.
@Test
@CoreApiTest
public void shouldExecutePushQuery() throws Exception {
// When
QueryResponse queryResponse = executePushQueryAndWaitForRows(DEFAULT_PUSH_QUERY_REQUEST_BODY);
// Then
assertThat(testEndpoints.getLastSql(), is(DEFAULT_PUSH_QUERY));
assertThat(testEndpoints.getLastProperties(), is(DEFAULT_PUSH_QUERY_REQUEST_PROPERTIES));
assertThat(queryResponse.responseObject.getJsonArray("columnNames"), is(DEFAULT_COLUMN_NAMES));
assertThat(queryResponse.responseObject.getJsonArray("columnTypes"), is(DEFAULT_COLUMN_TYPES));
assertThat(queryResponse.rows, is(DEFAULT_JSON_ROWS));
assertThat(server.getQueryIDs(), hasSize(1));
String queryId = queryResponse.responseObject.getString("queryId");
assertThat(queryId, is(notNullValue()));
assertThat(server.getQueryIDs().contains(new PushQueryId(queryId)), is(true));
}
use of io.confluent.ksql.api.utils.QueryResponse in project ksql by confluentinc.
the class ApiTest method shouldRejectMalformedJsonInArgs.
private void shouldRejectMalformedJsonInArgs(String uri) throws Exception {
// Given
Buffer requestBody = Buffer.buffer().appendString("{\"foo\":1");
// When
VertxCompletableFuture<HttpResponse<Buffer>> requestFuture = new VertxCompletableFuture<>();
client.post(uri).sendBuffer(requestBody, requestFuture);
HttpResponse<Buffer> response = requestFuture.get();
// Then
assertThat(response.statusCode(), is(400));
assertThat(response.statusMessage(), is("Bad Request"));
QueryResponse queryResponse = new QueryResponse(response.bodyAsString());
validateError(ERROR_CODE_BAD_REQUEST, "Invalid JSON in request: Unexpected end-of-input: expected close marker for Object", queryResponse.responseObject);
}
Aggregations