use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method shouldSelectAllFromDerivedStream.
@Test
public void shouldSelectAllFromDerivedStream() throws Exception {
executeStatement("CREATE STREAM pageviews_female" + " AS SELECT %s.userid AS userid, pageid, regionid, gender " + " FROM %s " + " LEFT JOIN %s ON %s.userid = %s.userid" + " WHERE gender = 'FEMALE';", userTable, pageViewStream, userTable, pageViewStream, userTable);
final QueuedQueryMetadata queryMetadata = executeQuery("SELECT * from pageviews_female;");
List<KeyValue<String, GenericRow>> results = new ArrayList<>();
BlockingQueue<KeyValue<String, GenericRow>> rowQueue = queryMetadata.getRowQueue();
// From the mock data, we expect exactly 3 page views from female users.
List<String> expectedPages = Arrays.asList("PAGE_2", "PAGE_5", "PAGE_5");
List<String> expectedUsers = Arrays.asList("USER_2", "USER_0", "USER_2");
List<String> actualPages = new ArrayList<>();
List<String> actualUsers = new ArrayList<>();
TestUtils.waitForCondition(() -> {
try {
log.debug("polling from pageviews_female");
KeyValue<String, GenericRow> nextRow = rowQueue.poll(8000, TimeUnit.MILLISECONDS);
if (nextRow != null) {
results.add(nextRow);
} else {
// If we didn't receive any records on the output topic for 8 seconds, it probably means that the join
// failed because the table data wasn't populated when the stream data was consumed. We should just
// re populate the stream data to try the join again.
log.warn("repopulating data in {} because the join returned empty results.", pageViewTopic);
testHarness.publishTestData(pageViewTopic, pageViewDataProvider, System.currentTimeMillis());
}
} catch (Exception e) {
log.error("Got exception when polling from pageviews_female", e);
}
return 3 <= results.size();
}, 30000, "Could not consume any records from " + pageViewTopic + " for 30 seconds");
for (KeyValue<String, GenericRow> result : results) {
List<Object> columns = result.value.getColumns();
log.debug("pageview join: {}", columns);
assertEquals(6, columns.size());
String user = (String) columns.get(2);
actualUsers.add(user);
String page = (String) columns.get(3);
actualPages.add(page);
}
assertEquals(expectedPages, actualPages);
assertEquals(expectedUsers, actualUsers);
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method executeQuery.
private QueuedQueryMetadata executeQuery(final String statement, final String... args) throws Exception {
final QueryMetadata queryMetadata = executeStatement(statement, args);
assertThat(queryMetadata, instanceOf(QueuedQueryMetadata.class));
return (QueuedQueryMetadata) queryMetadata;
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class PhysicalPlanBuilder method buildPlanForBareQuery.
private QueryMetadata buildPlanForBareQuery(final QueuedSchemaKStream schemaKStream, final KsqlBareOutputNode bareOutputNode, final String serviceId, final String transientQueryPrefix, final String statement) {
final String applicationId = addTimeSuffix(getBareQueryApplicationId(serviceId, transientQueryPrefix));
KafkaStreams streams = buildStreams(builder, applicationId, ksqlConfig, overriddenStreamsProperties);
SchemaKStream sourceSchemaKstream = schemaKStream.getSourceSchemaKStreams().get(0);
return new QueuedQueryMetadata(statement, streams, bareOutputNode, schemaKStream.getExecutionPlan(""), schemaKStream.getQueue(), (sourceSchemaKstream instanceof SchemaKTable) ? DataSource.DataSourceType.KTABLE : DataSource.DataSourceType.KSTREAM, applicationId, kafkaTopicClient, builder.build());
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method shouldSelectFromPageViewsWithSpecificColumn.
@Test
public void shouldSelectFromPageViewsWithSpecificColumn() throws Exception {
final QueuedQueryMetadata queryMetadata = executeQuery("SELECT pageid from %s;", pageViewStream);
BlockingQueue<KeyValue<String, GenericRow>> rowQueue = queryMetadata.getRowQueue();
List<String> actualPages = new ArrayList<>();
List<String> expectedPages = Arrays.asList("PAGE_1", "PAGE_2", "PAGE_3", "PAGE_4", "PAGE_5", "PAGE_5", "PAGE_5");
while (actualPages.size() < expectedPages.size()) {
KeyValue<String, GenericRow> nextRow = rowQueue.poll();
if (nextRow != null) {
List<Object> columns = nextRow.value.getColumns();
assertEquals(1, columns.size());
String page = (String) columns.get(0);
actualPages.add(page);
}
}
assertEquals(expectedPages, actualPages);
queryMetadata.getKafkaStreams().close();
}
use of io.confluent.ksql.util.QueuedQueryMetadata in project ksql by confluentinc.
the class EndToEndIntegrationTest method shouldSupportDroppingAndRecreatingJoinQuery.
@Test
public void shouldSupportDroppingAndRecreatingJoinQuery() throws Exception {
final String createStreamStatement = format("create stream cart_event_product as " + "select pv.pageid, u.gender " + "from %s pv left join %s u on pv.userid=u.userid;", pageViewStream, userTable);
executeStatement(createStreamStatement);
executeStatement("DROP STREAM cart_event_product;");
executeStatement(createStreamStatement);
final QueuedQueryMetadata queryMetadata = executeQuery("SELECT * from cart_event_product;");
final List<Object> columns = waitForFirstRow(queryMetadata);
assertThat(columns.get(1).toString(), startsWith("USER_"));
assertThat(columns.get(2).toString(), startsWith("PAGE_"));
assertThat(columns.get(3).toString(), either(is("FEMALE")).or(is("MALE")));
}
Aggregations