use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.
the class WindowingIntTest method shouldAggregateHoppingWindow.
@Test
public void shouldAggregateHoppingWindow() throws Exception {
testHarness.publishTestData(topicName, dataProvider, now);
final String streamName = "HOPPING_AGGTEST";
final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)", "HOPPING ( SIZE 10 SECONDS, ADVANCE BY 5 SECONDS)");
ksqlContext.sql(queryString);
Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
* 2 x items *
*/
20.0));
final Map<String, GenericRow> results = new HashMap<>();
TestUtils.waitForCondition(() -> {
final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, 1, new TimeWindowedDeserializer<>(new StringDeserializer()), 1000);
updateResults(results, windowedResults);
final GenericRow actual = results.get("ITEM_1");
return expected.equals(actual);
}, 60000, "didn't receive correct results within timeout");
AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
Set<String> topicBeforeCleanup = topicClient.listTopicNames();
assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
queryMetadata.close();
Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.
the class PhysicalPlanBuilderTest method shouldHaveOutputNode.
@Test
public void shouldHaveOutputNode() throws Exception {
final QueryMetadata queryMetadata = buildPhysicalPlan(simpleSelectFilter);
assertThat(queryMetadata.getOutputNode(), instanceOf(KsqlBareOutputNode.class));
}
use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.
the class PhysicalPlanBuilderTest method shouldHaveKStreamDataSource.
@Test
public void shouldHaveKStreamDataSource() throws Exception {
final QueryMetadata metadata = buildPhysicalPlan(simpleSelectFilter);
assertThat(metadata.getDataSourceType(), equalTo(DataSource.DataSourceType.KSTREAM));
}
use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.
the class KsqlEngineTest method shouldCreatePersistentQueries.
@Test
public void shouldCreatePersistentQueries() throws Exception {
final List<QueryMetadata> queries = ksqlEngine.createQueries("create table bar as select * from test2;" + "create table foo as select * from test2;");
assertThat(queries.size(), equalTo(2));
final PersistentQueryMetadata queryOne = (PersistentQueryMetadata) queries.get(0);
final PersistentQueryMetadata queryTwo = (PersistentQueryMetadata) queries.get(1);
assertThat(queryOne.getEntity(), equalTo("BAR"));
assertThat(queryTwo.getEntity(), equalTo("FOO"));
}
use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.
the class StatementExecutor method handleRunScript.
private void handleRunScript(Command command) throws Exception {
if (command.getKsqlProperties().containsKey(KsqlConstants.RUN_SCRIPT_STATEMENTS_CONTENT)) {
String queries = (String) command.getKsqlProperties().get(KsqlConstants.RUN_SCRIPT_STATEMENTS_CONTENT);
List<QueryMetadata> queryMetadataList = ksqlEngine.buildMultipleQueries(queries, command.getKsqlProperties());
for (QueryMetadata queryMetadata : queryMetadataList) {
if (queryMetadata instanceof PersistentQueryMetadata) {
PersistentQueryMetadata persistentQueryMetadata = (PersistentQueryMetadata) queryMetadata;
persistentQueryMetadata.getKafkaStreams().start();
}
}
} else {
throw new KsqlException("No statements received for LOAD FROM FILE.");
}
}
Aggregations