Search in sources :

Example 16 with QueryMetadata

use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.

the class QueryEngine method buildPhysicalPlans.

List<QueryMetadata> buildPhysicalPlans(final List<Pair<String, PlanNode>> logicalPlans, final List<Pair<String, Statement>> statementList, final Map<String, Object> overriddenProperties, final boolean updateMetastore) throws Exception {
    List<QueryMetadata> physicalPlans = new ArrayList<>();
    for (int i = 0; i < logicalPlans.size(); i++) {
        Pair<String, PlanNode> statementPlanPair = logicalPlans.get(i);
        if (statementPlanPair.getRight() == null) {
            Statement statement = statementList.get(i).getRight();
            if (!(statement instanceof DdlStatement)) {
                throw new KsqlException("expecting a statement implementing DDLStatement but got: " + statement.getClass());
            }
            handleDdlStatement(statementPlanPair.getLeft(), (DdlStatement) statement, overriddenProperties);
        } else {
            buildQueryPhysicalPlan(physicalPlans, statementPlanPair, overriddenProperties, updateMetastore);
        }
    }
    return physicalPlans;
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) PlanNode(io.confluent.ksql.planner.plan.PlanNode) DdlStatement(io.confluent.ksql.parser.tree.DdlStatement) AbstractStreamCreateStatement(io.confluent.ksql.parser.tree.AbstractStreamCreateStatement) Statement(io.confluent.ksql.parser.tree.Statement) ArrayList(java.util.ArrayList) DdlStatement(io.confluent.ksql.parser.tree.DdlStatement) KsqlException(io.confluent.ksql.util.KsqlException)

Example 17 with QueryMetadata

use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.

the class StandaloneExecutorTest method shouldNotExecuteNonPersistentQueries.

@Test
public void shouldNotExecuteNonPersistentQueries() throws Exception {
    final QueryMetadata query = EasyMock.createMock(QueryMetadata.class);
    EasyMock.expect(engine.createQueries(anyString())).andReturn(Collections.singletonList(query));
    EasyMock.expect(query.getStatementString()).andReturn("");
    EasyMock.replay(query, engine);
    executor.start();
    EasyMock.verify(query);
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) Test(org.junit.Test)

Example 18 with QueryMetadata

use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateTumblingWindow.

@Test
public void shouldAggregateTumblingWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "TUMBLING_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)", "TUMBLING ( SIZE 10 SECONDS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
     * 2 x items *
     */
    20.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, 1, new TimeWindowedDeserializer<>(new StringDeserializer()), MAX_POLL_PER_ITERATION);
        updateResults(results, windowedResults);
        final GenericRow actual = results.get("ITEM_1");
        return expected.equals(actual);
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) GenericRow(io.confluent.ksql.GenericRow) Windowed(org.apache.kafka.streams.kstream.Windowed) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 19 with QueryMetadata

use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.

the class PhysicalPlanBuilderTest method shouldCreateExecutionPlan.

@Test
public void shouldCreateExecutionPlan() throws Exception {
    String queryString = "SELECT col0, sum(col3), count(col3) FROM test1 " + "WHERE col0 > 100 GROUP BY col0;";
    final QueryMetadata metadata = buildPhysicalPlan(queryString);
    final String planText = metadata.getExecutionPlan();
    String[] lines = planText.split("\n");
    Assert.assertEquals(lines[0], " > [ SINK ] Schema: [COL0 : INT64 , KSQL_COL_1 : FLOAT64 " + ", KSQL_COL_2 : INT64].");
    Assert.assertEquals(lines[1], "\t\t > [ AGGREGATE ] Schema: [TEST1.COL0 : INT64 , TEST1.COL3 : FLOAT64 , KSQL_AGG_VARIABLE_0 : FLOAT64 , KSQL_AGG_VARIABLE_1 : INT64].");
    Assert.assertEquals(lines[2], "\t\t\t\t > [ PROJECT ] Schema: [TEST1.COL0 : INT64 , TEST1.COL3 : FLOAT64].");
    Assert.assertEquals(lines[3], "\t\t\t\t\t\t > [ FILTER ] Schema: [TEST1.COL0 : INT64 , TEST1.COL1 : STRING , TEST1.COL2 : STRING , TEST1.COL3 : FLOAT64 , TEST1.COL4 : ARRAY , TEST1.COL5 : MAP].");
    Assert.assertEquals(lines[4], "\t\t\t\t\t\t\t\t > [ SOURCE ] Schema: [TEST1.COL0 : INT64 , TEST1.COL1 : STRING , TEST1.COL2 : STRING , TEST1.COL3 : FLOAT64 , TEST1.COL4 : ARRAY , TEST1.COL5 : MAP].");
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) Test(org.junit.Test)

Example 20 with QueryMetadata

use of io.confluent.ksql.util.QueryMetadata in project ksql by confluentinc.

the class PhysicalPlanBuilderTest method shouldReturnCreatedKafkaStream.

@Test
public void shouldReturnCreatedKafkaStream() throws Exception {
    final QueryMetadata queryMetadata = buildPhysicalPlan(simpleSelectFilter);
    List<TestKafkaStreamsBuilder.Call> calls = testKafkaStreamsBuilder.getCalls();
    Assert.assertEquals(1, calls.size());
    Assert.assertSame(calls.get(0).kafkaStreams, queryMetadata.getKafkaStreams());
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) Test(org.junit.Test)

Aggregations

QueryMetadata (io.confluent.ksql.util.QueryMetadata)23 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)13 Test (org.junit.Test)10 KsqlException (io.confluent.ksql.util.KsqlException)5 IntegrationTest (io.confluent.common.utils.IntegrationTest)4 GenericRow (io.confluent.ksql.GenericRow)4 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)4 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)4 HashMap (java.util.HashMap)4 AdminClient (org.apache.kafka.clients.admin.AdminClient)4 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)4 Schema (org.apache.kafka.connect.data.Schema)4 Windowed (org.apache.kafka.streams.kstream.Windowed)3 QueryId (io.confluent.ksql.query.QueryId)2 CommandStatus (io.confluent.ksql.rest.entity.CommandStatus)2 WakeupException (org.apache.kafka.common.errors.WakeupException)2 CreateStreamCommand (io.confluent.ksql.ddl.commands.CreateStreamCommand)1 CreateTableCommand (io.confluent.ksql.ddl.commands.CreateTableCommand)1 DdlCommandExec (io.confluent.ksql.ddl.commands.DdlCommandExec)1 DropSourceCommand (io.confluent.ksql.ddl.commands.DropSourceCommand)1