Search in sources :

Example 16 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SelectValueMapperTest method createMapper.

private SelectValueMapper createMapper(final String query) throws Exception {
    final PlanNode planNode = planBuilder.buildLogicalPlan(query);
    final ProjectNode projectNode = (ProjectNode) planNode.getSources().get(0);
    final Schema schema = planNode.getTheSourceNode().getSchema();
    final List<Pair<String, Expression>> expressionPairList = projectNode.getProjectNameExpressionPairList();
    final List<ExpressionMetadata> metadata = createExpressionMetadata(expressionPairList, schema);
    return new SelectValueMapper(new GenericRowValueTypeEnforcer(schema), expressionPairList, metadata);
}
Also used : ExpressionMetadata(io.confluent.ksql.util.ExpressionMetadata) PlanNode(io.confluent.ksql.planner.plan.PlanNode) GenericRowValueTypeEnforcer(io.confluent.ksql.util.GenericRowValueTypeEnforcer) Schema(org.apache.kafka.connect.data.Schema) ProjectNode(io.confluent.ksql.planner.plan.ProjectNode) Pair(io.confluent.ksql.util.Pair)

Example 17 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SqlPredicateTest method buildLogicalPlan.

private PlanNode buildLogicalPlan(String queryStr) {
    List<Statement> statements = KSQL_PARSER.buildAst(queryStr, metaStore);
    // Analyze the query to resolve the references and extract oeprations
    Analysis analysis = new Analysis();
    Analyzer analyzer = new Analyzer("sqlExpression", analysis, metaStore);
    analyzer.process(statements.get(0), new AnalysisContext(null));
    AggregateAnalysis aggregateAnalysis = new AggregateAnalysis();
    AggregateAnalyzer aggregateAnalyzer = new AggregateAnalyzer(aggregateAnalysis, analysis, functionRegistry);
    for (Expression expression : analysis.getSelectExpressions()) {
        aggregateAnalyzer.process(expression, new AnalysisContext(null));
    }
    // Build a logical plan
    PlanNode logicalPlan = new LogicalPlanner(analysis, aggregateAnalysis, functionRegistry).buildPlan();
    return logicalPlan;
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) LogicalPlanner(io.confluent.ksql.planner.LogicalPlanner) AggregateAnalyzer(io.confluent.ksql.analyzer.AggregateAnalyzer) Expression(io.confluent.ksql.parser.tree.Expression) Statement(io.confluent.ksql.parser.tree.Statement) Analysis(io.confluent.ksql.analyzer.Analysis) AggregateAnalysis(io.confluent.ksql.analyzer.AggregateAnalysis) AggregateAnalysis(io.confluent.ksql.analyzer.AggregateAnalysis) AnalysisContext(io.confluent.ksql.analyzer.AnalysisContext) AggregateAnalyzer(io.confluent.ksql.analyzer.AggregateAnalyzer) Analyzer(io.confluent.ksql.analyzer.Analyzer)

Example 18 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SqlPredicateTest method testFilter.

@Test
public void testFilter() throws Exception {
    String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
    PlanNode logicalPlan = buildLogicalPlan(selectQuery);
    FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
    initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream.getSchema(), false, functionRegistry);
    Assert.assertTrue(predicate.getFilterExpression().toString().equalsIgnoreCase("(TEST1.COL0 > 100)"));
    Assert.assertTrue(predicate.getColumnIndexes().length == 1);
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) FilterNode(io.confluent.ksql.planner.plan.FilterNode) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 19 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SqlPredicateTest method testFilterBiggerExpression.

@Test
public void testFilterBiggerExpression() throws Exception {
    String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100 AND LEN(col2) = 5;";
    PlanNode logicalPlan = buildLogicalPlan(selectQuery);
    FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
    initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream.getSchema(), false, functionRegistry);
    Assert.assertTrue(predicate.getFilterExpression().toString().equalsIgnoreCase("((TEST1.COL0 > 100) AND" + " (LEN(TEST1.COL2) = 5))"));
    Assert.assertTrue(predicate.getColumnIndexes().length == 3);
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) FilterNode(io.confluent.ksql.planner.plan.FilterNode) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 20 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class LogicalPlannerTest method shouldCreatePlanWithTableAsSource.

@Test
public void shouldCreatePlanWithTableAsSource() {
    PlanNode planNode = buildLogicalPlan("select col0 from TEST2 limit 5;");
    assertThat(planNode.getSources().size(), equalTo(1));
    StructuredDataSource structuredDataSource = ((StructuredDataSourceNode) planNode.getSources().get(0).getSources().get(0)).getStructuredDataSource();
    assertThat(structuredDataSource.getDataSourceType(), equalTo(DataSource.DataSourceType.KTABLE));
    assertThat(structuredDataSource.getName(), equalTo("TEST2"));
}
Also used : StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) PlanNode(io.confluent.ksql.planner.plan.PlanNode) StructuredDataSourceNode(io.confluent.ksql.planner.plan.StructuredDataSourceNode) Test(org.junit.Test)

Aggregations

PlanNode (io.confluent.ksql.planner.plan.PlanNode)23 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)13 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)11 ProjectNode (io.confluent.ksql.planner.plan.ProjectNode)8 FilterNode (io.confluent.ksql.planner.plan.FilterNode)6 Expression (io.confluent.ksql.parser.tree.Expression)4 Statement (io.confluent.ksql.parser.tree.Statement)4 StructuredDataSourceNode (io.confluent.ksql.planner.plan.StructuredDataSourceNode)4 AggregateAnalysis (io.confluent.ksql.analyzer.AggregateAnalysis)3 Analysis (io.confluent.ksql.analyzer.Analysis)3 Pair (io.confluent.ksql.util.Pair)3 GenericRow (io.confluent.ksql.GenericRow)2 AggregateAnalyzer (io.confluent.ksql.analyzer.AggregateAnalyzer)2 AnalysisContext (io.confluent.ksql.analyzer.AnalysisContext)2 Analyzer (io.confluent.ksql.analyzer.Analyzer)2 StructuredDataSource (io.confluent.ksql.metastore.StructuredDataSource)2 AbstractStreamCreateStatement (io.confluent.ksql.parser.tree.AbstractStreamCreateStatement)2 DdlStatement (io.confluent.ksql.parser.tree.DdlStatement)2 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)2