Search in sources :

Example 11 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SchemaKStreamTest method testGroupByMultipleColumns.

@Test
public void testGroupByMultipleColumns() {
    String selectQuery = "SELECT col0, col1 FROM test1 WHERE col0 > 100;";
    PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
    initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    Expression col0Expression = new DereferenceExpression(new QualifiedNameReference(QualifiedName.of("TEST1")), "COL0");
    Expression col1Expression = new DereferenceExpression(new QualifiedNameReference(QualifiedName.of("TEST1")), "COL1");
    KsqlTopicSerDe ksqlTopicSerDe = new KsqlJsonTopicSerDe();
    Serde<GenericRow> rowSerde = ksqlTopicSerDe.getGenericRowSerde(initialSchemaKStream.getSchema(), null, false, null);
    List<Expression> groupByExpressions = Arrays.asList(col1Expression, col0Expression);
    SchemaKGroupedStream groupedSchemaKStream = initialSchemaKStream.groupBy(Serdes.String(), rowSerde, groupByExpressions);
    Assert.assertEquals(groupedSchemaKStream.getKeyField().name(), "TEST1.COL1|+|TEST1.COL0");
}
Also used : DereferenceExpression(io.confluent.ksql.parser.tree.DereferenceExpression) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) ArrayList(java.util.ArrayList) GenericRow(io.confluent.ksql.GenericRow) PlanNode(io.confluent.ksql.planner.plan.PlanNode) DereferenceExpression(io.confluent.ksql.parser.tree.DereferenceExpression) Expression(io.confluent.ksql.parser.tree.Expression) KsqlTopicSerDe(io.confluent.ksql.serde.KsqlTopicSerDe) KsqlJsonTopicSerDe(io.confluent.ksql.serde.json.KsqlJsonTopicSerDe) QualifiedNameReference(io.confluent.ksql.parser.tree.QualifiedNameReference) Test(org.junit.Test)

Example 12 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SchemaKStreamTest method testSelectKey.

@Test
public void testSelectKey() throws Exception {
    String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
    PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
    initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    SchemaKStream rekeyedSchemaKStream = initialSchemaKStream.selectKey(initialSchemaKStream.getSchema().fields().get(1), true);
    Assert.assertTrue(rekeyedSchemaKStream.getKeyField().name().equalsIgnoreCase("TEST1.COL1"));
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 13 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SchemaKStreamTest method testSelectWithExpression.

@Test
public void testSelectWithExpression() throws Exception {
    String selectQuery = "SELECT col0, LEN(UCASE(col2)), col3*3+5 FROM test1 WHERE col0 > 100;";
    PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
    ProjectNode projectNode = (ProjectNode) logicalPlan.getSources().get(0);
    initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    SchemaKStream projectedSchemaKStream = initialSchemaKStream.select(projectNode.getProjectNameExpressionPairList());
    Assert.assertTrue(projectedSchemaKStream.getSchema().fields().size() == 3);
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0") == projectedSchemaKStream.getSchema().fields().get(0));
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("KSQL_COL_1") == projectedSchemaKStream.getSchema().fields().get(1));
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("KSQL_COL_2") == projectedSchemaKStream.getSchema().fields().get(2));
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0").schema().type() == Schema.Type.INT64);
    Assert.assertTrue(projectedSchemaKStream.getSchema().fields().get(1).schema().type() == Schema.Type.INT32);
    Assert.assertTrue(projectedSchemaKStream.getSchema().fields().get(2).schema().type() == Schema.Type.FLOAT64);
    Assert.assertTrue(projectedSchemaKStream.getSourceSchemaKStreams().get(0) == initialSchemaKStream);
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) ArrayList(java.util.ArrayList) ProjectNode(io.confluent.ksql.planner.plan.ProjectNode) Test(org.junit.Test)

Example 14 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SchemaKTableTest method testFilter.

@Test
public void testFilter() throws Exception {
    String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
    PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
    FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
    initialSchemaKTable = new SchemaKTable(logicalPlan.getTheSourceNode().getSchema(), kTable, ksqlTable.getKeyField(), new ArrayList<>(), false, SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    SchemaKTable filteredSchemaKStream = initialSchemaKTable.filter(filterNode.getPredicate());
    Assert.assertTrue(filteredSchemaKStream.getSchema().fields().size() == 6);
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL0") == filteredSchemaKStream.getSchema().fields().get(0));
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL1") == filteredSchemaKStream.getSchema().fields().get(1));
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL2") == filteredSchemaKStream.getSchema().fields().get(2));
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL3") == filteredSchemaKStream.getSchema().fields().get(3));
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL0").schema().type() == Schema.Type.INT64);
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL1").schema().type() == Schema.Type.STRING);
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL2").schema().type() == Schema.Type.STRING);
    Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL3").schema().type() == Schema.Type.FLOAT64);
    Assert.assertTrue(filteredSchemaKStream.getSourceSchemaKStreams().get(0) == initialSchemaKTable);
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) FilterNode(io.confluent.ksql.planner.plan.FilterNode) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 15 with PlanNode

use of io.confluent.ksql.planner.plan.PlanNode in project ksql by confluentinc.

the class SchemaKTableTest method testSelectWithExpression.

@Test
public void testSelectWithExpression() throws Exception {
    String selectQuery = "SELECT col0, LEN(UCASE(col2)), col3*3+5 FROM test1 WHERE col0 > 100;";
    PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
    ProjectNode projectNode = (ProjectNode) logicalPlan.getSources().get(0);
    initialSchemaKTable = new SchemaKTable(logicalPlan.getTheSourceNode().getSchema(), kTable, ksqlTable.getKeyField(), new ArrayList<>(), false, SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
    SchemaKTable projectedSchemaKStream = initialSchemaKTable.select(projectNode.getProjectNameExpressionPairList());
    Assert.assertTrue(projectedSchemaKStream.getSchema().fields().size() == 3);
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0") == projectedSchemaKStream.getSchema().fields().get(0));
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("KSQL_COL_1") == projectedSchemaKStream.getSchema().fields().get(1));
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("KSQL_COL_2") == projectedSchemaKStream.getSchema().fields().get(2));
    Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0").schema().type() == Schema.Type.INT64);
    Assert.assertTrue(projectedSchemaKStream.getSchema().fields().get(1).schema() == Schema.INT32_SCHEMA);
    Assert.assertTrue(projectedSchemaKStream.getSchema().fields().get(2).schema() == Schema.FLOAT64_SCHEMA);
    Assert.assertTrue(projectedSchemaKStream.getSourceSchemaKStreams().get(0) == initialSchemaKTable);
}
Also used : PlanNode(io.confluent.ksql.planner.plan.PlanNode) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) ArrayList(java.util.ArrayList) ProjectNode(io.confluent.ksql.planner.plan.ProjectNode) Test(org.junit.Test)

Aggregations

PlanNode (io.confluent.ksql.planner.plan.PlanNode)23 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)13 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)11 ProjectNode (io.confluent.ksql.planner.plan.ProjectNode)8 FilterNode (io.confluent.ksql.planner.plan.FilterNode)6 Expression (io.confluent.ksql.parser.tree.Expression)4 Statement (io.confluent.ksql.parser.tree.Statement)4 StructuredDataSourceNode (io.confluent.ksql.planner.plan.StructuredDataSourceNode)4 AggregateAnalysis (io.confluent.ksql.analyzer.AggregateAnalysis)3 Analysis (io.confluent.ksql.analyzer.Analysis)3 Pair (io.confluent.ksql.util.Pair)3 GenericRow (io.confluent.ksql.GenericRow)2 AggregateAnalyzer (io.confluent.ksql.analyzer.AggregateAnalyzer)2 AnalysisContext (io.confluent.ksql.analyzer.AnalysisContext)2 Analyzer (io.confluent.ksql.analyzer.Analyzer)2 StructuredDataSource (io.confluent.ksql.metastore.StructuredDataSource)2 AbstractStreamCreateStatement (io.confluent.ksql.parser.tree.AbstractStreamCreateStatement)2 DdlStatement (io.confluent.ksql.parser.tree.DdlStatement)2 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)2