Search in sources :

Example 1 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestPlanCompiler method testCompile.

@Test
public void testCompile() throws Exception {
    final int EXPECTED_VALUE_SIZE = 2;
    String sql = "SELECT ID FROM FOO WHERE ID > 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractStreamsProcessor proc = planner.compile(data, sql);
    // inject output bolt
    proc.outputStream().to(new TestUtils.MockBolt());
    final StormTopology topo = proc.build();
    SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractStreamsProcessor(org.apache.storm.sql.AbstractStreamsProcessor) ISqlStreamsDataSource(org.apache.storm.sql.runtime.ISqlStreamsDataSource) Test(org.junit.jupiter.api.Test)

Example 2 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestPlanCompiler method testDateKeywordsAndFunctions.

/**
 * All the date/time/timestamp related tests are done here, because Avatica converts the result of date functions to java.sql classes
 * whereas Stream provides long type which makes different semantic from Stream implementation.
 */
@Test
public void testDateKeywordsAndFunctions() throws Exception {
    int EXPECTED_VALUE_SIZE = 1;
    String sql = "SELECT " + "LOCALTIME, CURRENT_TIME, LOCALTIMESTAMP, CURRENT_TIMESTAMP, CURRENT_DATE, " + "DATE '1970-05-15' AS datefield, TIME '00:00:00' AS timefield, TIMESTAMP '2016-01-01 00:00:00' as timestampfield, " + "EXTRACT(MONTH FROM TIMESTAMP '2010-01-23 12:34:56')," + "FLOOR(DATE '2016-01-23' TO MONTH)," + "CEIL(TIME '12:34:56' TO MINUTE)," + "{fn CURDATE()} = CURRENT_DATE, {fn CURTIME()} = LOCALTIME, {fn NOW()} = LOCALTIMESTAMP," + "{fn QUARTER(DATE '2016-10-07')}, {fn TIMESTAMPADD(MINUTE, 15, TIMESTAMP '2016-10-07 00:00:00')}," + "{fn TIMESTAMPDIFF(SECOND, TIMESTAMP '2016-10-06 00:00:00', TIMESTAMP '2016-10-07 00:00:00')}," + "INTERVAL '1-5' YEAR TO MONTH AS intervalfield, " + "(DATE '1970-01-01', DATE '1970-01-15') AS anchoredinterval_field " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractStreamsProcessor proc = planner.compile(data, sql);
    // inject output bolt
    proc.outputStream().to(new TestUtils.MockBolt());
    final DataContext dataContext = proc.getDataContext();
    final StormTopology topo = proc.build();
    SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
    long utcTimestamp = (long) dataContext.get(DataContext.Variable.UTC_TIMESTAMP.camelName);
    long currentTimestamp = (long) dataContext.get(DataContext.Variable.CURRENT_TIMESTAMP.camelName);
    long localTimestamp = (long) dataContext.get(DataContext.Variable.LOCAL_TIMESTAMP.camelName);
    System.out.println(TestUtils.MockBolt.getCollectedValues());
    java.sql.Timestamp timestamp = new java.sql.Timestamp(utcTimestamp);
    int dateInt = (int) timestamp.toLocalDateTime().atOffset(ZoneOffset.UTC).toLocalDate().toEpochDay();
    int localTimeInt = (int) (localTimestamp % DateTimeUtils.MILLIS_PER_DAY);
    int currentTimeInt = (int) (currentTimestamp % DateTimeUtils.MILLIS_PER_DAY);
    Assert.assertArrayEquals(new Values[] { new Values(localTimeInt, currentTimeInt, localTimestamp, currentTimestamp, dateInt, 134, 0, 1451606400000L, 1L, 0L, 45300000, true, true, true, 4L, 1475799300000L, 86400, 17, 0, 14) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractStreamsProcessor(org.apache.storm.sql.AbstractStreamsProcessor) DataContext(org.apache.calcite.DataContext) ISqlStreamsDataSource(org.apache.storm.sql.runtime.ISqlStreamsDataSource) Test(org.junit.jupiter.api.Test)

Example 3 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestCompilerUtils method sqlOverDummyGroupByTable.

public static CalciteState sqlOverDummyGroupByTable(String sql) throws RelConversionException, ValidationException, SqlParseException {
    SchemaPlus schema = Frameworks.createRootSchema(true);
    JavaTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
    StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory).field("ID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)).field("GRPID", SqlTypeName.INTEGER).field("NAME", typeFactory.createType(String.class)).field("ADDR", typeFactory.createType(String.class)).field("AGE", SqlTypeName.INTEGER).field("SCORE", SqlTypeName.INTEGER).build();
    Table table = streamableTable.stream();
    schema.add("FOO", table);
    schema.add("BAR", table);
    schema.add("MYSTATICSUM", AggregateFunctionImpl.create(MyStaticSumFunction.class));
    schema.add("MYSUM", AggregateFunctionImpl.create(MySumFunction.class));
    QueryPlanner queryPlanner = new QueryPlanner(schema);
    StreamsRel tree = queryPlanner.getPlan(sql);
    System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES));
    return new CalciteState(schema, tree);
}
Also used : CompilerUtil(org.apache.storm.sql.compiler.CompilerUtil) Table(org.apache.calcite.schema.Table) StreamableTable(org.apache.calcite.schema.StreamableTable) SchemaPlus(org.apache.calcite.schema.SchemaPlus) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) StreamsRel(org.apache.storm.sql.planner.streams.rel.StreamsRel) StreamableTable(org.apache.calcite.schema.StreamableTable) JavaTypeFactoryImpl(org.apache.calcite.jdbc.JavaTypeFactoryImpl) ColumnConstraint(org.apache.storm.sql.parser.ColumnConstraint) JavaTypeFactory(org.apache.calcite.adapter.java.JavaTypeFactory)

Example 4 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestCompilerUtils method sqlOverSimpleEquiJoinTables.

public static CalciteState sqlOverSimpleEquiJoinTables(String sql) throws RelConversionException, ValidationException, SqlParseException {
    SchemaPlus schema = Frameworks.createRootSchema(true);
    JavaTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
    StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory).field("EMPID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)).field("EMPNAME", SqlTypeName.VARCHAR).field("DEPTID", SqlTypeName.INTEGER).build();
    StreamableTable streamableTable2 = new CompilerUtil.TableBuilderInfo(typeFactory).field("DEPTID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)).field("DEPTNAME", SqlTypeName.VARCHAR).build();
    Table table = streamableTable.stream();
    Table table2 = streamableTable2.stream();
    schema.add("EMP", table);
    schema.add("DEPT", table2);
    QueryPlanner queryPlanner = new QueryPlanner(schema);
    StreamsRel tree = queryPlanner.getPlan(sql);
    System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES));
    return new CalciteState(schema, tree);
}
Also used : CompilerUtil(org.apache.storm.sql.compiler.CompilerUtil) Table(org.apache.calcite.schema.Table) StreamableTable(org.apache.calcite.schema.StreamableTable) SchemaPlus(org.apache.calcite.schema.SchemaPlus) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) StreamsRel(org.apache.storm.sql.planner.streams.rel.StreamsRel) StreamableTable(org.apache.calcite.schema.StreamableTable) JavaTypeFactoryImpl(org.apache.calcite.jdbc.JavaTypeFactoryImpl) ColumnConstraint(org.apache.storm.sql.parser.ColumnConstraint) JavaTypeFactory(org.apache.calcite.adapter.java.JavaTypeFactory)

Example 5 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestCompilerUtils method sqlOverNestedTable.

public static CalciteState sqlOverNestedTable(String sql) throws RelConversionException, ValidationException, SqlParseException {
    SchemaPlus schema = Frameworks.createRootSchema(true);
    JavaTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
    StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory).field("ID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)).field("MAPFIELD", typeFactory.createTypeWithNullability(typeFactory.createMapType(typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.VARCHAR), true), typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.INTEGER), true)), true)).field("NESTEDMAPFIELD", typeFactory.createTypeWithNullability(typeFactory.createMapType(typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.VARCHAR), true), typeFactory.createTypeWithNullability(typeFactory.createMapType(typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.VARCHAR), true), typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.INTEGER), true)), true)), true)).field("ARRAYFIELD", typeFactory.createTypeWithNullability(typeFactory.createArrayType(typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.INTEGER), true), -1L), true)).build();
    Table table = streamableTable.stream();
    schema.add("FOO", table);
    schema.add("BAR", table);
    schema.add("MYPLUS", ScalarFunctionImpl.create(MyPlus.class, "eval"));
    QueryPlanner queryPlanner = new QueryPlanner(schema);
    StreamsRel tree = queryPlanner.getPlan(sql);
    System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES));
    return new CalciteState(schema, tree);
}
Also used : CompilerUtil(org.apache.storm.sql.compiler.CompilerUtil) Table(org.apache.calcite.schema.Table) StreamableTable(org.apache.calcite.schema.StreamableTable) SchemaPlus(org.apache.calcite.schema.SchemaPlus) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) StreamsRel(org.apache.storm.sql.planner.streams.rel.StreamsRel) StreamableTable(org.apache.calcite.schema.StreamableTable) JavaTypeFactoryImpl(org.apache.calcite.jdbc.JavaTypeFactoryImpl) ColumnConstraint(org.apache.storm.sql.parser.ColumnConstraint) JavaTypeFactory(org.apache.calcite.adapter.java.JavaTypeFactory)

Aggregations

QueryPlanner (org.apache.storm.sql.planner.streams.QueryPlanner)10 HashMap (java.util.HashMap)6 StormTopology (org.apache.storm.generated.StormTopology)6 AbstractStreamsProcessor (org.apache.storm.sql.AbstractStreamsProcessor)6 TestUtils (org.apache.storm.sql.TestUtils)6 ISqlStreamsDataSource (org.apache.storm.sql.runtime.ISqlStreamsDataSource)6 Values (org.apache.storm.tuple.Values)6 Test (org.junit.jupiter.api.Test)6 JavaTypeFactory (org.apache.calcite.adapter.java.JavaTypeFactory)4 JavaTypeFactoryImpl (org.apache.calcite.jdbc.JavaTypeFactoryImpl)4 SchemaPlus (org.apache.calcite.schema.SchemaPlus)4 StreamableTable (org.apache.calcite.schema.StreamableTable)4 Table (org.apache.calcite.schema.Table)4 StreamsRel (org.apache.storm.sql.planner.streams.rel.StreamsRel)4 CompilerUtil (org.apache.storm.sql.compiler.CompilerUtil)3 ColumnConstraint (org.apache.storm.sql.parser.ColumnConstraint)3 ImmutableMap (com.google.common.collect.ImmutableMap)1 Map (java.util.Map)1 DataContext (org.apache.calcite.DataContext)1