Search in sources :

Example 6 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestPlanCompiler method testNested.

@Test
public void testNested() throws Exception {
    int EXPECTED_VALUE_SIZE = 1;
    String sql = "SELECT ID, MAPFIELD['c'], NESTEDMAPFIELD, ARRAYFIELD " + "FROM FOO " + "WHERE NESTEDMAPFIELD['a']['b'] = 2 AND ARRAYFIELD[2] = 200";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverNestedTable(sql);
    final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlStreamsNestedDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractStreamsProcessor proc = planner.compile(data, sql);
    // inject output bolt
    proc.outputStream().to(new TestUtils.MockBolt());
    final StormTopology topo = proc.build();
    SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
    Map<String, Integer> map = ImmutableMap.of("b", 2, "c", 4);
    Map<String, Map<String, Integer>> nestedMap = ImmutableMap.of("a", map);
    Assert.assertArrayEquals(new Values[] { new Values(2, 4, nestedMap, Arrays.asList(100, 200, 300)) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractStreamsProcessor(org.apache.storm.sql.AbstractStreamsProcessor) ISqlStreamsDataSource(org.apache.storm.sql.runtime.ISqlStreamsDataSource) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Map(java.util.Map) Test(org.junit.jupiter.api.Test)

Example 7 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestPlanCompiler method testBinaryStringFunctions.

/**
 * All the binary literal tests are done here, because Avatica converts the result to byte[] whereas Stream provides the result to
 * ByteString which makes different semantic from Stream implementation.
 */
@Test
public void testBinaryStringFunctions() throws Exception {
    int EXPECTED_VALUE_SIZE = 1;
    String sql = "SELECT x'45F0AB' || x'45F0AB', " + "POSITION(x'F0' IN x'453423F0ABBC'), " + "OVERLAY(x'453423F0ABBC45' PLACING x'4534' FROM 3), " + "SUBSTRING(x'453423F0ABBC' FROM 3), " + "SUBSTRING(x'453423F0ABBC453423F0ABBC' FROM 3 FOR 4) " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractStreamsProcessor proc = planner.compile(data, sql);
    // inject output bolt
    proc.outputStream().to(new TestUtils.MockBolt());
    final StormTopology topo = proc.build();
    SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
    Values v = TestUtils.MockBolt.getCollectedValues().get(0);
    assertEquals("45f0ab45f0ab", v.get(0).toString());
    assertEquals(4, v.get(1));
    assertEquals("45344534abbc45", v.get(2).toString());
    assertEquals("23f0abbc", v.get(3).toString());
    assertEquals("23f0abbc", v.get(4).toString());
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractStreamsProcessor(org.apache.storm.sql.AbstractStreamsProcessor) ISqlStreamsDataSource(org.apache.storm.sql.runtime.ISqlStreamsDataSource) Test(org.junit.jupiter.api.Test)

Example 8 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestCompilerUtils method sqlOverDummyTable.

public static CalciteState sqlOverDummyTable(String sql) throws RelConversionException, ValidationException, SqlParseException {
    SchemaPlus schema = Frameworks.createRootSchema(true);
    JavaTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
    StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory).field("ID", SqlTypeName.INTEGER, new ColumnConstraint.PrimaryKey(SqlMonotonicity.MONOTONIC, SqlParserPos.ZERO)).field("NAME", typeFactory.createType(String.class)).field("ADDR", typeFactory.createType(String.class)).build();
    Table table = streamableTable.stream();
    schema.add("FOO", table);
    schema.add("BAR", table);
    schema.add("MYPLUS", ScalarFunctionImpl.create(MyPlus.class, "eval"));
    QueryPlanner queryPlanner = new QueryPlanner(schema);
    StreamsRel tree = queryPlanner.getPlan(sql);
    System.out.println(StormRelUtils.explain(tree, SqlExplainLevel.ALL_ATTRIBUTES));
    return new CalciteState(schema, tree);
}
Also used : StreamableTable(org.apache.calcite.schema.StreamableTable) Table(org.apache.calcite.schema.Table) StreamableTable(org.apache.calcite.schema.StreamableTable) JavaTypeFactoryImpl(org.apache.calcite.jdbc.JavaTypeFactoryImpl) SchemaPlus(org.apache.calcite.schema.SchemaPlus) JavaTypeFactory(org.apache.calcite.adapter.java.JavaTypeFactory) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) StreamsRel(org.apache.storm.sql.planner.streams.rel.StreamsRel)

Example 9 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestPlanCompiler method testUdf.

@Test
public void testUdf() throws Exception {
    int EXPECTED_VALUE_SIZE = 1;
    String sql = "SELECT MYPLUS(ID, 3)" + "FROM FOO " + "WHERE ID = 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    Map<String, ISqlStreamsDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractStreamsProcessor proc = planner.compile(data, sql);
    // inject output bolt
    proc.outputStream().to(new TestUtils.MockBolt());
    final StormTopology topo = proc.build();
    SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Values[] { new Values(5) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractStreamsProcessor(org.apache.storm.sql.AbstractStreamsProcessor) ISqlStreamsDataSource(org.apache.storm.sql.runtime.ISqlStreamsDataSource) Test(org.junit.jupiter.api.Test)

Example 10 with QueryPlanner

use of org.apache.storm.sql.planner.streams.QueryPlanner in project storm by apache.

the class TestPlanCompiler method testInsert.

@Test
public void testInsert() throws Exception {
    final int EXPECTED_VALUE_SIZE = 1;
    String sql = "INSERT INTO BAR SELECT ID, NAME, ADDR FROM FOO WHERE ID > 3";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
    data.put("BAR", new TestUtils.MockSqlStreamsOutputDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractStreamsProcessor proc = planner.compile(data, sql);
    final StormTopology topo = proc.build();
    SqlTestUtil.runStormTopology(cluster, TestUtils.MockInsertBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Pair[] { Pair.of(4, new Values(4, "abcde", "y")) }, TestUtils.MockInsertBolt.getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) QueryPlanner(org.apache.storm.sql.planner.streams.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractStreamsProcessor(org.apache.storm.sql.AbstractStreamsProcessor) ISqlStreamsDataSource(org.apache.storm.sql.runtime.ISqlStreamsDataSource) Test(org.junit.jupiter.api.Test)

Aggregations

QueryPlanner (org.apache.storm.sql.planner.streams.QueryPlanner)10 HashMap (java.util.HashMap)6 StormTopology (org.apache.storm.generated.StormTopology)6 AbstractStreamsProcessor (org.apache.storm.sql.AbstractStreamsProcessor)6 TestUtils (org.apache.storm.sql.TestUtils)6 ISqlStreamsDataSource (org.apache.storm.sql.runtime.ISqlStreamsDataSource)6 Values (org.apache.storm.tuple.Values)6 Test (org.junit.jupiter.api.Test)6 JavaTypeFactory (org.apache.calcite.adapter.java.JavaTypeFactory)4 JavaTypeFactoryImpl (org.apache.calcite.jdbc.JavaTypeFactoryImpl)4 SchemaPlus (org.apache.calcite.schema.SchemaPlus)4 StreamableTable (org.apache.calcite.schema.StreamableTable)4 Table (org.apache.calcite.schema.Table)4 StreamsRel (org.apache.storm.sql.planner.streams.rel.StreamsRel)4 CompilerUtil (org.apache.storm.sql.compiler.CompilerUtil)3 ColumnConstraint (org.apache.storm.sql.parser.ColumnConstraint)3 ImmutableMap (com.google.common.collect.ImmutableMap)1 Map (java.util.Map)1 DataContext (org.apache.calcite.DataContext)1