Search in sources :

Example 1 with ISqlTridentDataSource

use of org.apache.storm.sql.runtime.ISqlTridentDataSource in project storm by apache.

the class StormSqlImpl method handleCreateTableForTrident.

private void handleCreateTableForTrident(SqlCreateTable n, Map<String, ISqlTridentDataSource> dataSources) {
    List<FieldInfo> fields = updateSchema(n);
    ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(n.location(), n.inputFormatClass(), n.outputFormatClass(), n.properties(), fields);
    if (ds == null) {
        throw new RuntimeException("Failed to find data source for " + n.tableName() + " URI: " + n.location());
    } else if (dataSources.containsKey(n.tableName())) {
        throw new RuntimeException("Duplicated definition for table " + n.tableName());
    }
    dataSources.put(n.tableName(), ds);
}
Also used : ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) FieldInfo(org.apache.storm.sql.runtime.FieldInfo)

Example 2 with ISqlTridentDataSource

use of org.apache.storm.sql.runtime.ISqlTridentDataSource in project storm by apache.

the class StormSqlImpl method submit.

@Override
public void submit(String name, Iterable<String> statements, Map<String, ?> stormConf, SubmitOptions opts, StormSubmitter.ProgressListener progressListener, String asUser) throws Exception {
    Map<String, ISqlTridentDataSource> dataSources = new HashMap<>();
    for (String sql : statements) {
        StormParser parser = new StormParser(sql);
        SqlNode node = parser.impl().parseSqlStmtEof();
        if (node instanceof SqlCreateTable) {
            handleCreateTableForTrident((SqlCreateTable) node, dataSources);
        } else if (node instanceof SqlCreateFunction) {
            handleCreateFunction((SqlCreateFunction) node);
        } else {
            QueryPlanner planner = new QueryPlanner(schema);
            AbstractTridentProcessor processor = planner.compile(dataSources, sql);
            TridentTopology topo = processor.build();
            Path jarPath = null;
            try {
                // QueryPlanner on Trident mode configures the topology with compiled classes,
                // so we need to add new classes into topology jar
                // Topology will be serialized and sent to Nimbus, and deserialized and executed in workers.
                jarPath = Files.createTempFile("storm-sql", ".jar");
                System.setProperty("storm.jar", jarPath.toString());
                packageTopology(jarPath, processor);
                StormSubmitter.submitTopologyAs(name, stormConf, topo.build(), opts, progressListener, asUser);
            } finally {
                if (jarPath != null) {
                    Files.delete(jarPath);
                }
            }
        }
    }
}
Also used : Path(java.nio.file.Path) HashMap(java.util.HashMap) TridentTopology(org.apache.storm.trident.TridentTopology) SqlCreateFunction(org.apache.storm.sql.parser.SqlCreateFunction) ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) StormParser(org.apache.storm.sql.parser.StormParser) SqlCreateTable(org.apache.storm.sql.parser.SqlCreateTable) QueryPlanner(org.apache.storm.sql.planner.trident.QueryPlanner) SqlNode(org.apache.calcite.sql.SqlNode)

Example 3 with ISqlTridentDataSource

use of org.apache.storm.sql.runtime.ISqlTridentDataSource in project storm by apache.

the class TestPlanCompiler method testCaseStatement.

@Test
public void testCaseStatement() throws Exception {
    int EXPECTED_VALUE_SIZE = 5;
    String sql = "SELECT CASE WHEN NAME IN ('a', 'abc', 'abcde') THEN UPPER('a') " + "WHEN UPPER(NAME) = 'AB' THEN 'b' ELSE {fn CONCAT(NAME, '#')} END FROM FOO";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlTridentDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlTridentDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractTridentProcessor proc = planner.compile(data, sql);
    final TridentTopology topo = proc.build();
    Fields f = proc.outputStream().getOutputFields();
    proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
    runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Values[] { new Values("A"), new Values("b"), new Values("A"), new Values("abcd#"), new Values("A") }, getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) Values(org.apache.storm.tuple.Values) MockState.getCollectedValues(org.apache.storm.sql.TestUtils.MockState.getCollectedValues) ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) QueryPlanner(org.apache.storm.sql.planner.trident.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractTridentProcessor(org.apache.storm.sql.AbstractTridentProcessor) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Test(org.junit.Test)

Example 4 with ISqlTridentDataSource

use of org.apache.storm.sql.runtime.ISqlTridentDataSource in project storm by apache.

the class TestPlanCompiler method testCompile.

@Test
public void testCompile() throws Exception {
    final int EXPECTED_VALUE_SIZE = 2;
    String sql = "SELECT ID FROM FOO WHERE ID > 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlTridentDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlTridentDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractTridentProcessor proc = planner.compile(data, sql);
    final TridentTopology topo = proc.build();
    Fields f = proc.outputStream().getOutputFields();
    proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
    runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) Values(org.apache.storm.tuple.Values) MockState.getCollectedValues(org.apache.storm.sql.TestUtils.MockState.getCollectedValues) ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) QueryPlanner(org.apache.storm.sql.planner.trident.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractTridentProcessor(org.apache.storm.sql.AbstractTridentProcessor) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Test(org.junit.Test)

Example 5 with ISqlTridentDataSource

use of org.apache.storm.sql.runtime.ISqlTridentDataSource in project storm by apache.

the class TestSocketDataSourceProvider method testSocketSink.

@Test
public void testSocketSink() throws IOException {
    ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(URI.create("socket://localhost:8888"), null, null, new Properties(), FIELDS);
    Assert.assertNotNull(ds);
    ISqlTridentDataSource.SqlTridentConsumer consumer = ds.getConsumer();
    Assert.assertEquals(SocketState.Factory.class, consumer.getStateFactory().getClass());
    Assert.assertEquals(SocketStateUpdater.class, consumer.getStateUpdater().getClass());
    // makeState() fails on creating State so we just mock SocketState anyway
    SocketState mockState = mock(SocketState.class);
    StateUpdater stateUpdater = consumer.getStateUpdater();
    List<TridentTuple> tupleList = mockTupleList();
    stateUpdater.updateState(mockState, tupleList, null);
    for (TridentTuple t : tupleList) {
        String serializedValue = new String(SERIALIZER.write(t.getValues(), null).array());
        verify(mockState).write(serializedValue + "\n");
    }
}
Also used : ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) Properties(java.util.Properties) SocketState(org.apache.storm.sql.runtime.datasource.socket.trident.SocketState) StateUpdater(org.apache.storm.trident.state.StateUpdater) SocketStateUpdater(org.apache.storm.sql.runtime.datasource.socket.trident.SocketStateUpdater) TridentTuple(org.apache.storm.trident.tuple.TridentTuple) Test(org.junit.Test)

Aggregations

ISqlTridentDataSource (org.apache.storm.sql.runtime.ISqlTridentDataSource)10 HashMap (java.util.HashMap)7 QueryPlanner (org.apache.storm.sql.planner.trident.QueryPlanner)7 TridentTopology (org.apache.storm.trident.TridentTopology)7 Test (org.junit.Test)7 AbstractTridentProcessor (org.apache.storm.sql.AbstractTridentProcessor)6 TestUtils (org.apache.storm.sql.TestUtils)6 MockState.getCollectedValues (org.apache.storm.sql.TestUtils.MockState.getCollectedValues)6 Values (org.apache.storm.tuple.Values)6 Fields (org.apache.storm.tuple.Fields)5 ImmutableMap (com.google.common.collect.ImmutableMap)1 Path (java.nio.file.Path)1 Map (java.util.Map)1 Properties (java.util.Properties)1 DataContext (org.apache.calcite.DataContext)1 SqlNode (org.apache.calcite.sql.SqlNode)1 SqlCreateFunction (org.apache.storm.sql.parser.SqlCreateFunction)1 SqlCreateTable (org.apache.storm.sql.parser.SqlCreateTable)1 StormParser (org.apache.storm.sql.parser.StormParser)1 FieldInfo (org.apache.storm.sql.runtime.FieldInfo)1