use of org.apache.storm.sql.AbstractStreamsProcessor in project storm by apache.
the class TestPlanCompiler method testCompile.
@Test
public void testCompile() throws Exception {
final int EXPECTED_VALUE_SIZE = 2;
String sql = "SELECT ID FROM FOO WHERE ID > 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractStreamsProcessor proc = planner.compile(data, sql);
// inject output bolt
proc.outputStream().to(new TestUtils.MockBolt());
final StormTopology topo = proc.build();
SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
use of org.apache.storm.sql.AbstractStreamsProcessor in project storm by apache.
the class TestPlanCompiler method testDateKeywordsAndFunctions.
/**
* All the date/time/timestamp related tests are done here, because Avatica converts the result of date functions to java.sql classes
* whereas Stream provides long type which makes different semantic from Stream implementation.
*/
@Test
public void testDateKeywordsAndFunctions() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT " + "LOCALTIME, CURRENT_TIME, LOCALTIMESTAMP, CURRENT_TIMESTAMP, CURRENT_DATE, " + "DATE '1970-05-15' AS datefield, TIME '00:00:00' AS timefield, TIMESTAMP '2016-01-01 00:00:00' as timestampfield, " + "EXTRACT(MONTH FROM TIMESTAMP '2010-01-23 12:34:56')," + "FLOOR(DATE '2016-01-23' TO MONTH)," + "CEIL(TIME '12:34:56' TO MINUTE)," + "{fn CURDATE()} = CURRENT_DATE, {fn CURTIME()} = LOCALTIME, {fn NOW()} = LOCALTIMESTAMP," + "{fn QUARTER(DATE '2016-10-07')}, {fn TIMESTAMPADD(MINUTE, 15, TIMESTAMP '2016-10-07 00:00:00')}," + "{fn TIMESTAMPDIFF(SECOND, TIMESTAMP '2016-10-06 00:00:00', TIMESTAMP '2016-10-07 00:00:00')}," + "INTERVAL '1-5' YEAR TO MONTH AS intervalfield, " + "(DATE '1970-01-01', DATE '1970-01-15') AS anchoredinterval_field " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractStreamsProcessor proc = planner.compile(data, sql);
// inject output bolt
proc.outputStream().to(new TestUtils.MockBolt());
final DataContext dataContext = proc.getDataContext();
final StormTopology topo = proc.build();
SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
long utcTimestamp = (long) dataContext.get(DataContext.Variable.UTC_TIMESTAMP.camelName);
long currentTimestamp = (long) dataContext.get(DataContext.Variable.CURRENT_TIMESTAMP.camelName);
long localTimestamp = (long) dataContext.get(DataContext.Variable.LOCAL_TIMESTAMP.camelName);
System.out.println(TestUtils.MockBolt.getCollectedValues());
java.sql.Timestamp timestamp = new java.sql.Timestamp(utcTimestamp);
int dateInt = (int) timestamp.toLocalDateTime().atOffset(ZoneOffset.UTC).toLocalDate().toEpochDay();
int localTimeInt = (int) (localTimestamp % DateTimeUtils.MILLIS_PER_DAY);
int currentTimeInt = (int) (currentTimestamp % DateTimeUtils.MILLIS_PER_DAY);
Assert.assertArrayEquals(new Values[] { new Values(localTimeInt, currentTimeInt, localTimestamp, currentTimestamp, dateInt, 134, 0, 1451606400000L, 1L, 0L, 45300000, true, true, true, 4L, 1475799300000L, 86400, 17, 0, 14) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
use of org.apache.storm.sql.AbstractStreamsProcessor in project storm by apache.
the class QueryPlanner method compile.
public AbstractStreamsProcessor compile(Map<String, ISqlStreamsDataSource> sources, String query) throws Exception {
StreamsRel relNode = getPlan(query);
StreamsPlanCreator streamsPlanCreator = new StreamsPlanCreator(sources, new RexBuilder(typeFactory));
relNode.streamsPlan(streamsPlanCreator);
final StreamBuilder streamBuilder = streamsPlanCreator.getStreamBuilder();
final Stream<Values> lastStream = streamsPlanCreator.pop();
final DataContext dc = streamsPlanCreator.getDataContext();
final List<CompilingClassLoader> cls = streamsPlanCreator.getClassLoaders();
return new AbstractStreamsProcessor() {
@Override
public StormTopology build() {
return streamBuilder.build();
}
@Override
public Stream<Values> outputStream() {
return lastStream;
}
@Override
public DataContext getDataContext() {
return dc;
}
@Override
public List<CompilingClassLoader> getClassLoaders() {
return cls;
}
};
}
use of org.apache.storm.sql.AbstractStreamsProcessor in project storm by apache.
the class TestPlanCompiler method testNested.
@Test
public void testNested() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT ID, MAPFIELD['c'], NESTEDMAPFIELD, ARRAYFIELD " + "FROM FOO " + "WHERE NESTEDMAPFIELD['a']['b'] = 2 AND ARRAYFIELD[2] = 200";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverNestedTable(sql);
final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlStreamsNestedDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractStreamsProcessor proc = planner.compile(data, sql);
// inject output bolt
proc.outputStream().to(new TestUtils.MockBolt());
final StormTopology topo = proc.build();
SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
Map<String, Integer> map = ImmutableMap.of("b", 2, "c", 4);
Map<String, Map<String, Integer>> nestedMap = ImmutableMap.of("a", map);
Assert.assertArrayEquals(new Values[] { new Values(2, 4, nestedMap, Arrays.asList(100, 200, 300)) }, TestUtils.MockBolt.getCollectedValues().toArray());
}
use of org.apache.storm.sql.AbstractStreamsProcessor in project storm by apache.
the class TestPlanCompiler method testBinaryStringFunctions.
/**
* All the binary literal tests are done here, because Avatica converts the result to byte[] whereas Stream provides the result to
* ByteString which makes different semantic from Stream implementation.
*/
@Test
public void testBinaryStringFunctions() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT x'45F0AB' || x'45F0AB', " + "POSITION(x'F0' IN x'453423F0ABBC'), " + "OVERLAY(x'453423F0ABBC45' PLACING x'4534' FROM 3), " + "SUBSTRING(x'453423F0ABBC' FROM 3), " + "SUBSTRING(x'453423F0ABBC453423F0ABBC' FROM 3 FOR 4) " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlStreamsDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlStreamsDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractStreamsProcessor proc = planner.compile(data, sql);
// inject output bolt
proc.outputStream().to(new TestUtils.MockBolt());
final StormTopology topo = proc.build();
SqlTestUtil.runStormTopology(cluster, TestUtils.MockBolt.getCollectedValues(), EXPECTED_VALUE_SIZE, proc, topo);
Values v = TestUtils.MockBolt.getCollectedValues().get(0);
assertEquals("45f0ab45f0ab", v.get(0).toString());
assertEquals(4, v.get(1));
assertEquals("45344534abbc45", v.get(2).toString());
assertEquals("23f0abbc", v.get(3).toString());
assertEquals("23f0abbc", v.get(4).toString());
}
Aggregations