use of org.apache.storm.sql.AbstractTridentProcessor in project storm by apache.
the class QueryPlanner method compile.
public AbstractTridentProcessor compile(Map<String, ISqlTridentDataSource> sources, String query) throws Exception {
TridentRel relNode = getPlan(query);
TridentPlanCreator tridentPlanCreator = new TridentPlanCreator(sources, new RexBuilder(typeFactory));
relNode.tridentPlan(tridentPlanCreator);
final TridentTopology topology = tridentPlanCreator.getTopology();
final IAggregatableStream lastStream = tridentPlanCreator.pop();
final DataContext dc = tridentPlanCreator.getDataContext();
final List<CompilingClassLoader> cls = tridentPlanCreator.getClassLoaders();
return new AbstractTridentProcessor() {
@Override
public TridentTopology build() {
return topology;
}
@Override
public Stream outputStream() {
return lastStream.toStream();
}
@Override
public DataContext getDataContext() {
return dc;
}
@Override
public List<CompilingClassLoader> getClassLoaders() {
return cls;
}
};
}
use of org.apache.storm.sql.AbstractTridentProcessor in project storm by apache.
the class TestPlanCompiler method testCaseStatement.
@Test
public void testCaseStatement() throws Exception {
int EXPECTED_VALUE_SIZE = 5;
String sql = "SELECT CASE WHEN NAME IN ('a', 'abc', 'abcde') THEN UPPER('a') " + "WHEN UPPER(NAME) = 'AB' THEN 'b' ELSE {fn CONCAT(NAME, '#')} END FROM FOO";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values("A"), new Values("b"), new Values("A"), new Values("abcd#"), new Values("A") }, getCollectedValues().toArray());
}
use of org.apache.storm.sql.AbstractTridentProcessor in project storm by apache.
the class TestPlanCompiler method testCompile.
@Test
public void testCompile() throws Exception {
final int EXPECTED_VALUE_SIZE = 2;
String sql = "SELECT ID FROM FOO WHERE ID > 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, getCollectedValues().toArray());
}
use of org.apache.storm.sql.AbstractTridentProcessor in project storm by apache.
the class TestPlanCompiler method testUdf.
@Test
public void testUdf() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT MYPLUS(ID, 3)" + "FROM FOO " + "WHERE ID = 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(5) }, getCollectedValues().toArray());
}
use of org.apache.storm.sql.AbstractTridentProcessor in project storm by apache.
the class TestPlanCompiler method testDateKeywords.
@Test
public void testDateKeywords() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT " + "LOCALTIME, CURRENT_TIME, LOCALTIMESTAMP, CURRENT_TIMESTAMP, CURRENT_DATE " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final DataContext dataContext = proc.getDataContext();
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
long utcTimestamp = (long) dataContext.get(DataContext.Variable.UTC_TIMESTAMP.camelName);
long currentTimestamp = (long) dataContext.get(DataContext.Variable.CURRENT_TIMESTAMP.camelName);
long localTimestamp = (long) dataContext.get(DataContext.Variable.LOCAL_TIMESTAMP.camelName);
System.out.println(getCollectedValues());
java.sql.Timestamp timestamp = new java.sql.Timestamp(utcTimestamp);
int dateInt = (int) timestamp.toLocalDateTime().atOffset(ZoneOffset.UTC).toLocalDate().toEpochDay();
int localTimeInt = (int) (localTimestamp % DateTimeUtils.MILLIS_PER_DAY);
int currentTimeInt = (int) (currentTimestamp % DateTimeUtils.MILLIS_PER_DAY);
Assert.assertArrayEquals(new Values[] { new Values(localTimeInt, currentTimeInt, localTimestamp, currentTimestamp, dateInt) }, getCollectedValues().toArray());
}
Aggregations