use of org.apache.storm.tuple.Values in project storm by apache.
the class TestPlanCompiler method testUdf.
@Test
public void testUdf() throws Exception {
String sql = "SELECT MYPLUS(ID, 3)" + "FROM FOO " + "WHERE ID = 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverNestedTable(sql);
PlanCompiler compiler = new PlanCompiler(typeFactory);
AbstractValuesProcessor proc = compiler.compile(state.tree());
Map<String, DataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockDataSource());
List<Values> values = new ArrayList<>();
ChannelHandler h = new TestUtils.CollectDataChannelHandler(values);
proc.initialize(data, h);
Assert.assertEquals(new Values(5), values.get(0));
}
use of org.apache.storm.tuple.Values in project storm by apache.
the class TestPlanCompiler method testNested.
@Test
public void testNested() throws Exception {
String sql = "SELECT ID, MAPFIELD['c'], NESTEDMAPFIELD, ARRAYFIELD " + "FROM FOO " + "WHERE NESTEDMAPFIELD['a']['b'] = 2 AND ARRAYFIELD[2] = 200";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverNestedTable(sql);
PlanCompiler compiler = new PlanCompiler(typeFactory);
AbstractValuesProcessor proc = compiler.compile(state.tree());
Map<String, DataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockNestedDataSource());
List<Values> values = new ArrayList<>();
ChannelHandler h = new TestUtils.CollectDataChannelHandler(values);
proc.initialize(data, h);
Map<String, Integer> map = ImmutableMap.of("b", 2, "c", 4);
Map<String, Map<String, Integer>> nestedMap = ImmutableMap.of("a", map);
Assert.assertEquals(new Values(2, 4, nestedMap, Arrays.asList(100, 200, 300)), values.get(0));
}
use of org.apache.storm.tuple.Values in project storm by apache.
the class TestPlanCompiler method testCaseStatement.
@Test
public void testCaseStatement() throws Exception {
int EXPECTED_VALUE_SIZE = 5;
String sql = "SELECT CASE WHEN NAME IN ('a', 'abc', 'abcde') THEN UPPER('a') " + "WHEN UPPER(NAME) = 'AB' THEN 'b' ELSE {fn CONCAT(NAME, '#')} END FROM FOO";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values("A"), new Values("b"), new Values("A"), new Values("abcd#"), new Values("A") }, getCollectedValues().toArray());
}
use of org.apache.storm.tuple.Values in project storm by apache.
the class TestPlanCompiler method testCompile.
@Test
public void testCompile() throws Exception {
final int EXPECTED_VALUE_SIZE = 2;
String sql = "SELECT ID FROM FOO WHERE ID > 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, getCollectedValues().toArray());
}
use of org.apache.storm.tuple.Values in project storm by apache.
the class TridentTopologySource method getTopology.
public StormTopology getTopology(Config config) {
this.spout = new FixedBatchSpout(new Fields("sentence"), 20, new Values("one two"), new Values("two three"), new Values("three four"), new Values("four five"), new Values("five six"));
TridentTopology trident = new TridentTopology();
trident.newStream("wordcount", spout).name("sentence").parallelismHint(1).shuffle().each(new Fields("sentence"), new Split(), new Fields("word")).parallelismHint(1).groupBy(new Fields("word")).persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")).parallelismHint(1);
return trident.build();
}
Aggregations