use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class TestPlanCompiler method testUdf.
@Test
public void testUdf() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT MYPLUS(ID, 3)" + "FROM FOO " + "WHERE ID = 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(5) }, getCollectedValues().toArray());
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class TestPlanCompiler method testDateKeywords.
@Test
public void testDateKeywords() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT " + "LOCALTIME, CURRENT_TIME, LOCALTIMESTAMP, CURRENT_TIMESTAMP, CURRENT_DATE " + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final DataContext dataContext = proc.getDataContext();
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
long utcTimestamp = (long) dataContext.get(DataContext.Variable.UTC_TIMESTAMP.camelName);
long currentTimestamp = (long) dataContext.get(DataContext.Variable.CURRENT_TIMESTAMP.camelName);
long localTimestamp = (long) dataContext.get(DataContext.Variable.LOCAL_TIMESTAMP.camelName);
System.out.println(getCollectedValues());
java.sql.Timestamp timestamp = new java.sql.Timestamp(utcTimestamp);
int dateInt = (int) timestamp.toLocalDateTime().atOffset(ZoneOffset.UTC).toLocalDate().toEpochDay();
int localTimeInt = (int) (localTimestamp % DateTimeUtils.MILLIS_PER_DAY);
int currentTimeInt = (int) (currentTimestamp % DateTimeUtils.MILLIS_PER_DAY);
Assert.assertArrayEquals(new Values[] { new Values(localTimeInt, currentTimeInt, localTimestamp, currentTimestamp, dateInt) }, getCollectedValues().toArray());
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class TestPlanCompiler method testInsert.
@Test
public void testInsert() throws Exception {
final int EXPECTED_VALUE_SIZE = 1;
String sql = "INSERT INTO BAR SELECT ID, NAME, ADDR FROM FOO WHERE ID > 3";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentDataSource());
data.put("BAR", new TestUtils.MockSqlTridentDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Assert.assertArrayEquals(new Values[] { new Values(4, "abcde", "y") }, getCollectedValues().toArray());
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class TestPlanCompiler method testNested.
@Test
public void testNested() throws Exception {
int EXPECTED_VALUE_SIZE = 1;
String sql = "SELECT ID, MAPFIELD['c'], NESTEDMAPFIELD, ARRAYFIELD " + "FROM FOO " + "WHERE NESTEDMAPFIELD['a']['b'] = 2 AND ARRAYFIELD[2] = 200";
TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverNestedTable(sql);
final Map<String, ISqlTridentDataSource> data = new HashMap<>();
data.put("FOO", new TestUtils.MockSqlTridentNestedDataSource());
QueryPlanner planner = new QueryPlanner(state.schema());
AbstractTridentProcessor proc = planner.compile(data, sql);
final TridentTopology topo = proc.build();
Fields f = proc.outputStream().getOutputFields();
proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
Map<String, Integer> map = ImmutableMap.of("b", 2, "c", 4);
Map<String, Map<String, Integer>> nestedMap = ImmutableMap.of("a", map);
Assert.assertArrayEquals(new Values[] { new Values(2, 4, nestedMap, Arrays.asList(100, 200, 300)) }, getCollectedValues().toArray());
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class SampleDruidBoltTridentTopology method main.
public static void main(String[] args) throws Exception {
if (args.length == 0) {
throw new IllegalArgumentException("There should be at least one argument. Run as `SampleDruidBoltTridentTopology <zk-url>`");
}
TridentTopology tridentTopology = new TridentTopology();
DruidBeamFactory druidBeamFactory = new SampleDruidBeamFactoryImpl(new HashMap<String, Object>());
ITupleDruidEventMapper<Map<String, Object>> eventMapper = new TupleDruidEventMapper<>(TupleDruidEventMapper.DEFAULT_FIELD_NAME);
final Stream stream = tridentTopology.newStream("batch-event-gen", new SimpleBatchSpout(10));
stream.peek(new Consumer() {
@Override
public void accept(TridentTuple input) {
LOG.info("########### Received tuple: [{}]", input);
}
}).partitionPersist(new DruidBeamStateFactory<Map<String, Object>>(druidBeamFactory, eventMapper), new Fields("event"), new DruidBeamStateUpdater());
Config conf = new Config();
conf.setDebug(true);
conf.put("druid.tranquility.zk.connect", args[0]);
if (args.length > 1) {
conf.setNumWorkers(3);
StormSubmitter.submitTopologyWithProgressBar(args[1], conf, tridentTopology.build());
} else {
conf.setMaxTaskParallelism(3);
try (LocalCluster cluster = new LocalCluster();
LocalTopology topo = cluster.submitTopology("druid-test", conf, tridentTopology.build())) {
Thread.sleep(30000);
}
System.exit(0);
}
}
Aggregations