Search in sources :

Example 26 with Fields

use of org.apache.storm.tuple.Fields in project storm by apache.

the class TestPlanCompiler method testCaseStatement.

@Test
public void testCaseStatement() throws Exception {
    int EXPECTED_VALUE_SIZE = 5;
    String sql = "SELECT CASE WHEN NAME IN ('a', 'abc', 'abcde') THEN UPPER('a') " + "WHEN UPPER(NAME) = 'AB' THEN 'b' ELSE {fn CONCAT(NAME, '#')} END FROM FOO";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlTridentDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlTridentDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractTridentProcessor proc = planner.compile(data, sql);
    final TridentTopology topo = proc.build();
    Fields f = proc.outputStream().getOutputFields();
    proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
    runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Values[] { new Values("A"), new Values("b"), new Values("A"), new Values("abcd#"), new Values("A") }, getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) Values(org.apache.storm.tuple.Values) MockState.getCollectedValues(org.apache.storm.sql.TestUtils.MockState.getCollectedValues) ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) QueryPlanner(org.apache.storm.sql.planner.trident.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractTridentProcessor(org.apache.storm.sql.AbstractTridentProcessor) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Test(org.junit.Test)

Example 27 with Fields

use of org.apache.storm.tuple.Fields in project storm by apache.

the class TestPlanCompiler method testCompile.

@Test
public void testCompile() throws Exception {
    final int EXPECTED_VALUE_SIZE = 2;
    String sql = "SELECT ID FROM FOO WHERE ID > 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);
    final Map<String, ISqlTridentDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlTridentDataSource());
    QueryPlanner planner = new QueryPlanner(state.schema());
    AbstractTridentProcessor proc = planner.compile(data, sql);
    final TridentTopology topo = proc.build();
    Fields f = proc.outputStream().getOutputFields();
    proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(), new Fields());
    runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);
    Assert.assertArrayEquals(new Values[] { new Values(3), new Values(4) }, getCollectedValues().toArray());
}
Also used : HashMap(java.util.HashMap) Values(org.apache.storm.tuple.Values) MockState.getCollectedValues(org.apache.storm.sql.TestUtils.MockState.getCollectedValues) ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) QueryPlanner(org.apache.storm.sql.planner.trident.QueryPlanner) TestUtils(org.apache.storm.sql.TestUtils) AbstractTridentProcessor(org.apache.storm.sql.AbstractTridentProcessor) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Test(org.junit.Test)

Example 28 with Fields

use of org.apache.storm.tuple.Fields in project storm by apache.

the class TridentTopologySource method getTopology.

public StormTopology getTopology(Config config) {
    this.spout = new FixedBatchSpout(new Fields("sentence"), 20, new Values("one two"), new Values("two three"), new Values("three four"), new Values("four five"), new Values("five six"));
    TridentTopology trident = new TridentTopology();
    trident.newStream("wordcount", spout).name("sentence").parallelismHint(1).shuffle().each(new Fields("sentence"), new Split(), new Fields("word")).parallelismHint(1).groupBy(new Fields("word")).persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")).parallelismHint(1);
    return trident.build();
}
Also used : FixedBatchSpout(org.apache.storm.trident.testing.FixedBatchSpout) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Values(org.apache.storm.tuple.Values) Count(org.apache.storm.trident.operation.builtin.Count)

Example 29 with Fields

use of org.apache.storm.tuple.Fields in project storm by apache.

the class Murmur3StreamGrouping method prepare.

/**
     * {@inheritDoc}
     */
@Override
public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
    this.targetTasks = targetTasks;
    this.partitionKeyIndexes = new ArrayList<>();
    Fields componentOutputFields = context.getComponentOutputFields(stream);
    for (String partitionKeyName : partitionKeyNames) {
        partitionKeyIndexes.add(componentOutputFields.fieldIndex(partitionKeyName));
    }
}
Also used : Fields(org.apache.storm.tuple.Fields)

Example 30 with Fields

use of org.apache.storm.tuple.Fields in project storm by apache.

the class OpaqueTridentEventCount method buildTopology.

@Override
protected StormTopology buildTopology(EventHubSpout eventHubSpout) {
    TridentTopology topology = new TridentTopology();
    OpaqueTridentEventHubSpout spout = new OpaqueTridentEventHubSpout(spoutConfig);
    TridentState state = topology.newStream("stream-" + spoutConfig.getTopologyName(), spout).parallelismHint(spoutConfig.getPartitionCount()).aggregate(new Count(), new Fields("partial-count")).persistentAggregate(new MemoryMapState.Factory(), new Fields("partial-count"), new Sum(), new Fields("count"));
    state.newValuesStream().each(new Fields("count"), new LoggingFilter("got count: ", 10000));
    return topology.build();
}
Also used : Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) OpaqueTridentEventHubSpout(org.apache.storm.eventhubs.trident.OpaqueTridentEventHubSpout) TridentState(org.apache.storm.trident.TridentState) MemoryMapState(org.apache.storm.trident.testing.MemoryMapState) LoggingFilter(org.apache.storm.eventhubs.samples.TransactionalTridentEventCount.LoggingFilter) Sum(org.apache.storm.trident.operation.builtin.Sum) Count(org.apache.storm.trident.operation.builtin.Count)

Aggregations

Fields (org.apache.storm.tuple.Fields)170 Test (org.junit.Test)44 Values (org.apache.storm.tuple.Values)38 TopologyBuilder (org.apache.storm.topology.TopologyBuilder)36 TridentTopology (org.apache.storm.trident.TridentTopology)32 HashMap (java.util.HashMap)31 Config (org.apache.storm.Config)31 Stream (org.apache.storm.trident.Stream)25 LocalCluster (org.apache.storm.LocalCluster)19 LocalTopology (org.apache.storm.LocalCluster.LocalTopology)17 TridentState (org.apache.storm.trident.TridentState)17 FixedBatchSpout (org.apache.storm.trident.testing.FixedBatchSpout)16 ArrayList (java.util.ArrayList)14 Map (java.util.Map)14 HiveOptions (org.apache.storm.hive.common.HiveOptions)14 AbstractTest (org.apache.flink.storm.util.AbstractTest)13 DelimitedRecordHiveMapper (org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper)12 IRichBolt (org.apache.storm.topology.IRichBolt)12 StateFactory (org.apache.storm.trident.state.StateFactory)12 Tuple (org.apache.storm.tuple.Tuple)12