Search in sources :

Example 6 with OutputFieldsDeclarer

use of org.apache.storm.topology.OutputFieldsDeclarer in project kafka-spout by HolmesNL.

the class KafkaSpoutConstructorTest method testDelegateCustomScheme.

@Test
public void testDelegateCustomScheme() {
    final Scheme scheme = new Scheme() {

        @Override
        public List<Object> deserialize(final ByteBuffer bytes) {
            final byte[] result = new byte[bytes.limit() - 1];
            bytes.get(result, 1, bytes.limit());
            return Arrays.<Object>asList(new byte[] { bytes.get() }, result);
        }

        @Override
        public Fields getOutputFields() {
            return new Fields("head", "tail");
        }
    };
    final OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
    // test for both constructors that accept a scheme
    new KafkaSpout(scheme).declareOutputFields(declarer);
    new KafkaSpout("topic", scheme).declareOutputFields(declarer);
    // Fields doesn't implement equals; match it manually
    verify(declarer, times(2)).declare(argThat(new ArgumentMatcher<Fields>() {

        @Override
        public boolean matches(final Object argument) {
            final Fields fields = (Fields) argument;
            return fields.size() == 2 && fields.get(0).equals("head") && fields.get(1).equals("tail");
        }
    }));
}
Also used : Scheme(org.apache.storm.spout.Scheme) Fields(org.apache.storm.tuple.Fields) ArgumentMatcher(org.mockito.ArgumentMatcher) OutputFieldsDeclarer(org.apache.storm.topology.OutputFieldsDeclarer) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 7 with OutputFieldsDeclarer

use of org.apache.storm.topology.OutputFieldsDeclarer in project flink by apache.

the class NullTerminatingSpoutTest method testMethodCalls.

@Test
public void testMethodCalls() {
    Map<String, Object> compConfig = new HashMap<String, Object>();
    IRichSpout spoutMock = mock(IRichSpout.class);
    when(spoutMock.getComponentConfiguration()).thenReturn(compConfig);
    Map<?, ?> conf = mock(Map.class);
    TopologyContext context = mock(TopologyContext.class);
    Object msgId = mock(Object.class);
    OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
    NullTerminatingSpout spout = new NullTerminatingSpout(spoutMock);
    spout.open(conf, context, null);
    spout.close();
    spout.activate();
    spout.deactivate();
    spout.ack(msgId);
    spout.fail(msgId);
    spout.declareOutputFields(declarer);
    Map<String, Object> c = spoutMock.getComponentConfiguration();
    verify(spoutMock).open(same(conf), same(context), any(SpoutOutputCollector.class));
    verify(spoutMock).close();
    verify(spoutMock).activate();
    verify(spoutMock).deactivate();
    verify(spoutMock).ack(same(msgId));
    verify(spoutMock).fail(same(msgId));
    verify(spoutMock).declareOutputFields(same(declarer));
    Assert.assertSame(compConfig, c);
}
Also used : IRichSpout(org.apache.storm.topology.IRichSpout) HashMap(java.util.HashMap) SpoutOutputCollector(org.apache.storm.spout.SpoutOutputCollector) OutputFieldsDeclarer(org.apache.storm.topology.OutputFieldsDeclarer) TopologyContext(org.apache.storm.task.TopologyContext) Test(org.junit.Test)

Example 8 with OutputFieldsDeclarer

use of org.apache.storm.topology.OutputFieldsDeclarer in project storm by apache.

the class IntermediateRankingsBoltTest method shouldDeclareOutputFields.

@Test
public void shouldDeclareOutputFields() {
    // given
    OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
    IntermediateRankingsBolt bolt = new IntermediateRankingsBolt();
    // when
    bolt.declareOutputFields(declarer);
    // then
    verify(declarer, times(1)).declare(any(Fields.class));
}
Also used : Fields(org.apache.storm.tuple.Fields) OutputFieldsDeclarer(org.apache.storm.topology.OutputFieldsDeclarer) Test(org.testng.annotations.Test)

Example 9 with OutputFieldsDeclarer

use of org.apache.storm.topology.OutputFieldsDeclarer in project IndyCar by DSC-SPIDAL.

the class StormTest method main.

public static void main(String[] args) throws Exception {
    StreamBuilder streamBuilder = new StreamBuilder();
    // start with source
    Stream<Tuple> sourceStream = streamBuilder.newStream(new BaseRichSpout() {

        private SpoutOutputCollector collector;

        private Random random;

        public void open(Map<String, Object> map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
            this.collector = spoutOutputCollector;
            this.random = new Random(System.currentTimeMillis());
        }

        public void nextTuple() {
            Object[] tuple = new Object[] { this.random.nextInt(33), random.nextFloat(), random.nextFloat(), random.nextFloat() };
            this.collector.emit(Arrays.asList(tuple));
        }

        public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
            outputFieldsDeclarer.declare(new Fields("car", "speed", "rpm", "throttle"));
        }
    }, 1);
    // split for 33 cars
    Predicate<Tuple>[] branchingPredicates = new Predicate[33];
    for (int i = 0; i < 33; i++) {
        final int index = i;
        branchingPredicates[i] = tuple -> tuple.getInteger(0) == index;
    }
    Stream<Tuple>[] carBranches = sourceStream.branch(branchingPredicates);
    for (Stream<Tuple> carBranch : carBranches) {
        PairStream<Integer, List<Float>> carBranchPaired = carBranch.mapToPair((PairFunction<Tuple, Integer, List<Float>>) tuple -> {
            List<Float> values = new ArrayList<>();
            values.add(tuple.getFloat(1));
            values.add(tuple.getFloat(2));
            values.add(tuple.getFloat(3));
            return Pair.of(tuple.getInteger(0), values);
        });
        // car branch has all the raw data
        PairStream joinedStream = carBranchPaired;
        for (int i = 0; i < 3; i++) {
            int metricIndex = i;
            PairStream<Integer, Float> anomalyScoreStream = carBranch.mapToPair(new PairFunction<Tuple, Integer, Float>() {

                // here we should initialize htm java and FIFO blocking mechanism should be created
                private Random htm = new Random(System.currentTimeMillis());

                @Override
                public Pair<Integer, Float> apply(Tuple tuple) {
                    // + 1 because 0 is the card number
                    Float rawData = tuple.getFloat(metricIndex + 1);
                    try {
                        // random sleep to simulate processing time
                        Thread.sleep(htm.nextInt(6));
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    return Pair.of(tuple.getInteger(0), rawData + 10000);
                }
            });
            joinedStream = joinedStream.join(anomalyScoreStream, new ValueJoiner() {

                @Override
                public Object apply(Object o, Object o2) {
                    List<Float> combined = (List<Float>) o;
                    combined.add((Float) o2);
                    return combined;
                }
            });
        }
        joinedStream.forEach(new Consumer() {

            @Override
            public void accept(Object o) {
                // publish to websockets or MQTT
                System.out.println(o);
            }
        });
    }
    try (LocalCluster cluster = new LocalCluster()) {
        cluster.submitTopology("indycar-stream", Collections.singletonMap(Config.TOPOLOGY_MAX_TASK_PARALLELISM, 33), streamBuilder.build());
        Thread.sleep(10000000);
    }
}
Also used : Consumer(org.apache.storm.streams.operations.Consumer) OutputFieldsDeclarer(org.apache.storm.topology.OutputFieldsDeclarer) BaseRichSpout(org.apache.storm.topology.base.BaseRichSpout) java.util(java.util) Pair(org.apache.storm.streams.Pair) PairStream(org.apache.storm.streams.PairStream) StreamBuilder(org.apache.storm.streams.StreamBuilder) TopologyContext(org.apache.storm.task.TopologyContext) Fields(org.apache.storm.tuple.Fields) LocalCluster(org.apache.storm.LocalCluster) ValueJoiner(org.apache.storm.streams.operations.ValueJoiner) Tuple(org.apache.storm.tuple.Tuple) PairFunction(org.apache.storm.streams.operations.PairFunction) Predicate(org.apache.storm.streams.operations.Predicate) Stream(org.apache.storm.streams.Stream) Config(org.apache.storm.Config) SpoutOutputCollector(org.apache.storm.spout.SpoutOutputCollector) LocalCluster(org.apache.storm.LocalCluster) OutputFieldsDeclarer(org.apache.storm.topology.OutputFieldsDeclarer) StreamBuilder(org.apache.storm.streams.StreamBuilder) Predicate(org.apache.storm.streams.operations.Predicate) ValueJoiner(org.apache.storm.streams.operations.ValueJoiner) Consumer(org.apache.storm.streams.operations.Consumer) PairStream(org.apache.storm.streams.PairStream) Stream(org.apache.storm.streams.Stream) TopologyContext(org.apache.storm.task.TopologyContext) Pair(org.apache.storm.streams.Pair) PairStream(org.apache.storm.streams.PairStream) Fields(org.apache.storm.tuple.Fields) SpoutOutputCollector(org.apache.storm.spout.SpoutOutputCollector) Tuple(org.apache.storm.tuple.Tuple) BaseRichSpout(org.apache.storm.topology.base.BaseRichSpout)

Aggregations

OutputFieldsDeclarer (org.apache.storm.topology.OutputFieldsDeclarer)9 Fields (org.apache.storm.tuple.Fields)8 Test (org.junit.Test)4 ArgumentMatcher (org.mockito.ArgumentMatcher)3 Test (org.testng.annotations.Test)3 SpoutOutputCollector (org.apache.storm.spout.SpoutOutputCollector)2 TopologyContext (org.apache.storm.task.TopologyContext)2 ByteBuffer (java.nio.ByteBuffer)1 java.util (java.util)1 HashMap (java.util.HashMap)1 Config (org.apache.storm.Config)1 LocalCluster (org.apache.storm.LocalCluster)1 Scheme (org.apache.storm.spout.Scheme)1 Pair (org.apache.storm.streams.Pair)1 PairStream (org.apache.storm.streams.PairStream)1 Stream (org.apache.storm.streams.Stream)1 StreamBuilder (org.apache.storm.streams.StreamBuilder)1 Consumer (org.apache.storm.streams.operations.Consumer)1 PairFunction (org.apache.storm.streams.operations.PairFunction)1 Predicate (org.apache.storm.streams.operations.Predicate)1