use of org.apache.storm.topology.OutputFieldsDeclarer in project kafka-spout by HolmesNL.
the class KafkaSpoutConstructorTest method testDelegateCustomScheme.
@Test
public void testDelegateCustomScheme() {
final Scheme scheme = new Scheme() {
@Override
public List<Object> deserialize(final ByteBuffer bytes) {
final byte[] result = new byte[bytes.limit() - 1];
bytes.get(result, 1, bytes.limit());
return Arrays.<Object>asList(new byte[] { bytes.get() }, result);
}
@Override
public Fields getOutputFields() {
return new Fields("head", "tail");
}
};
final OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
// test for both constructors that accept a scheme
new KafkaSpout(scheme).declareOutputFields(declarer);
new KafkaSpout("topic", scheme).declareOutputFields(declarer);
// Fields doesn't implement equals; match it manually
verify(declarer, times(2)).declare(argThat(new ArgumentMatcher<Fields>() {
@Override
public boolean matches(final Object argument) {
final Fields fields = (Fields) argument;
return fields.size() == 2 && fields.get(0).equals("head") && fields.get(1).equals("tail");
}
}));
}
use of org.apache.storm.topology.OutputFieldsDeclarer in project flink by apache.
the class NullTerminatingSpoutTest method testMethodCalls.
@Test
public void testMethodCalls() {
Map<String, Object> compConfig = new HashMap<String, Object>();
IRichSpout spoutMock = mock(IRichSpout.class);
when(spoutMock.getComponentConfiguration()).thenReturn(compConfig);
Map<?, ?> conf = mock(Map.class);
TopologyContext context = mock(TopologyContext.class);
Object msgId = mock(Object.class);
OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
NullTerminatingSpout spout = new NullTerminatingSpout(spoutMock);
spout.open(conf, context, null);
spout.close();
spout.activate();
spout.deactivate();
spout.ack(msgId);
spout.fail(msgId);
spout.declareOutputFields(declarer);
Map<String, Object> c = spoutMock.getComponentConfiguration();
verify(spoutMock).open(same(conf), same(context), any(SpoutOutputCollector.class));
verify(spoutMock).close();
verify(spoutMock).activate();
verify(spoutMock).deactivate();
verify(spoutMock).ack(same(msgId));
verify(spoutMock).fail(same(msgId));
verify(spoutMock).declareOutputFields(same(declarer));
Assert.assertSame(compConfig, c);
}
use of org.apache.storm.topology.OutputFieldsDeclarer in project storm by apache.
the class IntermediateRankingsBoltTest method shouldDeclareOutputFields.
@Test
public void shouldDeclareOutputFields() {
// given
OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
IntermediateRankingsBolt bolt = new IntermediateRankingsBolt();
// when
bolt.declareOutputFields(declarer);
// then
verify(declarer, times(1)).declare(any(Fields.class));
}
use of org.apache.storm.topology.OutputFieldsDeclarer in project IndyCar by DSC-SPIDAL.
the class StormTest method main.
public static void main(String[] args) throws Exception {
StreamBuilder streamBuilder = new StreamBuilder();
// start with source
Stream<Tuple> sourceStream = streamBuilder.newStream(new BaseRichSpout() {
private SpoutOutputCollector collector;
private Random random;
public void open(Map<String, Object> map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
this.collector = spoutOutputCollector;
this.random = new Random(System.currentTimeMillis());
}
public void nextTuple() {
Object[] tuple = new Object[] { this.random.nextInt(33), random.nextFloat(), random.nextFloat(), random.nextFloat() };
this.collector.emit(Arrays.asList(tuple));
}
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields("car", "speed", "rpm", "throttle"));
}
}, 1);
// split for 33 cars
Predicate<Tuple>[] branchingPredicates = new Predicate[33];
for (int i = 0; i < 33; i++) {
final int index = i;
branchingPredicates[i] = tuple -> tuple.getInteger(0) == index;
}
Stream<Tuple>[] carBranches = sourceStream.branch(branchingPredicates);
for (Stream<Tuple> carBranch : carBranches) {
PairStream<Integer, List<Float>> carBranchPaired = carBranch.mapToPair((PairFunction<Tuple, Integer, List<Float>>) tuple -> {
List<Float> values = new ArrayList<>();
values.add(tuple.getFloat(1));
values.add(tuple.getFloat(2));
values.add(tuple.getFloat(3));
return Pair.of(tuple.getInteger(0), values);
});
// car branch has all the raw data
PairStream joinedStream = carBranchPaired;
for (int i = 0; i < 3; i++) {
int metricIndex = i;
PairStream<Integer, Float> anomalyScoreStream = carBranch.mapToPair(new PairFunction<Tuple, Integer, Float>() {
// here we should initialize htm java and FIFO blocking mechanism should be created
private Random htm = new Random(System.currentTimeMillis());
@Override
public Pair<Integer, Float> apply(Tuple tuple) {
// + 1 because 0 is the card number
Float rawData = tuple.getFloat(metricIndex + 1);
try {
// random sleep to simulate processing time
Thread.sleep(htm.nextInt(6));
} catch (InterruptedException e) {
e.printStackTrace();
}
return Pair.of(tuple.getInteger(0), rawData + 10000);
}
});
joinedStream = joinedStream.join(anomalyScoreStream, new ValueJoiner() {
@Override
public Object apply(Object o, Object o2) {
List<Float> combined = (List<Float>) o;
combined.add((Float) o2);
return combined;
}
});
}
joinedStream.forEach(new Consumer() {
@Override
public void accept(Object o) {
// publish to websockets or MQTT
System.out.println(o);
}
});
}
try (LocalCluster cluster = new LocalCluster()) {
cluster.submitTopology("indycar-stream", Collections.singletonMap(Config.TOPOLOGY_MAX_TASK_PARALLELISM, 33), streamBuilder.build());
Thread.sleep(10000000);
}
}
Aggregations