use of org.apache.storm.task.TopologyContext in project storm by apache.
the class KafkaBoltTest method testSimpleWithError.
@Test
public void testSimpleWithError() {
MockProducer<String, String> producer = new MockProducer<>(Cluster.empty(), false, null, null, null);
KafkaBolt<String, String> bolt = makeBolt(producer);
OutputCollector collector = mock(OutputCollector.class);
TopologyContext context = mock(TopologyContext.class);
Map<String, Object> conf = new HashMap<>();
bolt.prepare(conf, context, collector);
String key = "KEY";
String value = "VALUE";
Tuple testTuple = createTestTuple(key, value);
bolt.execute(testTuple);
assertThat(producer.history().size(), is(1));
ProducerRecord<String, String> arg = producer.history().get(0);
LOG.info("GOT {} ->", arg);
LOG.info("{}, {}, {}", arg.topic(), arg.key(), arg.value());
assertThat(arg.topic(), is("MY_TOPIC"));
assertThat(arg.key(), is(key));
assertThat(arg.value(), is(value));
// Force a send error
KafkaException ex = new KafkaException();
producer.errorNext(ex);
verify(collector).reportError(ex);
verify(collector).fail(testTuple);
}
use of org.apache.storm.task.TopologyContext in project storm by apache.
the class RoundRobinManualPartitionerTest method testRoundRobinPartitioning.
@Test
public void testRoundRobinPartitioning() {
List<TopicPartition> allPartitions = new ArrayList<>();
for (int i = 0; i < 11; i++) {
allPartitions.add(createTp(i));
}
List<TopologyContext> contextMocks = new ArrayList<>();
String thisComponentId = "A spout";
List<Integer> allTasks = Arrays.asList(new Integer[] { 0, 1, 2 });
for (int i = 0; i < 3; i++) {
TopologyContext contextMock = mock(TopologyContext.class);
when(contextMock.getThisTaskIndex()).thenReturn(i);
when(contextMock.getThisComponentId()).thenReturn(thisComponentId);
when(contextMock.getComponentTasks(thisComponentId)).thenReturn(allTasks);
contextMocks.add(contextMock);
}
RoundRobinManualPartitioner partitioner = new RoundRobinManualPartitioner();
Set<TopicPartition> partitionsForFirstTask = partitioner.getPartitionsForThisTask(allPartitions, contextMocks.get(0));
assertThat(partitionsForFirstTask, is(partitionsToTps(new int[] { 0, 3, 6, 9 })));
Set<TopicPartition> partitionsForSecondTask = partitioner.getPartitionsForThisTask(allPartitions, contextMocks.get(1));
assertThat(partitionsForSecondTask, is(partitionsToTps(new int[] { 1, 4, 7, 10 })));
Set<TopicPartition> partitionsForThirdTask = partitioner.getPartitionsForThisTask(allPartitions, contextMocks.get(2));
assertThat(partitionsForThirdTask, is(partitionsToTps(new int[] { 2, 5, 8 })));
}
use of org.apache.storm.task.TopologyContext in project storm by apache.
the class BoltExecutor method init.
public void init(ArrayList<Task> idToTask, int idToTaskBase) throws InterruptedException {
executorTransfer.initLocalRecvQueues();
workerReady.await();
while (!stormActive.get()) {
// Topology may be deployed in deactivated mode, wait for activation
Utils.sleepNoSimulation(100);
}
LOG.info("Preparing bolt {}:{}", componentId, getTaskIds());
for (Task taskData : idToTask) {
if (taskData == null) {
// This happens if the min id is too small
continue;
}
IBolt boltObject = (IBolt) taskData.getTaskObject();
TopologyContext userContext = taskData.getUserContext();
if (boltObject instanceof ICredentialsListener) {
((ICredentialsListener) boltObject).setCredentials(credentials);
}
if (Constants.SYSTEM_COMPONENT_ID.equals(componentId)) {
BuiltinMetricsUtil.registerIconnectionServerMetric(workerData.getReceiver(), topoConf, userContext);
// add any autocredential expiry metrics from the worker
if (workerData.getAutoCredentials() != null) {
for (IAutoCredentials autoCredential : workerData.getAutoCredentials()) {
if (autoCredential instanceof IMetricsRegistrant) {
IMetricsRegistrant registrant = (IMetricsRegistrant) autoCredential;
registrant.registerMetrics(userContext, topoConf);
}
}
}
}
this.outputCollector = new BoltOutputCollectorImpl(this, taskData, rand, hasEventLoggers, ackingEnabled, isDebug);
boltObject.prepare(topoConf, userContext, new OutputCollector(outputCollector));
}
openOrPrepareWasCalled.set(true);
LOG.info("Prepared bolt {}:{}", componentId, taskIds);
setupTicks(false);
setupMetrics();
}
use of org.apache.storm.task.TopologyContext in project storm by apache.
the class StreamBuilderTest method testMultiPartitionByKeyWithRepartition.
@Test
public void testMultiPartitionByKeyWithRepartition() {
TopologyContext mockContext = Mockito.mock(TopologyContext.class);
OutputCollector mockCollector = Mockito.mock(OutputCollector.class);
Map<GlobalStreamId, Grouping> expected = new HashMap<>();
expected.put(new GlobalStreamId("bolt2", "s3"), Grouping.fields(Collections.singletonList("key")));
expected.put(new GlobalStreamId("bolt2", "s3__punctuation"), Grouping.all(new NullStruct()));
Stream<Integer> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID), new ValueMapper<>(0));
stream.mapToPair(x -> Pair.of(x, x)).window(TumblingWindows.of(BaseWindowedBolt.Count.of(10))).reduceByKey((x, y) -> x + y).repartition(10).reduceByKey((x, y) -> 0).print();
StormTopology topology = streamBuilder.build();
assertEquals(3, topology.get_bolts_size());
assertEquals(expected, topology.get_bolts().get("bolt3").get_common().get_inputs());
}
use of org.apache.storm.task.TopologyContext in project storm by apache.
the class StreamBuilderTest method testBranchAndJoin.
@Test
public void testBranchAndJoin() throws Exception {
TopologyContext mockContext = Mockito.mock(TopologyContext.class);
OutputCollector mockCollector = Mockito.mock(OutputCollector.class);
Stream<Integer> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID), new ValueMapper<>(0), 2);
Stream<Integer>[] streams = stream.branch(x -> x % 2 == 0, x -> x % 2 == 1);
PairStream<Integer, Pair<Integer, Integer>> joined = streams[0].mapToPair(x -> Pair.of(x, 1)).join(streams[1].mapToPair(x -> Pair.of(x, 1)));
assertTrue(joined.getNode() instanceof ProcessorNode);
StormTopology topology = streamBuilder.build();
assertEquals(2, topology.get_bolts_size());
}
Aggregations