Search in sources :

Example 31 with TopologyContext

use of org.apache.storm.task.TopologyContext in project storm by apache.

the class BoltExecutor method init.

public void init(Map<Integer, Task> idToTask) {
    while (!stormActive.get()) {
        Utils.sleep(100);
    }
    LOG.info("Preparing bolt {}:{}", componentId, idToTask.keySet());
    for (Map.Entry<Integer, Task> entry : idToTask.entrySet()) {
        Task taskData = entry.getValue();
        IBolt boltObject = (IBolt) taskData.getTaskObject();
        TopologyContext userContext = taskData.getUserContext();
        taskData.getBuiltInMetrics().registerAll(stormConf, userContext);
        if (boltObject instanceof ICredentialsListener) {
            ((ICredentialsListener) boltObject).setCredentials(credentials);
        }
        if (Constants.SYSTEM_COMPONENT_ID.equals(componentId)) {
            Map<String, DisruptorQueue> map = ImmutableMap.of("sendqueue", transferQueue, "receive", receiveQueue, "transfer", workerData.getTransferQueue());
            BuiltinMetricsUtil.registerQueueMetrics(map, stormConf, userContext);
            Map cachedNodePortToSocket = (Map) workerData.getCachedNodeToPortSocket().get();
            BuiltinMetricsUtil.registerIconnectionClientMetrics(cachedNodePortToSocket, stormConf, userContext);
            BuiltinMetricsUtil.registerIconnectionServerMetric(workerData.getReceiver(), stormConf, userContext);
        } else {
            Map<String, DisruptorQueue> map = ImmutableMap.of("sendqueue", transferQueue, "receive", receiveQueue);
            BuiltinMetricsUtil.registerQueueMetrics(map, stormConf, userContext);
        }
        IOutputCollector outputCollector = new BoltOutputCollectorImpl(this, taskData, entry.getKey(), rand, hasEventLoggers, isDebug);
        boltObject.prepare(stormConf, userContext, new OutputCollector(outputCollector));
    }
    openOrPrepareWasCalled.set(true);
    LOG.info("Prepared bolt {}:{}", componentId, idToTask.keySet());
    setupMetrics();
}
Also used : IOutputCollector(org.apache.storm.task.IOutputCollector) OutputCollector(org.apache.storm.task.OutputCollector) Task(org.apache.storm.daemon.Task) ICredentialsListener(org.apache.storm.ICredentialsListener) IOutputCollector(org.apache.storm.task.IOutputCollector) DisruptorQueue(org.apache.storm.utils.DisruptorQueue) IBolt(org.apache.storm.task.IBolt) TopologyContext(org.apache.storm.task.TopologyContext) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map)

Example 32 with TopologyContext

use of org.apache.storm.task.TopologyContext in project flink by apache.

the class NullTerminatingSpoutTest method testMethodCalls.

@Test
public void testMethodCalls() {
    Map<String, Object> compConfig = new HashMap<String, Object>();
    IRichSpout spoutMock = mock(IRichSpout.class);
    when(spoutMock.getComponentConfiguration()).thenReturn(compConfig);
    Map<?, ?> conf = mock(Map.class);
    TopologyContext context = mock(TopologyContext.class);
    Object msgId = mock(Object.class);
    OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
    NullTerminatingSpout spout = new NullTerminatingSpout(spoutMock);
    spout.open(conf, context, null);
    spout.close();
    spout.activate();
    spout.deactivate();
    spout.ack(msgId);
    spout.fail(msgId);
    spout.declareOutputFields(declarer);
    Map<String, Object> c = spoutMock.getComponentConfiguration();
    verify(spoutMock).open(same(conf), same(context), any(SpoutOutputCollector.class));
    verify(spoutMock).close();
    verify(spoutMock).activate();
    verify(spoutMock).deactivate();
    verify(spoutMock).ack(same(msgId));
    verify(spoutMock).fail(same(msgId));
    verify(spoutMock).declareOutputFields(same(declarer));
    Assert.assertSame(compConfig, c);
}
Also used : IRichSpout(org.apache.storm.topology.IRichSpout) HashMap(java.util.HashMap) SpoutOutputCollector(org.apache.storm.spout.SpoutOutputCollector) OutputFieldsDeclarer(org.apache.storm.topology.OutputFieldsDeclarer) TopologyContext(org.apache.storm.task.TopologyContext) Test(org.junit.Test)

Example 33 with TopologyContext

use of org.apache.storm.task.TopologyContext in project flink by apache.

the class WrapperSetupInLocalClusterTest method testCreateTopologyContext.

@Test
public void testCreateTopologyContext() {
    HashMap<String, Integer> dops = new HashMap<String, Integer>();
    dops.put("spout1", 1);
    dops.put("spout2", 3);
    dops.put("bolt1", 1);
    dops.put("bolt2", 2);
    dops.put("sink", 1);
    HashMap<String, Integer> taskCounter = new HashMap<String, Integer>();
    taskCounter.put("spout1", 0);
    taskCounter.put("spout2", 0);
    taskCounter.put("bolt1", 0);
    taskCounter.put("bolt2", 0);
    taskCounter.put("sink", 0);
    HashMap<String, IComponent> operators = new HashMap<String, IComponent>();
    operators.put("spout1", new TestDummySpout());
    operators.put("spout2", new TestDummySpout());
    operators.put("bolt1", new TestDummyBolt());
    operators.put("bolt2", new TestDummyBolt());
    operators.put("sink", new TestSink());
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("spout1", (IRichSpout) operators.get("spout1"), dops.get("spout1"));
    builder.setSpout("spout2", (IRichSpout) operators.get("spout2"), dops.get("spout2"));
    builder.setBolt("bolt1", (IRichBolt) operators.get("bolt1"), dops.get("bolt1")).shuffleGrouping("spout1");
    builder.setBolt("bolt2", (IRichBolt) operators.get("bolt2"), dops.get("bolt2")).allGrouping("spout2");
    builder.setBolt("sink", (IRichBolt) operators.get("sink"), dops.get("sink")).shuffleGrouping("bolt1", TestDummyBolt.groupingStreamId).shuffleGrouping("bolt1", TestDummyBolt.shuffleStreamId).shuffleGrouping("bolt2", TestDummyBolt.groupingStreamId).shuffleGrouping("bolt2", TestDummyBolt.shuffleStreamId);
    LocalCluster cluster = new LocalCluster();
    Config c = new Config();
    c.setNumAckers(0);
    cluster.submitTopology("test", c, builder.createTopology());
    while (TestSink.result.size() != 8) {
        Utils.sleep(100);
    }
    cluster.shutdown();
    final FlinkTopology flinkBuilder = FlinkTopology.createTopology(builder);
    StormTopology stormTopology = flinkBuilder.getStormTopology();
    Set<Integer> taskIds = new HashSet<Integer>();
    for (TopologyContext expectedContext : TestSink.result) {
        final String thisComponentId = expectedContext.getThisComponentId();
        int index = taskCounter.get(thisComponentId);
        StreamingRuntimeContext context = mock(StreamingRuntimeContext.class);
        when(context.getTaskName()).thenReturn(thisComponentId);
        when(context.getNumberOfParallelSubtasks()).thenReturn(dops.get(thisComponentId));
        when(context.getIndexOfThisSubtask()).thenReturn(index);
        taskCounter.put(thisComponentId, ++index);
        Config stormConfig = new Config();
        stormConfig.put(WrapperSetupHelper.TOPOLOGY_NAME, "test");
        TopologyContext topologyContext = WrapperSetupHelper.createTopologyContext(context, operators.get(thisComponentId), thisComponentId, stormTopology, stormConfig);
        ComponentCommon expcetedCommon = expectedContext.getComponentCommon(thisComponentId);
        ComponentCommon common = topologyContext.getComponentCommon(thisComponentId);
        Assert.assertNull(topologyContext.getCodeDir());
        Assert.assertNull(common.get_json_conf());
        Assert.assertNull(topologyContext.getExecutorData(null));
        Assert.assertNull(topologyContext.getPIDDir());
        Assert.assertNull(topologyContext.getResource(null));
        Assert.assertNull(topologyContext.getSharedExecutor());
        Assert.assertNull(expectedContext.getTaskData(null));
        Assert.assertNull(topologyContext.getThisWorkerPort());
        Assert.assertTrue(expectedContext.getStormId().startsWith(topologyContext.getStormId()));
        Assert.assertEquals(expcetedCommon.get_inputs(), common.get_inputs());
        Assert.assertEquals(expcetedCommon.get_parallelism_hint(), common.get_parallelism_hint());
        Assert.assertEquals(expcetedCommon.get_streams(), common.get_streams());
        Assert.assertEquals(expectedContext.getComponentIds(), topologyContext.getComponentIds());
        Assert.assertEquals(expectedContext.getComponentStreams(thisComponentId), topologyContext.getComponentStreams(thisComponentId));
        Assert.assertEquals(thisComponentId, topologyContext.getThisComponentId());
        Assert.assertEquals(expectedContext.getThisSources(), topologyContext.getThisSources());
        Assert.assertEquals(expectedContext.getThisStreams(), topologyContext.getThisStreams());
        Assert.assertEquals(expectedContext.getThisTargets(), topologyContext.getThisTargets());
        Assert.assertEquals(0, topologyContext.getThisWorkerTasks().size());
        for (int taskId : topologyContext.getComponentTasks(thisComponentId)) {
            Assert.assertEquals(thisComponentId, topologyContext.getComponentId(taskId));
        }
        for (String componentId : expectedContext.getComponentIds()) {
            Assert.assertEquals(expectedContext.getSources(componentId), topologyContext.getSources(componentId));
            Assert.assertEquals(expectedContext.getTargets(componentId), topologyContext.getTargets(componentId));
            for (String streamId : expectedContext.getComponentStreams(componentId)) {
                Assert.assertEquals(expectedContext.getComponentOutputFields(componentId, streamId).toList(), topologyContext.getComponentOutputFields(componentId, streamId).toList());
            }
        }
        for (String streamId : expectedContext.getThisStreams()) {
            Assert.assertEquals(expectedContext.getThisOutputFields(streamId).toList(), topologyContext.getThisOutputFields(streamId).toList());
        }
        HashMap<Integer, String> taskToComponents = new HashMap<Integer, String>();
        Set<Integer> allTaskIds = new HashSet<Integer>();
        for (String componentId : expectedContext.getComponentIds()) {
            List<Integer> possibleTasks = expectedContext.getComponentTasks(componentId);
            List<Integer> tasks = topologyContext.getComponentTasks(componentId);
            Iterator<Integer> p_it = possibleTasks.iterator();
            Iterator<Integer> t_it = tasks.iterator();
            while (p_it.hasNext()) {
                Assert.assertTrue(t_it.hasNext());
                Assert.assertNull(taskToComponents.put(p_it.next(), componentId));
                Assert.assertTrue(allTaskIds.add(t_it.next()));
            }
            Assert.assertFalse(t_it.hasNext());
        }
        Assert.assertEquals(taskToComponents, expectedContext.getTaskToComponent());
        Assert.assertTrue(taskIds.add(topologyContext.getThisTaskId()));
        try {
            topologyContext.getHooks();
            Assert.fail();
        } catch (UnsupportedOperationException e) {
        /* expected */
        }
        try {
            topologyContext.getRegisteredMetricByName(null);
            Assert.fail();
        } catch (UnsupportedOperationException e) {
        /* expected */
        }
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) StreamingRuntimeContext(org.apache.flink.streaming.api.operators.StreamingRuntimeContext) TopologyBuilder(org.apache.storm.topology.TopologyBuilder) IComponent(org.apache.storm.topology.IComponent) Config(org.apache.storm.Config) StormTopology(org.apache.storm.generated.StormTopology) TestDummySpout(org.apache.flink.storm.util.TestDummySpout) TestSink(org.apache.flink.storm.util.TestSink) TopologyContext(org.apache.storm.task.TopologyContext) IRichBolt(org.apache.storm.topology.IRichBolt) ComponentCommon(org.apache.storm.generated.ComponentCommon) FlinkTopology(org.apache.flink.storm.api.FlinkTopology) TestDummyBolt(org.apache.flink.storm.util.TestDummyBolt) Test(org.junit.Test) AbstractTest(org.apache.flink.storm.util.AbstractTest)

Example 34 with TopologyContext

use of org.apache.storm.task.TopologyContext in project heron by twitter.

the class IRichBoltDelegate method prepare.

@Override
@SuppressWarnings("rawtypes")
public void prepare(Map conf, com.twitter.heron.api.topology.TopologyContext context, com.twitter.heron.api.bolt.OutputCollector collector) {
    topologyContextImpl = new TopologyContext(context);
    outputCollectorImpl = new OutputCollectorImpl(collector);
    delegate.prepare(conf, topologyContextImpl, outputCollectorImpl);
}
Also used : TopologyContext(org.apache.storm.task.TopologyContext) OutputCollectorImpl(org.apache.storm.task.OutputCollectorImpl)

Example 35 with TopologyContext

use of org.apache.storm.task.TopologyContext in project heron by twitter.

the class ITaskHookDelegate method prepare.

@Override
public void prepare(Map<String, Object> newConf, com.twitter.heron.api.topology.TopologyContext context) {
    this.conf = newConf;
    if (!newConf.containsKey(Config.STORMCOMPAT_TOPOLOGY_AUTO_TASK_HOOKS)) {
        throw new RuntimeException("StormCompat Translation not done for task hooks");
    }
    List<String> hookClassNames = TypeUtils.getListOfStrings(newConf.get(Config.STORMCOMPAT_TOPOLOGY_AUTO_TASK_HOOKS));
    for (String className : hookClassNames) {
        ITaskHook hook;
        try {
            hook = (ITaskHook) Class.forName(className).newInstance();
        } catch (ClassNotFoundException ex) {
            throw new RuntimeException(ex + " ITaskHook class must be in class path.");
        } catch (InstantiationException ex) {
            throw new RuntimeException(ex + " ITaskHook class must be concrete.");
        } catch (IllegalAccessException ex) {
            throw new RuntimeException(ex + " ITaskHook class must have a no-arg constructor.");
        }
        hooks.add(hook);
    }
    // Invoke the prepare() for all ITaskHooks
    TopologyContext ctxt = new TopologyContext(context);
    for (ITaskHook hook : hooks) {
        hook.prepare(newConf, ctxt);
    }
}
Also used : TopologyContext(org.apache.storm.task.TopologyContext)

Aggregations

TopologyContext (org.apache.storm.task.TopologyContext)62 Test (org.junit.Test)29 HashMap (java.util.HashMap)25 OutputCollector (org.apache.storm.task.OutputCollector)19 Tuple (org.apache.storm.tuple.Tuple)16 SpoutOutputCollector (org.apache.storm.spout.SpoutOutputCollector)15 Map (java.util.Map)14 GlobalStreamId (org.apache.storm.generated.GlobalStreamId)8 ClientConfiguration (org.apache.pulsar.client.api.ClientConfiguration)7 Test (org.testng.annotations.Test)7 WriterConfiguration (org.apache.metron.common.configuration.writer.WriterConfiguration)6 BulkWriterResponse (org.apache.metron.common.writer.BulkWriterResponse)6 Collections (java.util.Collections)5 Grouping (org.apache.storm.generated.Grouping)5 StormTopology (org.apache.storm.generated.StormTopology)5 GeneralTopologyContext (org.apache.storm.task.GeneralTopologyContext)5 OutputCollectorImpl (org.apache.storm.task.OutputCollectorImpl)5 IRichBolt (org.apache.storm.topology.IRichBolt)5 IRichSpout (org.apache.storm.topology.IRichSpout)5 ParserConfigurations (org.apache.metron.common.configuration.ParserConfigurations)4