Search in sources :

Example 6 with Bolt

use of org.apache.storm.generated.Bolt in project incubator-atlas by apache.

the class StormTopologyUtil method getTerminalUserBoltNames.

public static Set<String> getTerminalUserBoltNames(StormTopology topology) {
    Set<String> terminalBolts = new HashSet<>();
    Set<String> inputs = new HashSet<>();
    for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) {
        String name = entry.getKey();
        Set<GlobalStreamId> inputsForBolt = entry.getValue().get_common().get_inputs().keySet();
        if (!isSystemComponent(name)) {
            for (GlobalStreamId streamId : inputsForBolt) {
                inputs.add(streamId.get_componentId());
            }
        }
    }
    for (String boltName : topology.get_bolts().keySet()) {
        if (!isSystemComponent(boltName) && !inputs.contains(boltName)) {
            terminalBolts.add(boltName);
        }
    }
    return terminalBolts;
}
Also used : GlobalStreamId(org.apache.storm.generated.GlobalStreamId) Bolt(org.apache.storm.generated.Bolt) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet)

Example 7 with Bolt

use of org.apache.storm.generated.Bolt in project flink by apache.

the class WrapperSetupHelper method createTopologyContext.

/**
	 * Creates a {@link TopologyContext} for a Spout or Bolt instance (ie, Flink task / Storm executor).
	 * 
	 * @param context
	 *            The Flink runtime context.
	 * @param spoutOrBolt
	 *            The Spout or Bolt this context is created for.
	 * @param stormTopology
	 *            The original Storm topology.
	 * @param stormConfig
	 *            The user provided configuration.
	 * @return The created {@link TopologyContext}.
	 */
@SuppressWarnings({ "rawtypes", "unchecked" })
static synchronized TopologyContext createTopologyContext(final StreamingRuntimeContext context, final IComponent spoutOrBolt, final String operatorName, StormTopology stormTopology, final Map stormConfig) {
    final int dop = context.getNumberOfParallelSubtasks();
    final Map<Integer, String> taskToComponents = new HashMap<Integer, String>();
    final Map<String, List<Integer>> componentToSortedTasks = new HashMap<String, List<Integer>>();
    final Map<String, Map<String, Fields>> componentToStreamToFields = new HashMap<String, Map<String, Fields>>();
    String stormId = (String) stormConfig.get(TOPOLOGY_NAME);
    // not supported
    String codeDir = null;
    // not supported
    String pidDir = null;
    Integer taskId = -1;
    // not supported
    Integer workerPort = null;
    List<Integer> workerTasks = new ArrayList<Integer>();
    final Map<String, Object> defaultResources = new HashMap<String, Object>();
    final Map<String, Object> userResources = new HashMap<String, Object>();
    final Map<String, Object> executorData = new HashMap<String, Object>();
    final Map registeredMetrics = new HashMap();
    Atom openOrPrepareWasCalled = null;
    if (stormTopology == null) {
        // embedded mode
        ComponentCommon common = new ComponentCommon();
        common.set_parallelism_hint(dop);
        HashMap<String, SpoutSpec> spouts = new HashMap<String, SpoutSpec>();
        HashMap<String, Bolt> bolts = new HashMap<String, Bolt>();
        if (spoutOrBolt instanceof IRichSpout) {
            spouts.put(operatorName, new SpoutSpec(null, common));
        } else {
            assert (spoutOrBolt instanceof IRichBolt);
            bolts.put(operatorName, new Bolt(null, common));
        }
        stormTopology = new StormTopology(spouts, bolts, new HashMap<String, StateSpoutSpec>());
        List<Integer> sortedTasks = new ArrayList<Integer>(dop);
        for (int i = 1; i <= dop; ++i) {
            taskToComponents.put(i, operatorName);
            sortedTasks.add(i);
        }
        componentToSortedTasks.put(operatorName, sortedTasks);
        SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
        spoutOrBolt.declareOutputFields(declarer);
        componentToStreamToFields.put(operatorName, declarer.outputStreams);
    } else {
        // whole topology is built (i.e. FlinkTopology is used)
        Map<String, SpoutSpec> spouts = stormTopology.get_spouts();
        Map<String, Bolt> bolts = stormTopology.get_bolts();
        Map<String, StateSpoutSpec> stateSpouts = stormTopology.get_state_spouts();
        tid = 1;
        for (Entry<String, SpoutSpec> spout : spouts.entrySet()) {
            Integer rc = processSingleOperator(spout.getKey(), spout.getValue().get_common(), operatorName, context.getIndexOfThisSubtask(), dop, taskToComponents, componentToSortedTasks, componentToStreamToFields);
            if (rc != null) {
                taskId = rc;
            }
        }
        for (Entry<String, Bolt> bolt : bolts.entrySet()) {
            Integer rc = processSingleOperator(bolt.getKey(), bolt.getValue().get_common(), operatorName, context.getIndexOfThisSubtask(), dop, taskToComponents, componentToSortedTasks, componentToStreamToFields);
            if (rc != null) {
                taskId = rc;
            }
        }
        for (Entry<String, StateSpoutSpec> stateSpout : stateSpouts.entrySet()) {
            Integer rc = processSingleOperator(stateSpout.getKey(), stateSpout.getValue().get_common(), operatorName, context.getIndexOfThisSubtask(), dop, taskToComponents, componentToSortedTasks, componentToStreamToFields);
            if (rc != null) {
                taskId = rc;
            }
        }
        assert (taskId != null);
    }
    if (!stormConfig.containsKey(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)) {
        // Storm default value
        stormConfig.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, 30);
    }
    return new FlinkTopologyContext(stormTopology, stormConfig, taskToComponents, componentToSortedTasks, componentToStreamToFields, stormId, codeDir, pidDir, taskId, workerPort, workerTasks, defaultResources, userResources, executorData, registeredMetrics, openOrPrepareWasCalled);
}
Also used : HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) ArrayList(java.util.ArrayList) StateSpoutSpec(org.apache.storm.generated.StateSpoutSpec) ArrayList(java.util.ArrayList) List(java.util.List) ComponentCommon(org.apache.storm.generated.ComponentCommon) IRichBolt(org.apache.storm.topology.IRichBolt) Bolt(org.apache.storm.generated.Bolt) IRichBolt(org.apache.storm.topology.IRichBolt) Atom(clojure.lang.Atom) Fields(org.apache.storm.tuple.Fields) IRichSpout(org.apache.storm.topology.IRichSpout) StateSpoutSpec(org.apache.storm.generated.StateSpoutSpec) SpoutSpec(org.apache.storm.generated.SpoutSpec) HashMap(java.util.HashMap) Map(java.util.Map)

Example 8 with Bolt

use of org.apache.storm.generated.Bolt in project storm by apache.

the class StormCommon method addSystemComponents.

@SuppressWarnings("unused")
public static void addSystemComponents(Map conf, StormTopology topology) {
    Map<String, StreamInfo> outputStreams = new HashMap<>();
    outputStreams.put(Constants.SYSTEM_TICK_STREAM_ID, Thrift.outputFields(Arrays.asList("rate_secs")));
    outputStreams.put(Constants.METRICS_TICK_STREAM_ID, Thrift.outputFields(Arrays.asList("interval")));
    outputStreams.put(Constants.CREDENTIALS_CHANGED_STREAM_ID, Thrift.outputFields(Arrays.asList("creds")));
    Map<String, Object> boltConf = new HashMap<>();
    boltConf.put(Config.TOPOLOGY_TASKS, 0);
    Bolt systemBoltSpec = Thrift.prepareSerializedBoltDetails(null, new SystemBolt(), outputStreams, 0, boltConf);
    topology.put_to_bolts(Constants.SYSTEM_COMPONENT_ID, systemBoltSpec);
}
Also used : HashMap(java.util.HashMap) StreamInfo(org.apache.storm.generated.StreamInfo) Bolt(org.apache.storm.generated.Bolt) MetricsConsumerBolt(org.apache.storm.metric.MetricsConsumerBolt) IBolt(org.apache.storm.task.IBolt) EventLoggerBolt(org.apache.storm.metric.EventLoggerBolt) SystemBolt(org.apache.storm.metric.SystemBolt) SystemBolt(org.apache.storm.metric.SystemBolt)

Example 9 with Bolt

use of org.apache.storm.generated.Bolt in project storm by apache.

the class StormCommon method addEventLogger.

public static void addEventLogger(Map conf, StormTopology topology) {
    Integer numExecutors = Utils.getInt(conf.get(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS), Utils.getInt(conf.get(Config.TOPOLOGY_WORKERS)));
    HashMap<String, Object> componentConf = new HashMap<>();
    componentConf.put(Config.TOPOLOGY_TASKS, numExecutors);
    componentConf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, Utils.getInt(conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)));
    Bolt eventLoggerBolt = Thrift.prepareSerializedBoltDetails(eventLoggerInputs(topology), new EventLoggerBolt(), null, numExecutors, componentConf);
    for (Object component : allComponents(topology).values()) {
        ComponentCommon common = getComponentCommon(component);
        common.put_to_streams(EVENTLOGGER_STREAM_ID, Thrift.outputFields(eventLoggerBoltFields()));
    }
    topology.put_to_bolts(EVENTLOGGER_COMPONENT_ID, eventLoggerBolt);
}
Also used : ComponentCommon(org.apache.storm.generated.ComponentCommon) HashMap(java.util.HashMap) EventLoggerBolt(org.apache.storm.metric.EventLoggerBolt) Bolt(org.apache.storm.generated.Bolt) MetricsConsumerBolt(org.apache.storm.metric.MetricsConsumerBolt) IBolt(org.apache.storm.task.IBolt) EventLoggerBolt(org.apache.storm.metric.EventLoggerBolt) SystemBolt(org.apache.storm.metric.SystemBolt)

Example 10 with Bolt

use of org.apache.storm.generated.Bolt in project storm by apache.

the class StormCommon method metricsConsumerBoltSpecs.

@SuppressWarnings("unchecked")
public static Map<String, Bolt> metricsConsumerBoltSpecs(Map conf, StormTopology topology) {
    Map<String, Bolt> metricsConsumerBolts = new HashMap<>();
    Set<String> componentIdsEmitMetrics = new HashSet<>();
    componentIdsEmitMetrics.addAll(allComponents(topology).keySet());
    componentIdsEmitMetrics.add(Constants.SYSTEM_COMPONENT_ID);
    Map<GlobalStreamId, Grouping> inputs = new HashMap<>();
    for (String componentId : componentIdsEmitMetrics) {
        inputs.put(Utils.getGlobalStreamId(componentId, Constants.METRICS_STREAM_ID), Thrift.prepareShuffleGrouping());
    }
    List<Map<String, Object>> registerInfo = (List<Map<String, Object>>) conf.get(Config.TOPOLOGY_METRICS_CONSUMER_REGISTER);
    if (registerInfo != null) {
        Map<String, Integer> classOccurrencesMap = new HashMap<String, Integer>();
        for (Map<String, Object> info : registerInfo) {
            String className = (String) info.get(TOPOLOGY_METRICS_CONSUMER_CLASS);
            Object argument = info.get(TOPOLOGY_METRICS_CONSUMER_ARGUMENT);
            Integer maxRetainMetricTuples = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_MAX_RETAIN_METRIC_TUPLES), 100);
            Integer phintNum = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_PARALLELISM_HINT), 1);
            Map<String, Object> metricsConsumerConf = new HashMap<String, Object>();
            metricsConsumerConf.put(Config.TOPOLOGY_TASKS, phintNum);
            List<String> whitelist = (List<String>) info.get(TOPOLOGY_METRICS_CONSUMER_WHITELIST);
            List<String> blacklist = (List<String>) info.get(TOPOLOGY_METRICS_CONSUMER_BLACKLIST);
            FilterByMetricName filterPredicate = new FilterByMetricName(whitelist, blacklist);
            Boolean expandMapType = Utils.getBoolean(info.get(TOPOLOGY_METRICS_CONSUMER_EXPAND_MAP_TYPE), false);
            String metricNameSeparator = Utils.getString(info.get(TOPOLOGY_METRICS_CONSUMER_METRIC_NAME_SEPARATOR), ".");
            DataPointExpander expander = new DataPointExpander(expandMapType, metricNameSeparator);
            MetricsConsumerBolt boltInstance = new MetricsConsumerBolt(className, argument, maxRetainMetricTuples, filterPredicate, expander);
            Bolt metricsConsumerBolt = Thrift.prepareSerializedBoltDetails(inputs, boltInstance, null, phintNum, metricsConsumerConf);
            String id = className;
            if (classOccurrencesMap.containsKey(className)) {
                // e.g. [\"a\", \"b\", \"a\"]) => [\"a\", \"b\", \"a#2\"]"
                int occurrenceNum = classOccurrencesMap.get(className);
                occurrenceNum++;
                classOccurrencesMap.put(className, occurrenceNum);
                id = Constants.METRICS_COMPONENT_ID_PREFIX + className + "#" + occurrenceNum;
            } else {
                classOccurrencesMap.put(className, 1);
            }
            metricsConsumerBolts.put(id, metricsConsumerBolt);
        }
    }
    return metricsConsumerBolts;
}
Also used : FilterByMetricName(org.apache.storm.metric.filter.FilterByMetricName) HashMap(java.util.HashMap) Bolt(org.apache.storm.generated.Bolt) MetricsConsumerBolt(org.apache.storm.metric.MetricsConsumerBolt) IBolt(org.apache.storm.task.IBolt) EventLoggerBolt(org.apache.storm.metric.EventLoggerBolt) SystemBolt(org.apache.storm.metric.SystemBolt) Grouping(org.apache.storm.generated.Grouping) MetricsConsumerBolt(org.apache.storm.metric.MetricsConsumerBolt) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) ArrayList(java.util.ArrayList) List(java.util.List) DataPointExpander(org.apache.storm.metric.util.DataPointExpander) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) HashSet(java.util.HashSet)

Aggregations

Bolt (org.apache.storm.generated.Bolt)17 HashMap (java.util.HashMap)12 Map (java.util.Map)9 SpoutSpec (org.apache.storm.generated.SpoutSpec)8 StormTopology (org.apache.storm.generated.StormTopology)8 GlobalStreamId (org.apache.storm.generated.GlobalStreamId)7 Grouping (org.apache.storm.generated.Grouping)6 EventLoggerBolt (org.apache.storm.metric.EventLoggerBolt)4 MetricsConsumerBolt (org.apache.storm.metric.MetricsConsumerBolt)4 SystemBolt (org.apache.storm.metric.SystemBolt)4 IBolt (org.apache.storm.task.IBolt)4 IRichBolt (org.apache.storm.topology.IRichBolt)4 BaseRichBolt (org.apache.storm.topology.base.BaseRichBolt)4 ArrayList (java.util.ArrayList)3 ComponentCommon (org.apache.storm.generated.ComponentCommon)3 NullStruct (org.apache.storm.generated.NullStruct)3 StateSpoutSpec (org.apache.storm.generated.StateSpoutSpec)3 StreamInfo (org.apache.storm.generated.StreamInfo)3 BaseWindowedBolt (org.apache.storm.topology.base.BaseWindowedBolt)3 Test (org.junit.Test)3