use of org.apache.storm.generated.Bolt in project incubator-atlas by apache.
the class StormTopologyUtil method getTerminalUserBoltNames.
public static Set<String> getTerminalUserBoltNames(StormTopology topology) {
Set<String> terminalBolts = new HashSet<>();
Set<String> inputs = new HashSet<>();
for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) {
String name = entry.getKey();
Set<GlobalStreamId> inputsForBolt = entry.getValue().get_common().get_inputs().keySet();
if (!isSystemComponent(name)) {
for (GlobalStreamId streamId : inputsForBolt) {
inputs.add(streamId.get_componentId());
}
}
}
for (String boltName : topology.get_bolts().keySet()) {
if (!isSystemComponent(boltName) && !inputs.contains(boltName)) {
terminalBolts.add(boltName);
}
}
return terminalBolts;
}
use of org.apache.storm.generated.Bolt in project flink by apache.
the class WrapperSetupHelper method createTopologyContext.
/**
* Creates a {@link TopologyContext} for a Spout or Bolt instance (ie, Flink task / Storm executor).
*
* @param context
* The Flink runtime context.
* @param spoutOrBolt
* The Spout or Bolt this context is created for.
* @param stormTopology
* The original Storm topology.
* @param stormConfig
* The user provided configuration.
* @return The created {@link TopologyContext}.
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
static synchronized TopologyContext createTopologyContext(final StreamingRuntimeContext context, final IComponent spoutOrBolt, final String operatorName, StormTopology stormTopology, final Map stormConfig) {
final int dop = context.getNumberOfParallelSubtasks();
final Map<Integer, String> taskToComponents = new HashMap<Integer, String>();
final Map<String, List<Integer>> componentToSortedTasks = new HashMap<String, List<Integer>>();
final Map<String, Map<String, Fields>> componentToStreamToFields = new HashMap<String, Map<String, Fields>>();
String stormId = (String) stormConfig.get(TOPOLOGY_NAME);
// not supported
String codeDir = null;
// not supported
String pidDir = null;
Integer taskId = -1;
// not supported
Integer workerPort = null;
List<Integer> workerTasks = new ArrayList<Integer>();
final Map<String, Object> defaultResources = new HashMap<String, Object>();
final Map<String, Object> userResources = new HashMap<String, Object>();
final Map<String, Object> executorData = new HashMap<String, Object>();
final Map registeredMetrics = new HashMap();
Atom openOrPrepareWasCalled = null;
if (stormTopology == null) {
// embedded mode
ComponentCommon common = new ComponentCommon();
common.set_parallelism_hint(dop);
HashMap<String, SpoutSpec> spouts = new HashMap<String, SpoutSpec>();
HashMap<String, Bolt> bolts = new HashMap<String, Bolt>();
if (spoutOrBolt instanceof IRichSpout) {
spouts.put(operatorName, new SpoutSpec(null, common));
} else {
assert (spoutOrBolt instanceof IRichBolt);
bolts.put(operatorName, new Bolt(null, common));
}
stormTopology = new StormTopology(spouts, bolts, new HashMap<String, StateSpoutSpec>());
List<Integer> sortedTasks = new ArrayList<Integer>(dop);
for (int i = 1; i <= dop; ++i) {
taskToComponents.put(i, operatorName);
sortedTasks.add(i);
}
componentToSortedTasks.put(operatorName, sortedTasks);
SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
spoutOrBolt.declareOutputFields(declarer);
componentToStreamToFields.put(operatorName, declarer.outputStreams);
} else {
// whole topology is built (i.e. FlinkTopology is used)
Map<String, SpoutSpec> spouts = stormTopology.get_spouts();
Map<String, Bolt> bolts = stormTopology.get_bolts();
Map<String, StateSpoutSpec> stateSpouts = stormTopology.get_state_spouts();
tid = 1;
for (Entry<String, SpoutSpec> spout : spouts.entrySet()) {
Integer rc = processSingleOperator(spout.getKey(), spout.getValue().get_common(), operatorName, context.getIndexOfThisSubtask(), dop, taskToComponents, componentToSortedTasks, componentToStreamToFields);
if (rc != null) {
taskId = rc;
}
}
for (Entry<String, Bolt> bolt : bolts.entrySet()) {
Integer rc = processSingleOperator(bolt.getKey(), bolt.getValue().get_common(), operatorName, context.getIndexOfThisSubtask(), dop, taskToComponents, componentToSortedTasks, componentToStreamToFields);
if (rc != null) {
taskId = rc;
}
}
for (Entry<String, StateSpoutSpec> stateSpout : stateSpouts.entrySet()) {
Integer rc = processSingleOperator(stateSpout.getKey(), stateSpout.getValue().get_common(), operatorName, context.getIndexOfThisSubtask(), dop, taskToComponents, componentToSortedTasks, componentToStreamToFields);
if (rc != null) {
taskId = rc;
}
}
assert (taskId != null);
}
if (!stormConfig.containsKey(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)) {
// Storm default value
stormConfig.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, 30);
}
return new FlinkTopologyContext(stormTopology, stormConfig, taskToComponents, componentToSortedTasks, componentToStreamToFields, stormId, codeDir, pidDir, taskId, workerPort, workerTasks, defaultResources, userResources, executorData, registeredMetrics, openOrPrepareWasCalled);
}
use of org.apache.storm.generated.Bolt in project storm by apache.
the class StormCommon method addSystemComponents.
@SuppressWarnings("unused")
public static void addSystemComponents(Map conf, StormTopology topology) {
Map<String, StreamInfo> outputStreams = new HashMap<>();
outputStreams.put(Constants.SYSTEM_TICK_STREAM_ID, Thrift.outputFields(Arrays.asList("rate_secs")));
outputStreams.put(Constants.METRICS_TICK_STREAM_ID, Thrift.outputFields(Arrays.asList("interval")));
outputStreams.put(Constants.CREDENTIALS_CHANGED_STREAM_ID, Thrift.outputFields(Arrays.asList("creds")));
Map<String, Object> boltConf = new HashMap<>();
boltConf.put(Config.TOPOLOGY_TASKS, 0);
Bolt systemBoltSpec = Thrift.prepareSerializedBoltDetails(null, new SystemBolt(), outputStreams, 0, boltConf);
topology.put_to_bolts(Constants.SYSTEM_COMPONENT_ID, systemBoltSpec);
}
use of org.apache.storm.generated.Bolt in project storm by apache.
the class StormCommon method addEventLogger.
public static void addEventLogger(Map conf, StormTopology topology) {
Integer numExecutors = Utils.getInt(conf.get(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS), Utils.getInt(conf.get(Config.TOPOLOGY_WORKERS)));
HashMap<String, Object> componentConf = new HashMap<>();
componentConf.put(Config.TOPOLOGY_TASKS, numExecutors);
componentConf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, Utils.getInt(conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)));
Bolt eventLoggerBolt = Thrift.prepareSerializedBoltDetails(eventLoggerInputs(topology), new EventLoggerBolt(), null, numExecutors, componentConf);
for (Object component : allComponents(topology).values()) {
ComponentCommon common = getComponentCommon(component);
common.put_to_streams(EVENTLOGGER_STREAM_ID, Thrift.outputFields(eventLoggerBoltFields()));
}
topology.put_to_bolts(EVENTLOGGER_COMPONENT_ID, eventLoggerBolt);
}
use of org.apache.storm.generated.Bolt in project storm by apache.
the class StormCommon method metricsConsumerBoltSpecs.
@SuppressWarnings("unchecked")
public static Map<String, Bolt> metricsConsumerBoltSpecs(Map conf, StormTopology topology) {
Map<String, Bolt> metricsConsumerBolts = new HashMap<>();
Set<String> componentIdsEmitMetrics = new HashSet<>();
componentIdsEmitMetrics.addAll(allComponents(topology).keySet());
componentIdsEmitMetrics.add(Constants.SYSTEM_COMPONENT_ID);
Map<GlobalStreamId, Grouping> inputs = new HashMap<>();
for (String componentId : componentIdsEmitMetrics) {
inputs.put(Utils.getGlobalStreamId(componentId, Constants.METRICS_STREAM_ID), Thrift.prepareShuffleGrouping());
}
List<Map<String, Object>> registerInfo = (List<Map<String, Object>>) conf.get(Config.TOPOLOGY_METRICS_CONSUMER_REGISTER);
if (registerInfo != null) {
Map<String, Integer> classOccurrencesMap = new HashMap<String, Integer>();
for (Map<String, Object> info : registerInfo) {
String className = (String) info.get(TOPOLOGY_METRICS_CONSUMER_CLASS);
Object argument = info.get(TOPOLOGY_METRICS_CONSUMER_ARGUMENT);
Integer maxRetainMetricTuples = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_MAX_RETAIN_METRIC_TUPLES), 100);
Integer phintNum = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_PARALLELISM_HINT), 1);
Map<String, Object> metricsConsumerConf = new HashMap<String, Object>();
metricsConsumerConf.put(Config.TOPOLOGY_TASKS, phintNum);
List<String> whitelist = (List<String>) info.get(TOPOLOGY_METRICS_CONSUMER_WHITELIST);
List<String> blacklist = (List<String>) info.get(TOPOLOGY_METRICS_CONSUMER_BLACKLIST);
FilterByMetricName filterPredicate = new FilterByMetricName(whitelist, blacklist);
Boolean expandMapType = Utils.getBoolean(info.get(TOPOLOGY_METRICS_CONSUMER_EXPAND_MAP_TYPE), false);
String metricNameSeparator = Utils.getString(info.get(TOPOLOGY_METRICS_CONSUMER_METRIC_NAME_SEPARATOR), ".");
DataPointExpander expander = new DataPointExpander(expandMapType, metricNameSeparator);
MetricsConsumerBolt boltInstance = new MetricsConsumerBolt(className, argument, maxRetainMetricTuples, filterPredicate, expander);
Bolt metricsConsumerBolt = Thrift.prepareSerializedBoltDetails(inputs, boltInstance, null, phintNum, metricsConsumerConf);
String id = className;
if (classOccurrencesMap.containsKey(className)) {
// e.g. [\"a\", \"b\", \"a\"]) => [\"a\", \"b\", \"a#2\"]"
int occurrenceNum = classOccurrencesMap.get(className);
occurrenceNum++;
classOccurrencesMap.put(className, occurrenceNum);
id = Constants.METRICS_COMPONENT_ID_PREFIX + className + "#" + occurrenceNum;
} else {
classOccurrencesMap.put(className, 1);
}
metricsConsumerBolts.put(id, metricsConsumerBolt);
}
}
return metricsConsumerBolts;
}
Aggregations