Search in sources :

Example 11 with Grouping

use of org.apache.storm.generated.Grouping in project flink by apache.

the class BoltWrapper method open.

@Override
public void open() throws Exception {
    super.open();
    this.flinkCollector = new TimestampedCollector<>(this.output);
    GlobalJobParameters config = getExecutionConfig().getGlobalJobParameters();
    StormConfig stormConfig = new StormConfig();
    if (config != null) {
        if (config instanceof StormConfig) {
            stormConfig = (StormConfig) config;
        } else {
            stormConfig.putAll(config.toMap());
        }
    }
    this.topologyContext = WrapperSetupHelper.createTopologyContext(getRuntimeContext(), this.bolt, this.name, this.stormTopology, stormConfig);
    final OutputCollector stormCollector = new OutputCollector(new BoltCollector<OUT>(this.numberOfAttributes, this.topologyContext.getThisTaskId(), this.flinkCollector));
    if (this.stormTopology != null) {
        Map<GlobalStreamId, Grouping> inputs = this.topologyContext.getThisSources();
        for (GlobalStreamId inputStream : inputs.keySet()) {
            for (Integer tid : this.topologyContext.getComponentTasks(inputStream.get_componentId())) {
                this.inputComponentIds.put(tid, inputStream.get_componentId());
                this.inputStreamIds.put(tid, inputStream.get_streamId());
                this.inputSchemas.put(tid, this.topologyContext.getComponentOutputFields(inputStream));
            }
        }
    }
    this.bolt.prepare(stormConfig, this.topologyContext, stormCollector);
}
Also used : StormConfig(org.apache.flink.storm.util.StormConfig) OutputCollector(org.apache.storm.task.OutputCollector) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) Grouping(org.apache.storm.generated.Grouping) GlobalJobParameters(org.apache.flink.api.common.ExecutionConfig.GlobalJobParameters)

Example 12 with Grouping

use of org.apache.storm.generated.Grouping in project storm by apache.

the class TopologyContext method toJSONString.

@Override
public String toJSONString() {
    Map<String, Object> obj = new HashMap<>();
    obj.put("task->component", this.getTaskToComponent());
    obj.put("taskid", this.getThisTaskId());
    obj.put("componentid", this.getThisComponentId());
    List<String> streamList = new ArrayList<>();
    streamList.addAll(this.getThisStreams());
    obj.put("streams", streamList);
    obj.put("stream->outputfields", this.getThisOutputFieldsForStreams());
    // Convert targets to a JSON serializable format
    Map<String, Map<String, Object>> stringTargets = new HashMap<>();
    for (Map.Entry<String, Map<String, Grouping>> entry : this.getThisTargets().entrySet()) {
        Map<String, Object> stringTargetMap = new HashMap<>();
        for (Map.Entry<String, Grouping> innerEntry : entry.getValue().entrySet()) {
            stringTargetMap.put(innerEntry.getKey(), groupingToJSONableMap(innerEntry.getValue()));
        }
        stringTargets.put(entry.getKey(), stringTargetMap);
    }
    obj.put("stream->target->grouping", stringTargets);
    // Convert sources to a JSON serializable format
    Map<String, Map<String, Object>> stringSources = new HashMap<>();
    for (Map.Entry<GlobalStreamId, Grouping> entry : this.getThisSources().entrySet()) {
        GlobalStreamId gid = entry.getKey();
        Map<String, Object> stringSourceMap = stringSources.get(gid.get_componentId());
        if (stringSourceMap == null) {
            stringSourceMap = new HashMap<>();
            stringSources.put(gid.get_componentId(), stringSourceMap);
        }
        stringSourceMap.put(gid.get_streamId(), groupingToJSONableMap(entry.getValue()));
    }
    obj.put("source->stream->grouping", stringSources);
    obj.put("source->stream->fields", this.getThisInputFields());
    return JSONValue.toJSONString(obj);
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Grouping(org.apache.storm.generated.Grouping) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) HashMap(java.util.HashMap) Map(java.util.Map)

Example 13 with Grouping

use of org.apache.storm.generated.Grouping in project storm by apache.

the class StormCommon method validateStructure.

public static void validateStructure(StormTopology topology) throws InvalidTopologyException {
    Map<String, Object> componentMap = allComponents(topology);
    for (Map.Entry<String, Object> entry : componentMap.entrySet()) {
        String componentId = entry.getKey();
        ComponentCommon common = getComponentCommon(entry.getValue());
        Map<GlobalStreamId, Grouping> inputs = common.get_inputs();
        for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) {
            String sourceStreamId = input.getKey().get_streamId();
            String sourceComponentId = input.getKey().get_componentId();
            if (!componentMap.keySet().contains(sourceComponentId)) {
                throw new InvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent component [" + sourceComponentId + "]");
            }
            ComponentCommon sourceComponent = getComponentCommon(componentMap.get(sourceComponentId));
            if (!sourceComponent.get_streams().containsKey(sourceStreamId)) {
                throw new InvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent stream: " + "[" + sourceStreamId + "] of component [" + sourceComponentId + "]");
            }
            Grouping grouping = input.getValue();
            if (Thrift.groupingType(grouping) == Grouping._Fields.FIELDS) {
                List<String> fields = new ArrayList<>(grouping.get_fields());
                Map<String, StreamInfo> streams = sourceComponent.get_streams();
                Set<String> sourceOutputFields = getStreamOutputFields(streams);
                fields.removeAll(sourceOutputFields);
                if (fields.size() != 0) {
                    throw new InvalidTopologyException("Component: [" + componentId + "] subscribes from stream: [" + sourceStreamId + "] of component " + "[" + sourceComponentId + "] + with non-existent fields: " + fields);
                }
            }
        }
    }
}
Also used : ComponentCommon(org.apache.storm.generated.ComponentCommon) InvalidTopologyException(org.apache.storm.generated.InvalidTopologyException) ArrayList(java.util.ArrayList) Grouping(org.apache.storm.generated.Grouping) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) StreamInfo(org.apache.storm.generated.StreamInfo) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 14 with Grouping

use of org.apache.storm.generated.Grouping in project storm by apache.

the class StormCommon method metricsConsumerBoltSpecs.

@SuppressWarnings("unchecked")
public static Map<String, Bolt> metricsConsumerBoltSpecs(Map conf, StormTopology topology) {
    Map<String, Bolt> metricsConsumerBolts = new HashMap<>();
    Set<String> componentIdsEmitMetrics = new HashSet<>();
    componentIdsEmitMetrics.addAll(allComponents(topology).keySet());
    componentIdsEmitMetrics.add(Constants.SYSTEM_COMPONENT_ID);
    Map<GlobalStreamId, Grouping> inputs = new HashMap<>();
    for (String componentId : componentIdsEmitMetrics) {
        inputs.put(Utils.getGlobalStreamId(componentId, Constants.METRICS_STREAM_ID), Thrift.prepareShuffleGrouping());
    }
    List<Map<String, Object>> registerInfo = (List<Map<String, Object>>) conf.get(Config.TOPOLOGY_METRICS_CONSUMER_REGISTER);
    if (registerInfo != null) {
        Map<String, Integer> classOccurrencesMap = new HashMap<String, Integer>();
        for (Map<String, Object> info : registerInfo) {
            String className = (String) info.get(TOPOLOGY_METRICS_CONSUMER_CLASS);
            Object argument = info.get(TOPOLOGY_METRICS_CONSUMER_ARGUMENT);
            Integer maxRetainMetricTuples = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_MAX_RETAIN_METRIC_TUPLES), 100);
            Integer phintNum = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_PARALLELISM_HINT), 1);
            Map<String, Object> metricsConsumerConf = new HashMap<String, Object>();
            metricsConsumerConf.put(Config.TOPOLOGY_TASKS, phintNum);
            List<String> whitelist = (List<String>) info.get(TOPOLOGY_METRICS_CONSUMER_WHITELIST);
            List<String> blacklist = (List<String>) info.get(TOPOLOGY_METRICS_CONSUMER_BLACKLIST);
            FilterByMetricName filterPredicate = new FilterByMetricName(whitelist, blacklist);
            Boolean expandMapType = Utils.getBoolean(info.get(TOPOLOGY_METRICS_CONSUMER_EXPAND_MAP_TYPE), false);
            String metricNameSeparator = Utils.getString(info.get(TOPOLOGY_METRICS_CONSUMER_METRIC_NAME_SEPARATOR), ".");
            DataPointExpander expander = new DataPointExpander(expandMapType, metricNameSeparator);
            MetricsConsumerBolt boltInstance = new MetricsConsumerBolt(className, argument, maxRetainMetricTuples, filterPredicate, expander);
            Bolt metricsConsumerBolt = Thrift.prepareSerializedBoltDetails(inputs, boltInstance, null, phintNum, metricsConsumerConf);
            String id = className;
            if (classOccurrencesMap.containsKey(className)) {
                // e.g. [\"a\", \"b\", \"a\"]) => [\"a\", \"b\", \"a#2\"]"
                int occurrenceNum = classOccurrencesMap.get(className);
                occurrenceNum++;
                classOccurrencesMap.put(className, occurrenceNum);
                id = Constants.METRICS_COMPONENT_ID_PREFIX + className + "#" + occurrenceNum;
            } else {
                classOccurrencesMap.put(className, 1);
            }
            metricsConsumerBolts.put(id, metricsConsumerBolt);
        }
    }
    return metricsConsumerBolts;
}
Also used : FilterByMetricName(org.apache.storm.metric.filter.FilterByMetricName) HashMap(java.util.HashMap) Bolt(org.apache.storm.generated.Bolt) MetricsConsumerBolt(org.apache.storm.metric.MetricsConsumerBolt) IBolt(org.apache.storm.task.IBolt) EventLoggerBolt(org.apache.storm.metric.EventLoggerBolt) SystemBolt(org.apache.storm.metric.SystemBolt) Grouping(org.apache.storm.generated.Grouping) MetricsConsumerBolt(org.apache.storm.metric.MetricsConsumerBolt) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) ArrayList(java.util.ArrayList) List(java.util.List) DataPointExpander(org.apache.storm.metric.util.DataPointExpander) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) HashSet(java.util.HashSet)

Example 15 with Grouping

use of org.apache.storm.generated.Grouping in project storm by apache.

the class StatefulWindowedBoltExecutorTest method testRecovery.

@Test
public void testRecovery() throws Exception {
    mockStormConf.put(Config.TOPOLOGY_BOLTS_MESSAGE_ID_FIELD_NAME, "msgid");
    mockStormConf.put(Config.TOPOLOGY_BOLTS_WINDOW_LENGTH_COUNT, 5);
    mockStormConf.put(Config.TOPOLOGY_BOLTS_SLIDING_INTERVAL_COUNT, 5);
    KeyValueState<TaskStream, WindowState> mockState;
    mockState = Mockito.mock(KeyValueState.class);
    Map<GlobalStreamId, Grouping> mockMap = Mockito.mock(Map.class);
    Mockito.when(mockTopologyContext.getThisSources()).thenReturn(mockMap);
    Mockito.when(mockTopologyContext.getComponentTasks(Mockito.anyString())).thenReturn(Collections.singletonList(1));
    Mockito.when(mockMap.keySet()).thenReturn(Collections.singleton(new GlobalStreamId("a", "s")));
    WindowState mockWindowState = new WindowState(4, 4);
    Mockito.when(mockState.get(Mockito.any(TaskStream.class))).thenReturn(mockWindowState);
    executor.prepare(mockStormConf, mockTopologyContext, mockOutputCollector, mockState);
    executor.initState(null);
    List<Tuple> tuples = getMockTuples(10);
    for (Tuple tuple : tuples) {
        executor.execute(tuple);
    }
    WindowState expectedState = new WindowState(4, 9);
    Mockito.verify(mockState, Mockito.times(1)).put(Mockito.any(TaskStream.class), Mockito.eq(expectedState));
}
Also used : WindowState(org.apache.storm.topology.StatefulWindowedBoltExecutor.WindowState) TaskStream(org.apache.storm.topology.StatefulWindowedBoltExecutor.TaskStream) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) KeyValueState(org.apache.storm.state.KeyValueState) Grouping(org.apache.storm.generated.Grouping) Tuple(org.apache.storm.tuple.Tuple) Test(org.junit.Test)

Aggregations

Grouping (org.apache.storm.generated.Grouping)18 GlobalStreamId (org.apache.storm.generated.GlobalStreamId)16 HashMap (java.util.HashMap)14 Map (java.util.Map)9 Bolt (org.apache.storm.generated.Bolt)6 StormTopology (org.apache.storm.generated.StormTopology)5 Test (org.junit.Test)5 ComponentCommon (org.apache.storm.generated.ComponentCommon)4 NullStruct (org.apache.storm.generated.NullStruct)4 SpoutSpec (org.apache.storm.generated.SpoutSpec)4 StreamInfo (org.apache.storm.generated.StreamInfo)4 Tuple (org.apache.storm.tuple.Tuple)4 ArrayList (java.util.ArrayList)3 TreeMap (java.util.TreeMap)3 IRichBolt (org.apache.storm.topology.IRichBolt)3 Fields (org.apache.storm.tuple.Fields)3 EventLoggerBolt (org.apache.storm.metric.EventLoggerBolt)2 MetricsConsumerBolt (org.apache.storm.metric.MetricsConsumerBolt)2 SystemBolt (org.apache.storm.metric.SystemBolt)2 BranchProcessor (org.apache.storm.streams.processors.BranchProcessor)2