Search in sources :

Example 11 with TopologyInfo

use of org.apache.storm.generated.TopologyInfo in project storm by apache.

the class FastWordCountTopology method printMetrics.

public static void printMetrics(Nimbus.Iface client, String name) throws Exception {
    TopologyInfo info = client.getTopologyInfoByName(name);
    int uptime = info.get_uptime_secs();
    long acked = 0;
    long failed = 0;
    double weightedAvgTotal = 0.0;
    for (ExecutorSummary exec : info.get_executors()) {
        if ("spout".equals(exec.get_component_id())) {
            SpoutStats stats = exec.get_stats().get_specific().get_spout();
            Map<String, Long> failedMap = stats.get_failed().get(":all-time");
            Map<String, Long> ackedMap = stats.get_acked().get(":all-time");
            Map<String, Double> avgLatMap = stats.get_complete_ms_avg().get(":all-time");
            for (String key : ackedMap.keySet()) {
                if (failedMap != null) {
                    Long tmp = failedMap.get(key);
                    if (tmp != null) {
                        failed += tmp;
                    }
                }
                long ackVal = ackedMap.get(key);
                double latVal = avgLatMap.get(key) * ackVal;
                acked += ackVal;
                weightedAvgTotal += latVal;
            }
        }
    }
    double avgLatency = weightedAvgTotal / acked;
    System.out.println("uptime: " + uptime + " acked: " + acked + " avgLatency: " + avgLatency + " acked/sec: " + (((double) acked) / uptime + " failed: " + failed));
}
Also used : SpoutStats(org.apache.storm.generated.SpoutStats) TopologyInfo(org.apache.storm.generated.TopologyInfo) ExecutorSummary(org.apache.storm.generated.ExecutorSummary)

Example 12 with TopologyInfo

use of org.apache.storm.generated.TopologyInfo in project storm by apache.

the class MetricsSample method factory.

public static MetricsSample factory(Nimbus.Iface client, String topologyName) throws Exception {
    // "************ Sampling Metrics *****************
    // get topology info
    TopologySummary topSummary = client.getTopologySummaryByName(topologyName);
    int topologyExecutors = topSummary.get_num_executors();
    int topologyWorkers = topSummary.get_num_workers();
    int topologyTasks = topSummary.get_num_tasks();
    TopologyInfo topInfo = client.getTopologyInfo(topSummary.get_id());
    MetricsSample sample = getMetricsSample(topInfo);
    sample.numWorkers = topologyWorkers;
    sample.numExecutors = topologyExecutors;
    sample.numTasks = topologyTasks;
    return sample;
}
Also used : TopologySummary(org.apache.storm.generated.TopologySummary) TopologyInfo(org.apache.storm.generated.TopologyInfo)

Example 13 with TopologyInfo

use of org.apache.storm.generated.TopologyInfo in project storm by apache.

the class Nimbus method getTopologyInfoWithOptsImpl.

private TopologyInfo getTopologyInfoWithOptsImpl(String topoId, GetInfoOptions options) throws NotAliveException, AuthorizationException, InvalidTopologyException, Exception {
    CommonTopoInfo common = getCommonTopoInfo(topoId, "getTopologyInfo");
    if (common.base == null) {
        throw new WrappedNotAliveException(topoId);
    }
    IStormClusterState state = stormClusterState;
    NumErrorsChoice numErrChoice = Utils.OR(options.get_num_err_choice(), NumErrorsChoice.ALL);
    Map<String, List<ErrorInfo>> errors = new HashMap<>();
    for (String component : common.allComponents) {
        switch(numErrChoice) {
            case NONE:
                errors.put(component, Collections.emptyList());
                break;
            case ONE:
                List<ErrorInfo> errList = new ArrayList<>();
                ErrorInfo info = state.lastError(topoId, component);
                if (info != null) {
                    errList.add(info);
                }
                errors.put(component, errList);
                break;
            case ALL:
                errors.put(component, state.errors(topoId, component));
                break;
            default:
                LOG.warn("Got invalid NumErrorsChoice '{}'", numErrChoice);
                errors.put(component, state.errors(topoId, component));
                break;
        }
    }
    List<ExecutorSummary> summaries = new ArrayList<>();
    if (common.assignment != null) {
        for (Entry<List<Long>, NodeInfo> entry : common.assignment.get_executor_node_port().entrySet()) {
            NodeInfo ni = entry.getValue();
            ExecutorInfo execInfo = toExecInfo(entry.getKey());
            Map<String, String> nodeToHost = common.assignment.get_node_host();
            Map<String, Object> heartbeat = common.beats.get(ClientStatsUtil.convertExecutor(entry.getKey()));
            if (heartbeat == null) {
                heartbeat = Collections.emptyMap();
            }
            ExecutorSummary summ = new ExecutorSummary(execInfo, common.taskToComponent.get(execInfo.get_task_start()), nodeToHost.get(ni.get_node()), ni.get_port_iterator().next().intValue(), (Integer) heartbeat.getOrDefault("uptime", 0));
            // heartbeats "stats"
            Map ex = (Map) heartbeat.get("stats");
            if (ex != null) {
                ExecutorStats stats = StatsUtil.thriftifyExecutorStats(ex);
                summ.set_stats(stats);
            }
            summaries.add(summ);
        }
    }
    TopologyInfo topoInfo = new TopologyInfo(topoId, common.topoName, Time.deltaSecs(common.launchTimeSecs), summaries, extractStatusStr(common.base), errors);
    if (common.topology.is_set_storm_version()) {
        topoInfo.set_storm_version(common.topology.get_storm_version());
    }
    if (common.base.is_set_owner()) {
        topoInfo.set_owner(common.base.get_owner());
    }
    String schedStatus = idToSchedStatus.get().get(topoId);
    if (schedStatus != null) {
        topoInfo.set_sched_status(schedStatus);
    }
    TopologyResources resources = getResourcesForTopology(topoId, common.base);
    if (resources != null && underlyingScheduler instanceof ResourceAwareScheduler) {
        topoInfo.set_requested_memonheap(resources.getRequestedMemOnHeap());
        topoInfo.set_requested_memoffheap(resources.getRequestedMemOffHeap());
        topoInfo.set_requested_cpu(resources.getRequestedCpu());
        topoInfo.set_assigned_memonheap(resources.getAssignedMemOnHeap());
        topoInfo.set_assigned_memoffheap(resources.getAssignedMemOffHeap());
        topoInfo.set_assigned_cpu(resources.getAssignedCpu());
    }
    if (common.base.is_set_component_debug()) {
        topoInfo.set_component_debug(common.base.get_component_debug());
    }
    topoInfo.set_replication_count(getBlobReplicationCount(ConfigUtils.masterStormCodeKey(topoId)));
    return topoInfo;
}
Also used : HashMap(java.util.HashMap) ExecutorStats(org.apache.storm.generated.ExecutorStats) ErrorInfo(org.apache.storm.generated.ErrorInfo) ArrayList(java.util.ArrayList) WrappedNotAliveException(org.apache.storm.utils.WrappedNotAliveException) ExecutorSummary(org.apache.storm.generated.ExecutorSummary) ResourceAwareScheduler(org.apache.storm.scheduler.resource.ResourceAwareScheduler) ExecutorInfo(org.apache.storm.generated.ExecutorInfo) NodeInfo(org.apache.storm.generated.NodeInfo) ArrayList(java.util.ArrayList) List(java.util.List) IStormClusterState(org.apache.storm.cluster.IStormClusterState) NumErrorsChoice(org.apache.storm.generated.NumErrorsChoice) Map(java.util.Map) NavigableMap(java.util.NavigableMap) RotatingMap(org.apache.storm.utils.RotatingMap) ImmutableMap(org.apache.storm.shade.com.google.common.collect.ImmutableMap) TimeCacheMap(org.apache.storm.utils.TimeCacheMap) HashMap(java.util.HashMap) TopologyInfo(org.apache.storm.generated.TopologyInfo)

Example 14 with TopologyInfo

use of org.apache.storm.generated.TopologyInfo in project storm by apache.

the class CaptureLoad method captureTopology.

static TopologyLoadConf captureTopology(Nimbus.Iface client, TopologySummary topologySummary) throws Exception {
    String topologyName = topologySummary.get_name();
    LOG.info("Capturing {}...", topologyName);
    String topologyId = topologySummary.get_id();
    TopologyInfo info = client.getTopologyInfo(topologyId);
    TopologyPageInfo tpinfo = client.getTopologyPageInfo(topologyId, ":all-time", false);
    @SuppressWarnings("checkstyle:VariableDeclarationUsageDistance") StormTopology topo = client.getUserTopology(topologyId);
    // Done capturing topology information...
    Map<String, Object> savedTopoConf = new HashMap<>();
    Map<String, Object> topoConf = (Map<String, Object>) JSONValue.parse(client.getTopologyConf(topologyId));
    for (String key : TopologyLoadConf.IMPORTANT_CONF_KEYS) {
        Object o = topoConf.get(key);
        if (o != null) {
            savedTopoConf.put(key, o);
            LOG.info("with config {}: {}", key, o);
        }
    }
    // Lets use the number of actually scheduled workers as a way to bridge RAS and non-RAS
    int numWorkers = tpinfo.get_num_workers();
    if (savedTopoConf.containsKey(Config.TOPOLOGY_WORKERS)) {
        numWorkers = Math.max(numWorkers, ((Number) savedTopoConf.get(Config.TOPOLOGY_WORKERS)).intValue());
    }
    savedTopoConf.put(Config.TOPOLOGY_WORKERS, numWorkers);
    Map<String, LoadCompConf.Builder> boltBuilders = new HashMap<>();
    Map<String, LoadCompConf.Builder> spoutBuilders = new HashMap<>();
    List<InputStream.Builder> inputStreams = new ArrayList<>();
    Map<GlobalStreamId, OutputStream.Builder> outStreams = new HashMap<>();
    // Bolts
    if (topo.get_bolts() != null) {
        for (Map.Entry<String, Bolt> boltSpec : topo.get_bolts().entrySet()) {
            String boltComp = boltSpec.getKey();
            LOG.info("Found bolt {}...", boltComp);
            Bolt bolt = boltSpec.getValue();
            ComponentCommon common = bolt.get_common();
            Map<GlobalStreamId, Grouping> inputs = common.get_inputs();
            if (inputs != null) {
                for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) {
                    GlobalStreamId id = input.getKey();
                    LOG.info("with input {}...", id);
                    Grouping grouping = input.getValue();
                    InputStream.Builder builder = new InputStream.Builder().withId(id.get_streamId()).withFromComponent(id.get_componentId()).withToComponent(boltComp).withGroupingType(grouping);
                    inputStreams.add(builder);
                }
            }
            Map<String, StreamInfo> outputs = common.get_streams();
            if (outputs != null) {
                for (String name : outputs.keySet()) {
                    GlobalStreamId id = new GlobalStreamId(boltComp, name);
                    LOG.info("and output {}...", id);
                    OutputStream.Builder builder = new OutputStream.Builder().withId(name);
                    outStreams.put(id, builder);
                }
            }
            LoadCompConf.Builder builder = new LoadCompConf.Builder().withParallelism(common.get_parallelism_hint()).withId(boltComp);
            boltBuilders.put(boltComp, builder);
        }
        Map<String, Map<String, Double>> boltResources = getBoltsResources(topo, topoConf);
        for (Map.Entry<String, Map<String, Double>> entry : boltResources.entrySet()) {
            LoadCompConf.Builder bd = boltBuilders.get(entry.getKey());
            if (bd != null) {
                Map<String, Double> resources = entry.getValue();
                Double cpu = resources.get(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT);
                if (cpu != null) {
                    bd.withCpuLoad(cpu);
                }
                Double mem = resources.get(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB);
                if (mem != null) {
                    bd.withMemoryLoad(mem);
                }
            }
        }
    }
    // Spouts
    if (topo.get_spouts() != null) {
        for (Map.Entry<String, SpoutSpec> spoutSpec : topo.get_spouts().entrySet()) {
            String spoutComp = spoutSpec.getKey();
            LOG.info("Found Spout {}...", spoutComp);
            SpoutSpec spout = spoutSpec.getValue();
            ComponentCommon common = spout.get_common();
            Map<String, StreamInfo> outputs = common.get_streams();
            if (outputs != null) {
                for (String name : outputs.keySet()) {
                    GlobalStreamId id = new GlobalStreamId(spoutComp, name);
                    LOG.info("with output {}...", id);
                    OutputStream.Builder builder = new OutputStream.Builder().withId(name);
                    outStreams.put(id, builder);
                }
            }
            LoadCompConf.Builder builder = new LoadCompConf.Builder().withParallelism(common.get_parallelism_hint()).withId(spoutComp);
            spoutBuilders.put(spoutComp, builder);
        }
        Map<String, Map<String, Double>> spoutResources = getSpoutsResources(topo, topoConf);
        for (Map.Entry<String, Map<String, Double>> entry : spoutResources.entrySet()) {
            LoadCompConf.Builder sd = spoutBuilders.get(entry.getKey());
            if (sd != null) {
                Map<String, Double> resources = entry.getValue();
                Double cpu = resources.get(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT);
                if (cpu != null) {
                    sd.withCpuLoad(cpu);
                }
                Double mem = resources.get(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB);
                if (mem != null) {
                    sd.withMemoryLoad(mem);
                }
            }
        }
    }
    // Stats...
    Map<String, List<ExecutorSummary>> byComponent = new HashMap<>();
    for (ExecutorSummary executor : info.get_executors()) {
        String component = executor.get_component_id();
        List<ExecutorSummary> list = byComponent.get(component);
        if (list == null) {
            list = new ArrayList<>();
            byComponent.put(component, list);
        }
        list.add(executor);
    }
    List<InputStream> streams = new ArrayList<>(inputStreams.size());
    // Compute the stats for the different input streams
    for (InputStream.Builder builder : inputStreams) {
        GlobalStreamId streamId = new GlobalStreamId(builder.getFromComponent(), builder.getId());
        List<ExecutorSummary> summaries = byComponent.get(builder.getToComponent());
        // Execute and process latency...
        builder.withProcessTime(new NormalDistStats(extractBoltValues(summaries, streamId, BoltStats::get_process_ms_avg)));
        builder.withExecTime(new NormalDistStats(extractBoltValues(summaries, streamId, BoltStats::get_execute_ms_avg)));
        // InputStream is done
        streams.add(builder.build());
    }
    // There is a bug in some versions that returns 0 for the uptime.
    // To work around it we should get it an alternative (working) way.
    Map<String, Integer> workerToUptime = new HashMap<>();
    for (WorkerSummary ws : tpinfo.get_workers()) {
        workerToUptime.put(ws.get_supervisor_id() + ":" + ws.get_port(), ws.get_uptime_secs());
    }
    LOG.debug("WORKER TO UPTIME {}", workerToUptime);
    for (Map.Entry<GlobalStreamId, OutputStream.Builder> entry : outStreams.entrySet()) {
        OutputStream.Builder builder = entry.getValue();
        GlobalStreamId id = entry.getKey();
        List<Double> emittedRate = new ArrayList<>();
        List<ExecutorSummary> summaries = byComponent.get(id.get_componentId());
        if (summaries != null) {
            for (ExecutorSummary summary : summaries) {
                if (summary.is_set_stats()) {
                    int uptime = summary.get_uptime_secs();
                    LOG.debug("UPTIME {}", uptime);
                    if (uptime <= 0) {
                        // Likely it is because of a bug, so try to get it another way
                        String key = summary.get_host() + ":" + summary.get_port();
                        uptime = workerToUptime.getOrDefault(key, 1);
                        LOG.debug("Getting uptime for worker {}, {}", key, uptime);
                    }
                    for (Map.Entry<String, Map<String, Long>> statEntry : summary.get_stats().get_emitted().entrySet()) {
                        String timeWindow = statEntry.getKey();
                        long timeSecs = uptime;
                        try {
                            timeSecs = Long.valueOf(timeWindow);
                        } catch (NumberFormatException e) {
                        // Ignored...
                        }
                        timeSecs = Math.min(timeSecs, uptime);
                        Long count = statEntry.getValue().get(id.get_streamId());
                        if (count != null) {
                            LOG.debug("{} emitted {} for {} secs or {} tuples/sec", id, count, timeSecs, count.doubleValue() / timeSecs);
                            emittedRate.add(count.doubleValue() / timeSecs);
                        }
                    }
                }
            }
        }
        builder.withRate(new NormalDistStats(emittedRate));
        // The OutputStream is done
        LoadCompConf.Builder comp = boltBuilders.get(id.get_componentId());
        if (comp == null) {
            comp = spoutBuilders.get(id.get_componentId());
        }
        comp.withStream(builder.build());
    }
    List<LoadCompConf> spouts = spoutBuilders.values().stream().map((b) -> b.build()).collect(Collectors.toList());
    List<LoadCompConf> bolts = boltBuilders.values().stream().map((b) -> b.build()).collect(Collectors.toList());
    return new TopologyLoadConf(topologyName, savedTopoConf, spouts, bolts, streams);
}
Also used : Options(org.apache.commons.cli.Options) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) HelpFormatter(org.apache.commons.cli.HelpFormatter) Function(java.util.function.Function) ComponentCommon(org.apache.storm.generated.ComponentCommon) ArrayList(java.util.ArrayList) DefaultParser(org.apache.commons.cli.DefaultParser) TopologyPageInfo(org.apache.storm.generated.TopologyPageInfo) Bolt(org.apache.storm.generated.Bolt) NimbusClient(org.apache.storm.utils.NimbusClient) Nimbus(org.apache.storm.generated.Nimbus) StormTopology(org.apache.storm.generated.StormTopology) JSONValue(org.json.simple.JSONValue) Map(java.util.Map) CommandLine(org.apache.commons.cli.CommandLine) Option(org.apache.commons.cli.Option) Logger(org.slf4j.Logger) CommandLineParser(org.apache.commons.cli.CommandLineParser) JSONParser(org.json.simple.parser.JSONParser) WorkerSummary(org.apache.storm.generated.WorkerSummary) Grouping(org.apache.storm.generated.Grouping) StreamInfo(org.apache.storm.generated.StreamInfo) TopologyInfo(org.apache.storm.generated.TopologyInfo) Collectors(java.util.stream.Collectors) File(java.io.File) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) ExecutorSummary(org.apache.storm.generated.ExecutorSummary) List(java.util.List) ObjectReader(org.apache.storm.utils.ObjectReader) JSONObject(org.json.simple.JSONObject) ParseException(org.apache.commons.cli.ParseException) SpoutSpec(org.apache.storm.generated.SpoutSpec) Config(org.apache.storm.Config) TopologySummary(org.apache.storm.generated.TopologySummary) BoltStats(org.apache.storm.generated.BoltStats) ClusterSummary(org.apache.storm.generated.ClusterSummary) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ExecutorSummary(org.apache.storm.generated.ExecutorSummary) ArrayList(java.util.ArrayList) List(java.util.List) ComponentCommon(org.apache.storm.generated.ComponentCommon) Bolt(org.apache.storm.generated.Bolt) Grouping(org.apache.storm.generated.Grouping) SpoutSpec(org.apache.storm.generated.SpoutSpec) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) JSONObject(org.json.simple.JSONObject) HashMap(java.util.HashMap) Map(java.util.Map) StormTopology(org.apache.storm.generated.StormTopology) BoltStats(org.apache.storm.generated.BoltStats) TopologyPageInfo(org.apache.storm.generated.TopologyPageInfo) WorkerSummary(org.apache.storm.generated.WorkerSummary) StreamInfo(org.apache.storm.generated.StreamInfo) TopologyInfo(org.apache.storm.generated.TopologyInfo)

Example 15 with TopologyInfo

use of org.apache.storm.generated.TopologyInfo in project storm by apache.

the class HdfsSpoutTopology method printMetrics.

static void printMetrics(Nimbus.Iface client, String name) throws Exception {
    TopologyInfo info = client.getTopologyInfoByName(name);
    int uptime = info.get_uptime_secs();
    long acked = 0;
    long failed = 0;
    double weightedAvgTotal = 0.0;
    for (ExecutorSummary exec : info.get_executors()) {
        if ("spout".equals(exec.get_component_id())) {
            SpoutStats stats = exec.get_stats().get_specific().get_spout();
            Map<String, Long> failedMap = stats.get_failed().get(":all-time");
            Map<String, Long> ackedMap = stats.get_acked().get(":all-time");
            Map<String, Double> avgLatMap = stats.get_complete_ms_avg().get(":all-time");
            for (String key : ackedMap.keySet()) {
                if (failedMap != null) {
                    Long tmp = failedMap.get(key);
                    if (tmp != null) {
                        failed += tmp;
                    }
                }
                long ackVal = ackedMap.get(key);
                double latVal = avgLatMap.get(key) * ackVal;
                acked += ackVal;
                weightedAvgTotal += latVal;
            }
        }
    }
    double avgLatency = weightedAvgTotal / acked;
    System.out.println("uptime: " + uptime + " acked: " + acked + " avgLatency: " + avgLatency + " acked/sec: " + (((double) acked) / uptime + " failed: " + failed));
}
Also used : SpoutStats(org.apache.storm.generated.SpoutStats) TopologyInfo(org.apache.storm.generated.TopologyInfo) ExecutorSummary(org.apache.storm.generated.ExecutorSummary)

Aggregations

TopologyInfo (org.apache.storm.generated.TopologyInfo)23 ExecutorSummary (org.apache.storm.generated.ExecutorSummary)12 GetInfoOptions (org.apache.storm.generated.GetInfoOptions)9 Map (java.util.Map)8 HashMap (java.util.HashMap)7 List (java.util.List)7 ArrayList (java.util.ArrayList)6 TopologySummary (org.apache.storm.generated.TopologySummary)6 ExecutorStats (org.apache.storm.generated.ExecutorStats)5 SpoutStats (org.apache.storm.generated.SpoutStats)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 HashSet (java.util.HashSet)4 AlreadyAliveException (org.apache.storm.generated.AlreadyAliveException)4 AuthorizationException (org.apache.storm.generated.AuthorizationException)4 InvalidTopologyException (org.apache.storm.generated.InvalidTopologyException)4 StormTopology (org.apache.storm.generated.StormTopology)4 File (java.io.File)3 IOException (java.io.IOException)3 Collectors (java.util.stream.Collectors)3 Config (org.apache.storm.Config)3