use of backtype.storm.generated.MetricInfo in project jstorm by alibaba.
the class ServiceHandler method getTaskAndStreamMetrics.
@Override
public List<MetricInfo> getTaskAndStreamMetrics(String topologyId, int taskId) throws TException {
List<MetricInfo> taskMetricList = getMetrics(topologyId, MetaType.TASK.getT());
List<MetricInfo> streamMetricList = getMetrics(topologyId, MetaType.STREAM.getT());
String taskIdStr = taskId + "";
MetricInfo taskMetricInfo;
if (taskMetricList != null && taskMetricList.size() > 0) {
taskMetricInfo = taskMetricList.get(0);
Map<String, Map<Integer, MetricSnapshot>> metrics = taskMetricInfo.get_metrics();
for (Iterator<String> itr = metrics.keySet().iterator(); itr.hasNext(); ) {
String metricName = itr.next();
String[] parts = metricName.split(MetricUtils.DELIM);
if (parts.length < 7 || !parts[3].equals(taskIdStr)) {
itr.remove();
}
}
} else {
taskMetricInfo = MetricUtils.mkMetricInfo();
}
MetricInfo streamMetricInfo;
if (streamMetricList != null && streamMetricList.size() > 0) {
streamMetricInfo = streamMetricList.get(0);
Map<String, Map<Integer, MetricSnapshot>> metrics = streamMetricInfo.get_metrics();
for (Iterator<String> itr = metrics.keySet().iterator(); itr.hasNext(); ) {
String metricName = itr.next();
String[] parts = metricName.split(MetricUtils.DELIM);
if (parts.length < 7 || !parts[3].equals(taskIdStr)) {
itr.remove();
}
}
} else {
streamMetricInfo = MetricUtils.mkMetricInfo();
}
return Lists.newArrayList(taskMetricInfo, streamMetricInfo);
}
use of backtype.storm.generated.MetricInfo in project jstorm by alibaba.
the class ServiceHandler method getTaskMetrics.
@Override
public MetricInfo getTaskMetrics(String topologyId, String component) throws TException {
List<MetricInfo> taskMetricList = getMetrics(topologyId, MetaType.TASK.getT());
if (taskMetricList != null && taskMetricList.size() > 0) {
MetricInfo metricInfo = taskMetricList.get(0);
Map<String, Map<Integer, MetricSnapshot>> metrics = metricInfo.get_metrics();
for (Iterator<String> itr = metrics.keySet().iterator(); itr.hasNext(); ) {
String metricName = itr.next();
String[] parts = metricName.split(MetricUtils.DELIM);
if (parts.length < 7 || !parts[2].equals(component)) {
itr.remove();
}
}
LOG.info("taskMetric, total size:{}", metricInfo.get_metrics_size());
return metricInfo;
}
return MetricUtils.mkMetricInfo();
}
use of backtype.storm.generated.MetricInfo in project jstorm by alibaba.
the class MergeEvent method mergeAndUploadClusterMetrics.
private void mergeAndUploadClusterMetrics() {
TopologyMetricContext clusterContext = context.getClusterTopologyMetricContext();
TopologyMetric tpMetric = clusterContext.mergeMetrics();
if (tpMetric == null) {
tpMetric = MetricUtils.mkTopologyMetric();
tpMetric.set_topologyMetric(MetricUtils.mkMetricInfo());
}
//reset snapshots metric id
MetricInfo clusterMetrics = tpMetric.get_topologyMetric();
Map<String, Long> metricName2Id = clusterContext.getMemMeta();
for (Map.Entry<String, Map<Integer, MetricSnapshot>> entry : clusterMetrics.get_metrics().entrySet()) {
String metricName = entry.getKey();
MetricType metricType = MetricUtils.metricType(metricName);
Long metricId = metricName2Id.get(metricName);
for (Map.Entry<Integer, MetricSnapshot> metric : entry.getValue().entrySet()) {
MetricSnapshot snapshot = metric.getValue();
snapshot.set_metricId(metricId);
if (metricType == MetricType.HISTOGRAM) {
snapshot.set_points(new byte[0]);
}
// entry.getValue().put(metric.getKey(), snapshot);
}
}
//fill the unacquired metrics with zero
long ts = System.currentTimeMillis();
for (Map.Entry<String, Long> entry : metricName2Id.entrySet()) {
String name = entry.getKey();
if (!clusterMetrics.get_metrics().containsKey(name)) {
Map<Integer, MetricSnapshot> metric = new HashMap<>();
MetricType type = MetricUtils.metricType(name);
metric.put(AsmWindow.M1_WINDOW, new MetricSnapshot(entry.getValue(), ts, type.getT()));
clusterMetrics.put_to_metrics(name, metric);
}
}
//upload to cache
UpdateEvent.pushEvent(JStormMetrics.CLUSTER_METRIC_KEY, tpMetric);
LOG.debug("send update event for cluster metrics, size : {}", clusterMetrics.get_metrics_size());
}
use of backtype.storm.generated.MetricInfo in project jstorm by alibaba.
the class UpdateEvent method run.
/**
* put metric data to metric cache.
*/
@Override
public void run() {
if (!context.getTopologyMetricContexts().containsKey(topologyId)) {
LOG.warn("topology {} has been killed or has not started, skip update.", topologyId);
return;
}
// double check and reset stream metrics if disabled
if (!JStormMetrics.enableStreamMetrics) {
topologyMetrics.set_streamMetric(new MetricInfo());
}
if (!JStormMetrics.CLUSTER_METRIC_KEY.equals(topologyId)) {
updateClusterMetrics(topologyId, topologyMetrics);
}
// overwrite nimbus-local metrics data
context.getMetricCache().putMetricData(topologyId, topologyMetrics);
// below process is kind of a transaction, first we lock an empty slot, mark it as PRE_SET
// by this time the slot is not yet ready for uploading as the upload thread looks for SET slots only
// after all metrics data has been saved, we mark it as SET, then it's ready for uploading.
int idx = context.getAndPresetFirstEmptyIndex();
if (idx < 0) {
LOG.error("Exceeding maxPendingUploadMetrics(too much metrics in local rocksdb), " + "skip caching metrics data for topology:{}", topologyId);
return;
}
TopologyMetricDataInfo summary = new TopologyMetricDataInfo();
int total = 0;
summary.topologyId = topologyId;
summary.timestamp = timestamp;
if (topologyId.equals(JStormMetrics.NIMBUS_METRIC_KEY) || topologyId.equals(JStormMetrics.CLUSTER_METRIC_KEY)) {
summary.type = MetricUploader.METRIC_TYPE_TOPLOGY;
} else {
total += topologyMetrics.get_topologyMetric().get_metrics_size() + topologyMetrics.get_componentMetric().get_metrics_size();
if (total > 0) {
int sub = topologyMetrics.get_taskMetric().get_metrics_size() + topologyMetrics.get_workerMetric().get_metrics_size() + topologyMetrics.get_nettyMetric().get_metrics_size() + topologyMetrics.get_streamMetric().get_metrics_size();
if (sub > 0) {
total += sub;
summary.type = MetricUploader.METRIC_TYPE_ALL;
} else {
summary.type = MetricUploader.METRIC_TYPE_TOPLOGY;
}
} else {
summary.type = MetricUploader.METRIC_TYPE_TASK;
total += topologyMetrics.get_taskMetric().get_metrics_size();
}
}
context.getMetricCache().put(ClusterMetricsContext.PENDING_UPLOAD_METRIC_DATA_INFO + idx, summary);
context.getMetricCache().put(ClusterMetricsContext.PENDING_UPLOAD_METRIC_DATA + idx, topologyMetrics);
context.markSet(idx);
LOG.debug("Put metric data to local cache, topology:{}, idx:{}, total:{}", topologyId, idx, total);
}
use of backtype.storm.generated.MetricInfo in project jstorm by alibaba.
the class JStormMetricCache method getMetricData.
public List<MetricInfo> getMetricData(String topologyId, MetaType metaType) {
Map<Long, MetricInfo> retMap = new TreeMap<Long, MetricInfo>();
String key = null;
if (metaType == MetaType.COMPONENT) {
key = METRIC_DATA_30M_COMPONENT + topologyId;
} else if (metaType == MetaType.TASK) {
key = METRIC_DATA_30M_TASK + topologyId;
} else if (metaType == MetaType.STREAM) {
key = METRIC_DATA_30M_STREAM + topologyId;
} else if (metaType == MetaType.WORKER) {
key = METRIC_DATA_30M_WORKER + topologyId;
} else if (metaType == MetaType.NETTY) {
key = METRIC_DATA_30M_NETTY + topologyId;
} else if (metaType == MetaType.TOPOLOGY) {
String keyPrefix = METRIC_DATA_30M_TOPOLOGY + topologyId + "-";
for (int i = 1; i <= 30; i++) {
Object obj = cache.get(keyPrefix + i);
if (obj != null) {
Object[] objects = (Object[]) obj;
retMap.put((Long) objects[0], (MetricInfo) objects[1]);
}
}
}
if (key != null) {
Object obj = cache.get(key);
if (obj != null) {
Object[] objects = (Object[]) obj;
retMap.put((Long) objects[0], (MetricInfo) objects[1]);
}
}
List<MetricInfo> ret = Lists.newArrayList(retMap.values());
int cnt = 0;
for (MetricInfo metricInfo : ret) {
cnt += metricInfo.get_metrics_size();
}
LOG.info("getMetricData, topology:{}, meta type:{}, metric info size:{}, total metric size:{}", topologyId, metaType, ret.size(), cnt);
return ret;
}
Aggregations