use of org.apache.flink.runtime.metrics.dump.QueryScopeInfo in project flink by apache.
the class MetricStore method add.
// -----------------------------------------------------------------------------------------------------------------
// Adding metrics
// -----------------------------------------------------------------------------------------------------------------
public void add(MetricDump metric) {
try {
QueryScopeInfo info = metric.scopeInfo;
TaskManagerMetricStore tm;
JobMetricStore job;
TaskMetricStore task;
SubtaskMetricStore subtask;
String name = info.scope.isEmpty() ? metric.name : info.scope + "." + metric.name;
if (name.isEmpty()) {
// malformed transmission
return;
}
switch(info.getCategory()) {
case INFO_CATEGORY_JM:
addMetric(jobManager.metrics, name, metric);
break;
case INFO_CATEGORY_TM:
String tmID = ((QueryScopeInfo.TaskManagerQueryScopeInfo) info).taskManagerID;
tm = taskManagers.get(tmID);
if (tm == null) {
tm = new TaskManagerMetricStore();
taskManagers.put(tmID, tm);
}
if (name.contains("GarbageCollector")) {
String gcName = name.substring("Status.JVM.GarbageCollector.".length(), name.lastIndexOf('.'));
tm.addGarbageCollectorName(gcName);
}
addMetric(tm.metrics, name, metric);
break;
case INFO_CATEGORY_JOB:
QueryScopeInfo.JobQueryScopeInfo jobInfo = (QueryScopeInfo.JobQueryScopeInfo) info;
job = jobs.get(jobInfo.jobID);
if (job == null) {
job = new JobMetricStore();
jobs.put(jobInfo.jobID, job);
}
addMetric(job.metrics, name, metric);
break;
case INFO_CATEGORY_TASK:
QueryScopeInfo.TaskQueryScopeInfo taskInfo = (QueryScopeInfo.TaskQueryScopeInfo) info;
job = jobs.get(taskInfo.jobID);
if (job == null) {
job = new JobMetricStore();
jobs.put(taskInfo.jobID, job);
}
task = job.tasks.get(taskInfo.vertexID);
if (task == null) {
task = new TaskMetricStore();
job.tasks.put(taskInfo.vertexID, task);
}
subtask = task.subtasks.get(taskInfo.subtaskIndex);
if (subtask == null) {
subtask = new SubtaskMetricStore();
task.subtasks.put(taskInfo.subtaskIndex, subtask);
}
/**
* The duplication is intended. Metrics scoped by subtask are useful for several job/task handlers,
* while the WebInterface task metric queries currently do not account for subtasks, so we don't
* divide by subtask and instead use the concatenation of subtask index and metric name as the name
* for thos.
*/
addMetric(subtask.metrics, name, metric);
addMetric(task.metrics, taskInfo.subtaskIndex + "." + name, metric);
break;
case INFO_CATEGORY_OPERATOR:
QueryScopeInfo.OperatorQueryScopeInfo operatorInfo = (QueryScopeInfo.OperatorQueryScopeInfo) info;
job = jobs.get(operatorInfo.jobID);
if (job == null) {
job = new JobMetricStore();
jobs.put(operatorInfo.jobID, job);
}
task = job.tasks.get(operatorInfo.vertexID);
if (task == null) {
task = new TaskMetricStore();
job.tasks.put(operatorInfo.vertexID, task);
}
/**
* As the WebInterface does not account for operators (because it can't) we don't
* divide by operator and instead use the concatenation of subtask index, operator name and metric name
* as the name.
*/
addMetric(task.metrics, operatorInfo.subtaskIndex + "." + operatorInfo.operatorName + "." + name, metric);
break;
default:
LOG.debug("Invalid metric dump category: " + info.getCategory());
}
} catch (Exception e) {
LOG.debug("Malformed metric dump.", e);
}
}
use of org.apache.flink.runtime.metrics.dump.QueryScopeInfo in project flink by apache.
the class MetricFetcherTest method createRequestDumpAnswer.
private static MetricDumpSerialization.MetricSerializationResult createRequestDumpAnswer(InstanceID tmID, JobID jobID) throws IOException {
Map<Counter, Tuple2<QueryScopeInfo, String>> counters = new HashMap<>();
Map<Gauge<?>, Tuple2<QueryScopeInfo, String>> gauges = new HashMap<>();
Map<Histogram, Tuple2<QueryScopeInfo, String>> histograms = new HashMap<>();
Map<Meter, Tuple2<QueryScopeInfo, String>> meters = new HashMap<>();
SimpleCounter c1 = new SimpleCounter();
SimpleCounter c2 = new SimpleCounter();
c1.inc(1);
c2.inc(2);
counters.put(c1, new Tuple2<QueryScopeInfo, String>(new QueryScopeInfo.OperatorQueryScopeInfo(jobID.toString(), "taskid", 2, "opname", "abc"), "oc"));
counters.put(c2, new Tuple2<QueryScopeInfo, String>(new QueryScopeInfo.TaskQueryScopeInfo(jobID.toString(), "taskid", 2, "abc"), "tc"));
meters.put(new Meter() {
@Override
public void markEvent() {
}
@Override
public void markEvent(long n) {
}
@Override
public double getRate() {
return 5;
}
@Override
public long getCount() {
return 10;
}
}, new Tuple2<QueryScopeInfo, String>(new QueryScopeInfo.JobQueryScopeInfo(jobID.toString(), "abc"), "jc"));
gauges.put(new Gauge<String>() {
@Override
public String getValue() {
return "x";
}
}, new Tuple2<QueryScopeInfo, String>(new QueryScopeInfo.TaskManagerQueryScopeInfo(tmID.toString(), "abc"), "gauge"));
histograms.put(new TestingHistogram(), new Tuple2<QueryScopeInfo, String>(new QueryScopeInfo.JobManagerQueryScopeInfo("abc"), "hist"));
MetricDumpSerialization.MetricDumpSerializer serializer = new MetricDumpSerialization.MetricDumpSerializer();
MetricDumpSerialization.MetricSerializationResult dump = serializer.serialize(counters, gauges, histograms, meters);
serializer.close();
return dump;
}
use of org.apache.flink.runtime.metrics.dump.QueryScopeInfo in project flink by apache.
the class MetricStore method add.
@VisibleForTesting
public void add(MetricDump metric) {
try {
QueryScopeInfo info = metric.scopeInfo;
TaskManagerMetricStore tm;
JobMetricStore job;
TaskMetricStore task;
ComponentMetricStore subtask;
String name = info.scope.isEmpty() ? metric.name : info.scope + "." + metric.name;
if (name.isEmpty()) {
// malformed transmission
return;
}
switch(info.getCategory()) {
case INFO_CATEGORY_JM:
addMetric(jobManager.metrics, name, metric);
break;
case INFO_CATEGORY_TM:
String tmID = ((QueryScopeInfo.TaskManagerQueryScopeInfo) info).taskManagerID;
tm = taskManagers.computeIfAbsent(tmID, k -> new TaskManagerMetricStore());
if (name.contains("GarbageCollector")) {
String gcName = name.substring("Status.JVM.GarbageCollector.".length(), name.lastIndexOf('.'));
tm.addGarbageCollectorName(gcName);
}
addMetric(tm.metrics, name, metric);
break;
case INFO_CATEGORY_JOB:
QueryScopeInfo.JobQueryScopeInfo jobInfo = (QueryScopeInfo.JobQueryScopeInfo) info;
job = jobs.computeIfAbsent(jobInfo.jobID, k -> new JobMetricStore());
addMetric(job.metrics, name, metric);
break;
case INFO_CATEGORY_TASK:
QueryScopeInfo.TaskQueryScopeInfo taskInfo = (QueryScopeInfo.TaskQueryScopeInfo) info;
job = jobs.computeIfAbsent(taskInfo.jobID, k -> new JobMetricStore());
task = job.tasks.computeIfAbsent(taskInfo.vertexID, k -> new TaskMetricStore());
subtask = task.subtasks.computeIfAbsent(taskInfo.subtaskIndex, k -> new ComponentMetricStore());
/**
* The duplication is intended. Metrics scoped by subtask are useful for several
* job/task handlers, while the WebInterface task metric queries currently do
* not account for subtasks, so we don't divide by subtask and instead use the
* concatenation of subtask index and metric name as the name for those.
*/
addMetric(subtask.metrics, name, metric);
addMetric(task.metrics, taskInfo.subtaskIndex + "." + name, metric);
break;
case INFO_CATEGORY_OPERATOR:
QueryScopeInfo.OperatorQueryScopeInfo operatorInfo = (QueryScopeInfo.OperatorQueryScopeInfo) info;
job = jobs.computeIfAbsent(operatorInfo.jobID, k -> new JobMetricStore());
task = job.tasks.computeIfAbsent(operatorInfo.vertexID, k -> new TaskMetricStore());
subtask = task.subtasks.computeIfAbsent(operatorInfo.subtaskIndex, k -> new ComponentMetricStore());
/**
* As the WebInterface does not account for operators (because it can't) we
* don't divide by operator and instead use the concatenation of subtask index,
* operator name and metric name as the name.
*/
addMetric(subtask.metrics, operatorInfo.operatorName + "." + name, metric);
addMetric(task.metrics, operatorInfo.subtaskIndex + "." + operatorInfo.operatorName + "." + name, metric);
break;
default:
LOG.debug("Invalid metric dump category: " + info.getCategory());
}
} catch (Exception e) {
LOG.debug("Malformed metric dump.", e);
}
}
Aggregations