use of org.apache.hive.spark.client.metrics.Metrics in project hive by apache.
the class TestMetricsCollection method testMetricsAggregation.
@Test
public void testMetricsAggregation() {
MetricsCollection collection = new MetricsCollection();
// 2 jobs, 2 stages per job, 2 tasks per stage.
for (int i : Arrays.asList(1, 2)) {
for (int j : Arrays.asList(1, 2)) {
for (long k : Arrays.asList(1L, 2L)) {
collection.addMetrics(i, j, k, makeMetrics(i, j, k));
}
}
}
assertEquals(ImmutableSet.of(1, 2), collection.getJobIds());
assertEquals(ImmutableSet.of(1, 2), collection.getStageIds(1));
assertEquals(ImmutableSet.of(1L, 2L), collection.getTaskIds(1, 1));
Metrics task112 = collection.getTaskMetrics(1, 1, 2);
checkMetrics(task112, taskValue(1, 1, 2));
Metrics stage21 = collection.getStageMetrics(2, 1);
checkMetrics(stage21, stageValue(2, 1, 2));
Metrics job1 = collection.getJobMetrics(1);
checkMetrics(job1, jobValue(1, 2, 2));
Metrics global = collection.getAllMetrics();
checkMetrics(global, globalValue(2, 2, 2));
}
use of org.apache.hive.spark.client.metrics.Metrics in project hive by apache.
the class LocalSparkJobStatus method getSparkStatistics.
@Override
public SparkStatistics getSparkStatistics() {
SparkStatisticsBuilder sparkStatisticsBuilder = new SparkStatisticsBuilder();
// add Hive operator level statistics.
sparkStatisticsBuilder.add(sparkCounters);
// add spark job metrics.
Map<Integer, List<Map.Entry<TaskMetrics, TaskInfo>>> jobMetric = jobMetricsListener.getJobMetric(jobId);
if (jobMetric == null) {
return null;
}
MetricsCollection metricsCollection = new MetricsCollection();
Set<Integer> stageIds = jobMetric.keySet();
for (int stageId : stageIds) {
List<Map.Entry<TaskMetrics, TaskInfo>> taskMetrics = jobMetric.get(stageId);
for (Map.Entry<TaskMetrics, TaskInfo> taskMetric : taskMetrics) {
Metrics metrics = new Metrics(taskMetric.getKey(), taskMetric.getValue());
metricsCollection.addMetrics(jobId, stageId, 0, metrics);
}
}
Map<String, Long> flatJobMetric = SparkMetricsUtils.collectMetrics(metricsCollection.getAllMetrics());
for (Map.Entry<String, Long> entry : flatJobMetric.entrySet()) {
sparkStatisticsBuilder.add(SparkStatisticsNames.SPARK_GROUP_NAME, entry.getKey(), Long.toString(entry.getValue()));
}
return sparkStatisticsBuilder.build();
}
use of org.apache.hive.spark.client.metrics.Metrics in project hive by apache.
the class MetricsCollection method aggregate.
private Metrics aggregate(Predicate<TaskInfo> filter) {
lock.readLock().lock();
try {
// Task metrics.
long executorDeserializeTime = 0L;
long executorRunTime = 0L;
long resultSize = 0L;
long jvmGCTime = 0L;
long resultSerializationTime = 0L;
long memoryBytesSpilled = 0L;
long diskBytesSpilled = 0L;
long taskDurationTime = 0L;
// Input metrics.
boolean hasInputMetrics = false;
long bytesRead = 0L;
// Shuffle read metrics.
boolean hasShuffleReadMetrics = false;
int remoteBlocksFetched = 0;
int localBlocksFetched = 0;
long fetchWaitTime = 0L;
long remoteBytesRead = 0L;
// Shuffle write metrics.
long shuffleBytesWritten = 0L;
long shuffleWriteTime = 0L;
for (TaskInfo info : Collections2.filter(taskMetrics, filter)) {
Metrics m = info.metrics;
executorDeserializeTime += m.executorDeserializeTime;
executorRunTime += m.executorRunTime;
resultSize += m.resultSize;
jvmGCTime += m.jvmGCTime;
resultSerializationTime += m.resultSerializationTime;
memoryBytesSpilled += m.memoryBytesSpilled;
diskBytesSpilled += m.diskBytesSpilled;
taskDurationTime += m.taskDurationTime;
if (m.inputMetrics != null) {
hasInputMetrics = true;
bytesRead += m.inputMetrics.bytesRead;
}
if (m.shuffleReadMetrics != null) {
hasShuffleReadMetrics = true;
remoteBlocksFetched += m.shuffleReadMetrics.remoteBlocksFetched;
localBlocksFetched += m.shuffleReadMetrics.localBlocksFetched;
fetchWaitTime += m.shuffleReadMetrics.fetchWaitTime;
remoteBytesRead += m.shuffleReadMetrics.remoteBytesRead;
}
if (m.shuffleWriteMetrics != null) {
shuffleBytesWritten += m.shuffleWriteMetrics.shuffleBytesWritten;
shuffleWriteTime += m.shuffleWriteMetrics.shuffleWriteTime;
}
}
InputMetrics inputMetrics = null;
if (hasInputMetrics) {
inputMetrics = new InputMetrics(bytesRead);
}
ShuffleReadMetrics shuffleReadMetrics = null;
if (hasShuffleReadMetrics) {
shuffleReadMetrics = new ShuffleReadMetrics(remoteBlocksFetched, localBlocksFetched, fetchWaitTime, remoteBytesRead);
}
ShuffleWriteMetrics shuffleWriteMetrics = null;
if (hasShuffleReadMetrics) {
shuffleWriteMetrics = new ShuffleWriteMetrics(shuffleBytesWritten, shuffleWriteTime);
}
return new Metrics(executorDeserializeTime, executorRunTime, resultSize, jvmGCTime, resultSerializationTime, memoryBytesSpilled, diskBytesSpilled, taskDurationTime, inputMetrics, shuffleReadMetrics, shuffleWriteMetrics);
} finally {
lock.readLock().unlock();
}
}
use of org.apache.hive.spark.client.metrics.Metrics in project hive by apache.
the class TestMetricsCollection method testOptionalMetrics.
@Test
public void testOptionalMetrics() {
long value = taskValue(1, 1, 1L);
Metrics metrics = new Metrics(value, value, value, value, value, value, value, value, null, null, null);
MetricsCollection collection = new MetricsCollection();
for (int i : Arrays.asList(1, 2)) {
collection.addMetrics(i, 1, 1, metrics);
}
Metrics global = collection.getAllMetrics();
assertNull(global.inputMetrics);
assertNull(global.shuffleReadMetrics);
assertNull(global.shuffleWriteMetrics);
collection.addMetrics(3, 1, 1, makeMetrics(3, 1, 1));
Metrics global2 = collection.getAllMetrics();
assertNotNull(global2.inputMetrics);
assertEquals(taskValue(3, 1, 1), global2.inputMetrics.bytesRead);
assertNotNull(global2.shuffleReadMetrics);
assertNotNull(global2.shuffleWriteMetrics);
}
use of org.apache.hive.spark.client.metrics.Metrics in project hive by apache.
the class TestMetricsCollection method testInputReadMethodAggregation.
@Test
public void testInputReadMethodAggregation() {
MetricsCollection collection = new MetricsCollection();
long value = taskValue(1, 1, 1);
Metrics metrics1 = new Metrics(value, value, value, value, value, value, value, value, new InputMetrics(value), null, null);
Metrics metrics2 = new Metrics(value, value, value, value, value, value, value, value, new InputMetrics(value), null, null);
collection.addMetrics(1, 1, 1, metrics1);
collection.addMetrics(1, 1, 2, metrics2);
Metrics global = collection.getAllMetrics();
assertNotNull(global.inputMetrics);
}
Aggregations