use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method loadEntityData.
private static void loadEntityData(String rootDir) throws Exception {
File appDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", "app");
TimelineEntity entity11 = new TimelineEntity();
entity11.setId("id_1");
entity11.setType("app");
entity11.setCreatedTime(1425016502000L);
Map<String, Object> info1 = new HashMap<String, Object>();
info1.put("info1", "val1");
info1.put("info2", "val5");
entity11.addInfo(info1);
TimelineEvent event = new TimelineEvent();
event.setId("event_1");
event.setTimestamp(1425016502003L);
entity11.addEvent(event);
Set<TimelineMetric> metrics = new HashSet<TimelineMetric>();
TimelineMetric metric1 = new TimelineMetric();
metric1.setId("metric1");
metric1.setType(TimelineMetric.Type.SINGLE_VALUE);
metric1.addValue(1425016502006L, 113);
metrics.add(metric1);
TimelineMetric metric2 = new TimelineMetric();
metric2.setId("metric2");
metric2.setType(TimelineMetric.Type.TIME_SERIES);
metric2.addValue(1425016502016L, 34);
metrics.add(metric2);
entity11.setMetrics(metrics);
Map<String, String> configs = new HashMap<String, String>();
configs.put("config_1", "127");
entity11.setConfigs(configs);
entity11.addRelatesToEntity("flow", "flow1");
entity11.addIsRelatedToEntity("type1", "tid1_1");
writeEntityFile(entity11, appDir);
TimelineEntity entity12 = new TimelineEntity();
entity12.setId("id_1");
entity12.setType("app");
configs.clear();
configs.put("config_2", "23");
configs.put("config_3", "abc");
entity12.addConfigs(configs);
metrics.clear();
TimelineMetric metric12 = new TimelineMetric();
metric12.setId("metric2");
metric12.setType(TimelineMetric.Type.TIME_SERIES);
metric12.addValue(1425016502032L, 48);
metric12.addValue(1425016502054L, 51);
metrics.add(metric12);
TimelineMetric metric3 = new TimelineMetric();
metric3.setId("metric3");
metric3.setType(TimelineMetric.Type.SINGLE_VALUE);
metric3.addValue(1425016502060L, 23L);
metrics.add(metric3);
entity12.setMetrics(metrics);
entity12.addIsRelatedToEntity("type1", "tid1_2");
entity12.addIsRelatedToEntity("type2", "tid2_1`");
TimelineEvent event15 = new TimelineEvent();
event15.setId("event_5");
event15.setTimestamp(1425016502017L);
entity12.addEvent(event15);
writeEntityFile(entity12, appDir);
TimelineEntity entity2 = new TimelineEntity();
entity2.setId("id_2");
entity2.setType("app");
entity2.setCreatedTime(1425016501050L);
Map<String, Object> info2 = new HashMap<String, Object>();
info1.put("info2", 4);
entity2.addInfo(info2);
Map<String, String> configs2 = new HashMap<String, String>();
configs2.put("config_1", "129");
configs2.put("config_3", "def");
entity2.setConfigs(configs2);
TimelineEvent event2 = new TimelineEvent();
event2.setId("event_2");
event2.setTimestamp(1425016501003L);
entity2.addEvent(event2);
Set<TimelineMetric> metrics2 = new HashSet<TimelineMetric>();
TimelineMetric metric21 = new TimelineMetric();
metric21.setId("metric1");
metric21.setType(TimelineMetric.Type.SINGLE_VALUE);
metric21.addValue(1425016501006L, 300);
metrics2.add(metric21);
TimelineMetric metric22 = new TimelineMetric();
metric22.setId("metric2");
metric22.setType(TimelineMetric.Type.TIME_SERIES);
metric22.addValue(1425016501056L, 31);
metric22.addValue(1425016501084L, 70);
metrics2.add(metric22);
TimelineMetric metric23 = new TimelineMetric();
metric23.setId("metric3");
metric23.setType(TimelineMetric.Type.SINGLE_VALUE);
metric23.addValue(1425016502060L, 23L);
metrics2.add(metric23);
entity2.setMetrics(metrics2);
entity2.addRelatesToEntity("flow", "flow2");
writeEntityFile(entity2, appDir);
TimelineEntity entity3 = new TimelineEntity();
entity3.setId("id_3");
entity3.setType("app");
entity3.setCreatedTime(1425016501050L);
Map<String, Object> info3 = new HashMap<String, Object>();
info3.put("info2", 3.5);
info3.put("info4", 20);
entity3.addInfo(info3);
Map<String, String> configs3 = new HashMap<String, String>();
configs3.put("config_1", "123");
configs3.put("config_3", "abc");
entity3.setConfigs(configs3);
TimelineEvent event3 = new TimelineEvent();
event3.setId("event_2");
event3.setTimestamp(1425016501003L);
entity3.addEvent(event3);
TimelineEvent event4 = new TimelineEvent();
event4.setId("event_4");
event4.setTimestamp(1425016502006L);
entity3.addEvent(event4);
Set<TimelineMetric> metrics3 = new HashSet<TimelineMetric>();
TimelineMetric metric31 = new TimelineMetric();
metric31.setId("metric1");
metric31.setType(TimelineMetric.Type.SINGLE_VALUE);
metric31.addValue(1425016501006L, 124);
metrics3.add(metric31);
TimelineMetric metric32 = new TimelineMetric();
metric32.setId("metric2");
metric32.setType(TimelineMetric.Type.TIME_SERIES);
metric32.addValue(1425016501056L, 31);
metric32.addValue(1425016501084L, 74);
metrics3.add(metric32);
entity3.setMetrics(metrics3);
entity3.addIsRelatedToEntity("type1", "tid1_2");
writeEntityFile(entity3, appDir);
TimelineEntity entity4 = new TimelineEntity();
entity4.setId("id_4");
entity4.setType("app");
entity4.setCreatedTime(1425016502050L);
TimelineEvent event44 = new TimelineEvent();
event44.setId("event_4");
event44.setTimestamp(1425016502003L);
entity4.addEvent(event44);
writeEntityFile(entity4, appDir);
File attemptDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString());
ApplicationAttemptEntity attempt1 = new ApplicationAttemptEntity();
attempt1.setId("app-attempt-1");
attempt1.setCreatedTime(1425017502003L);
writeEntityFile(attempt1, attemptDir);
ApplicationAttemptEntity attempt2 = new ApplicationAttemptEntity();
attempt2.setId("app-attempt-2");
attempt2.setCreatedTime(1425017502004L);
writeEntityFile(attempt2, attemptDir);
File entityDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", TimelineEntityType.YARN_CONTAINER.toString());
ContainerEntity containerEntity1 = new ContainerEntity();
containerEntity1.setId("container_1_1");
containerEntity1.setParent(attempt1.getIdentifier());
containerEntity1.setCreatedTime(1425017502003L);
writeEntityFile(containerEntity1, entityDir);
ContainerEntity containerEntity2 = new ContainerEntity();
containerEntity2.setId("container_2_1");
containerEntity2.setParent(attempt2.getIdentifier());
containerEntity2.setCreatedTime(1425018502003L);
writeEntityFile(containerEntity2, entityDir);
ContainerEntity containerEntity3 = new ContainerEntity();
containerEntity3.setId("container_2_2");
containerEntity3.setParent(attempt2.getIdentifier());
containerEntity3.setCreatedTime(1425018502003L);
writeEntityFile(containerEntity3, entityDir);
File appDir2 = getAppDir(rootDir, "cluster1", "user1", "flow1,flow", "1", "app2", "app");
TimelineEntity entity5 = new TimelineEntity();
entity5.setId("id_5");
entity5.setType("app");
entity5.setCreatedTime(1425016502050L);
writeEntityFile(entity5, appDir2);
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric in project hadoop by apache.
the class TestFileSystemTimelineWriterImpl method testWriteEntityToFile.
/**
* Unit test for PoC YARN 3264.
*
* @throws Exception
*/
@Test
public void testWriteEntityToFile() throws Exception {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
entity.setCreatedTime(1425016501000L);
te.addEntity(entity);
TimelineMetric metric = new TimelineMetric();
String metricId = "CPU";
metric.setId(metricId);
metric.setType(TimelineMetric.Type.SINGLE_VALUE);
metric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
metric.addValue(1425016501000L, 1234567L);
TimelineEntity entity2 = new TimelineEntity();
String id2 = "metric";
String type2 = "app";
entity2.setId(id2);
entity2.setType(type2);
entity2.setCreatedTime(1425016503000L);
entity2.addMetric(metric);
te.addEntity(entity2);
Map<String, TimelineMetric> aggregatedMetrics = new HashMap<String, TimelineMetric>();
aggregatedMetrics.put(metricId, metric);
FileSystemTimelineWriterImpl fsi = null;
try {
fsi = new FileSystemTimelineWriterImpl();
Configuration conf = new YarnConfiguration();
String outputRoot = tmpFolder.newFolder().getAbsolutePath();
conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, outputRoot);
fsi.init(conf);
fsi.start();
fsi.write("cluster_id", "user_id", "flow_name", "flow_version", 12345678L, "app_id", te);
String fileName = fsi.getOutputRoot() + File.separator + "entities" + File.separator + "cluster_id" + File.separator + "user_id" + File.separator + "flow_name" + File.separator + "flow_version" + File.separator + "12345678" + File.separator + "app_id" + File.separator + type + File.separator + id + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path = Paths.get(fileName);
File f = new File(fileName);
assertTrue(f.exists() && !f.isDirectory());
List<String> data = Files.readAllLines(path, StandardCharsets.UTF_8);
// ensure there's only one entity + 1 new line
assertTrue("data size is:" + data.size(), data.size() == 2);
String d = data.get(0);
// confirm the contents same as what was written
assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity));
// verify aggregated metrics
String fileName2 = fsi.getOutputRoot() + File.separator + "entities" + File.separator + "cluster_id" + File.separator + "user_id" + File.separator + "flow_name" + File.separator + "flow_version" + File.separator + "12345678" + File.separator + "app_id" + File.separator + type2 + File.separator + id2 + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path2 = Paths.get(fileName2);
File file = new File(fileName2);
assertTrue(file.exists() && !file.isDirectory());
List<String> data2 = Files.readAllLines(path2, StandardCharsets.UTF_8);
// ensure there's only one entity + 1 new line
assertTrue("data size is:" + data.size(), data2.size() == 2);
String metricToString = data2.get(0);
// confirm the contents same as what was written
assertEquals(metricToString, TimelineUtils.dumpTimelineRecordtoJSON(entity2));
} finally {
if (fsi != null) {
fsi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric in project hadoop by apache.
the class TestTimelineServiceClientIntegration method testPutEntities.
@Test
public void testPutEntities() throws Exception {
TimelineV2Client client = TimelineV2Client.createTimelineClient(ApplicationId.newInstance(0, 1));
try {
// set the timeline service address manually
client.setTimelineServiceAddress(collectorManager.getRestServerBindAddress());
client.init(conf);
client.start();
TimelineEntity entity = new TimelineEntity();
entity.setType("test entity type");
entity.setId("test entity id");
TimelineMetric metric = new TimelineMetric(TimelineMetric.Type.TIME_SERIES);
metric.setId("test metric id");
metric.addValue(1L, 1.0D);
metric.addValue(2L, 2.0D);
entity.addMetric(metric);
client.putEntities(entity);
client.putEntitiesAsync(entity);
} finally {
client.stop();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric in project hadoop by apache.
the class NMTimelinePublisher method reportContainerResourceUsage.
public void reportContainerResourceUsage(Container container, Long pmemUsage, Float cpuUsagePercentPerCore) {
if (pmemUsage != ResourceCalculatorProcessTree.UNAVAILABLE || cpuUsagePercentPerCore != ResourceCalculatorProcessTree.UNAVAILABLE) {
ContainerEntity entity = createContainerEntity(container.getContainerId());
long currentTimeMillis = System.currentTimeMillis();
if (pmemUsage != ResourceCalculatorProcessTree.UNAVAILABLE) {
TimelineMetric memoryMetric = new TimelineMetric();
memoryMetric.setId(ContainerMetric.MEMORY.toString());
memoryMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
memoryMetric.addValue(currentTimeMillis, pmemUsage);
entity.addMetric(memoryMetric);
}
if (cpuUsagePercentPerCore != ResourceCalculatorProcessTree.UNAVAILABLE) {
TimelineMetric cpuMetric = new TimelineMetric();
cpuMetric.setId(ContainerMetric.CPU.toString());
// TODO: support average
cpuMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
cpuMetric.addValue(currentTimeMillis, Math.round(cpuUsagePercentPerCore));
entity.addMetric(cpuMetric);
}
ApplicationId appId = container.getContainerId().getApplicationAttemptId().getApplicationId();
try {
// no need to put it as part of publisher as timeline client already has
// Queuing concept
TimelineV2Client timelineClient = getTimelineClient(appId);
if (timelineClient != null) {
timelineClient.putEntitiesAsync(entity);
} else {
LOG.error("Seems like client has been removed before the container" + " metric could be published for " + container.getContainerId());
}
} catch (IOException | YarnException e) {
LOG.error("Failed to publish Container metrics for container " + container.getContainerId(), e);
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric in project hadoop by apache.
the class TestNMTimelinePublisher method verifyPublishedResourceUsageMetrics.
private void verifyPublishedResourceUsageMetrics(DummyTimelineClient timelineClient, long memoryUsage, int cpuUsage) {
TimelineEntity[] entities = null;
for (int i = 0; i < 10; i++) {
entities = timelineClient.getLastPublishedEntities();
if (entities != null) {
break;
}
try {
Thread.sleep(150L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
int numberOfResourceMetrics = 0;
numberOfResourceMetrics += (memoryUsage == ResourceCalculatorProcessTree.UNAVAILABLE) ? 0 : 1;
numberOfResourceMetrics += (cpuUsage == ResourceCalculatorProcessTree.UNAVAILABLE) ? 0 : 1;
assertNotNull("entities are expected to be published", entities);
assertEquals("Expected number of metrics notpublished", numberOfResourceMetrics, entities[0].getMetrics().size());
Iterator<TimelineMetric> metrics = entities[0].getMetrics().iterator();
while (metrics.hasNext()) {
TimelineMetric metric = metrics.next();
Iterator<Entry<Long, Number>> entrySet;
switch(metric.getId()) {
case CPU_ID:
if (cpuUsage == ResourceCalculatorProcessTree.UNAVAILABLE) {
Assert.fail("Not Expecting CPU Metric to be published");
}
entrySet = metric.getValues().entrySet().iterator();
assertEquals("CPU usage metric not matching", cpuUsage, entrySet.next().getValue());
break;
case MEMORY_ID:
if (memoryUsage == ResourceCalculatorProcessTree.UNAVAILABLE) {
Assert.fail("Not Expecting Memory Metric to be published");
}
entrySet = metric.getValues().entrySet().iterator();
assertEquals("Memory usage metric not matching", memoryUsage, entrySet.next().getValue());
break;
default:
Assert.fail("Invalid Resource Usage metric");
break;
}
}
}
Aggregations