use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testNonIntegralMetricValues.
@Test
public void testNonIntegralMetricValues() throws IOException {
TimelineEntities teApp = new TimelineEntities();
ApplicationEntity entityApp = new ApplicationEntity();
String appId = "application_1000178881110_2002";
entityApp.setId(appId);
entityApp.setCreatedTime(1425016501000L);
// add metrics with floating point values
Set<TimelineMetric> metricsApp = new HashSet<>();
TimelineMetric mApp = new TimelineMetric();
mApp.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricAppValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricAppValues.put(ts - 20, 10.5);
metricAppValues.put(ts - 10, 20.5);
mApp.setType(Type.TIME_SERIES);
mApp.setValues(metricAppValues);
metricsApp.add(mApp);
entityApp.addMetrics(metricsApp);
teApp.addEntity(entityApp);
TimelineEntities teEntity = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setId("hello");
entity.setType("world");
entity.setCreatedTime(1425016501000L);
// add metrics with floating point values
Set<TimelineMetric> metricsEntity = new HashSet<>();
TimelineMetric mEntity = new TimelineMetric();
mEntity.setId("MAP_SLOT_MILLIS");
mEntity.addValue(ts - 20, 10.5);
metricsEntity.add(mEntity);
entity.addMetrics(metricsEntity);
teEntity.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
// Writing application entity.
try {
hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teApp);
Assert.fail("Expected an exception as metric values are non integral");
} catch (IOException e) {
}
// Writing generic entity.
try {
hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teEntity);
Assert.fail("Expected an exception as metric values are non integral");
} catch (IOException e) {
}
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method loadData.
private static void loadData() throws Exception {
String cluster = "cluster1";
String user = "user1";
String flow = "flow_name";
String flowVersion = "CF7022C10F1354";
Long runid = 1002345678919L;
Long runid1 = 1002345678920L;
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "application_1111111111_1111";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
Long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
entity.addConfig("cfg2", "value1");
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = ImmutableMap.of(ts - 100000, (Number) 2, ts - 90000, 7, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("MAP1_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 2, ts - 90000, 9, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_READ");
metricValues = ImmutableMap.of(ts - 100000, (Number) 31, ts - 80000, 57);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(cTime);
String expKey = "foo_event";
Object expVal = "test";
event.addInfo(expKey, expVal);
entity.addEvent(event);
TimelineEvent event11 = new TimelineEvent();
event11.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
Long expTs = 1425019501000L;
event11.setTimestamp(expTs);
entity.addEvent(event11);
te.addEntity(entity);
// write another application with same metric to this flow
TimelineEntities te1 = new TimelineEntities();
TimelineEntity entity1 = new TimelineEntity();
id = "application_1111111111_2222";
type = TimelineEntityType.YARN_APPLICATION.toString();
entity1.setId(id);
entity1.setType(type);
cTime = 1425016501000L;
entity1.setCreatedTime(cTime);
entity1.addConfig("cfg1", "value1");
// add metrics
metrics.clear();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP_SLOT_MILLIS");
metricValues = new HashMap<Long, Number>();
metricValues.put(ts - 100000, 5L);
metricValues.put(ts - 80000, 101L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity1.addMetrics(metrics);
TimelineEvent event1 = new TimelineEvent();
event1.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event1.setTimestamp(cTime);
event1.addInfo(expKey, expVal);
entity1.addEvent(event1);
te1.addEntity(entity1);
String flow2 = "flow_name2";
String flowVersion2 = "CF7022C10F1454";
Long runid2 = 2102356789046L;
TimelineEntities te3 = new TimelineEntities();
TimelineEntity entity3 = new TimelineEntity();
id = "application_11111111111111_2223";
entity3.setId(id);
entity3.setType(type);
cTime = 1425016501037L;
entity3.setCreatedTime(cTime);
TimelineEvent event2 = new TimelineEvent();
event2.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event2.setTimestamp(cTime);
event2.addInfo("foo_event", "test");
entity3.addEvent(event2);
te3.addEntity(entity3);
TimelineEntities te4 = new TimelineEntities();
TimelineEntity entity4 = new TimelineEntity();
id = "application_1111111111_2224";
entity4.setId(id);
entity4.setType(type);
cTime = 1425016501034L;
entity4.setCreatedTime(cTime);
TimelineEvent event4 = new TimelineEvent();
event4.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event4.setTimestamp(cTime);
event4.addInfo("foo_event", "test");
entity4.addEvent(event4);
metrics.clear();
m2 = new TimelineMetric();
m2.setId("MAP_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 5L, ts - 80000, 101L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity4.addMetrics(metrics);
te4.addEntity(entity4);
TimelineEntities te5 = new TimelineEntities();
TimelineEntity entity5 = new TimelineEntity();
entity5.setId("entity1");
entity5.setType("type1");
entity5.setCreatedTime(1425016501034L);
// add some config entries
entity5.addConfigs(ImmutableMap.of("config_param1", "value1", "config_param2", "value2", "cfg_param1", "value3"));
entity5.addInfo(ImmutableMap.of("info1", (Object) "cluster1", "info2", 2.0, "info3", 35000, "info4", 36000));
metrics = new HashSet<>();
m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 2, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_READ");
metricValues = ImmutableMap.of(ts - 100000, (Number) 31, ts - 80000, 57);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity5.addMetrics(metrics);
TimelineEvent event51 = new TimelineEvent();
event51.setId("event1");
event51.setTimestamp(cTime);
entity5.addEvent(event51);
TimelineEvent event52 = new TimelineEvent();
event52.setId("event2");
event52.setTimestamp(cTime);
entity5.addEvent(event52);
TimelineEvent event53 = new TimelineEvent();
event53.setId("event3");
event53.setTimestamp(cTime);
entity5.addEvent(event53);
TimelineEvent event54 = new TimelineEvent();
event54.setId("event4");
event54.setTimestamp(cTime);
entity5.addEvent(event54);
Map<String, Set<String>> isRelatedTo1 = new HashMap<String, Set<String>>();
isRelatedTo1.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
isRelatedTo1.put("type4", Sets.newHashSet("entity41", "entity42"));
isRelatedTo1.put("type1", Sets.newHashSet("entity14", "entity15"));
isRelatedTo1.put("type3", Sets.newHashSet("entity31", "entity35", "entity32", "entity33"));
entity5.addIsRelatedToEntities(isRelatedTo1);
Map<String, Set<String>> relatesTo1 = new HashMap<String, Set<String>>();
relatesTo1.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
relatesTo1.put("type4", Sets.newHashSet("entity41", "entity42"));
relatesTo1.put("type1", Sets.newHashSet("entity14", "entity15"));
relatesTo1.put("type3", Sets.newHashSet("entity31", "entity35", "entity32", "entity33"));
entity5.addRelatesToEntities(relatesTo1);
te5.addEntity(entity5);
TimelineEntity entity6 = new TimelineEntity();
entity6.setId("entity2");
entity6.setType("type1");
entity6.setCreatedTime(1425016501034L);
entity6.addConfigs(ImmutableMap.of("cfg_param3", "value1", "configuration_param2", "value2", "config_param1", "value3"));
entity6.addInfo(ImmutableMap.of("info1", (Object) "cluster2", "info2", 2.0, "info4", 35000));
metrics = new HashSet<>();
m1 = new TimelineMetric();
m1.setId("MAP1_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 12, ts - 80000, 140);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_READ");
metricValues = ImmutableMap.of(ts - 100000, (Number) 78, ts - 80000, 157);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("MAP11_SLOT_MILLIS");
m1.setType(Type.SINGLE_VALUE);
m1.addValue(ts - 100000, 122);
metrics.add(m1);
entity6.addMetrics(metrics);
TimelineEvent event61 = new TimelineEvent();
event61.setId("event1");
event61.setTimestamp(cTime);
entity6.addEvent(event61);
TimelineEvent event62 = new TimelineEvent();
event62.setId("event5");
event62.setTimestamp(cTime);
entity6.addEvent(event62);
TimelineEvent event63 = new TimelineEvent();
event63.setId("event3");
event63.setTimestamp(cTime);
entity6.addEvent(event63);
TimelineEvent event64 = new TimelineEvent();
event64.setId("event6");
event64.setTimestamp(cTime);
entity6.addEvent(event64);
Map<String, Set<String>> isRelatedTo2 = new HashMap<String, Set<String>>();
isRelatedTo2.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
isRelatedTo2.put("type5", Sets.newHashSet("entity51", "entity52"));
isRelatedTo2.put("type6", Sets.newHashSet("entity61", "entity66"));
isRelatedTo2.put("type3", Sets.newHashSet("entity31"));
entity6.addIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo2 = new HashMap<String, Set<String>>();
relatesTo2.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
relatesTo2.put("type5", Sets.newHashSet("entity51", "entity52"));
relatesTo2.put("type6", Sets.newHashSet("entity61", "entity66"));
relatesTo2.put("type3", Sets.newHashSet("entity31"));
entity6.addRelatesToEntities(relatesTo2);
te5.addEntity(entity6);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, flowVersion, runid, entity.getId(), te);
hbi.write(cluster, user, flow, flowVersion, runid, entity1.getId(), te1);
hbi.write(cluster, user, flow, flowVersion, runid1, entity4.getId(), te4);
hbi.write(cluster, user, flow2, flowVersion2, runid2, entity3.getId(), te3);
hbi.write(cluster, user, flow, flowVersion, runid, "application_1111111111_1111", te5);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestFileSystemTimelineWriterImpl method testWriteEntityToFile.
/**
* Unit test for PoC YARN 3264.
*
* @throws Exception
*/
@Test
public void testWriteEntityToFile() throws Exception {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
entity.setCreatedTime(1425016501000L);
te.addEntity(entity);
TimelineMetric metric = new TimelineMetric();
String metricId = "CPU";
metric.setId(metricId);
metric.setType(TimelineMetric.Type.SINGLE_VALUE);
metric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
metric.addValue(1425016501000L, 1234567L);
TimelineEntity entity2 = new TimelineEntity();
String id2 = "metric";
String type2 = "app";
entity2.setId(id2);
entity2.setType(type2);
entity2.setCreatedTime(1425016503000L);
entity2.addMetric(metric);
te.addEntity(entity2);
Map<String, TimelineMetric> aggregatedMetrics = new HashMap<String, TimelineMetric>();
aggregatedMetrics.put(metricId, metric);
FileSystemTimelineWriterImpl fsi = null;
try {
fsi = new FileSystemTimelineWriterImpl();
Configuration conf = new YarnConfiguration();
String outputRoot = tmpFolder.newFolder().getAbsolutePath();
conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, outputRoot);
fsi.init(conf);
fsi.start();
fsi.write("cluster_id", "user_id", "flow_name", "flow_version", 12345678L, "app_id", te);
String fileName = fsi.getOutputRoot() + File.separator + "entities" + File.separator + "cluster_id" + File.separator + "user_id" + File.separator + "flow_name" + File.separator + "flow_version" + File.separator + "12345678" + File.separator + "app_id" + File.separator + type + File.separator + id + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path = Paths.get(fileName);
File f = new File(fileName);
assertTrue(f.exists() && !f.isDirectory());
List<String> data = Files.readAllLines(path, StandardCharsets.UTF_8);
// ensure there's only one entity + 1 new line
assertTrue("data size is:" + data.size(), data.size() == 2);
String d = data.get(0);
// confirm the contents same as what was written
assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity));
// verify aggregated metrics
String fileName2 = fsi.getOutputRoot() + File.separator + "entities" + File.separator + "cluster_id" + File.separator + "user_id" + File.separator + "flow_name" + File.separator + "flow_version" + File.separator + "12345678" + File.separator + "app_id" + File.separator + type2 + File.separator + id2 + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
Path path2 = Paths.get(fileName2);
File file = new File(fileName2);
assertTrue(file.exists() && !file.isDirectory());
List<String> data2 = Files.readAllLines(path2, StandardCharsets.UTF_8);
// ensure there's only one entity + 1 new line
assertTrue("data size is:" + data.size(), data2.size() == 2);
String metricToString = data2.get(0);
// confirm the contents same as what was written
assertEquals(metricToString, TimelineUtils.dumpTimelineRecordtoJSON(entity2));
} finally {
if (fsi != null) {
fsi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class JobHistoryFileReplayMapperV2 method writePerEntity.
private void writePerEntity(AppLevelTimelineCollector collector, List<TimelineEntity> entitySet, UserGroupInformation ugi) throws IOException {
for (TimelineEntity entity : entitySet) {
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
collector.putEntities(entities, ugi);
LOG.info("wrote entity " + entity.getId());
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineClientV2Impl method testASyncCallMerge.
@Test
public void testASyncCallMerge() throws Exception {
client.setSleepBeforeReturn(true);
try {
client.putEntitiesAsync(generateEntity("1"));
Thread.sleep(TIME_TO_SLEEP / 2);
// by the time first put response comes push 2 entities in the queue
client.putEntitiesAsync(generateEntity("2"));
client.putEntitiesAsync(generateEntity("3"));
} catch (YarnException e) {
Assert.fail("Exception is not expected");
}
for (int i = 0; i < 4; i++) {
if (client.getNumOfTimelineEntitiesPublished() == 2) {
break;
}
Thread.sleep(TIME_TO_SLEEP);
}
Assert.assertEquals("two merged TimelineEntities needs to be published", 2, client.getNumOfTimelineEntitiesPublished());
TimelineEntities secondPublishedEntities = client.getPublishedEntities(1);
Assert.assertEquals("Merged TimelineEntities Object needs to 2 TimelineEntity Object", 2, secondPublishedEntities.getEntities().size());
Assert.assertEquals("Order of Async Events Needs to be FIFO", "2", secondPublishedEntities.getEntities().get(0).getId());
Assert.assertEquals("Order of Async Events Needs to be FIFO", "3", secondPublishedEntities.getEntities().get(1).getId());
}
Aggregations