use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestHBaseStorageFlowRun method testWriteFlowRunMetricsPrefix.
@Test
public void testWriteFlowRunMetricsPrefix() throws Exception {
String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
String user = "testWriteFlowRunMetricsPrefix_user1";
String flow = "testWriteFlowRunMetricsPrefix_flow_name";
String flowVersion = "CF7022C10F1354";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11111111111111_1111";
hbi.write(cluster, user, flow, flowVersion, 1002345678919L, appName, te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2);
appName = "application_11111111111111_2222";
hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC1.substring(0, METRIC1.indexOf("_") + 1)));
TimelineEntity entity = hbr.getEntity(new TimelineReaderContext(cluster, user, flow, 1002345678919L, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
Set<TimelineMetric> metrics = entity.getMetrics();
assertEquals(1, metrics.size());
for (TimelineMetric metric : metrics) {
String id = metric.getId();
Map<Long, Number> values = metric.getValues();
assertEquals(1, values.size());
Number value = null;
for (Number n : values.values()) {
value = n;
}
switch(id) {
case METRIC1:
assertEquals(40L, value);
break;
default:
fail("unrecognized metric: " + id);
}
}
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity timelineEntity : entities) {
metricCnt += timelineEntity.getMetrics().size();
}
assertEquals(2, metricCnt);
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestHBaseStorageFlowRunCompaction method testWriteFlowRunCompaction.
@Test
public void testWriteFlowRunCompaction() throws Exception {
String cluster = "kompaction_cluster1";
String user = "kompaction_FlowRun__user1";
String flow = "kompaction_flowRun_flow_name";
String flowVersion = "AF1021C19F1351";
long runid = 1449526652000L;
int start = 10;
int count = 2000;
int appIdSuffix = 1;
HBaseTimelineWriterImpl hbi = null;
long insertTs = System.currentTimeMillis() - count;
Configuration c1 = util.getConfiguration();
TimelineEntities te1 = null;
TimelineEntity entityApp1 = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
// of metric1 and 100 of metric2
for (int i = start; i < start + count; i++) {
String appName = "application_10240000000000_" + appIdSuffix;
insertTs++;
te1 = new TimelineEntities();
entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(insertTs, c1);
te1.addEntity(entityApp1);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
appName = "application_2048000000000_7" + appIdSuffix;
insertTs++;
te1 = new TimelineEntities();
entityApp1 = TestFlowDataGenerator.getEntityMetricsApp2(insertTs);
te1.addEntity(entityApp1);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
}
} finally {
String appName = "application_10240000000000_" + appIdSuffix;
te1 = new TimelineEntities();
entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1Complete(insertTs + 1, c1);
te1.addEntity(entityApp1);
if (hbi != null) {
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
hbi.flush();
hbi.close();
}
}
// check in flow run table
HRegionServer server = util.getRSForFirstRegionInTable(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
List<Region> regions = server.getOnlineRegions(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
assertTrue("Didn't find any regions for primary table!", regions.size() > 0);
// flush and compact all the regions of the primary table
for (Region region : regions) {
region.flush(true);
region.compact(true);
}
// check flow run for one flow many apps
checkFlowRunTable(cluster, user, flow, runid, c1, 4);
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineCollector method generateTestEntities.
private TimelineEntities generateTestEntities(int groups, int entities) {
TimelineEntities te = new TimelineEntities();
for (int j = 0; j < groups; j++) {
for (int i = 0; i < entities; i++) {
TimelineEntity entity = new TimelineEntity();
String containerId = "container_1000178881110_2002_" + i;
entity.setId(containerId);
String entityType = "TEST_" + j;
entity.setType(entityType);
long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_WRITE");
m1.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
long ts = System.currentTimeMillis();
m1.addValue(ts - 20000, 100L);
metrics.add(m1);
TimelineMetric m2 = new TimelineMetric();
m2.setId("VCORES_USED");
m2.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
m2.addValue(ts - 20000, 3L);
metrics.add(m2);
// m3 should not show up in the aggregation
TimelineMetric m3 = new TimelineMetric();
m3.setId("UNRELATED_VALUES");
m3.addValue(ts - 20000, 3L);
metrics.add(m3);
TimelineMetric m4 = new TimelineMetric();
m4.setId("TXN_FINISH_TIME");
m4.setRealtimeAggregationOp(TimelineMetricOperation.MAX);
m4.addValue(ts - 20000, i);
metrics.add(m4);
entity.addMetrics(metrics);
te.addEntity(entity);
}
}
return te;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineCollector method testAggregation.
@Test
public void testAggregation() throws Exception {
// Test aggregation with multiple groups.
int groups = 3;
int n = 50;
TimelineEntities testEntities = generateTestEntities(groups, n);
TimelineEntity resultEntity = TimelineCollector.aggregateEntities(testEntities, "test_result", "TEST_AGGR", true);
assertEquals(resultEntity.getMetrics().size(), groups * 3);
for (int i = 0; i < groups; i++) {
Set<TimelineMetric> metrics = resultEntity.getMetrics();
for (TimelineMetric m : metrics) {
if (m.getId().startsWith("HDFS_BYTES_WRITE")) {
assertEquals(100 * n, m.getSingleDataValue().intValue());
} else if (m.getId().startsWith("VCORES_USED")) {
assertEquals(3 * n, m.getSingleDataValue().intValue());
} else if (m.getId().startsWith("TXN_FINISH_TIME")) {
assertEquals(n - 1, m.getSingleDataValue());
} else {
fail("Unrecognized metric! " + m.getId());
}
}
}
// Test aggregation with a single group.
TimelineEntities testEntities1 = generateTestEntities(1, n);
TimelineEntity resultEntity1 = TimelineCollector.aggregateEntities(testEntities1, "test_result", "TEST_AGGR", false);
assertEquals(resultEntity1.getMetrics().size(), 3);
Set<TimelineMetric> metrics = resultEntity1.getMetrics();
for (TimelineMetric m : metrics) {
if (m.getId().equals("HDFS_BYTES_WRITE")) {
assertEquals(100 * n, m.getSingleDataValue().intValue());
} else if (m.getId().equals("VCORES_USED")) {
assertEquals(3 * n, m.getSingleDataValue().intValue());
} else if (m.getId().equals("TXN_FINISH_TIME")) {
assertEquals(n - 1, m.getSingleDataValue());
} else {
fail("Unrecognized metric! " + m.getId());
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class DataGeneratorForTest method loadApps.
static void loadApps(HBaseTestingUtility util) throws IOException {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "application_1111111111_2222";
entity.setId(id);
entity.setType(TimelineEntityType.YARN_APPLICATION.toString());
Long cTime = 1425016502000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<>();
infoMap.put("infoMapKey1", "infoMapValue2");
infoMap.put("infoMapKey2", 20);
infoMap.put("infoMapKey3", 85.85);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
Set<String> isRelatedToSet = new HashSet<>();
isRelatedToSet.add("relatedto1");
Map<String, Set<String>> isRelatedTo = new HashMap<>();
isRelatedTo.put("task", isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
Set<String> relatesToSet = new HashSet<>();
relatesToSet.add("relatesto1");
relatesToSet.add("relatesto3");
Map<String, Set<String>> relatesTo = new HashMap<>();
relatesTo.put("container", relatesToSet);
Set<String> relatesToSet11 = new HashSet<>();
relatesToSet11.add("relatesto4");
relatesTo.put("container1", relatesToSet11);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
conf.put("cfg_param1", "value3");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m12 = new TimelineMetric();
m12.setId("MAP1_BYTES");
m12.addValue(ts, 50);
metrics.add(m12);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId("start_event");
event.setTimestamp(ts);
entity.addEvent(event);
te.addEntity(entity);
TimelineEntities te1 = new TimelineEntities();
TimelineEntity entity1 = new TimelineEntity();
String id1 = "application_1111111111_3333";
entity1.setId(id1);
entity1.setType(TimelineEntityType.YARN_APPLICATION.toString());
entity1.setCreatedTime(cTime + 20L);
// add the info map in Timeline Entity
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey1", "infoMapValue1");
infoMap1.put("infoMapKey2", 10);
entity1.addInfo(infoMap1);
// add the isRelatedToEntity info
Set<String> isRelatedToSet1 = new HashSet<>();
isRelatedToSet1.add("relatedto3");
isRelatedToSet1.add("relatedto5");
Map<String, Set<String>> isRelatedTo1 = new HashMap<>();
isRelatedTo1.put("task1", isRelatedToSet1);
Set<String> isRelatedToSet11 = new HashSet<>();
isRelatedToSet11.add("relatedto4");
isRelatedTo1.put("task2", isRelatedToSet11);
entity1.setIsRelatedToEntities(isRelatedTo1);
// add the relatesTo info
Set<String> relatesToSet1 = new HashSet<>();
relatesToSet1.add("relatesto1");
relatesToSet1.add("relatesto2");
Map<String, Set<String>> relatesTo1 = new HashMap<>();
relatesTo1.put("container", relatesToSet1);
entity1.setRelatesToEntities(relatesTo1);
// add some config entries
Map<String, String> conf1 = new HashMap<>();
conf1.put("cfg_param1", "value1");
conf1.put("cfg_param2", "value2");
entity1.addConfigs(conf1);
// add metrics
Set<TimelineMetric> metrics1 = new HashSet<>();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP1_SLOT_MILLIS");
Map<Long, Number> metricValues1 = new HashMap<>();
long ts1 = System.currentTimeMillis();
metricValues1.put(ts1 - 120000, 100000000);
metricValues1.put(ts1 - 100000, 200000000);
metricValues1.put(ts1 - 80000, 300000000);
metricValues1.put(ts1 - 60000, 400000000);
metricValues1.put(ts1 - 40000, 50000000000L);
metricValues1.put(ts1 - 20000, 60000000000L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues1);
metrics1.add(m2);
entity1.addMetrics(metrics1);
TimelineEvent event11 = new TimelineEvent();
event11.setId("end_event");
event11.setTimestamp(ts);
entity1.addEvent(event11);
TimelineEvent event12 = new TimelineEvent();
event12.setId("update_event");
event12.setTimestamp(ts - 10);
entity1.addEvent(event12);
te1.addEntity(entity1);
TimelineEntities te2 = new TimelineEntities();
TimelineEntity entity2 = new TimelineEntity();
String id2 = "application_1111111111_4444";
entity2.setId(id2);
entity2.setType(TimelineEntityType.YARN_APPLICATION.toString());
entity2.setCreatedTime(cTime + 40L);
TimelineEvent event21 = new TimelineEvent();
event21.setId("update_event");
event21.setTimestamp(ts - 20);
entity2.addEvent(event21);
Set<String> isRelatedToSet2 = new HashSet<String>();
isRelatedToSet2.add("relatedto3");
Map<String, Set<String>> isRelatedTo2 = new HashMap<>();
isRelatedTo2.put("task1", isRelatedToSet2);
entity2.setIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo3 = new HashMap<>();
Set<String> relatesToSet14 = new HashSet<String>();
relatesToSet14.add("relatesto7");
relatesTo3.put("container2", relatesToSet14);
entity2.setRelatesToEntities(relatesTo3);
te2.addEntity(entity2);
HBaseTimelineWriterImpl hbi = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(util.getConfiguration());
hbi.start();
String cluster = "cluster1";
String user = "user1";
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
String appName = "application_1111111111_2222";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
appName = "application_1111111111_3333";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
appName = "application_1111111111_4444";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te2);
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
Aggregations