use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getEntityGreaterStartTime.
static TimelineEntity getEntityGreaterStartTime(long startTs) {
TimelineEntity entity = new TimelineEntity();
entity.setCreatedTime(startTs);
entity.setId("flowRunHello with greater start time");
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setType(type);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(startTs);
String expKey = "foo_event_greater";
String expVal = "test_app_greater";
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getMinFlushEntity.
static TimelineEntity getMinFlushEntity(long startTs) {
TimelineEntity entity = new TimelineEntity();
String id = "flowRunHelloFlushEntityMin";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
entity.setCreatedTime(startTs);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(startTs);
entity.addEvent(event);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getEntityMetricsApp2.
static TimelineEntity getEntityMetricsApp2(long insertTs) {
TimelineEntity entity = new TimelineEntity();
String id = "flowRunMetrics_test";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId(METRIC_1);
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = insertTs;
metricValues.put(ts - 100000, 5L);
metricValues.put(ts - 80000, 101L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
long endTs = 1439379885000L;
event.setTimestamp(endTs);
String expKey = "foo_event_greater";
String expVal = "test_app_greater";
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getEntityMetricsApp1Complete.
static TimelineEntity getEntityMetricsApp1Complete(long insertTs, Configuration c1) {
TimelineEntity entity = new TimelineEntity();
String id = "flowRunMetrics_test";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId(METRIC_1);
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = insertTs;
metricValues.put(ts - 80000, 40L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m2 = new TimelineMetric();
m2.setId(METRIC_2);
metricValues = new HashMap<Long, Number>();
ts = insertTs;
metricValues.put(ts - 80000, 57L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
event.setTimestamp(insertTs);
event.addInfo("done", "insertTs=" + insertTs);
entity.addEvent(event);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getAFullEntity.
static TimelineEntity getAFullEntity(long ts, long endTs) {
TimelineEntity entity = new TimelineEntity();
String id = "flowRunFullEntity";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
entity.setCreatedTime(ts);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId(METRIC_1);
Map<Long, Number> metricValues = new HashMap<Long, Number>();
metricValues.put(ts - 120000, 100000000L);
metricValues.put(ts - 100000, 200000000L);
metricValues.put(ts - 80000, 300000000L);
metricValues.put(ts - 60000, 400000000L);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m2 = new TimelineMetric();
m2.setId(METRIC_2);
metricValues = new HashMap<Long, Number>();
metricValues.put(ts - 900000, 31L);
metricValues.put(ts - 30000, 57L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(ts);
String expKey = "foo_event";
Object expVal = "test";
event.addInfo(expKey, expVal);
entity.addEvent(event);
event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
// start time + 6hrs
long expTs = ts + 21600000;
event.setTimestamp(expTs);
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity;
}
Aggregations