use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getFlowApp1.
static TimelineEntity getFlowApp1(long appCreatedTime) {
TimelineEntity entity = new TimelineEntity();
String id = "flowActivity_test";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
entity.setCreatedTime(appCreatedTime);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(appCreatedTime);
String expKey = "foo_event";
Object expVal = "test";
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class DataGeneratorForTest method loadEntities.
static void loadEntities(HBaseTestingUtility util) throws IOException {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
Long cTime = 1425016502000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<>();
infoMap.put("infoMapKey1", "infoMapValue2");
infoMap.put("infoMapKey2", 20);
infoMap.put("infoMapKey3", 71.4);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
Set<String> isRelatedToSet = new HashSet<>();
isRelatedToSet.add("relatedto1");
Map<String, Set<String>> isRelatedTo = new HashMap<>();
isRelatedTo.put("task", isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add("relatesto1");
relatesToSet.add("relatesto3");
Map<String, Set<String>> relatesTo = new HashMap<>();
relatesTo.put("container", relatesToSet);
Set<String> relatesToSet11 = new HashSet<>();
relatesToSet11.add("relatesto4");
relatesTo.put("container1", relatesToSet11);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
conf.put("cfg_param1", "value3");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 70000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m12 = new TimelineMetric();
m12.setId("MAP1_BYTES");
m12.addValue(ts, 50);
metrics.add(m12);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId("start_event");
event.setTimestamp(ts);
entity.addEvent(event);
te.addEntity(entity);
TimelineEntity entity1 = new TimelineEntity();
String id1 = "hello1";
entity1.setId(id1);
entity1.setType(type);
entity1.setCreatedTime(cTime + 20L);
// add the info map in Timeline Entity
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey1", "infoMapValue1");
infoMap1.put("infoMapKey2", 10);
entity1.addInfo(infoMap1);
// add event.
TimelineEvent event11 = new TimelineEvent();
event11.setId("end_event");
event11.setTimestamp(ts);
entity1.addEvent(event11);
TimelineEvent event12 = new TimelineEvent();
event12.setId("update_event");
event12.setTimestamp(ts - 10);
entity1.addEvent(event12);
// add the isRelatedToEntity info
Set<String> isRelatedToSet1 = new HashSet<>();
isRelatedToSet1.add("relatedto3");
isRelatedToSet1.add("relatedto5");
Map<String, Set<String>> isRelatedTo1 = new HashMap<>();
isRelatedTo1.put("task1", isRelatedToSet1);
Set<String> isRelatedToSet11 = new HashSet<>();
isRelatedToSet11.add("relatedto4");
isRelatedTo1.put("task2", isRelatedToSet11);
entity1.setIsRelatedToEntities(isRelatedTo1);
// add the relatesTo info
Set<String> relatesToSet1 = new HashSet<String>();
relatesToSet1.add("relatesto1");
relatesToSet1.add("relatesto2");
Map<String, Set<String>> relatesTo1 = new HashMap<>();
relatesTo1.put("container", relatesToSet1);
entity1.setRelatesToEntities(relatesTo1);
// add some config entries
Map<String, String> conf1 = new HashMap<>();
conf1.put("cfg_param1", "value1");
conf1.put("cfg_param2", "value2");
entity1.addConfigs(conf1);
// add metrics
Set<TimelineMetric> metrics1 = new HashSet<>();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP1_SLOT_MILLIS");
Map<Long, Number> metricValues1 = new HashMap<>();
long ts1 = System.currentTimeMillis();
metricValues1.put(ts1 - 120000, 100000000);
metricValues1.put(ts1 - 100000, 200000000);
metricValues1.put(ts1 - 80000, 300000000);
metricValues1.put(ts1 - 60000, 400000000);
metricValues1.put(ts1 - 40000, 50000000000L);
metricValues1.put(ts1 - 20000, 60000000000L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues1);
metrics1.add(m2);
entity1.addMetrics(metrics1);
te.addEntity(entity1);
TimelineEntity entity2 = new TimelineEntity();
String id2 = "hello2";
entity2.setId(id2);
entity2.setType(type);
entity2.setCreatedTime(cTime + 40L);
TimelineEvent event21 = new TimelineEvent();
event21.setId("update_event");
event21.setTimestamp(ts - 20);
entity2.addEvent(event21);
Set<String> isRelatedToSet2 = new HashSet<>();
isRelatedToSet2.add("relatedto3");
Map<String, Set<String>> isRelatedTo2 = new HashMap<>();
isRelatedTo2.put("task1", isRelatedToSet2);
entity2.setIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo3 = new HashMap<>();
Set<String> relatesToSet14 = new HashSet<>();
relatesToSet14.add("relatesto7");
relatesTo3.put("container2", relatesToSet14);
entity2.setRelatesToEntities(relatesTo3);
te.addEntity(entity2);
HBaseTimelineWriterImpl hbi = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(util.getConfiguration());
hbi.start();
String cluster = "cluster1";
String user = "user1";
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
String appName = "application_1231111111_1111";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testEventsEscapeTs.
@Test
public void testEventsEscapeTs() throws IOException {
TimelineEvent event = new TimelineEvent();
String eventId = ApplicationMetricsConstants.CREATED_EVENT_TYPE;
event.setId(eventId);
long expTs = 1463567041056L;
event.setTimestamp(expTs);
String expKey = "f==o o_e ve\tnt";
Object expVal = "test";
event.addInfo(expKey, expVal);
final TimelineEntity entity = new ApplicationEntity();
entity.setId(ApplicationId.newInstance(0, 1).toString());
entity.addEvent(event);
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "clus!ter_\ttest_ev ents";
String user = "user2";
String flow = "other_flow_name";
String flowVersion = "1111F01C2287BA";
long runid = 1009876543218L;
String appName = "application_123465899910_2001";
hbi.write(cluster, user, flow, flowVersion, runid, appName, entities);
hbi.stop();
// read the timeline entity using the reader this time
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
// check the events
NavigableSet<TimelineEvent> events = e1.getEvents();
// there should be only one event
assertEquals(1, events.size());
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, e.getTimestamp());
Map<String, Object> info = e.getInfo();
assertEquals(1, info.size());
for (Map.Entry<String, Object> infoEntry : info.entrySet()) {
assertEquals(expKey, infoEntry.getKey());
assertEquals(expVal, infoEntry.getValue());
}
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testEventsWithEmptyInfo.
@Test
public void testEventsWithEmptyInfo() throws IOException {
TimelineEvent event = new TimelineEvent();
String eventId = "foo_ev e nt_id";
event.setId(eventId);
Long expTs = 1436512802000L;
event.setTimestamp(expTs);
final TimelineEntity entity = new TimelineEntity();
entity.setId("attempt_1329348432655_0001_m_000008_18");
entity.setType("FOO_ATTEMPT");
entity.addEvent(event);
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_empty_eventkey";
String user = "user_emptyeventkey";
String flow = "other_flow_name";
String flowVersion = "1111F01C2287BA";
long runid = 1009876543218L;
String appName = ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1).toString();
byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName).getRowKeyPrefix();
hbi.write(cluster, user, flow, flowVersion, runid, appName, entities);
hbi.stop();
// scan the table and see that entity exists
Scan s = new Scan();
s.setStartRow(startRow);
s.addFamily(EntityColumnFamily.INFO.getBytes());
Connection conn = ConnectionFactory.createConnection(c1);
ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
int rowCount = 0;
for (Result result : scanner) {
if (result != null && !result.isEmpty()) {
rowCount++;
// check the row key
byte[] row1 = result.getRow();
assertTrue(isRowKeyCorrect(row1, cluster, user, flow, runid, appName, entity));
Map<EventColumnName, Object> eventsResult = EntityColumnPrefix.EVENT.readResults(result, new EventColumnNameConverter());
// there should be only one event
assertEquals(1, eventsResult.size());
for (Map.Entry<EventColumnName, Object> e : eventsResult.entrySet()) {
EventColumnName eventColumnName = e.getKey();
// the qualifier is a compound key
// hence match individual values
assertEquals(eventId, eventColumnName.getId());
assertEquals(expTs, eventColumnName.getTimestamp());
// key must be empty
assertNull(eventColumnName.getInfoKey());
Object value = e.getValue();
// value should be empty
assertEquals("", value.toString());
}
}
}
assertEquals(1, rowCount);
// read the timeline entity using the reader this time
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertEquals(1, es1.size());
// check the events
NavigableSet<TimelineEvent> events = e1.getEvents();
// there should be only one event
assertEquals(1, events.size());
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, Long.valueOf(e.getTimestamp()));
Map<String, Object> info = e.getInfo();
assertTrue(info == null || info.isEmpty());
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getEntity1.
static TimelineEntity getEntity1() {
TimelineEntity entity = new TimelineEntity();
String id = "flowRunHello";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
long cTime = 1425026901000L;
entity.setCreatedTime(cTime);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId(METRIC_1);
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000L);
metricValues.put(ts - 100000, 200000000L);
metricValues.put(ts - 80000, 300000000L);
metricValues.put(ts - 60000, 400000000L);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(cTime);
String expKey = "foo_event";
Object expVal = "test";
event.addInfo(expKey, expVal);
entity.addEvent(event);
event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
// start time + 6hrs
long expTs = cTime + 21600000;
event.setTimestamp(expTs);
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity;
}
Aggregations