use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntitiesRelationsAndEventFiltersDefaultView.
@Test
public void testReadEntitiesRelationsAndEventFiltersDefaultView() throws Exception {
TimelineFilterList eventFilter = new TimelineFilterList();
eventFilter.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container2", new HashSet<Object>(Arrays.asList("relatesto7"))));
relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container1", new HashSet<Object>(Arrays.asList("relatesto4"))));
TimelineFilterList isRelatedTo = new TimelineFilterList();
isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3"))));
isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto5"))));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, relatesTo, isRelatedTo, null, null, null, eventFilter), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
int eventCnt = 0;
int isRelatedToCnt = 0;
int relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello2")) {
Assert.fail("Entity id should have been hello2");
}
}
assertEquals(0, eventCnt);
assertEquals(0, isRelatedToCnt);
assertEquals(0, relatesToCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntitiesConfigFilters.
@Test
public void testReadEntitiesConfigFilters() throws Exception {
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param2", "value2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value3"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_param2", "value2"));
TimelineFilterList confFilterList = new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null));
assertEquals(2, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
}
assertEquals(5, cfgCnt);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
}
assertEquals(5, cfgCnt);
TimelineFilterList confFilterList1 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "cfg_param1", "value1"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null));
assertEquals(1, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
}
assertEquals(3, cfgCnt);
TimelineFilterList confFilterList2 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "cfg_param1", "value1"), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "config_param2", "value2"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList2, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList3 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_config", "value1"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList3, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList4 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "dummy_config", "value1"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList4, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList5 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "dummy_config", "value1", false));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList5, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), null));
assertEquals(3, entities.size());
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testEventsWithEmptyInfo.
@Test
public void testEventsWithEmptyInfo() throws IOException {
TimelineEvent event = new TimelineEvent();
String eventId = "foo_ev e nt_id";
event.setId(eventId);
Long expTs = 1436512802000L;
event.setTimestamp(expTs);
final TimelineEntity entity = new TimelineEntity();
entity.setId("attempt_1329348432655_0001_m_000008_18");
entity.setType("FOO_ATTEMPT");
entity.addEvent(event);
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_empty_eventkey";
String user = "user_emptyeventkey";
String flow = "other_flow_name";
String flowVersion = "1111F01C2287BA";
long runid = 1009876543218L;
String appName = ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1).toString();
byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName).getRowKeyPrefix();
hbi.write(cluster, user, flow, flowVersion, runid, appName, entities);
hbi.stop();
// scan the table and see that entity exists
Scan s = new Scan();
s.setStartRow(startRow);
s.addFamily(EntityColumnFamily.INFO.getBytes());
Connection conn = ConnectionFactory.createConnection(c1);
ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
int rowCount = 0;
for (Result result : scanner) {
if (result != null && !result.isEmpty()) {
rowCount++;
// check the row key
byte[] row1 = result.getRow();
assertTrue(isRowKeyCorrect(row1, cluster, user, flow, runid, appName, entity));
Map<EventColumnName, Object> eventsResult = EntityColumnPrefix.EVENT.readResults(result, new EventColumnNameConverter());
// there should be only one event
assertEquals(1, eventsResult.size());
for (Map.Entry<EventColumnName, Object> e : eventsResult.entrySet()) {
EventColumnName eventColumnName = e.getKey();
// the qualifier is a compound key
// hence match individual values
assertEquals(eventId, eventColumnName.getId());
assertEquals(expTs, eventColumnName.getTimestamp());
// key must be empty
assertNull(eventColumnName.getInfoKey());
Object value = e.getValue();
// value should be empty
assertEquals("", value.toString());
}
}
}
assertEquals(1, rowCount);
// read the timeline entity using the reader this time
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertEquals(1, es1.size());
// check the events
NavigableSet<TimelineEvent> events = e1.getEvents();
// there should be only one event
assertEquals(1, events.size());
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, Long.valueOf(e.getTimestamp()));
Map<String, Object> info = e.getInfo();
assertTrue(info == null || info.isEmpty());
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsMetricPrefix.
@Test
public void testReadAppsMetricPrefix() throws Exception {
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_"));
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1111111111_2222", TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineDataToRetrieve(null, list, null, null));
assertNotNull(e1);
assertEquals(1, e1.getMetrics().size());
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, list, null, null));
int metricCnt = 0;
for (TimelineEntity entity : es1) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue("Metric Id returned should start with MAP1_", metric.getId().startsWith("MAP1_"));
}
}
assertEquals(2, metricCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testEvents.
@Test
public void testEvents() throws IOException {
TimelineEvent event = new TimelineEvent();
String eventId = ApplicationMetricsConstants.CREATED_EVENT_TYPE;
event.setId(eventId);
Long expTs = 1436512802000L;
event.setTimestamp(expTs);
String expKey = "foo_event";
Object expVal = "test";
event.addInfo(expKey, expVal);
final TimelineEntity entity = new ApplicationEntity();
entity.setId(ApplicationId.newInstance(0, 1).toString());
entity.addEvent(event);
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_events";
String user = "user2";
String flow = "other_flow_name";
String flowVersion = "1111F01C2287BA";
long runid = 1009876543218L;
String appName = "application_123465899910_1001";
hbi.write(cluster, user, flow, flowVersion, runid, appName, entities);
hbi.stop();
// retrieve the row
ApplicationRowKey applicationRowKey = new ApplicationRowKey(cluster, user, flow, runid, appName);
byte[] rowKey = applicationRowKey.getRowKey();
Get get = new Get(rowKey);
get.setMaxVersions(Integer.MAX_VALUE);
Connection conn = ConnectionFactory.createConnection(c1);
Result result = new ApplicationTable().getResult(c1, conn, get);
assertTrue(result != null);
// check the row key
byte[] row1 = result.getRow();
assertTrue(isApplicationRowKeyCorrect(row1, cluster, user, flow, runid, appName));
Map<EventColumnName, Object> eventsResult = ApplicationColumnPrefix.EVENT.readResults(result, new EventColumnNameConverter());
// there should be only one event
assertEquals(1, eventsResult.size());
for (Map.Entry<EventColumnName, Object> e : eventsResult.entrySet()) {
EventColumnName eventColumnName = e.getKey();
// the qualifier is a compound key
// hence match individual values
assertEquals(eventId, eventColumnName.getId());
assertEquals(expTs, eventColumnName.getTimestamp());
assertEquals(expKey, eventColumnName.getInfoKey());
Object value = e.getValue();
// there should be only one timestamp and value
assertEquals(expVal, value.toString());
}
// read the timeline entity using the reader this time
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
TimelineEntity e2 = reader.getEntity(new TimelineReaderContext(cluster, user, null, null, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertNotNull(e2);
assertEquals(e1, e2);
// check the events
NavigableSet<TimelineEvent> events = e1.getEvents();
// there should be only one event
assertEquals(1, events.size());
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, Long.valueOf(e.getTimestamp()));
Map<String, Object> info = e.getInfo();
assertEquals(1, info.size());
for (Map.Entry<String, Object> infoEntry : info.entrySet()) {
assertEquals(expKey, infoEntry.getKey());
assertEquals(expVal, infoEntry.getValue());
}
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
Aggregations