use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseStorageFlowRun method testMetricFilters.
@Test
public void testMetricFilters() throws Exception {
String cluster = "cluster1";
String user = "user1";
String flow = "flow_name1";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678919L, "application_11111111111111_1111", te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678918L, "application_11111111111111_2222", te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, METRIC1, 101));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 43));
list2.addFilter(new TimelineCompareFilter(TimelineCompareOp.EQUAL, METRIC2, 57));
TimelineFilterList metricFilterList = new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(3, metricCnt);
TimelineFilterList metricFilterList1 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, METRIC1, 127), new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, METRIC2, 30));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(2, metricCnt);
TimelineFilterList metricFilterList2 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 32), new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, METRIC2, 57));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.EQUAL, "s_metric", 32));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList3, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, METRIC1, 101));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 43));
list4.addFilter(new TimelineCompareFilter(TimelineCompareOp.EQUAL, METRIC2, 57));
TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR, list3, list4);
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC2.substring(0, METRIC2.indexOf("_") + 1)));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList4, null), new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(1, metricCnt);
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsConfigFilterPrefix.
@Test
public void testReadAppsConfigFilterPrefix() throws Exception {
TimelineFilterList confFilterList = new TimelineFilterList();
confFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_"));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(list, null, null, null));
assertEquals(1, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with cfg_", confKey.startsWith("cfg_"));
}
}
assertEquals(2, cfgCnt);
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param2", "value2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value3"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_param2", "value2"));
TimelineFilterList confsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "config_"));
TimelineFilterList confFilterList1 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(confsToRetrieve, null, null, null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with config_", confKey.startsWith("config_"));
}
}
assertEquals(2, cfgCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsInfoFilters.
@Test
public void testReadAppsInfoFilters() throws Exception {
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey3", 85.85));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey1", "infoMapValue2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey1", "infoMapValue1"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey2", 10));
TimelineFilterList infoFilterList = new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(2, entities.size());
int infoCnt = 0;
for (TimelineEntity entity : entities) {
infoCnt += entity.getInfo().size();
}
assertEquals(5, infoCnt);
TimelineFilterList infoFilterList1 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "infoMapKey1", "infoMapValue1"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(1, entities.size());
infoCnt = 0;
for (TimelineEntity entity : entities) {
infoCnt += entity.getInfo().size();
}
assertEquals(3, infoCnt);
TimelineFilterList infoFilterList2 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "infoMapKey1", "infoMapValue2"), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "infoMapKey3", 85.85));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList3 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", "some_value"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList5 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value", false));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList5, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(3, entities.size());
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsEventFilters.
@Test
public void testReadAppsEventFilters() throws Exception {
TimelineFilterList ef = new TimelineFilterList();
ef.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
ef.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(1, entities.size());
int eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity id should have been application_1111111111_4444");
}
}
assertEquals(1, eventCnt);
TimelineFilterList ef1 = new TimelineFilterList();
ef1.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
ef1.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef1), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity id should have been application_1111111111_4444");
}
}
assertEquals(0, eventCnt);
TimelineFilterList ef2 = new TimelineFilterList();
ef2.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef2), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_2222") && !timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity ids' should have been application_1111111111_2222" + " and application_1111111111_4444");
}
}
assertEquals(0, eventCnt);
TimelineFilterList ef3 = new TimelineFilterList();
ef3.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
ef3.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "dummy_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef3), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
list1.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "dummy_event"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "start_event"));
TimelineFilterList ef4 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef4), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_2222")) {
Assert.fail("Entity id should have been application_1111111111_2222");
}
}
assertEquals(0, eventCnt);
TimelineFilterList ef5 = new TimelineFilterList();
ef5.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "update_event"));
ef5.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef5), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_2222")) {
Assert.fail("Entity id should have been application_1111111111_2222");
}
}
assertEquals(0, eventCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntitiesMetricFilters.
@Test
public void testReadEntitiesMetricFilters() throws Exception {
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "MAP1_SLOT_MILLIS", 50000000900L));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "MAP_SLOT_MILLIS", 80000000000L));
list2.addFilter(new TimelineCompareFilter(TimelineCompareOp.EQUAL, "MAP1_BYTES", 50));
TimelineFilterList metricFilterList = new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(3, metricCnt);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(3, metricCnt);
TimelineFilterList metricFilterList1 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "MAP_SLOT_MILLIS", 80000000000L), new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "MAP1_BYTES", 30));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(2, metricCnt);
TimelineFilterList metricFilterList2 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "MAP_SLOT_MILLIS", 40000000000L), new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "MAP1_BYTES", 30));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.EQUAL, "dummy_metric", 5));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList3, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList4 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "dummy_metric", 5));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList4, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList5 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "dummy_metric", 5, false));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList5, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(3, entities.size());
}
Aggregations