use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestTimelineReaderWebServicesUtils method testDataToRetrieve.
@Test
public void testDataToRetrieve() throws Exception {
String expr = "abc,def";
TimelineFilterList expectedList = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "def"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "(abc,def)";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " ( abc , def ) ";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " abc , def ";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc,def)";
expectedList = new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "def"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " ! ( abc , def ) ";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc)";
expectedList = new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "abc"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "(abc)";
expectedList = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "abc"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "abc";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " ! ( abc , def , xyz) ";
expectedList = new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "def"), new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "xyz"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc,def,xyz)";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc,def,xyz";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("No closing bracket. Exception should have been thrown");
} catch (TimelineParseException e) {
}
expr = "!abc,def,xyz";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + "have been thrown");
} catch (TimelineParseException e) {
}
expr = "!abc,def,xyz";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + "have been thrown");
} catch (TimelineParseException e) {
}
expr = "! r( abc,def,xyz)";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + "have been thrown");
} catch (TimelineParseException e) {
}
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(null));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(" "));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("()"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("!()"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("( )"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("!( )"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("! ( )"));
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestTimelineReaderWebServicesUtils method testInfoFiltersParsing.
@Test
public void testInfoFiltersParsing() throws Exception {
String expr = "(((key11 ne 234 AND key12 eq val12) AND " + "(key13 ene val13 OR key14 eq 567)) OR (key21 eq val_21 OR key22 eq " + "5.0))";
TimelineFilterList expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(Operator.AND, new TimelineFilterList(Operator.AND, new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "key11", 234, false), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key12", "val12", true)), new TimelineFilterList(Operator.OR, new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "key13", "val13", true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key14", 567, true))), new TimelineFilterList(Operator.OR, new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key21", "val_21", true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "key22", 5.0, true)));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseKVFilters(expr, false), expectedList);
expr = "abc ne 234 AND def eq 23 OR rst ene 24 OR xyz eq 456 AND pqr eq " + "val.1234";
expectedList = new TimelineFilterList(new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "def", 23, true)), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "rst", 24, true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "xyz", 456, true)), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "pqr", "val.1234", true));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseKVFilters(expr, false), expectedList);
// Test with unnecessary spaces.
expr = " abc ne 234 AND def eq 23 OR rst ene " + " 24 OR xyz eq 456 AND pqr eq 2 ";
expectedList = new TimelineFilterList(new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "def", 23, true)), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "rst", 24, true), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "xyz", 456, true)), new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "pqr", 2, true));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseKVFilters(expr, false), expectedList);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsRelationsAndEventFiltersDefaultView.
@Test
public void testReadAppsRelationsAndEventFiltersDefaultView() throws Exception {
TimelineFilterList eventFilter = new TimelineFilterList();
eventFilter.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container2", new HashSet<Object>(Arrays.asList("relatesto7"))));
relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container1", new HashSet<Object>(Arrays.asList("relatesto4"))));
TimelineFilterList isRelatedTo = new TimelineFilterList();
isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3"))));
isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto5"))));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, relatesTo, isRelatedTo, null, null, null, eventFilter), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
int eventCnt = 0;
int isRelatedToCnt = 0;
int relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity id should have been application_1111111111_4444");
}
}
assertEquals(0, eventCnt);
assertEquals(0, isRelatedToCnt);
assertEquals(0, relatesToCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsConfigPrefix.
@Test
public void testReadAppsConfigPrefix() throws Exception {
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_"));
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1111111111_2222", TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineDataToRetrieve(list, null, null, null));
assertNotNull(e1);
assertEquals(1, e1.getConfigs().size());
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(list, null, null, null));
int cfgCnt = 0;
for (TimelineEntity entity : es1) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with cfg_", confKey.startsWith("cfg_"));
}
}
assertEquals(3, cfgCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsIsRelatedTo.
@Test
public void testReadAppsIsRelatedTo() throws Exception {
TimelineFilterList irt = new TimelineFilterList(Operator.OR);
irt.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task", new HashSet<Object>(Arrays.asList("relatedto1"))));
irt.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task2", new HashSet<Object>(Arrays.asList("relatedto4"))));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("application_1111111111_2222") && !timelineEntity.getId().equals("application_1111111111_3333")) {
Assert.fail("Entity ids' should have been application_1111111111_2222" + " and application_1111111111_3333");
}
}
assertEquals(3, isRelatedToCnt);
TimelineFilterList irt1 = new TimelineFilterList();
irt1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3"))));
irt1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt1, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity id should have been application_1111111111_4444");
}
}
assertEquals(0, isRelatedToCnt);
TimelineFilterList irt2 = new TimelineFilterList(Operator.OR);
irt2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task", new HashSet<Object>(Arrays.asList("relatedto1"))));
irt2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task2", new HashSet<Object>(Arrays.asList("relatedto4"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt2, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("application_1111111111_2222") && !timelineEntity.getId().equals("application_1111111111_3333")) {
Assert.fail("Entity ids' should have been application_1111111111_2222" + " and application_1111111111_3333");
}
}
assertEquals(0, isRelatedToCnt);
TimelineFilterList irt3 = new TimelineFilterList();
irt3.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3", "relatedto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt3, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("application_1111111111_3333")) {
Assert.fail("Entity id should have been application_1111111111_3333");
}
}
assertEquals(0, isRelatedToCnt);
TimelineFilterList irt4 = new TimelineFilterList();
irt4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3"))));
irt4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "dummy_task", new HashSet<Object>(Arrays.asList("relatedto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt4, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList irt5 = new TimelineFilterList();
irt5.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3", "relatedto7"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt5, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task", new HashSet<Object>(Arrays.asList("relatedto1"))));
list1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "dummy_task", new HashSet<Object>(Arrays.asList("relatedto4"))));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task2", new HashSet<Object>(Arrays.asList("relatedto4"))));
TimelineFilterList irt6 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, irt6, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("application_1111111111_3333")) {
Assert.fail("Entity id should have been application_1111111111_3333");
}
}
assertEquals(0, isRelatedToCnt);
}
Aggregations