use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntitiesConfigFilterPrefix.
@Test
public void testReadEntitiesConfigFilterPrefix() throws Exception {
TimelineFilterList confFilterList = new TimelineFilterList();
confFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_"));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(list, null, null, null));
assertEquals(1, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with cfg_", confKey.startsWith("cfg_"));
}
}
assertEquals(2, cfgCnt);
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param2", "value2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value3"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_param2", "value2"));
TimelineFilterList confFilterList1 = new TimelineFilterList(Operator.OR, list1, list2);
TimelineFilterList confsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "config_"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(confsToRetrieve, null, null, null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with config_", confKey.startsWith("config_"));
}
}
assertEquals(2, cfgCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter in project hadoop by apache.
the class TestHBaseStorageFlowRun method testWriteFlowRunMetricsPrefix.
@Test
public void testWriteFlowRunMetricsPrefix() throws Exception {
String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
String user = "testWriteFlowRunMetricsPrefix_user1";
String flow = "testWriteFlowRunMetricsPrefix_flow_name";
String flowVersion = "CF7022C10F1354";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11111111111111_1111";
hbi.write(cluster, user, flow, flowVersion, 1002345678919L, appName, te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2);
appName = "application_11111111111111_2222";
hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC1.substring(0, METRIC1.indexOf("_") + 1)));
TimelineEntity entity = hbr.getEntity(new TimelineReaderContext(cluster, user, flow, 1002345678919L, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
Set<TimelineMetric> metrics = entity.getMetrics();
assertEquals(1, metrics.size());
for (TimelineMetric metric : metrics) {
String id = metric.getId();
Map<Long, Number> values = metric.getValues();
assertEquals(1, values.size());
Number value = null;
for (Number n : values.values()) {
value = n;
}
switch(id) {
case METRIC1:
assertEquals(40L, value);
break;
default:
fail("unrecognized metric: " + id);
}
}
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity timelineEntity : entities) {
metricCnt += timelineEntity.getMetrics().size();
}
assertEquals(2, metricCnt);
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter in project hadoop by apache.
the class TimelineParserForDataToRetrieve method parse.
@Override
public TimelineFilterList parse() throws TimelineParseException {
if (expr == null || exprLength == 0) {
return null;
}
TimelineCompareOp compareOp = null;
int openingBracketIndex = expr.indexOf(TimelineParseConstants.OPENING_BRACKET_CHAR);
if (expr.charAt(0) == TimelineParseConstants.NOT_CHAR) {
if (openingBracketIndex == -1) {
throw new TimelineParseException("Invalid config/metric to retrieve " + "expression");
}
if (openingBracketIndex != 1 && expr.substring(1, openingBracketIndex + 1).trim().length() != 1) {
throw new TimelineParseException("Invalid config/metric to retrieve " + "expression");
}
compareOp = TimelineCompareOp.NOT_EQUAL;
} else if (openingBracketIndex <= 0) {
compareOp = TimelineCompareOp.EQUAL;
}
char lastChar = expr.charAt(exprLength - 1);
if (compareOp == TimelineCompareOp.NOT_EQUAL && lastChar != TimelineParseConstants.CLOSING_BRACKET_CHAR) {
throw new TimelineParseException("Invalid config/metric to retrieve " + "expression");
}
if (openingBracketIndex != -1 && expr.charAt(exprLength - 1) == TimelineParseConstants.CLOSING_BRACKET_CHAR) {
expr = expr.substring(openingBracketIndex + 1, exprLength - 1).trim();
}
if (expr.isEmpty()) {
return null;
}
Operator op = (compareOp == TimelineCompareOp.NOT_EQUAL) ? Operator.AND : Operator.OR;
TimelineFilterList list = new TimelineFilterList(op);
String[] splits = expr.split(TimelineParseConstants.COMMA_DELIMITER);
for (String split : splits) {
list.addFilter(new TimelinePrefixFilter(compareOp, split.trim()));
}
return list;
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter in project hadoop by apache.
the class TestTimelineReaderWebServicesUtils method testDataToRetrieve.
@Test
public void testDataToRetrieve() throws Exception {
String expr = "abc,def";
TimelineFilterList expectedList = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "def"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "(abc,def)";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " ( abc , def ) ";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " abc , def ";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc,def)";
expectedList = new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "def"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " ! ( abc , def ) ";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc)";
expectedList = new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "abc"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "(abc)";
expectedList = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "abc"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "abc";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = " ! ( abc , def , xyz) ";
expectedList = new TimelineFilterList(new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "abc"), new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "def"), new TimelinePrefixFilter(TimelineCompareOp.NOT_EQUAL, "xyz"));
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc,def,xyz)";
verifyFilterList(expr, TimelineReaderWebServicesUtils.parseDataToRetrieve(expr), expectedList);
expr = "!(abc,def,xyz";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("No closing bracket. Exception should have been thrown");
} catch (TimelineParseException e) {
}
expr = "!abc,def,xyz";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + "have been thrown");
} catch (TimelineParseException e) {
}
expr = "!abc,def,xyz";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + "have been thrown");
} catch (TimelineParseException e) {
}
expr = "! r( abc,def,xyz)";
try {
TimelineReaderWebServicesUtils.parseDataToRetrieve(expr);
fail("NOT(!) should be followed by opening bracket. Exception should " + "have been thrown");
} catch (TimelineParseException e) {
}
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(null));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve(" "));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("()"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("!()"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("( )"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("!( )"));
assertNull(TimelineReaderWebServicesUtils.parseDataToRetrieve("! ( )"));
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsConfigPrefix.
@Test
public void testReadAppsConfigPrefix() throws Exception {
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_"));
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1111111111_2222", TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineDataToRetrieve(list, null, null, null));
assertNotNull(e1);
assertEquals(1, e1.getConfigs().size());
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(list, null, null, null));
int cfgCnt = 0;
for (TimelineEntity entity : es1) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with cfg_", confKey.startsWith("cfg_"));
}
}
assertEquals(3, cfgCnt);
}
Aggregations