Search in sources :

Example 21 with TimelineFilterList

use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.

the class GenericEntityReader method constructFilterListBasedOnFilters.

@Override
protected FilterList constructFilterListBasedOnFilters() throws IOException {
    // Filters here cannot be null for multiple entity reads as they are set in
    // augmentParams if null.
    FilterList listBasedOnFilters = new FilterList();
    TimelineEntityFilters filters = getFilters();
    // Create filter list based on created time range and add it to
    // listBasedOnFilters.
    long createdTimeBegin = filters.getCreatedTimeBegin();
    long createdTimeEnd = filters.getCreatedTimeEnd();
    if (createdTimeBegin != 0 || createdTimeEnd != Long.MAX_VALUE) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createSingleColValueFiltersByRange(EntityColumn.CREATED_TIME, createdTimeBegin, createdTimeEnd));
    }
    // Create filter list based on metric filters and add it to
    // listBasedOnFilters.
    TimelineFilterList metricFilters = filters.getMetricFilters();
    if (metricFilters != null && !metricFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(EntityColumnPrefix.METRIC, metricFilters));
    }
    // Create filter list based on config filters and add it to
    // listBasedOnFilters.
    TimelineFilterList configFilters = filters.getConfigFilters();
    if (configFilters != null && !configFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(EntityColumnPrefix.CONFIG, configFilters));
    }
    // Create filter list based on info filters and add it to listBasedOnFilters
    TimelineFilterList infoFilters = filters.getInfoFilters();
    if (infoFilters != null && !infoFilters.getFilterList().isEmpty()) {
        listBasedOnFilters.addFilter(TimelineFilterUtils.createHBaseFilterList(EntityColumnPrefix.INFO, infoFilters));
    }
    return listBasedOnFilters;
}
Also used : TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)

Example 22 with TimelineFilterList

use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.

the class TimelineStorageUtils method matchFilters.

/**
   * Common routine to match different filters. Iterates over a filter list and
   * calls routines based on filter type.
   *
   * @param entity Timeline entity.
   * @param filters filter list.
   * @param entityFiltersType type of filters which are being matched.
   * @return a boolean flag to indicate if filter matches.
   * @throws IOException if an unsupported filter for matching this specific
   *     filter is being matched.
   */
private static boolean matchFilters(TimelineEntity entity, TimelineFilterList filters, TimelineEntityFiltersType entityFiltersType) throws IOException {
    if (filters == null || filters.getFilterList().isEmpty()) {
        return false;
    }
    TimelineFilterList.Operator operator = filters.getOperator();
    for (TimelineFilter filter : filters.getFilterList()) {
        TimelineFilterType filterType = filter.getFilterType();
        if (!entityFiltersType.isValidFilter(filterType)) {
            throw new IOException("Unsupported filter " + filterType);
        }
        boolean matched = false;
        switch(filterType) {
            case LIST:
                matched = matchFilters(entity, (TimelineFilterList) filter, entityFiltersType);
                break;
            case COMPARE:
                matched = matchCompareFilter(entity, (TimelineCompareFilter) filter, entityFiltersType);
                break;
            case EXISTS:
                matched = matchExistsFilter(entity, (TimelineExistsFilter) filter, entityFiltersType);
                break;
            case KEY_VALUE:
                matched = matchKeyValueFilter(entity, (TimelineKeyValueFilter) filter, entityFiltersType);
                break;
            case KEY_VALUES:
                matched = matchKeyValuesFilter(entity, (TimelineKeyValuesFilter) filter, entityFiltersType);
                break;
            default:
                throw new IOException("Unsupported filter " + filterType);
        }
        if (!matched) {
            if (operator == TimelineFilterList.Operator.AND) {
                return false;
            }
        } else {
            if (operator == TimelineFilterList.Operator.OR) {
                return true;
            }
        }
    }
    return operator == TimelineFilterList.Operator.AND;
}
Also used : TimelineKeyValueFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter) TimelineKeyValuesFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) TimelineFilterType(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter.TimelineFilterType) TimelineCompareFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter) TimelineFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter) IOException(java.io.IOException) TimelineExistsFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter)

Example 23 with TimelineFilterList

use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method testGetFilteredEntities.

@Test
public void testGetFilteredEntities() throws Exception {
    // Get entities based on info filters.
    TimelineFilterList infoFilterList = new TimelineFilterList();
    infoFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
    Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    // Only one entity with ID id_3 should be returned.
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_3")) {
            Assert.fail("Incorrect filtering based on info filters");
        }
    }
    // Get entities based on config filters.
    TimelineFilterList confFilterList = new TimelineFilterList();
    confFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "123"));
    confFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_3")) {
            Assert.fail("Incorrect filtering based on config filters");
        }
    }
    // Get entities based on event filters.
    TimelineFilterList eventFilters = new TimelineFilterList();
    eventFilters.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_2"));
    eventFilters.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, eventFilters), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_3")) {
            Assert.fail("Incorrect filtering based on event filters");
        }
    }
    // Get entities based on metric filters.
    TimelineFilterList metricFilterList = new TimelineFilterList();
    metricFilterList.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "metric3", 0L));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    // Two entities with IDs' id_1 and id_2 should be returned.
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on metric filters");
        }
    }
    // Get entities based on complex config filters.
    TimelineFilterList list1 = new TimelineFilterList();
    list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "129"));
    list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "def"));
    TimelineFilterList list2 = new TimelineFilterList();
    list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23"));
    list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc"));
    TimelineFilterList confFilterList1 = new TimelineFilterList(Operator.OR, list1, list2);
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on config filters");
        }
    }
    TimelineFilterList list3 = new TimelineFilterList();
    list3.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "config_1", "123"));
    list3.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
    TimelineFilterList list4 = new TimelineFilterList();
    list4.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23"));
    TimelineFilterList confFilterList2 = new TimelineFilterList(Operator.OR, list3, list4);
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList2, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on config filters");
        }
    }
    TimelineFilterList confFilterList3 = new TimelineFilterList();
    confFilterList3.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "config_1", "127"));
    confFilterList3.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "config_3", "abc"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList3, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on config filters");
        }
    }
    TimelineFilterList confFilterList4 = new TimelineFilterList();
    confFilterList4.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_dummy", "dummy"));
    confFilterList4.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "def"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList4, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(0, result.size());
    TimelineFilterList confFilterList5 = new TimelineFilterList(Operator.OR);
    confFilterList5.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_dummy", "dummy"));
    confFilterList5.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "def"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList5, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on config filters");
        }
    }
    // Get entities based on complex metric filters.
    TimelineFilterList list6 = new TimelineFilterList();
    list6.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "metric1", 200));
    list6.addFilter(new TimelineCompareFilter(TimelineCompareOp.EQUAL, "metric3", 23));
    TimelineFilterList list7 = new TimelineFilterList();
    list7.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "metric2", 74));
    TimelineFilterList metricFilterList1 = new TimelineFilterList(Operator.OR, list6, list7);
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    // Two entities with IDs' id_2 and id_3 should be returned.
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_2") && !entity.getId().equals("id_3")) {
            Assert.fail("Incorrect filtering based on metric filters");
        }
    }
    TimelineFilterList metricFilterList2 = new TimelineFilterList();
    metricFilterList2.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "metric2", 70));
    metricFilterList2.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1")) {
            Assert.fail("Incorrect filtering based on metric filters");
        }
    }
    TimelineFilterList metricFilterList3 = new TimelineFilterList();
    metricFilterList3.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "dummy_metric", 30));
    metricFilterList3.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList3, null), new TimelineDataToRetrieve());
    Assert.assertEquals(0, result.size());
    TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR);
    metricFilterList4.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "dummy_metric", 30));
    metricFilterList4.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList4, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on metric filters");
        }
    }
    TimelineFilterList metricFilterList5 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "metric2", 74));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList5, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) {
            Assert.fail("Incorrect filtering based on metric filters");
        }
    }
    TimelineFilterList infoFilterList1 = new TimelineFilterList();
    infoFilterList1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
    infoFilterList1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "info4", 20));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(0, result.size());
    TimelineFilterList infoFilterList2 = new TimelineFilterList(Operator.OR);
    infoFilterList2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5));
    infoFilterList2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info1", "val1"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
            Assert.fail("Incorrect filtering based on info filters");
        }
    }
    TimelineFilterList infoFilterList3 = new TimelineFilterList();
    infoFilterList3.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1));
    infoFilterList3.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(0, result.size());
    TimelineFilterList infoFilterList4 = new TimelineFilterList(Operator.OR);
    infoFilterList4.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1));
    infoFilterList4.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5"));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1")) {
            Assert.fail("Incorrect filtering based on info filters");
        }
    }
}
Also used : TimelineKeyValueFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineCompareFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineExistsFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Test(org.junit.Test)

Example 24 with TimelineFilterList

use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.

the class TestTimelineReaderWebServicesUtils method testEventFiltersParsing.

@Test
public void testEventFiltersParsing() throws Exception {
    String expr = "abc,def";
    TimelineFilterList expectedList = new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "abc"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "def"));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseEventFilters(expr), expectedList);
    expr = "(abc,def)";
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseEventFilters(expr), expectedList);
    expr = "(abc,def) OR (rst, uvx)";
    expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "abc"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "def")), new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "rst"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "uvx")));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseEventFilters(expr), expectedList);
    expr = "!(abc,def,uvc) OR (rst, uvx)";
    expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "abc"), new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "def"), new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "uvc")), new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "rst"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "uvx")));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseEventFilters(expr), expectedList);
    expr = "(((!(abc,def,uvc) OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + " OR ((bcd,tyu) AND uvb))";
    expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "abc"), new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "def"), new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "uvc")), new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "rst"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "uvx"))), new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "abcdefg")), new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "ghj"), new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "tyu")))), new TimelineFilterList(new TimelineFilterList(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "bcd"), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "tyu")), new TimelineExistsFilter(TimelineCompareOp.EQUAL, "uvb")));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseEventFilters(expr), expectedList);
    expr = "  (  (  (  !  (  abc , def  ,   uvc)   OR   (   rst  ,   uvx )  )" + "  AND   (  !  (  abcdefg ) OR  !   (  ghj,  tyu)  ) )  OR   (   (" + "   bcd   ,   tyu  )   AND   uvb  )   )";
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseEventFilters(expr), expectedList);
    expr = "(((!(abc,def,uvc) OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + " OR ((bcd,tyu) AND uvb)";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Improper brackets. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "(((!(abc,def,uvc) (OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + " OR ((bcd,tyu) AND uvb))";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected opening bracket. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "(((!(abc,def,uvc) OR) (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + " OR ((bcd,tyu) AND uvb))";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected closing bracket. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "(((!(abc,def,uvc) PI (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + " OR ((bcd,tyu) AND uvb))";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Invalid op. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "(((!(abc,def,uvc) !OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu)))" + " OR ((bcd,tyu) AND uvb))";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected ! char. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "abc,def,uvc) OR (rst, uvx)";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected closing bracket. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "abc,def,uvc OR )rst, uvx)";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected closing bracket. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "abc,def,uvc OR ,rst, uvx)";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected delimiter. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "abc,def,uvc OR !  ";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unexpected not char. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "(abc,def,uvc)) OR (rst, uvx)";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("Unbalanced brackets. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    expr = "(((! ,(abc,def,uvc) OR (rst, uvx)) AND (!(abcdefg) OR !(ghj,tyu" + "))) OR ((bcd,tyu) AND uvb))";
    try {
        TimelineReaderWebServicesUtils.parseEventFilters(expr);
        fail("( should follow ! char. Exception should have been thrown");
    } catch (TimelineParseException e) {
    }
    assertNull(TimelineReaderWebServicesUtils.parseEventFilters(null));
    assertNull(TimelineReaderWebServicesUtils.parseEventFilters("   "));
}
Also used : TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) TimelineExistsFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter) Test(org.junit.Test)

Example 25 with TimelineFilterList

use of org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList in project hadoop by apache.

the class TestTimelineReaderWebServicesUtils method testRelationFiltersParsing.

@Test
public void testRelationFiltersParsing() throws Exception {
    String expr = "type1:entity11,type2:entity21:entity22";
    TimelineFilterList expectedList = new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", Sets.newHashSet((Object) "entity11")), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type2", Sets.newHashSet((Object) "entity21", "entity22")));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseRelationFilters(expr), expectedList);
    expr = "(type1:entity11,type2:entity21:entity22)";
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseRelationFilters(expr), expectedList);
    expr = "(type1:entity11,type2:entity21:entity22) OR (type3:entity31:" + "entity32:entity33,type1:entity11:entity12)";
    expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", Sets.newHashSet((Object) "entity11")), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type2", Sets.newHashSet((Object) "entity21", "entity22"))), new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type3", Sets.newHashSet((Object) "entity31", "entity32", "entity33")), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", Sets.newHashSet((Object) "entity11", "entity12"))));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseRelationFilters(expr), expectedList);
    expr = "!(type1:entity11,type2:entity21:entity22,type5:entity51) OR " + "(type3:entity31:entity32:entity33,type1:entity11:entity12)";
    expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type1", Sets.newHashSet((Object) "entity11")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type2", Sets.newHashSet((Object) "entity21", "entity22")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type5", Sets.newHashSet((Object) "entity51"))), new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type3", Sets.newHashSet((Object) "entity31", "entity32", "entity33")), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", Sets.newHashSet((Object) "entity11", "entity12"))));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseRelationFilters(expr), expectedList);
    expr = "(((!(type1:entity11,type2:entity21:entity22,type5:entity51) OR " + "(type3:entity31:entity32:entity33,type1:entity11:entity12)) AND " + "(!(type11:entity111) OR !(type4:entity43:entity44:entity47:entity49," + "type7:entity71))) OR ((type2:entity2,type8:entity88) AND t9:e:e1))";
    expectedList = new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type1", Sets.newHashSet((Object) "entity11")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type2", Sets.newHashSet((Object) "entity21", "entity22")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type5", Sets.newHashSet((Object) "entity51"))), new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type3", Sets.newHashSet((Object) "entity31", "entity32", "entity33")), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", Sets.newHashSet((Object) "entity11", "entity12")))), new TimelineFilterList(Operator.OR, new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type11", Sets.newHashSet((Object) "entity111"))), new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type4", Sets.newHashSet((Object) "entity43", "entity44", "entity47", "entity49")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type7", Sets.newHashSet((Object) "entity71"))))), new TimelineFilterList(new TimelineFilterList(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type2", Sets.newHashSet((Object) "entity2")), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type8", Sets.newHashSet((Object) "entity88"))), new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "t9", Sets.newHashSet((Object) "e", "e1"))));
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseRelationFilters(expr), expectedList);
    expr = "   (   (  (   !   (   type1:entity11  ,  type2:entity21:entity22" + "  ,  type5:entity51  )   OR  (   type3:entity31:entity32:entity33  " + "     ,   type1:entity11:entity12)) AND (!(  type11:entity111  )  OR " + "    !   (   type4:entity43:entity44:entity47:entity49 , " + "type7:entity71  )  )  ) OR  (  (  type2:entity2 , type8:entity88) " + "AND  t9:e:e1 )    ) ";
    verifyFilterList(expr, TimelineReaderWebServicesUtils.parseRelationFilters(expr), expectedList);
    expr = "(((!(type1 : entity11,type2:entity21:entity22,type5:entity51) OR " + "(type3:entity31:entity32:entity33,type1:entity11:entity12)) AND " + "(!(type11:entity111) OR !(type4:entity43:entity44:entity47:entity49," + "type7:entity71))) OR ((type2:entity2,type8:entity88) AND t9:e:e1))";
    try {
        TimelineReaderWebServicesUtils.parseRelationFilters(expr);
        fail("Space not allowed in relation expression. Exception should have " + "been thrown");
    } catch (TimelineParseException e) {
    }
}
Also used : TimelineKeyValuesFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) Test(org.junit.Test)

Aggregations

TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)47 Test (org.junit.Test)32 TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)28 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)27 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)26 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)26 TimelinePrefixFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter)12 TimelineKeyValueFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter)10 TimelineKeyValuesFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter)9 TimelineCompareFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter)8 HashSet (java.util.HashSet)7 TimelineExistsFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter)7 FilterList (org.apache.hadoop.hbase.filter.FilterList)6 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)5 Operator (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList.Operator)3 EnumSet (java.util.EnumSet)2 Set (java.util.Set)2 Configuration (org.apache.hadoop.conf.Configuration)2 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)2 HBaseTimelineReaderImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl)2