use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntitiesIsRelatedTo.
@Test
public void testReadEntitiesIsRelatedTo() throws Exception {
TimelineFilterList irt = new TimelineFilterList(Operator.OR);
irt.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task", new HashSet<Object>(Arrays.asList("relatedto1"))));
irt.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task2", new HashSet<Object>(Arrays.asList("relatedto4"))));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("hello") && !timelineEntity.getId().equals("hello1")) {
Assert.fail("Entity ids' should have been hello and hello1");
}
}
assertEquals(3, isRelatedToCnt);
TimelineFilterList irt1 = new TimelineFilterList();
irt1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3"))));
irt1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt1, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("hello2")) {
Assert.fail("Entity id should have been hello2");
}
}
assertEquals(0, isRelatedToCnt);
TimelineFilterList irt2 = new TimelineFilterList(Operator.OR);
irt2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task", new HashSet<Object>(Arrays.asList("relatedto1"))));
irt2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task2", new HashSet<Object>(Arrays.asList("relatedto4"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt2, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("hello") && !timelineEntity.getId().equals("hello1")) {
Assert.fail("Entity ids' should have been hello and hello1");
}
}
assertEquals(0, isRelatedToCnt);
TimelineFilterList irt3 = new TimelineFilterList();
irt3.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3", "relatedto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt3, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("hello1")) {
Assert.fail("Entity id should have been hello1");
}
}
assertEquals(0, isRelatedToCnt);
TimelineFilterList irt4 = new TimelineFilterList();
irt4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3"))));
irt4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "dummy_task", new HashSet<Object>(Arrays.asList("relatedto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt4, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList irt5 = new TimelineFilterList();
irt5.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task1", new HashSet<Object>(Arrays.asList("relatedto3", "relatedto7"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt5, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task", new HashSet<Object>(Arrays.asList("relatedto1"))));
list1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "dummy_task", new HashSet<Object>(Arrays.asList("relatedto4"))));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "task2", new HashSet<Object>(Arrays.asList("relatedto4"))));
TimelineFilterList irt6 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt6, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
isRelatedToCnt += timelineEntity.getIsRelatedToEntities().size();
if (!timelineEntity.getId().equals("hello1")) {
Assert.fail("Entity id should have been hello1");
}
}
assertEquals(0, isRelatedToCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntitiesRelatesTo.
@Test
public void testReadEntitiesRelatesTo() throws Exception {
TimelineFilterList rt = new TimelineFilterList(Operator.OR);
rt.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container2", new HashSet<Object>(Arrays.asList("relatesto7"))));
rt.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container1", new HashSet<Object>(Arrays.asList("relatesto4"))));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt, null, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello") && !timelineEntity.getId().equals("hello2")) {
Assert.fail("Entity ids' should have been hello and hello2");
}
}
assertEquals(3, relatesToCnt);
TimelineFilterList rt1 = new TimelineFilterList();
rt1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto1"))));
rt1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto3"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt1, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello1")) {
Assert.fail("Entity id should have been hello1");
}
}
assertEquals(0, relatesToCnt);
TimelineFilterList rt2 = new TimelineFilterList(Operator.OR);
rt2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container2", new HashSet<Object>(Arrays.asList("relatesto7"))));
rt2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container1", new HashSet<Object>(Arrays.asList("relatesto4"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt2, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello") && !timelineEntity.getId().equals("hello2")) {
Assert.fail("Entity ids' should have been hello and hello2");
}
}
assertEquals(0, relatesToCnt);
TimelineFilterList rt3 = new TimelineFilterList();
rt3.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto1", "relatesto3"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt3, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello")) {
Assert.fail("Entity id should have been hello");
}
}
assertEquals(0, relatesToCnt);
TimelineFilterList rt4 = new TimelineFilterList();
rt4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto1"))));
rt4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "dummy_container", new HashSet<Object>(Arrays.asList("relatesto5"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt4, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList rt5 = new TimelineFilterList();
rt5.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatedto1", "relatesto8"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt5, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container2", new HashSet<Object>(Arrays.asList("relatesto7"))));
list1.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "dummy_container", new HashSet<Object>(Arrays.asList("relatesto4"))));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container1", new HashSet<Object>(Arrays.asList("relatesto4"))));
TimelineFilterList rt6 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt6, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello")) {
Assert.fail("Entity id should have been hello");
}
}
assertEquals(0, relatesToCnt);
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto1"))));
list3.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container1", new HashSet<Object>(Arrays.asList("relatesto4"))));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto1"))));
list4.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto2"))));
TimelineFilterList combinedList = new TimelineFilterList(Operator.OR, list3, list4);
TimelineFilterList rt7 = new TimelineFilterList(Operator.AND, combinedList, new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "container", new HashSet<Object>(Arrays.asList("relatesto3"))));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt7, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
relatesToCnt += timelineEntity.getRelatesToEntities().size();
if (!timelineEntity.getId().equals("hello1")) {
Assert.fail("Entity id should have been hello1");
}
}
assertEquals(0, relatesToCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntitiesByRelations.
@Test
public void testGetEntitiesByRelations() throws Exception {
// Get entities based on relatesTo.
TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
Set<Object> relatesToIds = new HashSet<Object>(Arrays.asList((Object) "flow1"));
relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "flow", relatesToIds));
Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, relatesTo, null, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
// Only one entity with ID id_1 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on relatesTo");
}
}
// Get entities based on isRelatedTo.
TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR);
Set<Object> isRelatedToIds = new HashSet<Object>(Arrays.asList((Object) "tid1_2"));
isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", isRelatedToIds));
result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, isRelatedTo, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
// Two entities with IDs' id_1 and id_3 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on isRelatedTo");
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class ApplicationEntityReader method parseEntity.
@Override
protected TimelineEntity parseEntity(Result result) throws IOException {
if (result == null || result.isEmpty()) {
return null;
}
TimelineEntity entity = new TimelineEntity();
entity.setType(TimelineEntityType.YARN_APPLICATION.toString());
String entityId = ApplicationColumn.ID.readResult(result).toString();
entity.setId(entityId);
TimelineEntityFilters filters = getFilters();
// fetch created time
Long createdTime = (Long) ApplicationColumn.CREATED_TIME.readResult(result);
entity.setCreatedTime(createdTime);
EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
// fetch is related to entities and match isRelatedTo filter. If isRelatedTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// isRelatedTo are not set in HBase scan.
boolean checkIsRelatedTo = !isSingleEntityRead() && filters.getIsRelatedTo() != null && filters.getIsRelatedTo().getFilterList().size() > 0;
if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO) || checkIsRelatedTo) {
readRelationship(entity, result, ApplicationColumnPrefix.IS_RELATED_TO, true);
if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity, filters.getIsRelatedTo())) {
return null;
}
if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
entity.getIsRelatedToEntities().clear();
}
}
// fetch relates to entities and match relatesTo filter. If relatesTo
// filters do not match, entity would be dropped. We have to match filters
// locally as relevant HBase filters to filter out rows on the basis of
// relatesTo are not set in HBase scan.
boolean checkRelatesTo = !isSingleEntityRead() && filters.getRelatesTo() != null && filters.getRelatesTo().getFilterList().size() > 0;
if (hasField(fieldsToRetrieve, Field.RELATES_TO) || checkRelatesTo) {
readRelationship(entity, result, ApplicationColumnPrefix.RELATES_TO, false);
if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity, filters.getRelatesTo())) {
return null;
}
if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) {
entity.getRelatesToEntities().clear();
}
}
// fetch info if fieldsToRetrieve contains INFO or ALL.
if (hasField(fieldsToRetrieve, Field.INFO)) {
readKeyValuePairs(entity, result, ApplicationColumnPrefix.INFO, false);
}
// fetch configs if fieldsToRetrieve contains CONFIGS or ALL.
if (hasField(fieldsToRetrieve, Field.CONFIGS)) {
readKeyValuePairs(entity, result, ApplicationColumnPrefix.CONFIG, true);
}
// fetch events and match event filters if they exist. If event filters do
// not match, entity would be dropped. We have to match filters locally
// as relevant HBase filters to filter out rows on the basis of events
// are not set in HBase scan.
boolean checkEvents = !isSingleEntityRead() && filters.getEventFilters() != null && filters.getEventFilters().getFilterList().size() > 0;
if (hasField(fieldsToRetrieve, Field.EVENTS) || checkEvents) {
readEvents(entity, result, ApplicationColumnPrefix.EVENT);
if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity, filters.getEventFilters())) {
return null;
}
if (!hasField(fieldsToRetrieve, Field.EVENTS)) {
entity.getEvents().clear();
}
}
// fetch metrics if fieldsToRetrieve contains METRICS or ALL.
if (hasField(fieldsToRetrieve, Field.METRICS)) {
readMetrics(entity, result, ApplicationColumnPrefix.METRIC);
}
return entity;
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class GenericEntityReader method needCreateFilterListBasedOnFields.
/**
* Check if we need to create filter list based on fields. We need to create a
* filter list iff all fields need not be retrieved or we have some specific
* fields or metrics to retrieve. We also need to create a filter list if we
* have relationships(relatesTo/isRelatedTo) and event filters specified for
* the query.
*
* @return true if we need to create the filter list, false otherwise.
*/
protected boolean needCreateFilterListBasedOnFields() {
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Check if all fields are to be retrieved or not. If all fields have to
// be retrieved, also check if we have some metrics or configs to
// retrieve specified for the query because then a filter list will have
// to be created.
boolean flag = !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) || (dataToRetrieve.getConfsToRetrieve() != null && !dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) || (dataToRetrieve.getMetricsToRetrieve() != null && !dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty());
// isRelatedTo) and event filters specified for the query.
if (!flag && !isSingleEntityRead()) {
TimelineEntityFilters filters = getFilters();
flag = (filters.getEventFilters() != null && !filters.getEventFilters().getFilterList().isEmpty()) || (filters.getIsRelatedTo() != null && !filters.getIsRelatedTo().getFilterList().isEmpty()) || (filters.getRelatesTo() != null && !filters.getRelatesTo().getFilterList().isEmpty());
}
return flag;
}
Aggregations