Search in sources :

Example 51 with TimelineDataToRetrieve

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method testAppFlowMappingCsv.

/** This test checks whether we can handle commas in app flow mapping csv. */
@Test
public void testAppFlowMappingCsv() throws Exception {
    // Test getting an entity by cluster and app where flow entry
    // in app flow mapping csv has commas.
    TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", null, null, null, "app2", "app", "id_5"), new TimelineDataToRetrieve(null, null, null, null));
    Assert.assertEquals((new TimelineEntity.Identifier("app", "id_5")).toString(), result.getIdentifier().toString());
    Assert.assertEquals((Long) 1425016502050L, result.getCreatedTime());
}
Also used : TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Test(org.junit.Test)

Example 52 with TimelineDataToRetrieve

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method testGetEntityDefaultView.

@Test
public void testGetEntityDefaultView() throws Exception {
    // If no fields are specified, entity is returned with default view i.e.
    // only the id, type and created time.
    TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, null, null));
    Assert.assertEquals((new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString());
    Assert.assertEquals((Long) 1425016502000L, result.getCreatedTime());
    Assert.assertEquals(0, result.getConfigs().size());
    Assert.assertEquals(0, result.getMetrics().size());
}
Also used : TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Test(org.junit.Test)

Example 53 with TimelineDataToRetrieve

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method testGetEntitiesByRelations.

@Test
public void testGetEntitiesByRelations() throws Exception {
    // Get entities based on relatesTo.
    TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
    Set<Object> relatesToIds = new HashSet<Object>(Arrays.asList((Object) "flow1"));
    relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "flow", relatesToIds));
    Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, relatesTo, null, null, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(1, result.size());
    // Only one entity with ID id_1 should be returned.
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1")) {
            Assert.fail("Incorrect filtering based on relatesTo");
        }
    }
    // Get entities based on isRelatedTo.
    TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR);
    Set<Object> isRelatedToIds = new HashSet<Object>(Arrays.asList((Object) "tid1_2"));
    isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", isRelatedToIds));
    result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, isRelatedTo, null, null, null, null), new TimelineDataToRetrieve());
    Assert.assertEquals(2, result.size());
    // Two entities with IDs' id_1 and id_3 should be returned.
    for (TimelineEntity entity : result) {
        if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
            Assert.fail("Incorrect filtering based on isRelatedTo");
        }
    }
}
Also used : TimelineKeyValuesFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 54 with TimelineDataToRetrieve

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve in project hadoop by apache.

the class TestFileSystemTimelineReaderImpl method testGetEntityByClusterAndApp.

@Test
public void testGetEntityByClusterAndApp() throws Exception {
    // Cluster and AppId should be enough to get an entity.
    TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", null, null, null, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, null, null));
    Assert.assertEquals((new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString());
    Assert.assertEquals((Long) 1425016502000L, result.getCreatedTime());
    Assert.assertEquals(0, result.getConfigs().size());
    Assert.assertEquals(0, result.getMetrics().size());
}
Also used : TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Test(org.junit.Test)

Example 55 with TimelineDataToRetrieve

use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve in project hadoop by apache.

the class FlowRunEntityReader method constructFilterListBasedOnFields.

@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
    FilterList list = new FilterList(Operator.MUST_PASS_ONE);
    // By default fetch everything in INFO column family.
    FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
    TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
    // Metrics are always returned if we are reading a single entity.
    if (!isSingleEntityRead() && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
        FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
        infoColFamilyList.addFilter(infoColumnFamily);
        infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
        list.addFilter(infoColFamilyList);
    } else {
        // Check if metricsToRetrieve are specified and if they are, create a
        // filter list for info column family by adding flow run tables columns
        // and a list for metrics to retrieve. Pls note that fieldsToRetrieve
        // will have METRICS added to it if metricsToRetrieve are specified
        // (in augmentParams()).
        TimelineFilterList metricsToRetrieve = dataToRetrieve.getMetricsToRetrieve();
        if (metricsToRetrieve != null && !metricsToRetrieve.getFilterList().isEmpty()) {
            FilterList infoColFamilyList = new FilterList();
            infoColFamilyList.addFilter(infoColumnFamily);
            FilterList columnsList = updateFixedColumns();
            columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricsToRetrieve));
            infoColFamilyList.addFilter(columnsList);
            list.addFilter(infoColFamilyList);
        }
    }
    return list;
}
Also used : BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Aggregations

TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)56 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)54 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)54 Test (org.junit.Test)54 TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)45 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)27 Configuration (org.apache.hadoop.conf.Configuration)14 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)14 TimelinePrefixFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter)10 HashSet (java.util.HashSet)9 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)9 HBaseTimelineReaderImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl)9 HBaseTimelineWriterImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl)9 TimelineKeyValueFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter)7 TimelineKeyValuesFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter)7 Connection (org.apache.hadoop.hbase.client.Connection)6 Result (org.apache.hadoop.hbase.client.Result)6 TimelineCompareFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter)6 HashMap (java.util.HashMap)5 Map (java.util.Map)5