Search in sources :

Example 46 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TestHBaseStorageFlowRun method testWriteFlowRunMetricsPrefix.

@Test
public void testWriteFlowRunMetricsPrefix() throws Exception {
    String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
    String user = "testWriteFlowRunMetricsPrefix_user1";
    String flow = "testWriteFlowRunMetricsPrefix_flow_name";
    String flowVersion = "CF7022C10F1354";
    TimelineEntities te = new TimelineEntities();
    TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
    te.addEntity(entityApp1);
    HBaseTimelineWriterImpl hbi = null;
    Configuration c1 = util.getConfiguration();
    try {
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        String appName = "application_11111111111111_1111";
        hbi.write(cluster, user, flow, flowVersion, 1002345678919L, appName, te);
        // write another application with same metric to this flow
        te = new TimelineEntities();
        TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
        te.addEntity(entityApp2);
        appName = "application_11111111111111_2222";
        hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
        hbi.flush();
    } finally {
        if (hbi != null) {
            hbi.close();
        }
    }
    // use the timeline reader to verify data
    HBaseTimelineReaderImpl hbr = null;
    try {
        hbr = new HBaseTimelineReaderImpl();
        hbr.init(c1);
        hbr.start();
        TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC1.substring(0, METRIC1.indexOf("_") + 1)));
        TimelineEntity entity = hbr.getEntity(new TimelineReaderContext(cluster, user, flow, 1002345678919L, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
        assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
        Set<TimelineMetric> metrics = entity.getMetrics();
        assertEquals(1, metrics.size());
        for (TimelineMetric metric : metrics) {
            String id = metric.getId();
            Map<Long, Number> values = metric.getValues();
            assertEquals(1, values.size());
            Number value = null;
            for (Number n : values.values()) {
                value = n;
            }
            switch(id) {
                case METRIC1:
                    assertEquals(40L, value);
                    break;
                default:
                    fail("unrecognized metric: " + id);
            }
        }
        Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
        assertEquals(2, entities.size());
        int metricCnt = 0;
        for (TimelineEntity timelineEntity : entities) {
            metricCnt += timelineEntity.getMetrics().size();
        }
        assertEquals(2, metricCnt);
    } finally {
        if (hbr != null) {
            hbr.close();
        }
    }
}
Also used : TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) Configuration(org.apache.hadoop.conf.Configuration) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) HBaseTimelineWriterImpl(org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl) HBaseTimelineReaderImpl(org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) TimelinePrefixFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter) Test(org.junit.Test)

Example 47 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TestHBaseStorageFlowRunCompaction method testWriteFlowRunCompaction.

@Test
public void testWriteFlowRunCompaction() throws Exception {
    String cluster = "kompaction_cluster1";
    String user = "kompaction_FlowRun__user1";
    String flow = "kompaction_flowRun_flow_name";
    String flowVersion = "AF1021C19F1351";
    long runid = 1449526652000L;
    int start = 10;
    int count = 2000;
    int appIdSuffix = 1;
    HBaseTimelineWriterImpl hbi = null;
    long insertTs = System.currentTimeMillis() - count;
    Configuration c1 = util.getConfiguration();
    TimelineEntities te1 = null;
    TimelineEntity entityApp1 = null;
    try {
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        // of metric1 and 100 of metric2
        for (int i = start; i < start + count; i++) {
            String appName = "application_10240000000000_" + appIdSuffix;
            insertTs++;
            te1 = new TimelineEntities();
            entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(insertTs, c1);
            te1.addEntity(entityApp1);
            hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
            appName = "application_2048000000000_7" + appIdSuffix;
            insertTs++;
            te1 = new TimelineEntities();
            entityApp1 = TestFlowDataGenerator.getEntityMetricsApp2(insertTs);
            te1.addEntity(entityApp1);
            hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
        }
    } finally {
        String appName = "application_10240000000000_" + appIdSuffix;
        te1 = new TimelineEntities();
        entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1Complete(insertTs + 1, c1);
        te1.addEntity(entityApp1);
        if (hbi != null) {
            hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
            hbi.flush();
            hbi.close();
        }
    }
    // check in flow run table
    HRegionServer server = util.getRSForFirstRegionInTable(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
    List<Region> regions = server.getOnlineRegions(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
    assertTrue("Didn't find any regions for primary table!", regions.size() > 0);
    // flush and compact all the regions of the primary table
    for (Region region : regions) {
        region.flush(true);
        region.compact(true);
    }
    // check flow run for one flow many apps
    checkFlowRunTable(cluster, user, flow, runid, c1, 4);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) Region(org.apache.hadoop.hbase.regionserver.Region) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) HBaseTimelineWriterImpl(org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl) HRegionServer(org.apache.hadoop.hbase.regionserver.HRegionServer) Test(org.junit.Test)

Example 48 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineEntityReader method readEntities.

/**
   * Reads and deserializes a set of timeline entities from the HBase storage.
   * It goes through all the results available, and returns the number of
   * entries as specified in the limit in the entity's natural sort order.
   *
   * @param hbaseConf HBase Configuration.
   * @param conn HBase Connection.
   * @return a set of <cite>TimelineEntity</cite> objects.
   * @throws IOException if any exception is encountered while reading entities.
   */
public Set<TimelineEntity> readEntities(Configuration hbaseConf, Connection conn) throws IOException {
    validateParams();
    augmentParams(hbaseConf, conn);
    NavigableSet<TimelineEntity> entities = new TreeSet<>();
    FilterList filterList = createFilterList();
    if (LOG.isDebugEnabled() && filterList != null) {
        LOG.debug("FilterList created for scan is - " + filterList);
    }
    ResultScanner results = getResults(hbaseConf, conn, filterList);
    try {
        for (Result result : results) {
            TimelineEntity entity = parseEntity(result);
            if (entity == null) {
                continue;
            }
            entities.add(entity);
            if (!sortedKeys) {
                if (entities.size() > filters.getLimit()) {
                    entities.pollLast();
                }
            } else {
                if (entities.size() == filters.getLimit()) {
                    break;
                }
            }
        }
        return entities;
    } finally {
        results.close();
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) TreeSet(java.util.TreeSet) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) Result(org.apache.hadoop.hbase.client.Result)

Example 49 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class TimelineReaderManager method getEntity.

/**
   * Get single timeline entity by making a call to backend storage
   * implementation. The meaning of each argument in detail has been
   * documented with {@link TimelineReader#getEntity}. If cluster ID has not
   * been supplied by the client, fills the cluster id from config before making
   * a call to backend storage. After fetching entity from backend, fills the
   * appropriate UID based on entity type.
   *
   * @param context Timeline context within the scope of which entity has to be
   *     fetched.
   * @param dataToRetrieve Data to carry in the entity fetched.
   * @return A <cite>TimelineEntity</cite> object if found, null otherwise.
   * @throws IOException  if any problem occurs while getting entity.
   * @see TimelineReader#getEntity
   */
public TimelineEntity getEntity(TimelineReaderContext context, TimelineDataToRetrieve dataToRetrieve) throws IOException {
    context.setClusterId(getClusterID(context.getClusterId(), getConfig()));
    TimelineEntity entity = reader.getEntity(new TimelineReaderContext(context), dataToRetrieve);
    if (entity != null) {
        TimelineEntityType type = getTimelineEntityType(context.getEntityType());
        fillUID(type, entity, context);
    }
    return entity;
}
Also used : TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineEntityType(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType)

Example 50 with TimelineEntity

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.

the class GenericEntityReader method parseEntity.

@Override
protected TimelineEntity parseEntity(Result result) throws IOException {
    if (result == null || result.isEmpty()) {
        return null;
    }
    TimelineEntity entity = new TimelineEntity();
    String entityType = EntityColumn.TYPE.readResult(result).toString();
    entity.setType(entityType);
    String entityId = EntityColumn.ID.readResult(result).toString();
    entity.setId(entityId);
    TimelineEntityFilters filters = getFilters();
    // fetch created time
    Long createdTime = (Long) EntityColumn.CREATED_TIME.readResult(result);
    entity.setCreatedTime(createdTime);
    EnumSet<Field> fieldsToRetrieve = getDataToRetrieve().getFieldsToRetrieve();
    // fetch is related to entities and match isRelatedTo filter. If isRelatedTo
    // filters do not match, entity would be dropped. We have to match filters
    // locally as relevant HBase filters to filter out rows on the basis of
    // isRelatedTo are not set in HBase scan.
    boolean checkIsRelatedTo = !isSingleEntityRead() && filters.getIsRelatedTo() != null && filters.getIsRelatedTo().getFilterList().size() > 0;
    if (hasField(fieldsToRetrieve, Field.IS_RELATED_TO) || checkIsRelatedTo) {
        readRelationship(entity, result, EntityColumnPrefix.IS_RELATED_TO, true);
        if (checkIsRelatedTo && !TimelineStorageUtils.matchIsRelatedTo(entity, filters.getIsRelatedTo())) {
            return null;
        }
        if (!hasField(fieldsToRetrieve, Field.IS_RELATED_TO)) {
            entity.getIsRelatedToEntities().clear();
        }
    }
    // fetch relates to entities and match relatesTo filter. If relatesTo
    // filters do not match, entity would be dropped. We have to match filters
    // locally as relevant HBase filters to filter out rows on the basis of
    // relatesTo are not set in HBase scan.
    boolean checkRelatesTo = !isSingleEntityRead() && filters.getRelatesTo() != null && filters.getRelatesTo().getFilterList().size() > 0;
    if (hasField(fieldsToRetrieve, Field.RELATES_TO) || checkRelatesTo) {
        readRelationship(entity, result, EntityColumnPrefix.RELATES_TO, false);
        if (checkRelatesTo && !TimelineStorageUtils.matchRelatesTo(entity, filters.getRelatesTo())) {
            return null;
        }
        if (!hasField(fieldsToRetrieve, Field.RELATES_TO)) {
            entity.getRelatesToEntities().clear();
        }
    }
    // fetch info if fieldsToRetrieve contains INFO or ALL.
    if (hasField(fieldsToRetrieve, Field.INFO)) {
        readKeyValuePairs(entity, result, EntityColumnPrefix.INFO, false);
    }
    // fetch configs if fieldsToRetrieve contains CONFIGS or ALL.
    if (hasField(fieldsToRetrieve, Field.CONFIGS)) {
        readKeyValuePairs(entity, result, EntityColumnPrefix.CONFIG, true);
    }
    // fetch events and match event filters if they exist. If event filters do
    // not match, entity would be dropped. We have to match filters locally
    // as relevant HBase filters to filter out rows on the basis of events
    // are not set in HBase scan.
    boolean checkEvents = !isSingleEntityRead() && filters.getEventFilters() != null && filters.getEventFilters().getFilterList().size() > 0;
    if (hasField(fieldsToRetrieve, Field.EVENTS) || checkEvents) {
        readEvents(entity, result, EntityColumnPrefix.EVENT);
        if (checkEvents && !TimelineStorageUtils.matchEventFilters(entity, filters.getEventFilters())) {
            return null;
        }
        if (!hasField(fieldsToRetrieve, Field.EVENTS)) {
            entity.getEvents().clear();
        }
    }
    // fetch metrics if fieldsToRetrieve contains METRICS or ALL.
    if (hasField(fieldsToRetrieve, Field.METRICS)) {
        readMetrics(entity, result, EntityColumnPrefix.METRIC);
    }
    return entity;
}
Also used : Field(org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)

Aggregations

TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)155 Test (org.junit.Test)98 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)54 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)54 TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)46 HashSet (java.util.HashSet)37 Client (com.sun.jersey.api.client.Client)36 ClientResponse (com.sun.jersey.api.client.ClientResponse)36 URI (java.net.URI)36 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)33 Set (java.util.Set)32 TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)27 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)26 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)26 HashMap (java.util.HashMap)23 Configuration (org.apache.hadoop.conf.Configuration)21 GenericType (com.sun.jersey.api.client.GenericType)14 HBaseTimelineWriterImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl)12 TimelinePrefixFilter (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter)10 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)9