Search in sources :

Example 11 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.

the class TestHBaseTimelineStorageEntities method testWriteEntityToHBase.

@Test
public void testWriteEntityToHBase() throws Exception {
    TimelineEntities te = new TimelineEntities();
    TimelineEntity entity = new TimelineEntity();
    String id = "hello";
    String type = "world";
    entity.setId(id);
    entity.setType(type);
    Long cTime = 1425016501000L;
    entity.setCreatedTime(cTime);
    // add the info map in Timeline Entity
    Map<String, Object> infoMap = new HashMap<String, Object>();
    infoMap.put("infoMapKey1", "infoMapValue1");
    infoMap.put("infoMapKey2", 10);
    entity.addInfo(infoMap);
    // add the isRelatedToEntity info
    String key = "task";
    String value = "is_related_to_entity_id_here";
    Set<String> isRelatedToSet = new HashSet<String>();
    isRelatedToSet.add(value);
    Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
    isRelatedTo.put(key, isRelatedToSet);
    entity.setIsRelatedToEntities(isRelatedTo);
    // add the relatesTo info
    key = "container";
    value = "relates_to_entity_id_here";
    Set<String> relatesToSet = new HashSet<String>();
    relatesToSet.add(value);
    value = "relates_to_entity_id_here_Second";
    relatesToSet.add(value);
    Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
    relatesTo.put(key, relatesToSet);
    entity.setRelatesToEntities(relatesTo);
    // add some config entries
    Map<String, String> conf = new HashMap<String, String>();
    conf.put("config_param1", "value1");
    conf.put("config_param2", "value2");
    entity.addConfigs(conf);
    // add metrics
    Set<TimelineMetric> metrics = new HashSet<>();
    TimelineMetric m1 = new TimelineMetric();
    m1.setId("MAP_SLOT_MILLIS");
    Map<Long, Number> metricValues = new HashMap<Long, Number>();
    long ts = System.currentTimeMillis();
    metricValues.put(ts - 120000, 100000000);
    metricValues.put(ts - 100000, 200000000);
    metricValues.put(ts - 80000, 300000000);
    metricValues.put(ts - 60000, 400000000);
    metricValues.put(ts - 40000, 50000000000L);
    metricValues.put(ts - 20000, 60000000000L);
    m1.setType(Type.TIME_SERIES);
    m1.setValues(metricValues);
    metrics.add(m1);
    entity.addMetrics(metrics);
    te.addEntity(entity);
    HBaseTimelineWriterImpl hbi = null;
    try {
        Configuration c1 = util.getConfiguration();
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        hbi.start();
        String cluster = "cluster_test_write_entity";
        String user = "user1";
        String flow = "some_flow_name";
        String flowVersion = "AB7822C10F1111";
        long runid = 1002345678919L;
        String appName = ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1).toString();
        hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
        hbi.stop();
        // scan the table and see that entity exists
        Scan s = new Scan();
        byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName).getRowKeyPrefix();
        s.setStartRow(startRow);
        s.setMaxVersions(Integer.MAX_VALUE);
        Connection conn = ConnectionFactory.createConnection(c1);
        ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
        int rowCount = 0;
        int colCount = 0;
        KeyConverter<String> stringKeyConverter = new StringKeyConverter();
        for (Result result : scanner) {
            if (result != null && !result.isEmpty()) {
                rowCount++;
                colCount += result.size();
                byte[] row1 = result.getRow();
                assertTrue(isRowKeyCorrect(row1, cluster, user, flow, runid, appName, entity));
                // check info column family
                String id1 = EntityColumn.ID.readResult(result).toString();
                assertEquals(id, id1);
                String type1 = EntityColumn.TYPE.readResult(result).toString();
                assertEquals(type, type1);
                Long cTime1 = (Long) EntityColumn.CREATED_TIME.readResult(result);
                assertEquals(cTime1, cTime);
                Map<String, Object> infoColumns = EntityColumnPrefix.INFO.readResults(result, new StringKeyConverter());
                assertEquals(infoMap, infoColumns);
                // Remember isRelatedTo is of type Map<String, Set<String>>
                for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
                    Object isRelatedToValue = EntityColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
                    String compoundValue = isRelatedToValue.toString();
                    // id7?id9?id6
                    Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
                    assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
                    for (String v : isRelatedToEntry.getValue()) {
                        assertTrue(isRelatedToValues.contains(v));
                    }
                }
                // RelatesTo
                for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
                    String compoundValue = EntityColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
                    // id3?id4?id5
                    Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
                    assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
                    for (String v : relatesToEntry.getValue()) {
                        assertTrue(relatesToValues.contains(v));
                    }
                }
                // Configuration
                Map<String, Object> configColumns = EntityColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
                assertEquals(conf, configColumns);
                NavigableMap<String, NavigableMap<Long, Number>> metricsResult = EntityColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
                NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
                matchMetrics(metricValues, metricMap);
            }
        }
        assertEquals(1, rowCount);
        assertEquals(16, colCount);
        // read the timeline entity using the reader this time
        TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
        Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
        assertNotNull(e1);
        assertEquals(1, es1.size());
        // verify attributes
        assertEquals(id, e1.getId());
        assertEquals(type, e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        Map<String, Object> infoMap2 = e1.getInfo();
        assertEquals(infoMap, infoMap2);
        Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
        assertEquals(isRelatedTo, isRelatedTo2);
        Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
        assertEquals(relatesTo, relatesTo2);
        Map<String, String> conf2 = e1.getConfigs();
        assertEquals(conf, conf2);
        Set<TimelineMetric> metrics2 = e1.getMetrics();
        assertEquals(metrics, metrics2);
        for (TimelineMetric metric2 : metrics2) {
            Map<Long, Number> metricValues2 = metric2.getValues();
            matchMetrics(metricValues, metricValues2);
        }
        e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
        assertNotNull(e1);
        assertEquals(id, e1.getId());
        assertEquals(type, e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        assertEquals(infoMap, e1.getInfo());
        assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
        assertEquals(relatesTo, e1.getRelatesToEntities());
        assertEquals(conf, e1.getConfigs());
        for (TimelineMetric metric : e1.getMetrics()) {
            assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
            assertEquals(1, metric.getValues().size());
            assertTrue(metric.getValues().containsKey(ts - 20000));
            assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
        }
    } finally {
        if (hbi != null) {
            hbi.stop();
            hbi.close();
        }
    }
}
Also used : StringKeyConverter(org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) EnumSet(java.util.EnumSet) Set(java.util.Set) NavigableSet(java.util.NavigableSet) HashSet(java.util.HashSet) Configuration(org.apache.hadoop.conf.Configuration) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) EntityRowKeyPrefix(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix) Result(org.apache.hadoop.hbase.client.Result) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) HashSet(java.util.HashSet) EntityTable(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) Connection(org.apache.hadoop.hbase.client.Connection) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Scan(org.apache.hadoop.hbase.client.Scan) Map(java.util.Map) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 12 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.

the class TimelineServiceV2Publisher method putEntity.

private void putEntity(TimelineEntity entity, ApplicationId appId) {
    try {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Publishing the entity " + entity + ", JSON-style content: " + TimelineUtils.dumpTimelineRecordtoJSON(entity));
        }
        TimelineCollector timelineCollector = rmTimelineCollectorManager.get(appId);
        TimelineEntities entities = new TimelineEntities();
        entities.addEntity(entity);
        timelineCollector.putEntities(entities, UserGroupInformation.getCurrentUser());
    } catch (Exception e) {
        LOG.error("Error when publishing entity " + entity, e);
    }
}
Also used : TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) TimelineCollector(org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector)

Example 13 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.

the class TestTimelineClientV2Impl method testConfigurableNumberOfMerges.

@Test
public void testConfigurableNumberOfMerges() throws Exception {
    client.setSleepBeforeReturn(true);
    try {
        // At max 3 entities need to be merged
        client.putEntitiesAsync(generateEntity("1"));
        client.putEntitiesAsync(generateEntity("2"));
        client.putEntitiesAsync(generateEntity("3"));
        client.putEntitiesAsync(generateEntity("4"));
        client.putEntities(generateEntity("5"));
        client.putEntitiesAsync(generateEntity("6"));
        client.putEntitiesAsync(generateEntity("7"));
        client.putEntitiesAsync(generateEntity("8"));
        client.putEntitiesAsync(generateEntity("9"));
        client.putEntitiesAsync(generateEntity("10"));
    } catch (YarnException e) {
        Assert.fail("No exception expected");
    }
    // not having the same logic here as it doesn't depend on how many times
    // events are published.
    Thread.sleep(2 * TIME_TO_SLEEP);
    printReceivedEntities();
    for (TimelineEntities publishedEntities : client.publishedEntities) {
        Assert.assertTrue("Number of entities should not be greater than 3 for each publish," + " but was " + publishedEntities.getEntities().size(), publishedEntities.getEntities().size() <= 3);
    }
}
Also used : TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) Test(org.junit.Test)

Example 14 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.

the class TestHBaseStorageFlowRun method testWriteFlowRunMetricsPrefix.

@Test
public void testWriteFlowRunMetricsPrefix() throws Exception {
    String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
    String user = "testWriteFlowRunMetricsPrefix_user1";
    String flow = "testWriteFlowRunMetricsPrefix_flow_name";
    String flowVersion = "CF7022C10F1354";
    TimelineEntities te = new TimelineEntities();
    TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
    te.addEntity(entityApp1);
    HBaseTimelineWriterImpl hbi = null;
    Configuration c1 = util.getConfiguration();
    try {
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        String appName = "application_11111111111111_1111";
        hbi.write(cluster, user, flow, flowVersion, 1002345678919L, appName, te);
        // write another application with same metric to this flow
        te = new TimelineEntities();
        TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
        te.addEntity(entityApp2);
        appName = "application_11111111111111_2222";
        hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
        hbi.flush();
    } finally {
        if (hbi != null) {
            hbi.close();
        }
    }
    // use the timeline reader to verify data
    HBaseTimelineReaderImpl hbr = null;
    try {
        hbr = new HBaseTimelineReaderImpl();
        hbr.init(c1);
        hbr.start();
        TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC1.substring(0, METRIC1.indexOf("_") + 1)));
        TimelineEntity entity = hbr.getEntity(new TimelineReaderContext(cluster, user, flow, 1002345678919L, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
        assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
        Set<TimelineMetric> metrics = entity.getMetrics();
        assertEquals(1, metrics.size());
        for (TimelineMetric metric : metrics) {
            String id = metric.getId();
            Map<Long, Number> values = metric.getValues();
            assertEquals(1, values.size());
            Number value = null;
            for (Number n : values.values()) {
                value = n;
            }
            switch(id) {
                case METRIC1:
                    assertEquals(40L, value);
                    break;
                default:
                    fail("unrecognized metric: " + id);
            }
        }
        Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
        assertEquals(2, entities.size());
        int metricCnt = 0;
        for (TimelineEntity timelineEntity : entities) {
            metricCnt += timelineEntity.getMetrics().size();
        }
        assertEquals(2, metricCnt);
    } finally {
        if (hbr != null) {
            hbr.close();
        }
    }
}
Also used : TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) Configuration(org.apache.hadoop.conf.Configuration) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) HBaseTimelineWriterImpl(org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl) HBaseTimelineReaderImpl(org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) TimelinePrefixFilter(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter) Test(org.junit.Test)

Example 15 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.

the class TestHBaseStorageFlowRunCompaction method testWriteFlowRunCompaction.

@Test
public void testWriteFlowRunCompaction() throws Exception {
    String cluster = "kompaction_cluster1";
    String user = "kompaction_FlowRun__user1";
    String flow = "kompaction_flowRun_flow_name";
    String flowVersion = "AF1021C19F1351";
    long runid = 1449526652000L;
    int start = 10;
    int count = 2000;
    int appIdSuffix = 1;
    HBaseTimelineWriterImpl hbi = null;
    long insertTs = System.currentTimeMillis() - count;
    Configuration c1 = util.getConfiguration();
    TimelineEntities te1 = null;
    TimelineEntity entityApp1 = null;
    try {
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        // of metric1 and 100 of metric2
        for (int i = start; i < start + count; i++) {
            String appName = "application_10240000000000_" + appIdSuffix;
            insertTs++;
            te1 = new TimelineEntities();
            entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(insertTs, c1);
            te1.addEntity(entityApp1);
            hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
            appName = "application_2048000000000_7" + appIdSuffix;
            insertTs++;
            te1 = new TimelineEntities();
            entityApp1 = TestFlowDataGenerator.getEntityMetricsApp2(insertTs);
            te1.addEntity(entityApp1);
            hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
        }
    } finally {
        String appName = "application_10240000000000_" + appIdSuffix;
        te1 = new TimelineEntities();
        entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1Complete(insertTs + 1, c1);
        te1.addEntity(entityApp1);
        if (hbi != null) {
            hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
            hbi.flush();
            hbi.close();
        }
    }
    // check in flow run table
    HRegionServer server = util.getRSForFirstRegionInTable(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
    List<Region> regions = server.getOnlineRegions(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
    assertTrue("Didn't find any regions for primary table!", regions.size() > 0);
    // flush and compact all the regions of the primary table
    for (Region region : regions) {
        region.flush(true);
        region.compact(true);
    }
    // check flow run for one flow many apps
    checkFlowRunTable(cluster, user, flow, runid, c1, 4);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) Region(org.apache.hadoop.hbase.regionserver.Region) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) HBaseTimelineWriterImpl(org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl) HRegionServer(org.apache.hadoop.hbase.regionserver.HRegionServer) Test(org.junit.Test)

Aggregations

TimelineEntities (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities)33 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)27 Test (org.junit.Test)24 Configuration (org.apache.hadoop.conf.Configuration)21 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)14 TimelineReaderContext (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext)14 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)13 HBaseTimelineWriterImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl)12 HashMap (java.util.HashMap)11 TimelineEntityFilters (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters)9 HBaseTimelineReaderImpl (org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl)9 HashSet (java.util.HashSet)7 Connection (org.apache.hadoop.hbase.client.Connection)7 Result (org.apache.hadoop.hbase.client.Result)7 TimelineEvent (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent)7 Map (java.util.Map)5 NavigableMap (java.util.NavigableMap)5 Set (java.util.Set)5 ApplicationEntity (org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity)5 Get (org.apache.hadoop.hbase.client.Get)4