Search in sources :

Example 1 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class EntityGroupFSTimelineStore method getEntities.

@Override
public TimelineEntities getEntities(String entityType, Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fieldsToRetrieve, CheckAcl checkAcl) throws IOException {
    LOG.debug("getEntities type={} primary={}", entityType, primaryFilter);
    List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
    List<TimelineStore> stores = getTimelineStoresForRead(entityType, primaryFilter, secondaryFilters, relatedCacheItems);
    TimelineEntities returnEntities = new TimelineEntities();
    for (TimelineStore store : stores) {
        LOG.debug("Try timeline store {} for the request", store.getName());
        TimelineEntities entities = store.getEntities(entityType, limit, windowStart, windowEnd, fromId, fromTs, primaryFilter, secondaryFilters, fieldsToRetrieve, checkAcl);
        if (entities != null) {
            returnEntities.addEntities(entities.getEntities());
        }
    }
    return returnEntities;
}
Also used : TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) ArrayList(java.util.ArrayList)

Example 2 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class TestLogInfo method testParseEntity.

@Test
public void testParseEntity() throws Exception {
    // Load test data
    TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithMemStore(config);
    EntityLogInfo testLogInfo = new EntityLogInfo(TEST_ATTEMPT_DIR_NAME, TEST_ENTITY_FILE_NAME, UserGroupInformation.getLoginUser().getUserName());
    testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs);
    // Verify for the first batch
    PluginStoreTestUtils.verifyTestEntities(tdm);
    // Load new data
    TimelineEntity entityNew = PluginStoreTestUtils.createEntity("id_3", "type_3", 789l, null, null, null, null, "domain_id_1");
    TimelineEntities entityList = new TimelineEntities();
    entityList.addEntity(entityNew);
    writeEntitiesLeaveOpen(entityList, new Path(getTestRootPath(TEST_ATTEMPT_DIR_NAME), TEST_ENTITY_FILE_NAME));
    testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs);
    // Verify the newly added data
    TimelineEntity entity3 = tdm.getEntity(entityNew.getEntityType(), entityNew.getEntityId(), EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
    assertNotNull(entity3);
    assertEquals("Failed to read out entity new", entityNew.getStartTime(), entity3.getStartTime());
    tdm.close();
}
Also used : Path(org.apache.hadoop.fs.Path) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) Test(org.junit.Test)

Example 3 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class TestEntityGroupFSTimelineStore method testPluginRead.

@Test
public void testPluginRead() throws Exception {
    // Verify precondition
    assertEquals(EntityGroupPlugInForTest.class.getName(), store.getConfig().get(YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES));
    List<TimelineEntityGroupPlugin> currPlugins = store.getPlugins();
    for (TimelineEntityGroupPlugin plugin : currPlugins) {
        ClassLoader pluginClassLoader = plugin.getClass().getClassLoader();
        assertTrue("Should set up ApplicationClassLoader", pluginClassLoader instanceof ApplicationClassLoader);
        URL[] paths = ((URLClassLoader) pluginClassLoader).getURLs();
        boolean foundJAR = false;
        for (URL path : paths) {
            if (path.toString().contains(testJar.getAbsolutePath())) {
                foundJAR = true;
            }
        }
        assertTrue("Not found path " + testJar.getAbsolutePath() + " for plugin " + plugin.getClass().getName(), foundJAR);
    }
    // Load data and cache item, prepare timeline store by making a cache item
    EntityGroupFSTimelineStore.AppLogs appLogs = store.new AppLogs(mainTestAppId, mainTestAppDirPath, AppState.COMPLETED);
    EntityCacheItem cacheItem = new EntityCacheItem(EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId), config);
    cacheItem.setAppLogs(appLogs);
    store.setCachedLogs(EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId), cacheItem);
    MutableCounterLong detailLogEntityRead = store.metrics.getGetEntityToDetailOps();
    MutableStat cacheRefresh = store.metrics.getCacheRefresh();
    long numEntityReadBefore = detailLogEntityRead.value();
    long cacheRefreshBefore = cacheRefresh.lastStat().numSamples();
    // Generate TDM
    TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithStore(config, store);
    // Verify single entity read
    TimelineEntity entity3 = tdm.getEntity("type_3", mainTestAppId.toString(), EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
    assertNotNull(entity3);
    assertEquals(entityNew.getStartTime(), entity3.getStartTime());
    // Verify multiple entities read
    NameValuePair primaryFilter = new NameValuePair(EntityGroupPlugInForTest.APP_ID_FILTER_NAME, mainTestAppId.toString());
    TimelineEntities entities = tdm.getEntities("type_3", primaryFilter, null, null, null, null, null, null, EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
    assertEquals(1, entities.getEntities().size());
    for (TimelineEntity entity : entities.getEntities()) {
        assertEquals(entityNew.getStartTime(), entity.getStartTime());
    }
    // Verify metrics
    assertEquals(numEntityReadBefore + 2L, detailLogEntityRead.value());
    assertEquals(cacheRefreshBefore + 1L, cacheRefresh.lastStat().numSamples());
}
Also used : MutableCounterLong(org.apache.hadoop.metrics2.lib.MutableCounterLong) MutableStat(org.apache.hadoop.metrics2.lib.MutableStat) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) ApplicationClassLoader(org.apache.hadoop.util.ApplicationClassLoader) URL(java.net.URL) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) URLClassLoader(java.net.URLClassLoader) ApplicationClassLoader(org.apache.hadoop.util.ApplicationClassLoader) URLClassLoader(java.net.URLClassLoader) Test(org.junit.Test)

Example 4 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class TestEntityGroupFSTimelineStore method createTestFiles.

private void createTestFiles(ApplicationId appId, Path attemptDirPath) throws IOException {
    TimelineEntities entities = PluginStoreTestUtils.generateTestEntities();
    PluginStoreTestUtils.writeEntities(entities, new Path(attemptDirPath, TEST_SUMMARY_LOG_FILE_NAME), fs);
    Map<String, Set<Object>> primaryFilters = new HashMap<>();
    Set<Object> appSet = new HashSet<Object>();
    appSet.add(appId.toString());
    primaryFilters.put(EntityGroupPlugInForTest.APP_ID_FILTER_NAME, appSet);
    entityNew = PluginStoreTestUtils.createEntity(appId.toString(), "type_3", 789L, null, null, primaryFilters, null, "domain_id_1");
    TimelineEntities entityList = new TimelineEntities();
    entityList.addEntity(entityNew);
    PluginStoreTestUtils.writeEntities(entityList, new Path(attemptDirPath, mainEntityLogFileName), fs);
    FSDataOutputStream out = fs.create(new Path(attemptDirPath, TEST_DOMAIN_LOG_FILE_NAME));
    out.close();
}
Also used : Path(org.apache.hadoop.fs.Path) HashSet(java.util.HashSet) EnumSet(java.util.EnumSet) Set(java.util.Set) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) HashMap(java.util.HashMap) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HashSet(java.util.HashSet)

Example 5 with TimelineEntities

use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.

the class TestMRTimelineEventHandling method testMRTimelineEventHandling.

@Test
public void testMRTimelineEventHandling() throws Exception {
    Configuration conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
    MiniMRYarnCluster cluster = null;
    try {
        cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
        cluster.init(conf);
        cluster.start();
        conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
        TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
        String localPathRoot = System.getProperty("test.build.data", "build/test/data");
        Path inDir = new Path(localPathRoot, "input");
        Path outDir = new Path(localPathRoot, "output");
        RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        TimelineEntity tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
        Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
        Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(0).getEventType());
        job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(2, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
        Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
        Assert.assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType());
    } finally {
        if (cluster != null) {
            cluster.stop();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) MiniMRYarnCluster(org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) Test(org.junit.Test)

Aggregations

TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)43 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)30 Test (org.junit.Test)23 TimelinePutResponse (org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse)14 ClientResponse (com.sun.jersey.api.client.ClientResponse)9 WebResource (com.sun.jersey.api.client.WebResource)8 AdminACLsManager (org.apache.hadoop.yarn.security.AdminACLsManager)6 ArrayList (java.util.ArrayList)5 Set (java.util.Set)5 Path (org.apache.hadoop.fs.Path)5 IOException (java.io.IOException)4 HashMap (java.util.HashMap)4 HashSet (java.util.HashSet)4 Configuration (org.apache.hadoop.conf.Configuration)4 TimelineDomain (org.apache.hadoop.yarn.api.records.timeline.TimelineDomain)4 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)4 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)4 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)4 EnumSet (java.util.EnumSet)3 LinkedHashMap (java.util.LinkedHashMap)3