use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class EntityGroupFSTimelineStore method getEntities.
@Override
public TimelineEntities getEntities(String entityType, Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fieldsToRetrieve, CheckAcl checkAcl) throws IOException {
LOG.debug("getEntities type={} primary={}", entityType, primaryFilter);
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
List<TimelineStore> stores = getTimelineStoresForRead(entityType, primaryFilter, secondaryFilters, relatedCacheItems);
TimelineEntities returnEntities = new TimelineEntities();
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {} for the request", store.getName());
TimelineEntities entities = store.getEntities(entityType, limit, windowStart, windowEnd, fromId, fromTs, primaryFilter, secondaryFilters, fieldsToRetrieve, checkAcl);
if (entities != null) {
returnEntities.addEntities(entities.getEntities());
}
}
return returnEntities;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestLogInfo method testParseEntity.
@Test
public void testParseEntity() throws Exception {
// Load test data
TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithMemStore(config);
EntityLogInfo testLogInfo = new EntityLogInfo(TEST_ATTEMPT_DIR_NAME, TEST_ENTITY_FILE_NAME, UserGroupInformation.getLoginUser().getUserName());
testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs);
// Verify for the first batch
PluginStoreTestUtils.verifyTestEntities(tdm);
// Load new data
TimelineEntity entityNew = PluginStoreTestUtils.createEntity("id_3", "type_3", 789l, null, null, null, null, "domain_id_1");
TimelineEntities entityList = new TimelineEntities();
entityList.addEntity(entityNew);
writeEntitiesLeaveOpen(entityList, new Path(getTestRootPath(TEST_ATTEMPT_DIR_NAME), TEST_ENTITY_FILE_NAME));
testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs);
// Verify the newly added data
TimelineEntity entity3 = tdm.getEntity(entityNew.getEntityType(), entityNew.getEntityId(), EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
assertNotNull(entity3);
assertEquals("Failed to read out entity new", entityNew.getStartTime(), entity3.getStartTime());
tdm.close();
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestEntityGroupFSTimelineStore method testPluginRead.
@Test
public void testPluginRead() throws Exception {
// Verify precondition
assertEquals(EntityGroupPlugInForTest.class.getName(), store.getConfig().get(YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES));
List<TimelineEntityGroupPlugin> currPlugins = store.getPlugins();
for (TimelineEntityGroupPlugin plugin : currPlugins) {
ClassLoader pluginClassLoader = plugin.getClass().getClassLoader();
assertTrue("Should set up ApplicationClassLoader", pluginClassLoader instanceof ApplicationClassLoader);
URL[] paths = ((URLClassLoader) pluginClassLoader).getURLs();
boolean foundJAR = false;
for (URL path : paths) {
if (path.toString().contains(testJar.getAbsolutePath())) {
foundJAR = true;
}
}
assertTrue("Not found path " + testJar.getAbsolutePath() + " for plugin " + plugin.getClass().getName(), foundJAR);
}
// Load data and cache item, prepare timeline store by making a cache item
EntityGroupFSTimelineStore.AppLogs appLogs = store.new AppLogs(mainTestAppId, mainTestAppDirPath, AppState.COMPLETED);
EntityCacheItem cacheItem = new EntityCacheItem(EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId), config);
cacheItem.setAppLogs(appLogs);
store.setCachedLogs(EntityGroupPlugInForTest.getStandardTimelineGroupId(mainTestAppId), cacheItem);
MutableCounterLong detailLogEntityRead = store.metrics.getGetEntityToDetailOps();
MutableStat cacheRefresh = store.metrics.getCacheRefresh();
long numEntityReadBefore = detailLogEntityRead.value();
long cacheRefreshBefore = cacheRefresh.lastStat().numSamples();
// Generate TDM
TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithStore(config, store);
// Verify single entity read
TimelineEntity entity3 = tdm.getEntity("type_3", mainTestAppId.toString(), EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
assertNotNull(entity3);
assertEquals(entityNew.getStartTime(), entity3.getStartTime());
// Verify multiple entities read
NameValuePair primaryFilter = new NameValuePair(EntityGroupPlugInForTest.APP_ID_FILTER_NAME, mainTestAppId.toString());
TimelineEntities entities = tdm.getEntities("type_3", primaryFilter, null, null, null, null, null, null, EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
assertEquals(1, entities.getEntities().size());
for (TimelineEntity entity : entities.getEntities()) {
assertEquals(entityNew.getStartTime(), entity.getStartTime());
}
// Verify metrics
assertEquals(numEntityReadBefore + 2L, detailLogEntityRead.value());
assertEquals(cacheRefreshBefore + 1L, cacheRefresh.lastStat().numSamples());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestEntityGroupFSTimelineStore method createTestFiles.
private void createTestFiles(ApplicationId appId, Path attemptDirPath) throws IOException {
TimelineEntities entities = PluginStoreTestUtils.generateTestEntities();
PluginStoreTestUtils.writeEntities(entities, new Path(attemptDirPath, TEST_SUMMARY_LOG_FILE_NAME), fs);
Map<String, Set<Object>> primaryFilters = new HashMap<>();
Set<Object> appSet = new HashSet<Object>();
appSet.add(appId.toString());
primaryFilters.put(EntityGroupPlugInForTest.APP_ID_FILTER_NAME, appSet);
entityNew = PluginStoreTestUtils.createEntity(appId.toString(), "type_3", 789L, null, null, primaryFilters, null, "domain_id_1");
TimelineEntities entityList = new TimelineEntities();
entityList.addEntity(entityNew);
PluginStoreTestUtils.writeEntities(entityList, new Path(attemptDirPath, mainEntityLogFileName), fs);
FSDataOutputStream out = fs.create(new Path(attemptDirPath, TEST_DOMAIN_LOG_FILE_NAME));
out.close();
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestMRTimelineEventHandling method testMRTimelineEventHandling.
@Test
public void testMRTimelineEventHandling() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
MiniMRYarnCluster cluster = null;
try {
cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
String localPathRoot = System.getProperty("test.build.data", "build/test/data");
Path inDir = new Path(localPathRoot, "input");
Path outDir = new Path(localPathRoot, "output");
RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(0).getEventType());
job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(2, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
Assert.assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType());
} finally {
if (cluster != null) {
cluster.stop();
}
}
}
Aggregations