use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestRollingLevelDBTimelineStore method testRelatingToOldEntityWithoutDomainId.
@Test
public void testRelatingToOldEntityWithoutDomainId() throws IOException {
// New entity is put in the default domain
TimelineEntity entityToStore = new TimelineEntity();
entityToStore.setEntityType("NEW_ENTITY_TYPE_1");
entityToStore.setEntityId("NEW_ENTITY_ID_1");
entityToStore.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
entityToStore.addRelatedEntity("OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entityToStore);
store.put(entities);
TimelineEntity entityToGet = store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
Assert.assertNotNull(entityToGet);
Assert.assertEquals("DEFAULT", entityToGet.getDomainId());
Assert.assertEquals("NEW_ENTITY_TYPE_1", entityToGet.getRelatedEntities().keySet().iterator().next());
Assert.assertEquals("NEW_ENTITY_ID_1", entityToGet.getRelatedEntities().values().iterator().next().iterator().next());
// New entity is not put in the default domain
entityToStore = new TimelineEntity();
entityToStore.setEntityType("NEW_ENTITY_TYPE_2");
entityToStore.setEntityId("NEW_ENTITY_ID_2");
entityToStore.setDomainId("NON_DEFAULT");
entityToStore.addRelatedEntity("OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1");
entities = new TimelineEntities();
entities.addEntity(entityToStore);
TimelinePutResponse response = store.put(entities);
Assert.assertEquals(1, response.getErrors().size());
Assert.assertEquals(TimelinePutError.FORBIDDEN_RELATION, response.getErrors().get(0).getErrorCode());
entityToGet = store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
Assert.assertNotNull(entityToGet);
Assert.assertEquals("DEFAULT", entityToGet.getDomainId());
// Still have one related entity
Assert.assertEquals(1, entityToGet.getRelatedEntities().keySet().size());
Assert.assertEquals(1, entityToGet.getRelatedEntities().values().iterator().next().size());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestMRTimelineEventHandling method testMRTimelineEventHandling.
@Test
public void testMRTimelineEventHandling() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
MiniMRYarnCluster cluster = null;
try {
cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
cluster.init(conf);
cluster.start();
conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
String localPathRoot = System.getProperty("test.build.data", "build/test/data");
Path inDir = new Path(localPathRoot, "input");
Path outDir = new Path(localPathRoot, "output");
RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(1, entities.getEntities().size());
TimelineEntity tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(0).getEventType());
job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
Assert.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
Assert.assertEquals(2, entities.getEntities().size());
tEntity = entities.getEntities().get(0);
Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
Assert.assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType());
} finally {
if (cluster != null) {
cluster.stop();
}
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class KeyValueBasedTimelineStore method getEntities.
@Override
public synchronized TimelineEntities getEntities(String entityType, Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
return null;
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
if (windowStart == null) {
windowStart = Long.MIN_VALUE;
}
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
Iterator<TimelineEntity> entityIterator = null;
if (fromId != null) {
TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId, entityType));
if (firstEntity == null) {
return new TimelineEntities();
} else {
entityIterator = entities.valueSetIterator(firstEntity);
}
}
if (entityIterator == null) {
entityIterator = entities.valueSetIterator();
}
List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
while (entityIterator.hasNext()) {
TimelineEntity entity = entityIterator.next();
if (entitiesSelected.size() >= limit) {
break;
}
if (!entity.getEntityType().equals(entityType)) {
continue;
}
if (entity.getStartTime() <= windowStart) {
continue;
}
if (entity.getStartTime() > windowEnd) {
continue;
}
if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(entity.getEntityId(), entity.getEntityType())) > fromTs) {
continue;
}
if (primaryFilter != null && !KeyValueBasedTimelineStoreUtils.matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
continue;
}
if (secondaryFilters != null) {
// AND logic
boolean flag = true;
for (NameValuePair secondaryFilter : secondaryFilters) {
if (secondaryFilter != null && !KeyValueBasedTimelineStoreUtils.matchPrimaryFilter(entity.getPrimaryFilters(), secondaryFilter) && !KeyValueBasedTimelineStoreUtils.matchFilter(entity.getOtherInfo(), secondaryFilter)) {
flag = false;
break;
}
}
if (!flag) {
continue;
}
}
if (entity.getDomainId() == null) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
entitiesSelected.add(entity);
}
}
List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
for (TimelineEntity entitySelected : entitiesSelected) {
entitiesToReturn.add(KeyValueBasedTimelineStoreUtils.maskFields(entitySelected, fields));
}
Collections.sort(entitiesToReturn);
TimelineEntities entitiesWrapper = new TimelineEntities();
entitiesWrapper.setEntities(entitiesToReturn);
return entitiesWrapper;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method getApplicationAttempts.
@Override
public Map<ApplicationAttemptId, ApplicationAttemptReport> getApplicationAttempts(ApplicationId appId) throws YarnException, IOException {
ApplicationReportExt app = getApplication(appId, ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(AppAttemptMetricsConstants.ENTITY_TYPE, new NameValuePair(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ApplicationAttemptId, ApplicationAttemptReport> appAttempts = new LinkedHashMap<ApplicationAttemptId, ApplicationAttemptReport>();
for (TimelineEntity entity : entities.getEntities()) {
ApplicationAttemptReport appAttempt = convertToApplicationAttemptReport(entity);
appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt);
}
return appAttempts;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method getContainers.
@Override
public Map<ContainerId, ContainerReport> getContainers(ApplicationAttemptId appAttemptId) throws YarnException, IOException {
ApplicationReportExt app = getApplication(appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(ContainerMetricsConstants.ENTITY_TYPE, new NameValuePair(ContainerMetricsConstants.PARENT_PRIMARIY_FILTER, appAttemptId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ContainerId, ContainerReport> containers = new LinkedHashMap<ContainerId, ContainerReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
ContainerReport container = convertToContainerReport(entity, serverHttpAddress, app.appReport.getUser());
containers.put(container.getContainerId(), container);
}
}
return containers;
}
Aggregations