use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId in project tez by apache.
the class TestTimelineCachePluginImpl method testGetTimelineEntityGroupIdByIdWithOldGroupIdsSingle.
@Test
public void testGetTimelineEntityGroupIdByIdWithOldGroupIdsSingle() {
TimelineCachePluginImpl plugin = createPlugin(100, "50");
for (Entry<String, String> entry : typeIdMap2.entrySet()) {
Set<TimelineEntityGroupId> groupIds = plugin.getTimelineEntityGroupId(entry.getValue(), entry.getKey());
if (entry.getKey().equals(EntityTypes.TEZ_DAG_ID.name())) {
Assert.assertNull(groupIds);
continue;
}
Assert.assertEquals(3, groupIds.size());
Iterator<TimelineEntityGroupId> iter = groupIds.iterator();
while (iter.hasNext()) {
TimelineEntityGroupId groupId = iter.next();
Assert.assertEquals(appId2, groupId.getApplicationId());
Assert.assertTrue(getGroupIds(dagID2, 100, 50).contains(groupId.getTimelineEntityGroupId()));
}
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId in project tez by apache.
the class TestTimelineCachePluginImpl method testGetTimelineEntityGroupIdByIdDefaultConfig.
@Test
public void testGetTimelineEntityGroupIdByIdDefaultConfig() {
TimelineCachePluginImpl plugin = createPlugin(-1, null);
for (Entry<String, String> entry : typeIdMap1.entrySet()) {
Set<TimelineEntityGroupId> groupIds = plugin.getTimelineEntityGroupId(entry.getValue(), entry.getKey());
if (entry.getKey().equals(EntityTypes.TEZ_DAG_ID.name())) {
Assert.assertNull(groupIds);
continue;
}
Assert.assertEquals(1, groupIds.size());
Iterator<TimelineEntityGroupId> iter = groupIds.iterator();
while (iter.hasNext()) {
TimelineEntityGroupId groupId = iter.next();
Assert.assertEquals(appId1, groupId.getApplicationId());
Assert.assertTrue(getGroupIds(dagID1).contains(groupId.getTimelineEntityGroupId()));
}
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId in project tez by apache.
the class TestTimelineCachePluginImpl method testGetTimelineEntityGroupIdByIdWithOldGroupIdsMultiple.
@Test
public void testGetTimelineEntityGroupIdByIdWithOldGroupIdsMultiple() {
TimelineCachePluginImpl plugin = createPlugin(100, "25, 50");
for (Entry<String, String> entry : typeIdMap2.entrySet()) {
Set<TimelineEntityGroupId> groupIds = plugin.getTimelineEntityGroupId(entry.getValue(), entry.getKey());
if (entry.getKey().equals(EntityTypes.TEZ_DAG_ID.name())) {
Assert.assertNull(groupIds);
continue;
}
Assert.assertEquals(4, groupIds.size());
Iterator<TimelineEntityGroupId> iter = groupIds.iterator();
while (iter.hasNext()) {
TimelineEntityGroupId groupId = iter.next();
Assert.assertEquals(appId2, groupId.getApplicationId());
Assert.assertTrue(getGroupIds(dagID2, 100, 25, 50).contains(groupId.getTimelineEntityGroupId()));
}
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId in project tez by apache.
the class TestTimelineCachePluginImpl method testGetTimelineEntityGroupIdByIdNoGroupingConf.
@Test
public void testGetTimelineEntityGroupIdByIdNoGroupingConf() {
TimelineCachePluginImpl plugin = createPlugin(1, null);
for (Entry<String, String> entry : typeIdMap1.entrySet()) {
Set<TimelineEntityGroupId> groupIds = plugin.getTimelineEntityGroupId(entry.getValue(), entry.getKey());
if (entry.getKey().equals(EntityTypes.TEZ_DAG_ID.name())) {
Assert.assertNull(groupIds);
continue;
}
Assert.assertEquals(1, groupIds.size());
Iterator<TimelineEntityGroupId> iter = groupIds.iterator();
while (iter.hasNext()) {
TimelineEntityGroupId groupId = iter.next();
Assert.assertEquals(appId1, groupId.getApplicationId());
Assert.assertTrue(getGroupIds(dagID1).contains(groupId.getTimelineEntityGroupId()));
}
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId in project tez by apache.
the class ATSV15HistoryLoggingService method handleEvents.
private void handleEvents(DAGHistoryEvent event) {
String domainId = getDomainForEvent(event);
// skippedDags is updated in the above call so check again.
if (event.getDagID() != null && skippedDAGs.contains(event.getDagID())) {
return;
}
TimelineEntityGroupId groupId = getGroupId(event);
List<TimelineEntity> entities = HistoryEventTimelineConversion.convertToTimelineEntities(event.getHistoryEvent());
for (TimelineEntity entity : entities) {
logEntity(groupId, entity, domainId);
}
}
Aggregations