use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineClientV2Impl method testSyncCall.
@Test
public void testSyncCall() throws Exception {
try {
// sync entity should not be be merged with Async
client.putEntities(generateEntity("1"));
client.putEntitiesAsync(generateEntity("2"));
client.putEntitiesAsync(generateEntity("3"));
// except for the sync call above 2 should be merged
client.putEntities(generateEntity("4"));
} catch (YarnException e) {
Assert.fail("Exception is not expected");
}
for (int i = 0; i < 4; i++) {
if (client.getNumOfTimelineEntitiesPublished() == 3) {
break;
}
Thread.sleep(TIME_TO_SLEEP);
}
printReceivedEntities();
Assert.assertEquals("TimelineEntities not published as desired", 3, client.getNumOfTimelineEntitiesPublished());
TimelineEntities firstPublishedEntities = client.getPublishedEntities(0);
Assert.assertEquals("sync entities should not be merged with async", 1, firstPublishedEntities.getEntities().size());
// test before pushing the sync entities asyncs are merged and pushed
TimelineEntities secondPublishedEntities = client.getPublishedEntities(1);
Assert.assertEquals("async entities should be merged before publishing sync", 2, secondPublishedEntities.getEntities().size());
Assert.assertEquals("Order of Async Events Needs to be FIFO", "2", secondPublishedEntities.getEntities().get(0).getId());
Assert.assertEquals("Order of Async Events Needs to be FIFO", "3", secondPublishedEntities.getEntities().get(1).getId());
// test the last entity published is sync put
TimelineEntities thirdPublishedEntities = client.getPublishedEntities(2);
Assert.assertEquals("sync entities had to be published at the last", 1, thirdPublishedEntities.getEntities().size());
Assert.assertEquals("Expected last sync Event is not proper", "4", thirdPublishedEntities.getEntities().get(0).getId());
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineClientV2Impl method testAfterStop.
@Test
public void testAfterStop() throws Exception {
client.setSleepBeforeReturn(true);
try {
// At max 3 entities need to be merged
client.putEntities(generateEntity("1"));
for (int i = 2; i < 20; i++) {
client.putEntitiesAsync(generateEntity("" + i));
}
client.stop();
try {
client.putEntitiesAsync(generateEntity("50"));
Assert.fail("Exception expected");
} catch (YarnException e) {
// expected
}
} catch (YarnException e) {
Assert.fail("No exception expected");
}
// events are published.
for (int i = 0; i < 5; i++) {
TimelineEntities publishedEntities = client.publishedEntities.get(client.publishedEntities.size() - 1);
TimelineEntity timelineEntity = publishedEntities.getEntities().get(publishedEntities.getEntities().size() - 1);
if (!timelineEntity.getId().equals("19")) {
Thread.sleep(2 * TIME_TO_SLEEP);
}
}
printReceivedEntities();
TimelineEntities publishedEntities = client.publishedEntities.get(client.publishedEntities.size() - 1);
TimelineEntity timelineEntity = publishedEntities.getEntities().get(publishedEntities.getEntities().size() - 1);
Assert.assertEquals("", "19", timelineEntity.getId());
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class JobHistoryFileReplayMapperV2 method writePerEntity.
private void writePerEntity(AppLevelTimelineCollector collector, List<TimelineEntity> entitySet, UserGroupInformation ugi) throws IOException {
for (TimelineEntity entity : entitySet) {
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
collector.putEntities(entities, ugi);
LOG.info("wrote entity " + entity.getId());
}
}
Aggregations