use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TimelineServiceV2Publisher method putEntity.
private void putEntity(TimelineEntity entity, ApplicationId appId) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Publishing the entity " + entity + ", JSON-style content: " + TimelineUtils.dumpTimelineRecordtoJSON(entity));
}
TimelineCollector timelineCollector = rmTimelineCollectorManager.get(appId);
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
timelineCollector.putEntities(entities, UserGroupInformation.getCurrentUser());
} catch (Exception e) {
LOG.error("Error when publishing entity " + entity, e);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestTimelineClientV2Impl method testConfigurableNumberOfMerges.
@Test
public void testConfigurableNumberOfMerges() throws Exception {
client.setSleepBeforeReturn(true);
try {
// At max 3 entities need to be merged
client.putEntitiesAsync(generateEntity("1"));
client.putEntitiesAsync(generateEntity("2"));
client.putEntitiesAsync(generateEntity("3"));
client.putEntitiesAsync(generateEntity("4"));
client.putEntities(generateEntity("5"));
client.putEntitiesAsync(generateEntity("6"));
client.putEntitiesAsync(generateEntity("7"));
client.putEntitiesAsync(generateEntity("8"));
client.putEntitiesAsync(generateEntity("9"));
client.putEntitiesAsync(generateEntity("10"));
} catch (YarnException e) {
Assert.fail("No exception expected");
}
// not having the same logic here as it doesn't depend on how many times
// events are published.
Thread.sleep(2 * TIME_TO_SLEEP);
printReceivedEntities();
for (TimelineEntities publishedEntities : client.publishedEntities) {
Assert.assertTrue("Number of entities should not be greater than 3 for each publish," + " but was " + publishedEntities.getEntities().size(), publishedEntities.getEntities().size() <= 3);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class SimpleEntityWriterV2 method writeEntities.
protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException {
Configuration conf = context.getConfiguration();
// simulate the app id with the task id
int taskId = context.getTaskAttemptID().getTaskID().getId();
long timestamp = conf.getLong(TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
ApplicationId appId = ApplicationId.newInstance(timestamp, taskId);
// create the app level timeline collector
AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
manager.putIfAbsent(appId, collector);
try {
// set the context
// flow id: job name, flow run id: timestamp, user id
TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
tlContext.setFlowName(context.getJobName());
tlContext.setFlowRunId(timestamp);
tlContext.setUserId(context.getUser());
final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
long totalTime = 0;
final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
final Random rand = new Random();
final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
final char[] payLoad = new char[kbs * 1024];
for (int i = 0; i < testtimes; i++) {
// Generate a fixed length random payload
for (int xx = 0; xx < kbs * 1024; xx++) {
int alphaNumIdx = rand.nextInt(ALPHA_NUMS.length);
payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
}
String entId = taskAttemptId + "_" + Integer.toString(i);
final TimelineEntity entity = new TimelineEntity();
entity.setId(entId);
entity.setType("FOO_ATTEMPT");
entity.addInfo("PERF_TEST", payLoad);
// add an event
TimelineEvent event = new TimelineEvent();
event.setId("foo_event_id");
event.setTimestamp(System.currentTimeMillis());
event.addInfo("foo_event", "test");
entity.addEvent(event);
// add a metric
TimelineMetric metric = new TimelineMetric();
metric.setId("foo_metric");
metric.addValue(System.currentTimeMillis(), 123456789L);
entity.addMetric(metric);
// add a config
entity.addConfig("foo", "bar");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
collector.putEntities(entities, ugi);
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
}
LOG.info("wrote " + testtimes + " entities (" + kbs * testtimes + " kB) in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(testtimes);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).increment(kbs * testtimes);
} finally {
// clean up
manager.remove(appId);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class JobHistoryFileReplayMapperV2 method writeAllEntities.
private void writeAllEntities(AppLevelTimelineCollector collector, List<TimelineEntity> entitySet, UserGroupInformation ugi) throws IOException {
TimelineEntities entities = new TimelineEntities();
entities.setEntities(entitySet);
collector.putEntities(entities, ugi);
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testWriteFlowActivityOneFlow.
/**
* Write 1 application entity and checks the record for today in the flow
* activity table.
*/
@Test
public void testWriteFlowActivityOneFlow() throws Exception {
String cluster = "testWriteFlowActivityOneFlow_cluster1";
String user = "testWriteFlowActivityOneFlow_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion = "A122110F135BC4";
long runid = 1001111178919L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_1111999999_1234";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTable(cluster, user, flow, flowVersion, runid, c1, appCreatedTime);
// use the reader to verify the data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity entity = (FlowActivityEntity) e;
NavigableSet<FlowRunEntity> flowRuns = entity.getFlowRuns();
assertEquals(1, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
assertEquals(runid, flowRun.getRunId());
assertEquals(flowVersion, flowRun.getVersion());
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
Aggregations