use of org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity in project hadoop by apache.
the class TestTimelineServiceClientIntegration method testPutExtendedEntities.
@Test
public void testPutExtendedEntities() throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
TimelineV2Client client = TimelineV2Client.createTimelineClient(appId);
try {
// set the timeline service address manually
client.setTimelineServiceAddress(collectorManager.getRestServerBindAddress());
client.init(conf);
client.start();
ClusterEntity cluster = new ClusterEntity();
cluster.setId(YarnConfiguration.DEFAULT_RM_CLUSTER_ID);
FlowRunEntity flow = new FlowRunEntity();
flow.setUser(UserGroupInformation.getCurrentUser().getShortUserName());
flow.setName("test_flow_name");
flow.setVersion("test_flow_version");
flow.setRunId(1L);
flow.setParent(cluster.getType(), cluster.getId());
ApplicationEntity app = new ApplicationEntity();
app.setId(appId.toString());
flow.addChild(app.getType(), app.getId());
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
ApplicationAttemptEntity appAttempt = new ApplicationAttemptEntity();
appAttempt.setId(attemptId.toString());
ContainerId containerId = ContainerId.newContainerId(attemptId, 1);
ContainerEntity container = new ContainerEntity();
container.setId(containerId.toString());
UserEntity user = new UserEntity();
user.setId(UserGroupInformation.getCurrentUser().getShortUserName());
QueueEntity queue = new QueueEntity();
queue.setId("default_queue");
client.putEntities(cluster, flow, app, appAttempt, container, user, queue);
client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user, queue);
} finally {
client.stop();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testWriteFlowActivityOneFlow.
/**
* Write 1 application entity and checks the record for today in the flow
* activity table.
*/
@Test
public void testWriteFlowActivityOneFlow() throws Exception {
String cluster = "testWriteFlowActivityOneFlow_cluster1";
String user = "testWriteFlowActivityOneFlow_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion = "A122110F135BC4";
long runid = 1001111178919L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_1111999999_1234";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTable(cluster, user, flow, flowVersion, runid, c1, appCreatedTime);
// use the reader to verify the data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity entity = (FlowActivityEntity) e;
NavigableSet<FlowRunEntity> flowRuns = entity.getFlowRuns();
assertEquals(1, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
assertEquals(runid, flowRun.getRunId());
assertEquals(flowVersion, flowRun.getVersion());
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testFlowActivityTableOneFlowMultipleRunIds.
/**
* Writes 3 applications each with a different run id and version for the same
* {cluster, user, flow}.
*
* They should be getting inserted into one record in the flow activity table
* with 3 columns, one per run id.
*/
@Test
public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException {
String cluster = "testManyRunsFlowActivity_cluster1";
String user = "testManyRunsFlowActivity_c_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion1 = "A122110F135BC4";
long runid1 = 11111111111L;
String flowVersion2 = "A12222222222C4";
long runid2 = 2222222222222L;
String flowVersion3 = "A1333333333C4";
long runid3 = 3333333333333L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11888888888_1111";
hbi.write(cluster, user, flow, flowVersion1, runid1, appName, te);
// write an application with to this flow but a different runid/ version
te = new TimelineEntities();
te.addEntity(entityApp1);
appName = "application_11888888888_2222";
hbi.write(cluster, user, flow, flowVersion2, runid2, appName, te);
// write an application with to this flow but a different runid/ version
te = new TimelineEntities();
te.addEntity(entityApp1);
appName = "application_11888888888_3333";
hbi.write(cluster, user, flow, flowVersion3, runid3, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTableSeveralRuns(cluster, user, flow, c1, flowVersion1, runid1, flowVersion2, runid2, flowVersion3, runid3, appCreatedTime);
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, null, null, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity flowActivity = (FlowActivityEntity) e;
assertEquals(cluster, flowActivity.getCluster());
assertEquals(user, flowActivity.getUser());
assertEquals(flow, flowActivity.getFlowName());
long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime);
assertEquals(dayTs, flowActivity.getDate().getTime());
Set<FlowRunEntity> flowRuns = flowActivity.getFlowRuns();
assertEquals(3, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
long runId = flowRun.getRunId();
String version = flowRun.getVersion();
if (runId == runid1) {
assertEquals(flowVersion1, version);
} else if (runId == runid2) {
assertEquals(flowVersion2, version);
} else if (runId == runid3) {
assertEquals(flowVersion3, version);
} else {
fail("unknown run id: " + runId);
}
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlowRuns.
@Test
public void testGetFlowRuns() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs");
ClientResponse resp = getResponse(client, uri);
Set<FlowRunEntity> entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
for (FlowRunEntity entity : entities) {
assertTrue("Id, run id or start time does not match.", ((entity.getId().equals("user1@flow_name/1002345678919")) && (entity.getRunId() == 1002345678919L) && (entity.getStartTime() == 1425016501000L)) || ((entity.getId().equals("user1@flow_name/1002345678920")) && (entity.getRunId() == 1002345678920L) && (entity.getStartTime() == 1425016501034L)));
assertEquals(0, entity.getMetrics().size());
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/users/user1/flows/flow_name/runs?limit=1");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
for (FlowRunEntity entity : entities) {
assertTrue("Id, run id or start time does not match.", entity.getId().equals("user1@flow_name/1002345678920") && entity.getRunId() == 1002345678920L && entity.getStartTime() == 1425016501034L);
assertEquals(0, entity.getMetrics().size());
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "createdtimestart=1425016501030");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
for (FlowRunEntity entity : entities) {
assertTrue("Id, run id or start time does not match.", entity.getId().equals("user1@flow_name/1002345678920") && entity.getRunId() == 1002345678920L && entity.getStartTime() == 1425016501034L);
assertEquals(0, entity.getMetrics().size());
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "createdtimestart=1425016500999&createdtimeend=1425016501035");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
for (FlowRunEntity entity : entities) {
assertTrue("Id, run id or start time does not match.", ((entity.getId().equals("user1@flow_name/1002345678919")) && (entity.getRunId() == 1002345678919L) && (entity.getStartTime() == 1425016501000L)) || ((entity.getId().equals("user1@flow_name/1002345678920")) && (entity.getRunId() == 1002345678920L) && (entity.getStartTime() == 1425016501034L)));
assertEquals(0, entity.getMetrics().size());
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "createdtimeend=1425016501030");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
for (FlowRunEntity entity : entities) {
assertTrue("Id, run id or start time does not match.", entity.getId().equals("user1@flow_name/1002345678919") && entity.getRunId() == 1002345678919L && entity.getStartTime() == 1425016501000L);
assertEquals(0, entity.getMetrics().size());
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "fields=metrics");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
for (FlowRunEntity entity : entities) {
assertTrue("Id, run id or start time does not match.", ((entity.getId().equals("user1@flow_name/1002345678919")) && (entity.getRunId() == 1002345678919L) && (entity.getStartTime() == 1425016501000L) && (entity.getMetrics().size() == 3)) || ((entity.getId().equals("user1@flow_name/1002345678920")) && (entity.getRunId() == 1002345678920L) && (entity.getStartTime() == 1425016501034L) && (entity.getMetrics().size() == 1)));
}
// fields as CONFIGS will lead to a HTTP 400 as it makes no sense for
// flow runs.
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "fields=CONFIGS");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
} finally {
client.destroy();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlowRunsMetricsToRetrieve.
@Test
public void testGetFlowRunsMetricsToRetrieve() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "metricstoretrieve=MAP_,HDFS_");
ClientResponse resp = getResponse(client, uri);
Set<FlowRunEntity> entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
int metricCnt = 0;
for (FlowRunEntity entity : entities) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP_") || metric.getId().startsWith("HDFS_"));
}
}
assertEquals(3, metricCnt);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "metricstoretrieve=!(MAP_,HDFS_)");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
metricCnt = 0;
for (FlowRunEntity entity : entities) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP1_"));
}
}
assertEquals(1, metricCnt);
} finally {
client.destroy();
}
}
Aggregations