use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testWriteFlowActivityOneFlow.
/**
* Write 1 application entity and checks the record for today in the flow
* activity table.
*/
@Test
public void testWriteFlowActivityOneFlow() throws Exception {
String cluster = "testWriteFlowActivityOneFlow_cluster1";
String user = "testWriteFlowActivityOneFlow_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion = "A122110F135BC4";
long runid = 1001111178919L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_1111999999_1234";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTable(cluster, user, flow, flowVersion, runid, c1, appCreatedTime);
// use the reader to verify the data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity entity = (FlowActivityEntity) e;
NavigableSet<FlowRunEntity> flowRuns = entity.getFlowRuns();
assertEquals(1, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
assertEquals(runid, flowRun.getRunId());
assertEquals(flowVersion, flowRun.getVersion());
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testFlowActivityTableOneFlowMultipleRunIds.
/**
* Writes 3 applications each with a different run id and version for the same
* {cluster, user, flow}.
*
* They should be getting inserted into one record in the flow activity table
* with 3 columns, one per run id.
*/
@Test
public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException {
String cluster = "testManyRunsFlowActivity_cluster1";
String user = "testManyRunsFlowActivity_c_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion1 = "A122110F135BC4";
long runid1 = 11111111111L;
String flowVersion2 = "A12222222222C4";
long runid2 = 2222222222222L;
String flowVersion3 = "A1333333333C4";
long runid3 = 3333333333333L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11888888888_1111";
hbi.write(cluster, user, flow, flowVersion1, runid1, appName, te);
// write an application with to this flow but a different runid/ version
te = new TimelineEntities();
te.addEntity(entityApp1);
appName = "application_11888888888_2222";
hbi.write(cluster, user, flow, flowVersion2, runid2, appName, te);
// write an application with to this flow but a different runid/ version
te = new TimelineEntities();
te.addEntity(entityApp1);
appName = "application_11888888888_3333";
hbi.write(cluster, user, flow, flowVersion3, runid3, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTableSeveralRuns(cluster, user, flow, c1, flowVersion1, runid1, flowVersion2, runid2, flowVersion3, runid3, appCreatedTime);
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, null, null, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity flowActivity = (FlowActivityEntity) e;
assertEquals(cluster, flowActivity.getCluster());
assertEquals(user, flowActivity.getUser());
assertEquals(flow, flowActivity.getFlowName());
long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime);
assertEquals(dayTs, flowActivity.getDate().getTime());
Set<FlowRunEntity> flowRuns = flowActivity.getFlowRuns();
assertEquals(3, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
long runId = flowRun.getRunId();
String version = flowRun.getVersion();
if (runId == runid1) {
assertEquals(flowVersion1, version);
} else if (runId == runid2) {
assertEquals(flowVersion2, version);
} else if (runId == runid3) {
assertEquals(flowVersion3, version);
} else {
fail("unknown run id: " + runId);
}
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlows.
@Test
public void testGetFlows() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows");
ClientResponse resp = getResponse(client, uri);
Set<FlowActivityEntity> entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
for (FlowActivityEntity entity : entities) {
assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
}
// Query without specifying cluster ID.
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/flows/");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?limit=1");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(1, entities.size());
long firstFlowActivity = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(1425016501000L);
DateFormat fmt = TimelineReaderWebServices.DATE_FORMAT.get();
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(firstFlowActivity) + "-" + fmt.format(dayTs));
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
for (FlowActivityEntity entity : entities) {
assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(dayTs + (4 * 86400000L)));
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(0, entities.size());
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=-" + fmt.format(dayTs));
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(firstFlowActivity) + "-");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150711:20150714");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150714-20150711");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=2015071129-20150712");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150711-2015071243");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
} finally {
client.destroy();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlowsNotPresent.
@Test
public void testGetFlowsNotPresent() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster2/flows");
ClientResponse resp = getResponse(client, uri);
Set<FlowActivityEntity> entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(0, entities.size());
} finally {
client.destroy();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TimelineReaderManager method fillUID.
/**
* Fill UID in the info field of entity based on the query(identified by
* entity type).
* @param entityType Entity type of query.
* @param entity Timeline Entity.
* @param context Context defining the query.
*/
private static void fillUID(TimelineEntityType entityType, TimelineEntity entity, TimelineReaderContext context) {
if (entityType != null) {
switch(entityType) {
case YARN_FLOW_ACTIVITY:
FlowActivityEntity activityEntity = (FlowActivityEntity) entity;
context.setUserId(activityEntity.getUser());
context.setFlowName(activityEntity.getFlowName());
entity.setUID(UID_KEY, TimelineUIDConverter.FLOW_UID.encodeUID(context));
return;
case YARN_FLOW_RUN:
FlowRunEntity runEntity = (FlowRunEntity) entity;
context.setFlowRunId(runEntity.getRunId());
entity.setUID(UID_KEY, TimelineUIDConverter.FLOWRUN_UID.encodeUID(context));
return;
case YARN_APPLICATION:
context.setAppId(entity.getId());
entity.setUID(UID_KEY, TimelineUIDConverter.APPLICATION_UID.encodeUID(context));
return;
default:
break;
}
}
context.setEntityType(entity.getType());
context.setEntityId(entity.getId());
entity.setUID(UID_KEY, TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(context));
}
Aggregations