use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class FlowActivityEntityReader method parseEntity.
@Override
protected TimelineEntity parseEntity(Result result) throws IOException {
FlowActivityRowKey rowKey = FlowActivityRowKey.parseRowKey(result.getRow());
Long time = rowKey.getDayTimestamp();
String user = rowKey.getUserId();
String flowName = rowKey.getFlowName();
FlowActivityEntity flowActivity = new FlowActivityEntity(getContext().getClusterId(), time, user, flowName);
// set the id
flowActivity.setId(flowActivity.getId());
// get the list of run ids along with the version that are associated with
// this flow on this day
Map<Long, Object> runIdsMap = FlowActivityColumnPrefix.RUN_ID.readResults(result, longKeyConverter);
for (Map.Entry<Long, Object> e : runIdsMap.entrySet()) {
Long runId = e.getKey();
String version = (String) e.getValue();
FlowRunEntity flowRun = new FlowRunEntity();
flowRun.setUser(user);
flowRun.setName(flowName);
flowRun.setRunId(runId);
flowRun.setVersion(version);
// set the id
flowRun.setId(flowRun.getId());
flowActivity.addFlowRun(flowRun);
}
return flowActivity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testWriteFlowRunMinMax.
/**
* Writes 4 timeline entities belonging to one flow run through the
* {@link HBaseTimelineWriterImpl}
*
* Checks the flow run table contents
*
* The first entity has a created event, metrics and a finish event.
*
* The second entity has a created event and this is the entity with smallest
* start time. This should be the start time for the flow run.
*
* The third entity has a finish event and this is the entity with the max end
* time. This should be the end time for the flow run.
*
* The fourth entity has a created event which has a start time that is
* greater than min start time.
*
* The test also checks in the flow activity table that one entry has been
* made for all of these 4 application entities since they belong to the same
* flow run.
*/
@Test
public void testWriteFlowRunMinMax() throws Exception {
TimelineEntities te = new TimelineEntities();
te.addEntity(TestFlowDataGenerator.getEntity1());
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
String cluster = "testWriteFlowRunMinMaxToHBase_cluster1";
String user = "testWriteFlowRunMinMaxToHBase_user1";
String flow = "testing_flowRun_flow_name";
String flowVersion = "CF7022C10F1354";
long runid = 1002345678919L;
String appName = "application_100000000000_1111";
long minStartTs = 1424995200300L;
long greaterStartTs = 1424995200300L + 864000L;
long endTs = 1424995200300L + 86000000L;
TimelineEntity entityMinStartTime = TestFlowDataGenerator.getEntityMinStartTime(minStartTs);
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// write another entity with the right min start time
te = new TimelineEntities();
te.addEntity(entityMinStartTime);
appName = "application_100000000000_3333";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// writer another entity for max end time
TimelineEntity entityMaxEndTime = TestFlowDataGenerator.getEntityMaxEndTime(endTs);
te = new TimelineEntities();
te.addEntity(entityMaxEndTime);
appName = "application_100000000000_4444";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// writer another entity with greater start time
TimelineEntity entityGreaterStartTime = TestFlowDataGenerator.getEntityGreaterStartTime(greaterStartTs);
te = new TimelineEntities();
te.addEntity(entityGreaterStartTime);
appName = "application_1000000000000000_2222";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// flush everything to hbase
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
Connection conn = ConnectionFactory.createConnection(c1);
// check in flow activity table
Table table1 = conn.getTable(TableName.valueOf(FlowActivityTable.DEFAULT_TABLE_NAME));
byte[] startRow = new FlowActivityRowKey(cluster, minStartTs, user, flow).getRowKey();
Get g = new Get(startRow);
Result r1 = table1.get(g);
assertNotNull(r1);
assertTrue(!r1.isEmpty());
Map<byte[], byte[]> values = r1.getFamilyMap(FlowActivityColumnFamily.INFO.getBytes());
assertEquals(1, values.size());
byte[] row = r1.getRow();
FlowActivityRowKey flowActivityRowKey = FlowActivityRowKey.parseRowKey(row);
assertNotNull(flowActivityRowKey);
assertEquals(cluster, flowActivityRowKey.getClusterId());
assertEquals(user, flowActivityRowKey.getUserId());
assertEquals(flow, flowActivityRowKey.getFlowName());
Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(minStartTs);
assertEquals(dayTs, flowActivityRowKey.getDayTimestamp());
assertEquals(1, values.size());
checkFlowActivityRunId(runid, flowVersion, values);
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
// get the flow activity entity
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, null, null, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity flowActivity = (FlowActivityEntity) e;
assertEquals(cluster, flowActivity.getCluster());
assertEquals(user, flowActivity.getUser());
assertEquals(flow, flowActivity.getFlowName());
assertEquals(dayTs, Long.valueOf(flowActivity.getDate().getTime()));
Set<FlowRunEntity> flowRuns = flowActivity.getFlowRuns();
assertEquals(1, flowRuns.size());
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesByUID.
@Test
public void testGetEntitiesByUID() throws Exception {
Client client = createClient();
try {
// Query all flows.
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/flows");
ClientResponse resp = getResponse(client, uri);
Set<FlowActivityEntity> flowEntities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertNotNull(flowEntities);
assertEquals(2, flowEntities.size());
List<String> listFlowUIDs = new ArrayList<String>();
for (FlowActivityEntity entity : flowEntities) {
String flowUID = (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
listFlowUIDs.add(flowUID);
assertEquals(TimelineUIDConverter.FLOW_UID.encodeUID(new TimelineReaderContext(entity.getCluster(), entity.getUser(), entity.getFlowName(), null, null, null, null)), flowUID);
assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
}
// Query flowruns based on UID returned in query above.
List<String> listFlowRunUIDs = new ArrayList<String>();
for (String flowUID : listFlowUIDs) {
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/flow-uid/" + flowUID + "/runs");
resp = getResponse(client, uri);
Set<FlowRunEntity> frEntities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertNotNull(frEntities);
for (FlowRunEntity entity : frEntities) {
String flowRunUID = (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
listFlowRunUIDs.add(flowRunUID);
assertEquals(TimelineUIDConverter.FLOWRUN_UID.encodeUID(new TimelineReaderContext("cluster1", entity.getUser(), entity.getName(), entity.getRunId(), null, null, null)), flowRunUID);
}
}
assertEquals(3, listFlowRunUIDs.size());
// Query single flowrun based on UIDs' returned in query to get flowruns.
for (String flowRunUID : listFlowRunUIDs) {
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/run-uid/" + flowRunUID);
resp = getResponse(client, uri);
FlowRunEntity entity = resp.getEntity(FlowRunEntity.class);
assertNotNull(entity);
}
// Query apps based on UIDs' returned in query to get flowruns.
List<String> listAppUIDs = new ArrayList<String>();
for (String flowRunUID : listFlowRunUIDs) {
TimelineReaderContext context = TimelineUIDConverter.FLOWRUN_UID.decodeUID(flowRunUID);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/run-uid/" + flowRunUID + "/apps");
resp = getResponse(client, uri);
Set<TimelineEntity> appEntities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(appEntities);
for (TimelineEntity entity : appEntities) {
String appUID = (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
listAppUIDs.add(appUID);
assertEquals(TimelineUIDConverter.APPLICATION_UID.encodeUID(new TimelineReaderContext(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId(), entity.getId(), null, null)), appUID);
}
}
assertEquals(4, listAppUIDs.size());
// Query single app based on UIDs' returned in query to get apps.
for (String appUID : listAppUIDs) {
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/app-uid/" + appUID);
resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
}
// Query entities based on UIDs' returned in query to get apps and
// a specific entity type(in this case type1).
List<String> listEntityUIDs = new ArrayList<String>();
for (String appUID : listAppUIDs) {
TimelineReaderContext context = TimelineUIDConverter.APPLICATION_UID.decodeUID(appUID);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/app-uid/" + appUID + "/entities/type1");
resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
for (TimelineEntity entity : entities) {
String entityUID = (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
listEntityUIDs.add(entityUID);
assertEquals(TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(new TimelineReaderContext(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId(), context.getAppId(), "type1", entity.getId())), entityUID);
}
}
assertEquals(2, listEntityUIDs.size());
// Query single entity based on UIDs' returned in query to get entities.
for (String entityUID : listEntityUIDs) {
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/entity-uid/" + entityUID);
resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/flow-uid/dummy:flow/runs");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/run-uid/dummy:flowrun");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
// Run Id is not a numerical value.
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/run-uid/some:dummy:flow:123v456");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/run-uid/dummy:flowrun/apps");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/app-uid/dummy:app");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/app-uid/dummy:app/entities/type1");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/entity-uid/dummy:entity");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
} finally {
client.destroy();
}
}
Aggregations