use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testAppFlowMappingCsv.
/** This test checks whether we can handle commas in app flow mapping csv. */
@Test
public void testAppFlowMappingCsv() throws Exception {
// Test getting an entity by cluster and app where flow entry
// in app flow mapping csv has commas.
TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", null, null, null, "app2", "app", "id_5"), new TimelineDataToRetrieve(null, null, null, null));
Assert.assertEquals((new TimelineEntity.Identifier("app", "id_5")).toString(), result.getIdentifier().toString());
Assert.assertEquals((Long) 1425016502050L, result.getCreatedTime());
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntityDefaultView.
@Test
public void testGetEntityDefaultView() throws Exception {
// If no fields are specified, entity is returned with default view i.e.
// only the id, type and created time.
TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, null, null));
Assert.assertEquals((new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString());
Assert.assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(0, result.getConfigs().size());
Assert.assertEquals(0, result.getMetrics().size());
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntitiesByRelations.
@Test
public void testGetEntitiesByRelations() throws Exception {
// Get entities based on relatesTo.
TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR);
Set<Object> relatesToIds = new HashSet<Object>(Arrays.asList((Object) "flow1"));
relatesTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "flow", relatesToIds));
Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, relatesTo, null, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
// Only one entity with ID id_1 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1")) {
Assert.fail("Incorrect filtering based on relatesTo");
}
}
// Get entities based on isRelatedTo.
TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR);
Set<Object> isRelatedToIds = new HashSet<Object>(Arrays.asList((Object) "tid1_2"));
isRelatedTo.addFilter(new TimelineKeyValuesFilter(TimelineCompareOp.EQUAL, "type1", isRelatedToIds));
result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, null, null, isRelatedTo, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
// Two entities with IDs' id_1 and id_3 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) {
Assert.fail("Incorrect filtering based on isRelatedTo");
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntityByClusterAndApp.
@Test
public void testGetEntityByClusterAndApp() throws Exception {
// Cluster and AppId should be enough to get an entity.
TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", null, null, null, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, null, null));
Assert.assertEquals((new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString());
Assert.assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(0, result.getConfigs().size());
Assert.assertEquals(0, result.getMetrics().size());
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class ApplicationEntityReader method getResult.
@Override
protected Result getResult(Configuration hbaseConf, Connection conn, FilterList filterList) throws IOException {
TimelineReaderContext context = getContext();
ApplicationRowKey applicationRowKey = new ApplicationRowKey(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId(), context.getAppId());
byte[] rowKey = applicationRowKey.getRowKey();
Get get = new Get(rowKey);
get.setMaxVersions(getDataToRetrieve().getMetricsLimit());
if (filterList != null && !filterList.getFilters().isEmpty()) {
get.setFilter(filterList);
}
return getTable().getResult(hbaseConf, conn, get);
}
Aggregations