use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineCollector method aggregate.
/**
* Aggregate internal status and generate timeline entities for the
* aggregation results.
*
* @param aggregationGroups Aggregation status table
* @param resultEntityId Id of the result entity
* @param resultEntityType Type of the result entity
* @return A timeline entity that contains all aggregated TimelineMetric.
*/
static TimelineEntity aggregate(Map<String, AggregationStatusTable> aggregationGroups, String resultEntityId, String resultEntityType) {
TimelineEntity result = new TimelineEntity();
result.setId(resultEntityId);
result.setType(resultEntityType);
for (Map.Entry<String, AggregationStatusTable> entry : aggregationGroups.entrySet()) {
entry.getValue().aggregateAllTo(result, entry.getKey());
}
return result;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class FileSystemTimelineReaderImpl method getEntities.
private Set<TimelineEntity> getEntities(File dir, String entityType, TimelineEntityFilters filters, TimelineDataToRetrieve dataToRetrieve) throws IOException {
// First sort the selected entities based on created/start time.
Map<Long, Set<TimelineEntity>> sortedEntities = new TreeMap<>(new Comparator<Long>() {
@Override
public int compare(Long l1, Long l2) {
return l2.compareTo(l1);
}
});
for (File entityFile : dir.listFiles()) {
if (!entityFile.getName().contains(TIMELINE_SERVICE_STORAGE_EXTENSION)) {
continue;
}
try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(entityFile), Charset.forName("UTF-8")))) {
TimelineEntity entity = readEntityFromFile(reader);
if (!entity.getType().equals(entityType)) {
continue;
}
if (!isTimeInRange(entity.getCreatedTime(), filters.getCreatedTimeBegin(), filters.getCreatedTimeEnd())) {
continue;
}
if (filters.getRelatesTo() != null && !filters.getRelatesTo().getFilterList().isEmpty() && !TimelineStorageUtils.matchRelatesTo(entity, filters.getRelatesTo())) {
continue;
}
if (filters.getIsRelatedTo() != null && !filters.getIsRelatedTo().getFilterList().isEmpty() && !TimelineStorageUtils.matchIsRelatedTo(entity, filters.getIsRelatedTo())) {
continue;
}
if (filters.getInfoFilters() != null && !filters.getInfoFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchInfoFilters(entity, filters.getInfoFilters())) {
continue;
}
if (filters.getConfigFilters() != null && !filters.getConfigFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchConfigFilters(entity, filters.getConfigFilters())) {
continue;
}
if (filters.getMetricFilters() != null && !filters.getMetricFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchMetricFilters(entity, filters.getMetricFilters())) {
continue;
}
if (filters.getEventFilters() != null && !filters.getEventFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchEventFilters(entity, filters.getEventFilters())) {
continue;
}
TimelineEntity entityToBeReturned = createEntityToBeReturned(entity, dataToRetrieve.getFieldsToRetrieve());
Set<TimelineEntity> entitiesCreatedAtSameTime = sortedEntities.get(entityToBeReturned.getCreatedTime());
if (entitiesCreatedAtSameTime == null) {
entitiesCreatedAtSameTime = new HashSet<TimelineEntity>();
}
entitiesCreatedAtSameTime.add(entityToBeReturned);
sortedEntities.put(entityToBeReturned.getCreatedTime(), entitiesCreatedAtSameTime);
}
}
Set<TimelineEntity> entities = new HashSet<TimelineEntity>();
long entitiesAdded = 0;
for (Set<TimelineEntity> entitySet : sortedEntities.values()) {
for (TimelineEntity entity : entitySet) {
entities.add(entity);
++entitiesAdded;
if (entitiesAdded >= filters.getLimit()) {
return entities;
}
}
}
return entities;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntitiesWithLimit.
@Test
public void testGetEntitiesWithLimit() throws Exception {
Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(2L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(2, result.size());
// based on created time, descending.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_1") && !entity.getId().equals("id_4")) {
Assert.fail("Entity not sorted by created time");
}
}
result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(3L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
// Even though 2 entities out of 4 have same created time, one entity
// is left out due to limit
Assert.assertEquals(3, result.size());
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntityCustomFields.
@Test
public void testGetEntityCustomFields() throws Exception {
// Specified fields in addition to default view will be returned.
TimelineEntity result = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS), null));
Assert.assertEquals((new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString());
Assert.assertEquals((Long) 1425016502000L, result.getCreatedTime());
Assert.assertEquals(3, result.getConfigs().size());
Assert.assertEquals(3, result.getMetrics().size());
Assert.assertEquals(2, result.getInfo().size());
// No events will be returned
Assert.assertEquals(0, result.getEvents().size());
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method testGetEntitiesByTimeWindows.
@Test
public void testGetEntitiesByTimeWindows() throws Exception {
// Get entities based on created time start and end time range.
Set<TimelineEntity> result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, 1425016502030L, 1425016502060L, null, null, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
// Only one entity with ID id_4 should be returned.
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_4")) {
Assert.fail("Incorrect filtering based on created time range");
}
}
// Get entities if only created time end is specified.
result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, null, 1425016502010L, null, null, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(3, result.size());
for (TimelineEntity entity : result) {
if (entity.getId().equals("id_4")) {
Assert.fail("Incorrect filtering based on created time range");
}
}
// Get entities if only created time start is specified.
result = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters(null, 1425016502010L, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
Assert.assertEquals(1, result.size());
for (TimelineEntity entity : result) {
if (!entity.getId().equals("id_4")) {
Assert.fail("Incorrect filtering based on created time range");
}
}
}
Aggregations