use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class DataGeneratorForTest method loadEntities.
static void loadEntities(HBaseTestingUtility util) throws IOException {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
Long cTime = 1425016502000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<>();
infoMap.put("infoMapKey1", "infoMapValue2");
infoMap.put("infoMapKey2", 20);
infoMap.put("infoMapKey3", 71.4);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
Set<String> isRelatedToSet = new HashSet<>();
isRelatedToSet.add("relatedto1");
Map<String, Set<String>> isRelatedTo = new HashMap<>();
isRelatedTo.put("task", isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add("relatesto1");
relatesToSet.add("relatesto3");
Map<String, Set<String>> relatesTo = new HashMap<>();
relatesTo.put("container", relatesToSet);
Set<String> relatesToSet11 = new HashSet<>();
relatesToSet11.add("relatesto4");
relatesTo.put("container1", relatesToSet11);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
conf.put("cfg_param1", "value3");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 70000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m12 = new TimelineMetric();
m12.setId("MAP1_BYTES");
m12.addValue(ts, 50);
metrics.add(m12);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId("start_event");
event.setTimestamp(ts);
entity.addEvent(event);
te.addEntity(entity);
TimelineEntity entity1 = new TimelineEntity();
String id1 = "hello1";
entity1.setId(id1);
entity1.setType(type);
entity1.setCreatedTime(cTime + 20L);
// add the info map in Timeline Entity
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey1", "infoMapValue1");
infoMap1.put("infoMapKey2", 10);
entity1.addInfo(infoMap1);
// add event.
TimelineEvent event11 = new TimelineEvent();
event11.setId("end_event");
event11.setTimestamp(ts);
entity1.addEvent(event11);
TimelineEvent event12 = new TimelineEvent();
event12.setId("update_event");
event12.setTimestamp(ts - 10);
entity1.addEvent(event12);
// add the isRelatedToEntity info
Set<String> isRelatedToSet1 = new HashSet<>();
isRelatedToSet1.add("relatedto3");
isRelatedToSet1.add("relatedto5");
Map<String, Set<String>> isRelatedTo1 = new HashMap<>();
isRelatedTo1.put("task1", isRelatedToSet1);
Set<String> isRelatedToSet11 = new HashSet<>();
isRelatedToSet11.add("relatedto4");
isRelatedTo1.put("task2", isRelatedToSet11);
entity1.setIsRelatedToEntities(isRelatedTo1);
// add the relatesTo info
Set<String> relatesToSet1 = new HashSet<String>();
relatesToSet1.add("relatesto1");
relatesToSet1.add("relatesto2");
Map<String, Set<String>> relatesTo1 = new HashMap<>();
relatesTo1.put("container", relatesToSet1);
entity1.setRelatesToEntities(relatesTo1);
// add some config entries
Map<String, String> conf1 = new HashMap<>();
conf1.put("cfg_param1", "value1");
conf1.put("cfg_param2", "value2");
entity1.addConfigs(conf1);
// add metrics
Set<TimelineMetric> metrics1 = new HashSet<>();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP1_SLOT_MILLIS");
Map<Long, Number> metricValues1 = new HashMap<>();
long ts1 = System.currentTimeMillis();
metricValues1.put(ts1 - 120000, 100000000);
metricValues1.put(ts1 - 100000, 200000000);
metricValues1.put(ts1 - 80000, 300000000);
metricValues1.put(ts1 - 60000, 400000000);
metricValues1.put(ts1 - 40000, 50000000000L);
metricValues1.put(ts1 - 20000, 60000000000L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues1);
metrics1.add(m2);
entity1.addMetrics(metrics1);
te.addEntity(entity1);
TimelineEntity entity2 = new TimelineEntity();
String id2 = "hello2";
entity2.setId(id2);
entity2.setType(type);
entity2.setCreatedTime(cTime + 40L);
TimelineEvent event21 = new TimelineEvent();
event21.setId("update_event");
event21.setTimestamp(ts - 20);
entity2.addEvent(event21);
Set<String> isRelatedToSet2 = new HashSet<>();
isRelatedToSet2.add("relatedto3");
Map<String, Set<String>> isRelatedTo2 = new HashMap<>();
isRelatedTo2.put("task1", isRelatedToSet2);
entity2.setIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo3 = new HashMap<>();
Set<String> relatesToSet14 = new HashSet<>();
relatesToSet14.add("relatesto7");
relatesTo3.put("container2", relatesToSet14);
entity2.setRelatesToEntities(relatesTo3);
te.addEntity(entity2);
HBaseTimelineWriterImpl hbi = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(util.getConfiguration());
hbi.start();
String cluster = "cluster1";
String user = "user1";
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
String appName = "application_1231111111_1111";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testWriteApplicationToHBase.
@Test
public void testWriteApplicationToHBase() throws Exception {
TimelineEntities te = new TimelineEntities();
ApplicationEntity entity = new ApplicationEntity();
String appId = "application_1000178881110_2002";
entity.setId(appId);
Long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<String, Object>();
infoMap.put("infoMapKey1", "infoMapValue1");
infoMap.put("infoMapKey2", 10);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
String key = "task";
String value = "is_related_to_entity_id_here";
Set<String> isRelatedToSet = new HashSet<String>();
isRelatedToSet.add(value);
Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
isRelatedTo.put(key, isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
key = "container";
value = "relates_to_entity_id_here";
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add(value);
value = "relates_to_entity_id_here_Second";
relatesToSet.add(value);
Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
relatesTo.put(key, relatesToSet);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<String, String>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
// add aggregated metrics
TimelineEntity aggEntity = new TimelineEntity();
String type = TimelineEntityType.YARN_APPLICATION.toString();
aggEntity.setId(appId);
aggEntity.setType(type);
long cTime2 = 1425016502000L;
aggEntity.setCreatedTime(cTime2);
TimelineMetric aggMetric = new TimelineMetric();
aggMetric.setId("MEM_USAGE");
Map<Long, Number> aggMetricValues = new HashMap<Long, Number>();
long aggTs = ts;
aggMetricValues.put(aggTs - 120000, 102400000L);
aggMetric.setType(Type.SINGLE_VALUE);
aggMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
aggMetric.setValues(aggMetricValues);
Set<TimelineMetric> aggMetrics = new HashSet<>();
aggMetrics.add(aggMetric);
entity.addMetrics(aggMetrics);
te.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_write_app";
String user = "user1";
String flow = "s!ome_f\tlow _n am!e";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
// Write entity again, this time without created time.
entity = new ApplicationEntity();
appId = "application_1000178881110_2002";
entity.setId(appId);
// add the info map in Timeline Entity
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey3", "infoMapValue1");
entity.addInfo(infoMap1);
te = new TimelineEntities();
te.addEntity(entity);
hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
hbi.stop();
infoMap.putAll(infoMap1);
// retrieve the row
ApplicationRowKey applicationRowKey = new ApplicationRowKey(cluster, user, flow, runid, appId);
byte[] rowKey = applicationRowKey.getRowKey();
Get get = new Get(rowKey);
get.setMaxVersions(Integer.MAX_VALUE);
Connection conn = ConnectionFactory.createConnection(c1);
Result result = new ApplicationTable().getResult(c1, conn, get);
assertTrue(result != null);
assertEquals(17, result.size());
// check the row key
byte[] row1 = result.getRow();
assertTrue(isApplicationRowKeyCorrect(row1, cluster, user, flow, runid, appId));
// check info column family
String id1 = ApplicationColumn.ID.readResult(result).toString();
assertEquals(appId, id1);
Long cTime1 = (Long) ApplicationColumn.CREATED_TIME.readResult(result);
assertEquals(cTime, cTime1);
Map<String, Object> infoColumns = ApplicationColumnPrefix.INFO.readResults(result, new StringKeyConverter());
assertEquals(infoMap, infoColumns);
// Remember isRelatedTo is of type Map<String, Set<String>>
for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
Object isRelatedToValue = ApplicationColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
String compoundValue = isRelatedToValue.toString();
// id7?id9?id6
Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
for (String v : isRelatedToEntry.getValue()) {
assertTrue(isRelatedToValues.contains(v));
}
}
// RelatesTo
for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
String compoundValue = ApplicationColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
// id3?id4?id5
Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
for (String v : relatesToEntry.getValue()) {
assertTrue(relatesToValues.contains(v));
}
}
KeyConverter<String> stringKeyConverter = new StringKeyConverter();
// Configuration
Map<String, Object> configColumns = ApplicationColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
assertEquals(conf, configColumns);
NavigableMap<String, NavigableMap<Long, Number>> metricsResult = ApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
matchMetrics(metricValues, metricMap);
// read the timeline entity using the reader this time. In metrics limit
// specify Integer MAX_VALUE. A TIME_SERIES will be returned(if more than
// one value exists for a metric).
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), Integer.MAX_VALUE));
assertNotNull(e1);
// verify attributes
assertEquals(appId, e1.getId());
assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
assertEquals(cTime, e1.getCreatedTime());
Map<String, Object> infoMap2 = e1.getInfo();
assertEquals(infoMap, infoMap2);
Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
assertEquals(isRelatedTo, isRelatedTo2);
Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
assertEquals(relatesTo, relatesTo2);
Map<String, String> conf2 = e1.getConfigs();
assertEquals(conf, conf2);
Set<TimelineMetric> metrics2 = e1.getMetrics();
assertEquals(2, metrics2.size());
for (TimelineMetric metric2 : metrics2) {
Map<Long, Number> metricValues2 = metric2.getValues();
assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
if (metric2.getId().equals("MAP_SLOT_MILLIS")) {
assertEquals(6, metricValues2.size());
matchMetrics(metricValues, metricValues2);
}
if (metric2.getId().equals("MEM_USAGE")) {
assertEquals(1, metricValues2.size());
matchMetrics(aggMetricValues, metricValues2);
}
}
// In metrics limit specify a value of 3. No more than 3 values for a
// metric will be returned.
e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), 3));
assertNotNull(e1);
assertEquals(appId, e1.getId());
assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
assertEquals(conf, e1.getConfigs());
metrics2 = e1.getMetrics();
assertEquals(2, metrics2.size());
for (TimelineMetric metric2 : metrics2) {
Map<Long, Number> metricValues2 = metric2.getValues();
assertTrue(metricValues2.size() <= 3);
assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
}
// Check if single value(latest value) instead of time series is returned
// if metricslimit is not set(null), irrespective of number of metric
// values.
e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), null));
assertNotNull(e1);
assertEquals(appId, e1.getId());
assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
assertEquals(cTime, e1.getCreatedTime());
assertEquals(infoMap, e1.getInfo());
assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
assertEquals(relatesTo, e1.getRelatesToEntities());
assertEquals(conf, e1.getConfigs());
assertEquals(2, e1.getMetrics().size());
for (TimelineMetric metric : e1.getMetrics()) {
assertEquals(1, metric.getValues().size());
assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
assertTrue(metric.getId().equals("MAP_SLOT_MILLIS") || metric.getId().equals("MEM_USAGE"));
assertEquals(1, metric.getValues().size());
if (metric.getId().equals("MAP_SLOT_MILLIS")) {
assertTrue(metric.getValues().containsKey(ts - 20000));
assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
}
if (metric.getId().equals("MEM_USAGE")) {
assertTrue(metric.getValues().containsKey(aggTs - 120000));
assertEquals(aggMetricValues.get(aggTs - 120000), metric.getValues().get(aggTs - 120000));
}
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsConfigFilterPrefix.
@Test
public void testReadAppsConfigFilterPrefix() throws Exception {
TimelineFilterList confFilterList = new TimelineFilterList();
confFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_"));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(list, null, null, null));
assertEquals(1, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with cfg_", confKey.startsWith("cfg_"));
}
}
assertEquals(2, cfgCnt);
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param2", "value2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value3"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_param2", "value2"));
TimelineFilterList confsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "config_"));
TimelineFilterList confFilterList1 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(confsToRetrieve, null, null, null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with config_", confKey.startsWith("config_"));
}
}
assertEquals(2, cfgCnt);
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsInfoFilters.
@Test
public void testReadAppsInfoFilters() throws Exception {
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey3", 85.85));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey1", "infoMapValue2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey1", "infoMapValue1"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "infoMapKey2", 10));
TimelineFilterList infoFilterList = new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(2, entities.size());
int infoCnt = 0;
for (TimelineEntity entity : entities) {
infoCnt += entity.getInfo().size();
}
assertEquals(5, infoCnt);
TimelineFilterList infoFilterList1 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "infoMapKey1", "infoMapValue1"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(1, entities.size());
infoCnt = 0;
for (TimelineEntity entity : entities) {
infoCnt += entity.getInfo().size();
}
assertEquals(3, infoCnt);
TimelineFilterList infoFilterList2 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "infoMapKey1", "infoMapValue2"), new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "infoMapKey3", 85.85));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList3 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", "some_value"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList5 = new TimelineFilterList(new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value", false));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList5, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(3, entities.size());
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsEventFilters.
@Test
public void testReadAppsEventFilters() throws Exception {
TimelineFilterList ef = new TimelineFilterList();
ef.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
ef.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(1, entities.size());
int eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity id should have been application_1111111111_4444");
}
}
assertEquals(1, eventCnt);
TimelineFilterList ef1 = new TimelineFilterList();
ef1.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
ef1.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef1), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity id should have been application_1111111111_4444");
}
}
assertEquals(0, eventCnt);
TimelineFilterList ef2 = new TimelineFilterList();
ef2.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef2), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_2222") && !timelineEntity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entity ids' should have been application_1111111111_2222" + " and application_1111111111_4444");
}
}
assertEquals(0, eventCnt);
TimelineFilterList ef3 = new TimelineFilterList();
ef3.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
ef3.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "dummy_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef3), new TimelineDataToRetrieve());
assertEquals(0, entities.size());
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "update_event"));
list1.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "dummy_event"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineExistsFilter(TimelineCompareOp.EQUAL, "start_event"));
TimelineFilterList ef4 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef4), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_2222")) {
Assert.fail("Entity id should have been application_1111111111_2222");
}
}
assertEquals(0, eventCnt);
TimelineFilterList ef5 = new TimelineFilterList();
ef5.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "update_event"));
ef5.addFilter(new TimelineExistsFilter(TimelineCompareOp.NOT_EQUAL, "end_event"));
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef5), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
eventCnt += timelineEntity.getEvents().size();
if (!timelineEntity.getId().equals("application_1111111111_2222")) {
Assert.fail("Entity id should have been application_1111111111_2222");
}
}
assertEquals(0, eventCnt);
}
Aggregations