use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testReadEntities.
@Test
public void testReadEntities() throws Exception {
TimelineEntity entity = reader.getEntity(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(entity);
assertEquals(3, entity.getConfigs().size());
assertEquals(1, entity.getIsRelatedToEntities().size());
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(3, entities.size());
int cfgCnt = 0;
int metricCnt = 0;
int infoCnt = 0;
int eventCnt = 0;
int relatesToCnt = 0;
int isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
cfgCnt += (timelineEntity.getConfigs() == null) ? 0 : timelineEntity.getConfigs().size();
metricCnt += (timelineEntity.getMetrics() == null) ? 0 : timelineEntity.getMetrics().size();
infoCnt += (timelineEntity.getInfo() == null) ? 0 : timelineEntity.getInfo().size();
eventCnt += (timelineEntity.getEvents() == null) ? 0 : timelineEntity.getEvents().size();
relatesToCnt += (timelineEntity.getRelatesToEntities() == null) ? 0 : timelineEntity.getRelatesToEntities().size();
isRelatedToCnt += (timelineEntity.getIsRelatedToEntities() == null) ? 0 : timelineEntity.getIsRelatedToEntities().size();
}
assertEquals(5, cfgCnt);
assertEquals(3, metricCnt);
assertEquals(5, infoCnt);
assertEquals(4, eventCnt);
assertEquals(4, relatesToCnt);
assertEquals(4, isRelatedToCnt);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseTimelineStorageEntities method testWriteEntityToHBase.
@Test
public void testWriteEntityToHBase() throws Exception {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "hello";
String type = "world";
entity.setId(id);
entity.setType(type);
Long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<String, Object>();
infoMap.put("infoMapKey1", "infoMapValue1");
infoMap.put("infoMapKey2", 10);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
String key = "task";
String value = "is_related_to_entity_id_here";
Set<String> isRelatedToSet = new HashSet<String>();
isRelatedToSet.add(value);
Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
isRelatedTo.put(key, isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
key = "container";
value = "relates_to_entity_id_here";
Set<String> relatesToSet = new HashSet<String>();
relatesToSet.add(value);
value = "relates_to_entity_id_here_Second";
relatesToSet.add(value);
Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
relatesTo.put(key, relatesToSet);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<String, String>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
te.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
String cluster = "cluster_test_write_entity";
String user = "user1";
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
String appName = ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1).toString();
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.stop();
// scan the table and see that entity exists
Scan s = new Scan();
byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName).getRowKeyPrefix();
s.setStartRow(startRow);
s.setMaxVersions(Integer.MAX_VALUE);
Connection conn = ConnectionFactory.createConnection(c1);
ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
int rowCount = 0;
int colCount = 0;
KeyConverter<String> stringKeyConverter = new StringKeyConverter();
for (Result result : scanner) {
if (result != null && !result.isEmpty()) {
rowCount++;
colCount += result.size();
byte[] row1 = result.getRow();
assertTrue(isRowKeyCorrect(row1, cluster, user, flow, runid, appName, entity));
// check info column family
String id1 = EntityColumn.ID.readResult(result).toString();
assertEquals(id, id1);
String type1 = EntityColumn.TYPE.readResult(result).toString();
assertEquals(type, type1);
Long cTime1 = (Long) EntityColumn.CREATED_TIME.readResult(result);
assertEquals(cTime1, cTime);
Map<String, Object> infoColumns = EntityColumnPrefix.INFO.readResults(result, new StringKeyConverter());
assertEquals(infoMap, infoColumns);
// Remember isRelatedTo is of type Map<String, Set<String>>
for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
Object isRelatedToValue = EntityColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
String compoundValue = isRelatedToValue.toString();
// id7?id9?id6
Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
for (String v : isRelatedToEntry.getValue()) {
assertTrue(isRelatedToValues.contains(v));
}
}
// RelatesTo
for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
String compoundValue = EntityColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
// id3?id4?id5
Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
for (String v : relatesToEntry.getValue()) {
assertTrue(relatesToValues.contains(v));
}
}
// Configuration
Map<String, Object> configColumns = EntityColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
assertEquals(conf, configColumns);
NavigableMap<String, NavigableMap<Long, Number>> metricsResult = EntityColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
matchMetrics(metricValues, metricMap);
}
}
assertEquals(1, rowCount);
assertEquals(16, colCount);
// read the timeline entity using the reader this time
TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
assertNotNull(e1);
assertEquals(1, es1.size());
// verify attributes
assertEquals(id, e1.getId());
assertEquals(type, e1.getType());
assertEquals(cTime, e1.getCreatedTime());
Map<String, Object> infoMap2 = e1.getInfo();
assertEquals(infoMap, infoMap2);
Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
assertEquals(isRelatedTo, isRelatedTo2);
Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
assertEquals(relatesTo, relatesTo2);
Map<String, String> conf2 = e1.getConfigs();
assertEquals(conf, conf2);
Set<TimelineMetric> metrics2 = e1.getMetrics();
assertEquals(metrics, metrics2);
for (TimelineMetric metric2 : metrics2) {
Map<Long, Number> metricValues2 = metric2.getValues();
matchMetrics(metricValues, metricValues2);
}
e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertEquals(id, e1.getId());
assertEquals(type, e1.getType());
assertEquals(cTime, e1.getCreatedTime());
assertEquals(infoMap, e1.getInfo());
assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
assertEquals(relatesTo, e1.getRelatesToEntities());
assertEquals(conf, e1.getConfigs());
for (TimelineMetric metric : e1.getMetrics()) {
assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
assertEquals(1, metric.getValues().size());
assertTrue(metric.getValues().containsKey(ts - 20000));
assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
}
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class TestHBaseStorageFlowRun method testWriteFlowRunMetricsPrefix.
@Test
public void testWriteFlowRunMetricsPrefix() throws Exception {
String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
String user = "testWriteFlowRunMetricsPrefix_user1";
String flow = "testWriteFlowRunMetricsPrefix_flow_name";
String flowVersion = "CF7022C10F1354";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11111111111111_1111";
hbi.write(cluster, user, flow, flowVersion, 1002345678919L, appName, te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2);
appName = "application_11111111111111_2222";
hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC1.substring(0, METRIC1.indexOf("_") + 1)));
TimelineEntity entity = hbr.getEntity(new TimelineReaderContext(cluster, user, flow, 1002345678919L, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
Set<TimelineMetric> metrics = entity.getMetrics();
assertEquals(1, metrics.size());
for (TimelineMetric metric : metrics) {
String id = metric.getId();
Map<Long, Number> values = metric.getValues();
assertEquals(1, values.size());
Number value = null;
for (Number n : values.values()) {
value = n;
}
switch(id) {
case METRIC1:
assertEquals(40L, value);
break;
default:
fail("unrecognized metric: " + id);
}
}
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity timelineEntity : entities) {
metricCnt += timelineEntity.getMetrics().size();
}
assertEquals(2, metricCnt);
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class ApplicationEntityReader method getResults.
@Override
protected ResultScanner getResults(Configuration hbaseConf, Connection conn, FilterList filterList) throws IOException {
Scan scan = new Scan();
TimelineReaderContext context = getContext();
// Whether or not flowRunID is null doesn't matter, the
// ApplicationRowKeyPrefix will do the right thing.
RowKeyPrefix<ApplicationRowKey> applicationRowKeyPrefix = new ApplicationRowKeyPrefix(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId());
scan.setRowPrefixFilter(applicationRowKeyPrefix.getRowKeyPrefix());
FilterList newList = new FilterList();
newList.addFilter(new PageFilter(getFilters().getLimit()));
if (filterList != null && !filterList.getFilters().isEmpty()) {
newList.addFilter(filterList);
}
scan.setFilter(newList);
scan.setMaxVersions(getDataToRetrieve().getMetricsLimit());
return getTable().getResultScanner(hbaseConf, conn, scan);
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext in project hadoop by apache.
the class FlowRunEntityReader method getResult.
@Override
protected Result getResult(Configuration hbaseConf, Connection conn, FilterList filterList) throws IOException {
TimelineReaderContext context = getContext();
FlowRunRowKey flowRunRowKey = new FlowRunRowKey(context.getClusterId(), context.getUserId(), context.getFlowName(), context.getFlowRunId());
byte[] rowKey = flowRunRowKey.getRowKey();
Get get = new Get(rowKey);
get.setMaxVersions(Integer.MAX_VALUE);
if (filterList != null && !filterList.getFilters().isEmpty()) {
get.setFilter(filterList);
}
return getTable().getResult(hbaseConf, conn, get);
}
Aggregations