use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testWriteFlowActivityOneFlow.
/**
* Write 1 application entity and checks the record for today in the flow
* activity table.
*/
@Test
public void testWriteFlowActivityOneFlow() throws Exception {
String cluster = "testWriteFlowActivityOneFlow_cluster1";
String user = "testWriteFlowActivityOneFlow_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion = "A122110F135BC4";
long runid = 1001111178919L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_1111999999_1234";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTable(cluster, user, flow, flowVersion, runid, c1, appCreatedTime);
// use the reader to verify the data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity entity = (FlowActivityEntity) e;
NavigableSet<FlowRunEntity> flowRuns = entity.getFlowRuns();
assertEquals(1, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
assertEquals(runid, flowRun.getRunId());
assertEquals(flowVersion, flowRun.getVersion());
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseStorageFlowActivity method testFlowActivityTableOneFlowMultipleRunIds.
/**
* Writes 3 applications each with a different run id and version for the same
* {cluster, user, flow}.
*
* They should be getting inserted into one record in the flow activity table
* with 3 columns, one per run id.
*/
@Test
public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException {
String cluster = "testManyRunsFlowActivity_cluster1";
String user = "testManyRunsFlowActivity_c_user1";
String flow = "flow_activity_test_flow_name";
String flowVersion1 = "A122110F135BC4";
long runid1 = 11111111111L;
String flowVersion2 = "A12222222222C4";
long runid2 = 2222222222222L;
String flowVersion3 = "A1333333333C4";
long runid3 = 3333333333333L;
TimelineEntities te = new TimelineEntities();
long appCreatedTime = 1425016501000L;
TimelineEntity entityApp1 = TestFlowDataGenerator.getFlowApp1(appCreatedTime);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11888888888_1111";
hbi.write(cluster, user, flow, flowVersion1, runid1, appName, te);
// write an application with to this flow but a different runid/ version
te = new TimelineEntities();
te.addEntity(entityApp1);
appName = "application_11888888888_2222";
hbi.write(cluster, user, flow, flowVersion2, runid2, appName, te);
// write an application with to this flow but a different runid/ version
te = new TimelineEntities();
te.addEntity(entityApp1);
appName = "application_11888888888_3333";
hbi.write(cluster, user, flow, flowVersion3, runid3, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTableSeveralRuns(cluster, user, flow, c1, flowVersion1, runid1, flowVersion2, runid2, flowVersion3, runid3, appCreatedTime);
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, null, null, null, null, TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null), new TimelineEntityFilters(10L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity e : entities) {
FlowActivityEntity flowActivity = (FlowActivityEntity) e;
assertEquals(cluster, flowActivity.getCluster());
assertEquals(user, flowActivity.getUser());
assertEquals(flow, flowActivity.getFlowName());
long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime);
assertEquals(dayTs, flowActivity.getDate().getTime());
Set<FlowRunEntity> flowRuns = flowActivity.getFlowRuns();
assertEquals(3, flowRuns.size());
for (FlowRunEntity flowRun : flowRuns) {
long runId = flowRun.getRunId();
String version = flowRun.getVersion();
if (runId == runid1) {
assertEquals(flowVersion1, version);
} else if (runId == runid2) {
assertEquals(flowVersion2, version);
} else if (runId == runid3) {
assertEquals(flowVersion3, version);
} else {
fail("unknown run id: " + runId);
}
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseStorageFlowRun method testWriteFlowRunsMetricFields.
@Test
public void testWriteFlowRunsMetricFields() throws Exception {
String cluster = "testWriteFlowRunsMetricFields_cluster1";
String user = "testWriteFlowRunsMetricFields_user1";
String flow = "testWriteFlowRunsMetricFields_flow_name";
String flowVersion = "CF7022C10F1354";
long runid = 1002345678919L;
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
String appName = "application_11111111111111_1111";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2);
appName = "application_11111111111111_2222";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// check flow run
checkFlowRunTable(cluster, user, flow, runid, c1);
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, runid, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity timelineEntity : entities) {
assertEquals(0, timelineEntity.getMetrics().size());
}
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, runid, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(1, entities.size());
for (TimelineEntity timelineEntity : entities) {
Set<TimelineMetric> timelineMetrics = timelineEntity.getMetrics();
assertEquals(2, timelineMetrics.size());
for (TimelineMetric metric : timelineMetrics) {
String id = metric.getId();
Map<Long, Number> values = metric.getValues();
assertEquals(1, values.size());
Number value = null;
for (Number n : values.values()) {
value = n;
}
switch(id) {
case METRIC1:
assertEquals(141L, value);
break;
case METRIC2:
assertEquals(57L, value);
break;
default:
fail("unrecognized metric: " + id);
}
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseStorageFlowRun method testMetricFilters.
@Test
public void testMetricFilters() throws Exception {
String cluster = "cluster1";
String user = "user1";
String flow = "flow_name1";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678919L, "application_11111111111111_1111", te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
te.addEntity(entityApp2);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678918L, "application_11111111111111_2222", te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, METRIC1, 101));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 43));
list2.addFilter(new TimelineCompareFilter(TimelineCompareOp.EQUAL, METRIC2, 57));
TimelineFilterList metricFilterList = new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(3, metricCnt);
TimelineFilterList metricFilterList1 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, METRIC1, 127), new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, METRIC2, 30));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(2, metricCnt);
TimelineFilterList metricFilterList2 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 32), new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, METRIC2, 57));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(new TimelineCompareFilter(TimelineCompareOp.EQUAL, "s_metric", 32));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList3, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, METRIC1, 101));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 43));
list4.addFilter(new TimelineCompareFilter(TimelineCompareOp.EQUAL, METRIC2, 57));
TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR, list3, list4);
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, METRIC2.substring(0, METRIC2.indexOf("_") + 1)));
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList4, null), new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
}
assertEquals(1, metricCnt);
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testReadAppsConfigFilterPrefix.
@Test
public void testReadAppsConfigFilterPrefix() throws Exception {
TimelineFilterList confFilterList = new TimelineFilterList();
confFilterList.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
TimelineFilterList list = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_"));
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(list, null, null, null));
assertEquals(1, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with cfg_", confKey.startsWith("cfg_"));
}
}
assertEquals(2, cfgCnt);
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value1"));
list1.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param2", "value2"));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "cfg_param1", "value3"));
list2.addFilter(new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_param2", "value2"));
TimelineFilterList confsToRetrieve = new TimelineFilterList(Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "config_"));
TimelineFilterList confFilterList1 = new TimelineFilterList(Operator.OR, list1, list2);
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(confsToRetrieve, null, null, null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
cfgCnt += entity.getConfigs().size();
for (String confKey : entity.getConfigs().keySet()) {
assertTrue("Config key returned should start with config_", confKey.startsWith("config_"));
}
}
assertEquals(2, cfgCnt);
}
Aggregations