use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseStorageFlowRun method testFilterFlowRunsByCreatedTime.
@Test
public void testFilterFlowRunsByCreatedTime() throws Exception {
String cluster = "cluster2";
String user = "user2";
String flow = "flow_name2";
TimelineEntities te = new TimelineEntities();
TimelineEntity entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1(System.currentTimeMillis());
entityApp1.setCreatedTime(1425016501000L);
te.addEntity(entityApp1);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678919L, "application_11111111111111_1111", te);
// write another application with same metric to this flow
te = new TimelineEntities();
TimelineEntity entityApp2 = TestFlowDataGenerator.getEntityMetricsApp2(System.currentTimeMillis());
entityApp2.setCreatedTime(1425016502000L);
te.addEntity(entityApp2);
hbi.write(cluster, user, flow, "CF7022C10F1354", 1002345678918L, "application_11111111111111_2222", te);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
Set<TimelineEntity> entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, 1425016501000L, 1425016502001L, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("user2@flow_name2/1002345678918") && !entity.getId().equals("user2@flow_name2/1002345678919")) {
fail("Entities with flow runs 1002345678918 and 1002345678919" + "should be present.");
}
}
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, 1425016501050L, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("user2@flow_name2/1002345678918")) {
fail("Entity with flow run 1002345678918 should be present.");
}
}
entities = hbr.getEntities(new TimelineReaderContext(cluster, user, flow, null, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineEntityFilters(null, null, 1425016501050L, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("user2@flow_name2/1002345678919")) {
fail("Entity with flow run 1002345678919 should be present.");
}
}
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseStorageFlowRun method testWriteFlowRunFlush.
@Test
public void testWriteFlowRunFlush() throws Exception {
String cluster = "atestFlushFlowRun_cluster1";
String user = "atestFlushFlowRun__user1";
String flow = "atestFlushFlowRun_flow_name";
String flowVersion = "AF1021C19F1351";
long runid = 1449526652000L;
int start = 10;
int count = 20000;
int appIdSuffix = 1;
HBaseTimelineWriterImpl hbi = null;
long insertTs = 1449796654827L - count;
long minTS = insertTs + 1;
long startTs = insertTs;
Configuration c1 = util.getConfiguration();
TimelineEntities te1 = null;
TimelineEntity entityApp1 = null;
TimelineEntity entityApp2 = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
for (int i = start; i < count; i++) {
String appName = "application_1060350000000_" + appIdSuffix;
insertTs++;
te1 = new TimelineEntities();
entityApp1 = TestFlowDataGenerator.getMinFlushEntity(insertTs);
te1.addEntity(entityApp1);
entityApp2 = TestFlowDataGenerator.getMaxFlushEntity(insertTs);
te1.addEntity(entityApp2);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
Thread.sleep(1);
appName = "application_1001199480000_7" + appIdSuffix;
insertTs++;
appIdSuffix++;
te1 = new TimelineEntities();
entityApp1 = TestFlowDataGenerator.getMinFlushEntity(insertTs);
te1.addEntity(entityApp1);
entityApp2 = TestFlowDataGenerator.getMaxFlushEntity(insertTs);
te1.addEntity(entityApp2);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
if (i % 1000 == 0) {
hbi.flush();
checkMinMaxFlush(c1, minTS, startTs, count, cluster, user, flow, runid, false);
}
}
} finally {
if (hbi != null) {
hbi.flush();
hbi.close();
}
checkMinMaxFlush(c1, minTS, startTs, count, cluster, user, flow, runid, true);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseStorageFlowRun method testWriteFlowRunMinMax.
/**
* Writes 4 timeline entities belonging to one flow run through the
* {@link HBaseTimelineWriterImpl}
*
* Checks the flow run table contents
*
* The first entity has a created event, metrics and a finish event.
*
* The second entity has a created event and this is the entity with smallest
* start time. This should be the start time for the flow run.
*
* The third entity has a finish event and this is the entity with the max end
* time. This should be the end time for the flow run.
*
* The fourth entity has a created event which has a start time that is
* greater than min start time.
*
*/
@Test
public void testWriteFlowRunMinMax() throws Exception {
TimelineEntities te = new TimelineEntities();
te.addEntity(TestFlowDataGenerator.getEntity1());
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
String cluster = "testWriteFlowRunMinMaxToHBase_cluster1";
String user = "testWriteFlowRunMinMaxToHBase_user1";
String flow = "testing_flowRun_flow_name";
String flowVersion = "CF7022C10F1354";
long runid = 1002345678919L;
String appName = "application_100000000000_1111";
long minStartTs = 1425026900000L;
long greaterStartTs = 30000000000000L;
long endTs = 1439750690000L;
TimelineEntity entityMinStartTime = TestFlowDataGenerator.getEntityMinStartTime(minStartTs);
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// write another entity with the right min start time
te = new TimelineEntities();
te.addEntity(entityMinStartTime);
appName = "application_100000000000_3333";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// writer another entity for max end time
TimelineEntity entityMaxEndTime = TestFlowDataGenerator.getEntityMaxEndTime(endTs);
te = new TimelineEntities();
te.addEntity(entityMaxEndTime);
appName = "application_100000000000_4444";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// writer another entity with greater start time
TimelineEntity entityGreaterStartTime = TestFlowDataGenerator.getEntityGreaterStartTime(greaterStartTs);
te = new TimelineEntities();
te.addEntity(entityGreaterStartTime);
appName = "application_1000000000000000_2222";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
// flush everything to hbase
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
Connection conn = ConnectionFactory.createConnection(c1);
// check in flow run table
Table table1 = conn.getTable(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
// scan the table and see that we get back the right min and max
// timestamps
byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey();
Get g = new Get(startRow);
g.addColumn(FlowRunColumnFamily.INFO.getBytes(), FlowRunColumn.MIN_START_TIME.getColumnQualifierBytes());
g.addColumn(FlowRunColumnFamily.INFO.getBytes(), FlowRunColumn.MAX_END_TIME.getColumnQualifierBytes());
Result r1 = table1.get(g);
assertNotNull(r1);
assertTrue(!r1.isEmpty());
Map<byte[], byte[]> values = r1.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
assertEquals(2, r1.size());
long starttime = Bytes.toLong(values.get(FlowRunColumn.MIN_START_TIME.getColumnQualifierBytes()));
assertEquals(minStartTs, starttime);
assertEquals(endTs, Bytes.toLong(values.get(FlowRunColumn.MAX_END_TIME.getColumnQualifierBytes())));
// use the timeline reader to verify data
HBaseTimelineReaderImpl hbr = null;
try {
hbr = new HBaseTimelineReaderImpl();
hbr.init(c1);
hbr.start();
// get the flow run entity
TimelineEntity entity = hbr.getEntity(new TimelineReaderContext(cluster, user, flow, runid, null, TimelineEntityType.YARN_FLOW_RUN.toString(), null), new TimelineDataToRetrieve());
assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
FlowRunEntity flowRun = (FlowRunEntity) entity;
assertEquals(minStartTs, flowRun.getStartTime());
assertEquals(endTs, flowRun.getMaxEndTime());
} finally {
if (hbr != null) {
hbr.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testFilterAppsByCreatedTime.
@Test
public void testFilterAppsByCreatedTime() throws Exception {
Set<TimelineEntity> entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(3, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("application_1111111111_2222") && !entity.getId().equals("application_1111111111_3333") && !entity.getId().equals("application_1111111111_4444")) {
Assert.fail("Entities with ids' application_1111111111_2222, " + "application_1111111111_3333 and application_1111111111_4444" + " should be present");
}
}
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, 1425016502015L, null, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("application_1111111111_3333") && !entity.getId().equals("application_1111111111_4444")) {
Assert.fail("Apps with ids' application_1111111111_3333 and" + " application_1111111111_4444 should be present");
}
}
entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), new TimelineEntityFilters(null, null, 1425016502015L, null, null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
if (!entity.getId().equals("application_1111111111_2222")) {
Assert.fail("App with id application_1111111111_2222 should" + " be present");
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestHBaseTimelineStorageApps method testNonIntegralMetricValues.
@Test
public void testNonIntegralMetricValues() throws IOException {
TimelineEntities teApp = new TimelineEntities();
ApplicationEntity entityApp = new ApplicationEntity();
String appId = "application_1000178881110_2002";
entityApp.setId(appId);
entityApp.setCreatedTime(1425016501000L);
// add metrics with floating point values
Set<TimelineMetric> metricsApp = new HashSet<>();
TimelineMetric mApp = new TimelineMetric();
mApp.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricAppValues = new HashMap<Long, Number>();
long ts = System.currentTimeMillis();
metricAppValues.put(ts - 20, 10.5);
metricAppValues.put(ts - 10, 20.5);
mApp.setType(Type.TIME_SERIES);
mApp.setValues(metricAppValues);
metricsApp.add(mApp);
entityApp.addMetrics(metricsApp);
teApp.addEntity(entityApp);
TimelineEntities teEntity = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setId("hello");
entity.setType("world");
entity.setCreatedTime(1425016501000L);
// add metrics with floating point values
Set<TimelineMetric> metricsEntity = new HashSet<>();
TimelineMetric mEntity = new TimelineMetric();
mEntity.setId("MAP_SLOT_MILLIS");
mEntity.addValue(ts - 20, 10.5);
metricsEntity.add(mEntity);
entity.addMetrics(metricsEntity);
teEntity.addEntity(entity);
HBaseTimelineWriterImpl hbi = null;
try {
Configuration c1 = util.getConfiguration();
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.start();
// Writing application entity.
try {
hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teApp);
Assert.fail("Expected an exception as metric values are non integral");
} catch (IOException e) {
}
// Writing generic entity.
try {
hbi.write("c1", "u1", "f1", "v1", 1002345678919L, appId, teEntity);
Assert.fail("Expected an exception as metric values are non integral");
} catch (IOException e) {
}
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
Aggregations