use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestTimelineWebServices method testPostIncompleteEntities.
@Test
public void testPostIncompleteEntities() throws Exception {
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity1 = new TimelineEntity();
entity1.setEntityId("test id 1");
entity1.setEntityType("test type 1");
entity1.setStartTime(System.currentTimeMillis());
entity1.setDomainId("domain_id_1");
entities.addEntity(entity1);
// Add an entity with no id or type.
entities.addEntity(new TimelineEntity());
WebResource r = resource();
// One of the entities has no id or type. HTTP 400 will be returned
ClientResponse response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "tester").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
assertResponseStatusCode(Status.BAD_REQUEST, response.getStatusInfo());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestTimelineWebServices method testPostEntities.
@Test
public void testPostEntities() throws Exception {
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setEntityId("test id 1");
entity.setEntityType("test type 1");
entity.setStartTime(System.currentTimeMillis());
entity.setDomainId("domain_id_1");
entities.addEntity(entity);
WebResource r = resource();
// No owner, will be rejected
ClientResponse response = r.path("ws").path("v1").path("timeline").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
assertResponseStatusCode(Status.FORBIDDEN, response.getStatusInfo());
response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "tester").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class);
Assert.assertNotNull(putResposne);
Assert.assertEquals(0, putResposne.getErrors().size());
// verify the entity exists in the store
response = r.path("ws").path("v1").path("timeline").path("test type 1").path("test id 1").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
entity = response.getEntity(TimelineEntity.class);
Assert.assertNotNull(entity);
Assert.assertEquals("test id 1", entity.getEntityId());
Assert.assertEquals("test type 1", entity.getEntityType());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestLogInfo method setup.
@Before
public void setup() throws Exception {
config.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEST_ROOT_DIR.toString());
HdfsConfiguration hdfsConfig = new HdfsConfiguration();
hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig).numDataNodes(1).build();
fs = hdfsCluster.getFileSystem();
fc = FileContext.getFileContext(hdfsCluster.getURI(0), config);
Path testAppDirPath = getTestRootPath(TEST_ATTEMPT_DIR_NAME);
fs.mkdirs(testAppDirPath, new FsPermission(FILE_LOG_DIR_PERMISSIONS));
objMapper = PluginStoreTestUtils.createObjectMapper();
TimelineEntities testEntities = PluginStoreTestUtils.generateTestEntities();
writeEntitiesLeaveOpen(testEntities, new Path(testAppDirPath, TEST_ENTITY_FILE_NAME));
testDomain = new TimelineDomain();
testDomain.setId("domain_1");
testDomain.setReaders(UserGroupInformation.getLoginUser().getUserName());
testDomain.setOwner(UserGroupInformation.getLoginUser().getUserName());
testDomain.setDescription("description");
writeDomainLeaveOpen(testDomain, new Path(testAppDirPath, TEST_DOMAIN_FILE_NAME));
writeBrokenFile(new Path(testAppDirPath, TEST_BROKEN_FILE_NAME));
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class PluginStoreTestUtils method generateTestEntities.
/**
* Create sample entities for testing
* @return two timeline entities in a {@link TimelineEntities} object
*/
static TimelineEntities generateTestEntities() {
TimelineEntities entities = new TimelineEntities();
Map<String, Set<Object>> primaryFilters = new HashMap<String, Set<Object>>();
Set<Object> l1 = new HashSet<Object>();
l1.add("username");
Set<Object> l2 = new HashSet<Object>();
l2.add(Integer.MAX_VALUE);
Set<Object> l3 = new HashSet<Object>();
l3.add("123abc");
Set<Object> l4 = new HashSet<Object>();
l4.add((long) Integer.MAX_VALUE + 1l);
primaryFilters.put("user", l1);
primaryFilters.put("appname", l2);
primaryFilters.put("other", l3);
primaryFilters.put("long", l4);
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
secondaryFilters.put("startTime", 123456);
secondaryFilters.put("status", "RUNNING");
Map<String, Object> otherInfo1 = new HashMap<String, Object>();
otherInfo1.put("info1", "val1");
otherInfo1.putAll(secondaryFilters);
String entityId1 = "id_1";
String entityType1 = "type_1";
String entityId2 = "id_2";
String entityType2 = "type_2";
Map<String, Set<String>> relatedEntities = new HashMap<String, Set<String>>();
relatedEntities.put(entityType2, Collections.singleton(entityId2));
TimelineEvent ev3 = createEvent(789l, "launch_event", null);
TimelineEvent ev4 = createEvent(0l, "init_event", null);
List<TimelineEvent> events = new ArrayList<TimelineEvent>();
events.add(ev3);
events.add(ev4);
entities.addEntity(createEntity(entityId2, entityType2, 456l, events, null, null, null, "domain_id_1"));
TimelineEvent ev1 = createEvent(123l, "start_event", null);
entities.addEntity(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1"));
return entities;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestEntityGroupFSTimelineStore method testSummaryRead.
@Test
public void testSummaryRead() throws Exception {
// Load data
EntityGroupFSTimelineStore.AppLogs appLogs = store.new AppLogs(mainTestAppId, mainTestAppDirPath, AppState.COMPLETED);
MutableCounterLong summaryLogEntityRead = store.metrics.getGetEntityToSummaryOps();
long numEntityReadBefore = summaryLogEntityRead.value();
TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithStore(config, store);
appLogs.scanForLogs();
appLogs.parseSummaryLogs(tdm);
// Verify single entity read
PluginStoreTestUtils.verifyTestEntities(tdm);
// Verify multiple entities read
TimelineEntities entities = tdm.getEntities("type_1", null, null, null, null, null, null, null, EnumSet.allOf(TimelineReader.Field.class), UserGroupInformation.getLoginUser());
assertEquals(entities.getEntities().size(), 1);
for (TimelineEntity entity : entities.getEntities()) {
assertEquals((Long) 123L, entity.getStartTime());
}
// Verify metrics
assertEquals(numEntityReadBefore + 5L, summaryLogEntityRead.value());
}
Aggregations