use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TimelineStoreTestUtils method loadTestEntityData.
/**
* Load test entity data into the given store
*/
protected void loadTestEntityData() throws IOException {
beforeTs = System.currentTimeMillis() - 1;
TimelineEntities entities = new TimelineEntities();
Map<String, Set<Object>> primaryFilters = new HashMap<String, Set<Object>>();
Set<Object> l1 = new HashSet<Object>();
l1.add("username");
Set<Object> l2 = new HashSet<Object>();
l2.add(Integer.MAX_VALUE);
Set<Object> l3 = new HashSet<Object>();
l3.add("123abc");
Set<Object> l4 = new HashSet<Object>();
l4.add((long) Integer.MAX_VALUE + 1l);
primaryFilters.put("user", l1);
primaryFilters.put("appname", l2);
primaryFilters.put("other", l3);
primaryFilters.put("long", l4);
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
secondaryFilters.put("startTime", 123456);
secondaryFilters.put("status", "RUNNING");
Map<String, Object> otherInfo1 = new HashMap<String, Object>();
otherInfo1.put("info1", "val1");
otherInfo1.putAll(secondaryFilters);
String entityId1 = "id_1";
String entityType1 = "type_1";
String entityId1b = "id_2";
String entityId2 = "id_2";
String entityType2 = "type_2";
String entityId4 = "id_4";
String entityType4 = "type_4";
String entityId5 = "id_5";
String entityType5 = "type_5";
String entityId6 = "id_6";
String entityId7 = "id_7";
String entityType7 = "type_7";
Map<String, Set<String>> relatedEntities = new HashMap<String, Set<String>>();
relatedEntities.put(entityType2, Collections.singleton(entityId2));
TimelineEvent ev3 = createEvent(789l, "launch_event", null);
TimelineEvent ev4 = createEvent(0l, "init_event", null);
List<TimelineEvent> events = new ArrayList<TimelineEvent>();
events.add(ev3);
events.add(ev4);
entities.setEntities(Collections.singletonList(createEntity(entityId2, entityType2, null, events, null, null, null, "domain_id_1")));
TimelinePutResponse response = store.put(entities);
assertEquals(0, response.getErrors().size());
TimelineEvent ev1 = createEvent(123l, "start_event", null);
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, null, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put("event info 1", "val1");
TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
Map<String, Object> otherInfo2 = new HashMap<String, Object>();
otherInfo2.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, null, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity("badentityid", "badentity", null, null, null, null, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
TimelinePutError error = response.getErrors().get(0);
assertEquals("badentityid", error.getEntityId());
assertEquals("badentity", error.getEntityType());
assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
relatedEntities.clear();
relatedEntities.put(entityType5, Collections.singleton(entityId5));
entities.setEntities(Collections.singletonList(createEntity(entityId4, entityType4, 42l, null, relatedEntities, null, null, "domain_id_1")));
response = store.put(entities);
relatedEntities.clear();
otherInfo1.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId6, entityType1, 61l, null, relatedEntities, primaryFilters, otherInfo1, "domain_id_2")));
response = store.put(entities);
relatedEntities.clear();
relatedEntities.put(entityType1, Collections.singleton(entityId1));
entities.setEntities(Collections.singletonList(createEntity(entityId7, entityType7, 62l, null, relatedEntities, null, null, "domain_id_2")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
assertEquals(entityType7, response.getErrors().get(0).getEntityType());
assertEquals(entityId7, response.getErrors().get(0).getEntityId());
assertEquals(TimelinePutError.FORBIDDEN_RELATION, response.getErrors().get(0).getErrorCode());
if (store instanceof LeveldbTimelineStore) {
LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", 63l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", 64l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestTimelineWebServices method testPostEntitiesWithPrimaryFilter.
@Test
public void testPostEntitiesWithPrimaryFilter() throws Exception {
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
Map<String, Set<Object>> filters = new HashMap<String, Set<Object>>();
filters.put(TimelineStore.SystemFilter.ENTITY_OWNER.toString(), new HashSet<Object>());
entity.setPrimaryFilters(filters);
entity.setEntityId("test id 6");
entity.setEntityType("test type 6");
entity.setStartTime(System.currentTimeMillis());
entities.addEntity(entity);
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "tester").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class);
Assert.assertEquals(0, putResposne.getErrors().size());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestLeveldbTimelineStore method testDeleteEntitiesPrimaryFilters.
@Test
public void testDeleteEntitiesPrimaryFilters() throws IOException, InterruptedException {
Map<String, Set<Object>> primaryFilter = Collections.singletonMap("user", Collections.singleton((Object) "otheruser"));
TimelineEntities atsEntities = new TimelineEntities();
atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter, null, domainId1)));
TimelinePutResponse response = store.put(atsEntities);
assertEquals(0, response.getErrors().size());
NameValuePair pfPair = new NameValuePair("user", "otheruser");
List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1", pfPair);
assertEquals(1, entities.size());
verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2), EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0), domainId1);
entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
assertEquals(3, entities.size());
verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, entities.get(0), domainId1);
verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, entities.get(1), domainId1);
verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, entities.get(2), domainId2);
((LeveldbTimelineStore) store).discardOldEntities(-123L);
assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
((LeveldbTimelineStore) store).discardOldEntities(123L);
assertEquals(0, getEntities("type_1").size());
assertEquals(0, getEntities("type_2").size());
assertEquals(0, ((LeveldbTimelineStore) store).getEntityTypes().size());
assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestRollingLevelDBTimelineStore method testRelatingToNonExistingEntity.
@Test
public void testRelatingToNonExistingEntity() throws IOException {
TimelineEntity entityToStore = new TimelineEntity();
entityToStore.setEntityType("TEST_ENTITY_TYPE_1");
entityToStore.setEntityId("TEST_ENTITY_ID_1");
entityToStore.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
entityToStore.addRelatedEntity("TEST_ENTITY_TYPE_2", "TEST_ENTITY_ID_2");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entityToStore);
store.put(entities);
TimelineEntity entityToGet = store.getEntity("TEST_ENTITY_ID_2", "TEST_ENTITY_TYPE_2", null);
Assert.assertNotNull(entityToGet);
Assert.assertEquals("DEFAULT", entityToGet.getDomainId());
Assert.assertEquals("TEST_ENTITY_TYPE_1", entityToGet.getRelatedEntities().keySet().iterator().next());
Assert.assertEquals("TEST_ENTITY_ID_1", entityToGet.getRelatedEntities().values().iterator().next().iterator().next());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestTimelineWebServices method testPostEntitiesToDefaultDomain.
@Test
public void testPostEntitiesToDefaultDomain() throws Exception {
AdminACLsManager oldAdminACLsManager = timelineACLsManager.setAdminACLsManager(adminACLsManager);
try {
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setEntityId("test id 7");
entity.setEntityType("test type 7");
entity.setStartTime(System.currentTimeMillis());
entities.addEntity(entity);
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "anybody_1").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class);
Assert.assertNotNull(putResposne);
Assert.assertEquals(0, putResposne.getErrors().size());
// verify the entity exists in the store
response = r.path("ws").path("v1").path("timeline").path("test type 7").path("test id 7").queryParam("user.name", "any_body_2").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
entity = response.getEntity(TimelineEntity.class);
Assert.assertNotNull(entity);
Assert.assertEquals("test id 7", entity.getEntityId());
Assert.assertEquals("test type 7", entity.getEntityType());
Assert.assertEquals(TimelineDataManager.DEFAULT_DOMAIN_ID, entity.getDomainId());
} finally {
timelineACLsManager.setAdminACLsManager(oldAdminACLsManager);
}
}
Aggregations