use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class RollingLevelDBTimelineStore method put.
@Override
public TimelinePutResponse put(TimelineEntities entities) {
if (LOG.isDebugEnabled()) {
LOG.debug("Starting put");
}
TimelinePutResponse response = new TimelinePutResponse();
TreeMap<Long, RollingWriteBatch> entityUpdates = new TreeMap<Long, RollingWriteBatch>();
TreeMap<Long, RollingWriteBatch> indexUpdates = new TreeMap<Long, RollingWriteBatch>();
long entityCount = 0;
long indexCount = 0;
try {
for (TimelineEntity entity : entities.getEntities()) {
entityCount += putEntities(entityUpdates, indexUpdates, entity, response);
}
for (RollingWriteBatch entityUpdate : entityUpdates.values()) {
entityUpdate.write();
}
for (RollingWriteBatch indexUpdate : indexUpdates.values()) {
indexUpdate.write();
}
} finally {
for (RollingWriteBatch entityRollingWriteBatch : entityUpdates.values()) {
entityRollingWriteBatch.close();
}
for (RollingWriteBatch indexRollingWriteBatch : indexUpdates.values()) {
indexRollingWriteBatch.close();
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Put " + entityCount + " new leveldb entity entries and " + indexCount + " new leveldb index entries from " + entities.getEntities().size() + " timeline entities");
}
return response;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TestTimelineDataManager method testUpdatingOldEntityWithoutDomainId.
@Test
public void testUpdatingOldEntityWithoutDomainId() throws Exception {
// Set the domain to the default domain when updating
TimelineEntity entity = new TimelineEntity();
entity.setEntityType("OLD_ENTITY_TYPE_1");
entity.setEntityId("OLD_ENTITY_ID_1");
entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
entity.addOtherInfo("NEW_OTHER_INFO_KEY", "NEW_OTHER_INFO_VALUE");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
TimelinePutResponse response = dataManaer.postEntities(entities, UserGroupInformation.getCurrentUser());
Assert.assertEquals(0, response.getErrors().size());
entity = store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
Assert.assertNotNull(entity);
// Even in leveldb, the domain is updated to the default domain Id
Assert.assertEquals(TimelineDataManager.DEFAULT_DOMAIN_ID, entity.getDomainId());
Assert.assertEquals(1, entity.getOtherInfo().size());
Assert.assertEquals("NEW_OTHER_INFO_KEY", entity.getOtherInfo().keySet().iterator().next());
Assert.assertEquals("NEW_OTHER_INFO_VALUE", entity.getOtherInfo().values().iterator().next());
// Set the domain to the non-default domain when updating
entity = new TimelineEntity();
entity.setEntityType("OLD_ENTITY_TYPE_1");
entity.setEntityId("OLD_ENTITY_ID_2");
entity.setDomainId("NON_DEFAULT");
entity.addOtherInfo("NEW_OTHER_INFO_KEY", "NEW_OTHER_INFO_VALUE");
entities = new TimelineEntities();
entities.addEntity(entity);
response = dataManaer.postEntities(entities, UserGroupInformation.getCurrentUser());
Assert.assertEquals(1, response.getErrors().size());
Assert.assertEquals(TimelinePutResponse.TimelinePutError.ACCESS_DENIED, response.getErrors().get(0).getErrorCode());
entity = store.getEntity("OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", null);
Assert.assertNotNull(entity);
// In leveldb, the domain Id is still null
Assert.assertNull(entity.getDomainId());
// Updating is not executed
Assert.assertEquals(0, entity.getOtherInfo().size());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TimelineStoreTestUtils method loadTestEntityData.
/**
* Load test entity data into the given store
*/
protected void loadTestEntityData() throws IOException {
beforeTs = System.currentTimeMillis() - 1;
TimelineEntities entities = new TimelineEntities();
Map<String, Set<Object>> primaryFilters = new HashMap<String, Set<Object>>();
Set<Object> l1 = new HashSet<Object>();
l1.add("username");
Set<Object> l2 = new HashSet<Object>();
l2.add(Integer.MAX_VALUE);
Set<Object> l3 = new HashSet<Object>();
l3.add("123abc");
Set<Object> l4 = new HashSet<Object>();
l4.add((long) Integer.MAX_VALUE + 1l);
primaryFilters.put("user", l1);
primaryFilters.put("appname", l2);
primaryFilters.put("other", l3);
primaryFilters.put("long", l4);
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
secondaryFilters.put("startTime", 123456);
secondaryFilters.put("status", "RUNNING");
Map<String, Object> otherInfo1 = new HashMap<String, Object>();
otherInfo1.put("info1", "val1");
otherInfo1.putAll(secondaryFilters);
String entityId1 = "id_1";
String entityType1 = "type_1";
String entityId1b = "id_2";
String entityId2 = "id_2";
String entityType2 = "type_2";
String entityId4 = "id_4";
String entityType4 = "type_4";
String entityId5 = "id_5";
String entityType5 = "type_5";
String entityId6 = "id_6";
String entityId7 = "id_7";
String entityType7 = "type_7";
Map<String, Set<String>> relatedEntities = new HashMap<String, Set<String>>();
relatedEntities.put(entityType2, Collections.singleton(entityId2));
TimelineEvent ev3 = createEvent(789l, "launch_event", null);
TimelineEvent ev4 = createEvent(0l, "init_event", null);
List<TimelineEvent> events = new ArrayList<TimelineEvent>();
events.add(ev3);
events.add(ev4);
entities.setEntities(Collections.singletonList(createEntity(entityId2, entityType2, null, events, null, null, null, "domain_id_1")));
TimelinePutResponse response = store.put(entities);
assertEquals(0, response.getErrors().size());
TimelineEvent ev1 = createEvent(123l, "start_event", null);
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, null, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put("event info 1", "val1");
TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
Map<String, Object> otherInfo2 = new HashMap<String, Object>();
otherInfo2.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, null, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity("badentityid", "badentity", null, null, null, null, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
TimelinePutError error = response.getErrors().get(0);
assertEquals("badentityid", error.getEntityId());
assertEquals("badentity", error.getEntityType());
assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
relatedEntities.clear();
relatedEntities.put(entityType5, Collections.singleton(entityId5));
entities.setEntities(Collections.singletonList(createEntity(entityId4, entityType4, 42l, null, relatedEntities, null, null, "domain_id_1")));
response = store.put(entities);
relatedEntities.clear();
otherInfo1.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId6, entityType1, 61l, null, relatedEntities, primaryFilters, otherInfo1, "domain_id_2")));
response = store.put(entities);
relatedEntities.clear();
relatedEntities.put(entityType1, Collections.singleton(entityId1));
entities.setEntities(Collections.singletonList(createEntity(entityId7, entityType7, 62l, null, relatedEntities, null, null, "domain_id_2")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
assertEquals(entityType7, response.getErrors().get(0).getEntityType());
assertEquals(entityId7, response.getErrors().get(0).getEntityId());
assertEquals(TimelinePutError.FORBIDDEN_RELATION, response.getErrors().get(0).getErrorCode());
if (store instanceof LeveldbTimelineStore) {
LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", 63l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", 64l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TestTimelineWebServices method testPostEntitiesWithPrimaryFilter.
@Test
public void testPostEntitiesWithPrimaryFilter() throws Exception {
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
Map<String, Set<Object>> filters = new HashMap<String, Set<Object>>();
filters.put(TimelineStore.SystemFilter.ENTITY_OWNER.toString(), new HashSet<Object>());
entity.setPrimaryFilters(filters);
entity.setEntityId("test id 6");
entity.setEntityType("test type 6");
entity.setStartTime(System.currentTimeMillis());
entities.addEntity(entity);
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "tester").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class);
Assert.assertEquals(0, putResposne.getErrors().size());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TestLeveldbTimelineStore method testDeleteEntitiesPrimaryFilters.
@Test
public void testDeleteEntitiesPrimaryFilters() throws IOException, InterruptedException {
Map<String, Set<Object>> primaryFilter = Collections.singletonMap("user", Collections.singleton((Object) "otheruser"));
TimelineEntities atsEntities = new TimelineEntities();
atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter, null, domainId1)));
TimelinePutResponse response = store.put(atsEntities);
assertEquals(0, response.getErrors().size());
NameValuePair pfPair = new NameValuePair("user", "otheruser");
List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1", pfPair);
assertEquals(1, entities.size());
verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2), EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0), domainId1);
entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
assertEquals(3, entities.size());
verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, entities.get(0), domainId1);
verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, entities.get(1), domainId1);
verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES, primaryFilters, otherInfo, entities.get(2), domainId2);
((LeveldbTimelineStore) store).discardOldEntities(-123L);
assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
assertEquals(3, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
((LeveldbTimelineStore) store).discardOldEntities(123L);
assertEquals(0, getEntities("type_1").size());
assertEquals(0, getEntities("type_2").size());
assertEquals(0, ((LeveldbTimelineStore) store).getEntityTypes().size());
assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
}
Aggregations