use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class RollingLevelDBTimelineStore method getAndSetStartTime.
/**
* Get the unique start time for a given entity as a byte array that sorts the
* timestamps in reverse order (see
* {@link GenericObjectMapper#writeReverseOrderedLong(long)}). If the start
* time doesn't exist, set it based on the information provided.
*
* @param entityId
* The id of the entity
* @param entityType
* The type of the entity
* @param startTime
* The start time of the entity, or null
* @param events
* A list of events for the entity, or null
* @return A StartAndInsertTime
* @throws IOException
*/
private Long getAndSetStartTime(String entityId, String entityType, Long startTime, List<TimelineEvent> events) throws IOException {
EntityIdentifier entity = new EntityIdentifier(entityId, entityType);
Long time = startTimeWriteCache.get(entity);
if (time != null) {
// return the value in the cache
return time;
}
if (startTime == null && events != null) {
// calculate best guess start time based on lowest event time
startTime = Long.MAX_VALUE;
for (TimelineEvent e : events) {
if (e.getTimestamp() < startTime) {
startTime = e.getTimestamp();
}
}
}
// check the provided start time matches the db
return checkStartTimeInDb(entity, startTime);
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntityEvent.
/**
* Creates an event object from the given key, offset, and value. If the event
* type is not contained in the specified set of event types, returns null.
*/
private static TimelineEvent getEntityEvent(Set<String> eventTypes, byte[] key, int offset, byte[] value) throws IOException {
KeyParser kp = new KeyParser(key, offset);
long ts = kp.getNextLong();
String tstype = kp.getNextString();
if (eventTypes == null || eventTypes.contains(tstype)) {
TimelineEvent event = new TimelineEvent();
event.setTimestamp(ts);
event.setEventType(tstype);
Object o = fstConf.asObject(value);
if (o == null) {
event.setEventInfo(null);
} else if (o instanceof Map) {
@SuppressWarnings("unchecked") Map<String, Object> m = (Map<String, Object>) o;
event.setEventInfo(m);
} else {
throw new IOException("Couldn't deserialize event info map");
}
return event;
}
return null;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TimelineStoreTestUtils method loadTestEntityData.
/**
* Load test entity data into the given store
*/
protected void loadTestEntityData() throws IOException {
beforeTs = System.currentTimeMillis() - 1;
TimelineEntities entities = new TimelineEntities();
Map<String, Set<Object>> primaryFilters = new HashMap<String, Set<Object>>();
Set<Object> l1 = new HashSet<Object>();
l1.add("username");
Set<Object> l2 = new HashSet<Object>();
l2.add(Integer.MAX_VALUE);
Set<Object> l3 = new HashSet<Object>();
l3.add("123abc");
Set<Object> l4 = new HashSet<Object>();
l4.add((long) Integer.MAX_VALUE + 1l);
primaryFilters.put("user", l1);
primaryFilters.put("appname", l2);
primaryFilters.put("other", l3);
primaryFilters.put("long", l4);
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
secondaryFilters.put("startTime", 123456);
secondaryFilters.put("status", "RUNNING");
Map<String, Object> otherInfo1 = new HashMap<String, Object>();
otherInfo1.put("info1", "val1");
otherInfo1.putAll(secondaryFilters);
String entityId1 = "id_1";
String entityType1 = "type_1";
String entityId1b = "id_2";
String entityId2 = "id_2";
String entityType2 = "type_2";
String entityId4 = "id_4";
String entityType4 = "type_4";
String entityId5 = "id_5";
String entityType5 = "type_5";
String entityId6 = "id_6";
String entityId7 = "id_7";
String entityType7 = "type_7";
Map<String, Set<String>> relatedEntities = new HashMap<String, Set<String>>();
relatedEntities.put(entityType2, Collections.singleton(entityId2));
TimelineEvent ev3 = createEvent(789l, "launch_event", null);
TimelineEvent ev4 = createEvent(0l, "init_event", null);
List<TimelineEvent> events = new ArrayList<TimelineEvent>();
events.add(ev3);
events.add(ev4);
entities.setEntities(Collections.singletonList(createEntity(entityId2, entityType2, null, events, null, null, null, "domain_id_1")));
TimelinePutResponse response = store.put(entities);
assertEquals(0, response.getErrors().size());
TimelineEvent ev1 = createEvent(123l, "start_event", null);
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, null, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put("event info 1", "val1");
TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
Map<String, Object> otherInfo2 = new HashMap<String, Object>();
otherInfo2.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, null, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity("badentityid", "badentity", null, null, null, null, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
TimelinePutError error = response.getErrors().get(0);
assertEquals("badentityid", error.getEntityId());
assertEquals("badentity", error.getEntityType());
assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
relatedEntities.clear();
relatedEntities.put(entityType5, Collections.singleton(entityId5));
entities.setEntities(Collections.singletonList(createEntity(entityId4, entityType4, 42l, null, relatedEntities, null, null, "domain_id_1")));
response = store.put(entities);
relatedEntities.clear();
otherInfo1.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId6, entityType1, 61l, null, relatedEntities, primaryFilters, otherInfo1, "domain_id_2")));
response = store.put(entities);
relatedEntities.clear();
relatedEntities.put(entityType1, Collections.singleton(entityId1));
entities.setEntities(Collections.singletonList(createEntity(entityId7, entityType7, 62l, null, relatedEntities, null, null, "domain_id_2")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
assertEquals(entityType7, response.getErrors().get(0).getEntityType());
assertEquals(entityId7, response.getErrors().get(0).getEntityId());
assertEquals(TimelinePutError.FORBIDDEN_RELATION, response.getErrors().get(0).getErrorCode());
if (store instanceof LeveldbTimelineStore) {
LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", 63l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", 64l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TimelineStoreTestUtils method createEvent.
/**
* Create a test event
*/
private static TimelineEvent createEvent(long timestamp, String type, Map<String, Object> info) {
TimelineEvent event = new TimelineEvent();
event.setTimestamp(timestamp);
event.setEventType(type);
event.setEventInfo(info);
return event;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TestTimelineWebServices method testGetEvents.
@Test
public void testGetEvents() throws Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline").path("type_1").path("events").queryParam("entityId", "id_1").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
TimelineEvents events = response.getEntity(TimelineEvents.class);
Assert.assertNotNull(events);
Assert.assertEquals(1, events.getAllEvents().size());
TimelineEvents.EventsOfOneEntity partEvents = events.getAllEvents().get(0);
Assert.assertEquals(2, partEvents.getEvents().size());
TimelineEvent event1 = partEvents.getEvents().get(0);
Assert.assertEquals(456l, event1.getTimestamp());
Assert.assertEquals("end_event", event1.getEventType());
Assert.assertEquals(1, event1.getEventInfo().size());
TimelineEvent event2 = partEvents.getEvents().get(1);
Assert.assertEquals(123l, event2.getTimestamp());
Assert.assertEquals("start_event", event2.getEventType());
Assert.assertEquals(0, event2.getEventInfo().size());
}
Aggregations