use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class KeyValueBasedTimelineStore method put.
@Override
public synchronized TimelinePutResponse put(TimelineEntities data) {
TimelinePutResponse response = new TimelinePutResponse();
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
TimelinePutError error = new TimelinePutError();
error.setErrorCode(TimelinePutError.IO_EXCEPTION);
response.addError(error);
return response;
}
for (TimelineEntity entity : data.getEntities()) {
EntityIdentifier entityId = new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
// store entity info in memory
TimelineEntity existingEntity = entities.get(entityId);
boolean needsPut = false;
if (existingEntity == null) {
existingEntity = new TimelineEntity();
existingEntity.setEntityId(entity.getEntityId());
existingEntity.setEntityType(entity.getEntityType());
existingEntity.setStartTime(entity.getStartTime());
if (entity.getDomainId() == null || entity.getDomainId().length() == 0) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_DOMAIN);
response.addError(error);
continue;
}
existingEntity.setDomainId(entity.getDomainId());
// insert a new entity to the storage, update insert time map
entityInsertTimes.put(entityId, System.currentTimeMillis());
needsPut = true;
}
if (entity.getEvents() != null) {
if (existingEntity.getEvents() == null) {
existingEntity.setEvents(entity.getEvents());
} else {
existingEntity.addEvents(entity.getEvents());
}
Collections.sort(existingEntity.getEvents());
needsPut = true;
}
// check startTime
if (existingEntity.getStartTime() == null) {
if (existingEntity.getEvents() == null || existingEntity.getEvents().isEmpty()) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_START_TIME);
response.addError(error);
entities.remove(entityId);
entityInsertTimes.remove(entityId);
continue;
} else {
Long min = Long.MAX_VALUE;
for (TimelineEvent e : entity.getEvents()) {
if (min > e.getTimestamp()) {
min = e.getTimestamp();
}
}
existingEntity.setStartTime(min);
needsPut = true;
}
}
if (entity.getPrimaryFilters() != null) {
if (existingEntity.getPrimaryFilters() == null) {
existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
}
for (Entry<String, Set<Object>> pf : entity.getPrimaryFilters().entrySet()) {
for (Object pfo : pf.getValue()) {
existingEntity.addPrimaryFilter(pf.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(pfo));
needsPut = true;
}
}
}
if (entity.getOtherInfo() != null) {
if (existingEntity.getOtherInfo() == null) {
existingEntity.setOtherInfo(new HashMap<String, Object>());
}
for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
existingEntity.addOtherInfo(info.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(info.getValue()));
needsPut = true;
}
}
if (needsPut) {
entities.put(entityId, existingEntity);
}
// relate it to other entities
if (entity.getRelatedEntities() == null) {
continue;
}
for (Entry<String, Set<String>> partRelatedEntities : entity.getRelatedEntities().entrySet()) {
if (partRelatedEntities == null) {
continue;
}
for (String idStr : partRelatedEntities.getValue()) {
EntityIdentifier relatedEntityId = new EntityIdentifier(idStr, partRelatedEntities.getKey());
TimelineEntity relatedEntity = entities.get(relatedEntityId);
if (relatedEntity != null) {
if (relatedEntity.getDomainId().equals(existingEntity.getDomainId())) {
relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
entities.put(relatedEntityId, relatedEntity);
} else {
// in this case the entity will be put, but the relation will be
// ignored
TimelinePutError error = new TimelinePutError();
error.setEntityType(existingEntity.getEntityType());
error.setEntityId(existingEntity.getEntityId());
error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
response.addError(error);
}
} else {
relatedEntity = new TimelineEntity();
relatedEntity.setEntityId(relatedEntityId.getId());
relatedEntity.setEntityType(relatedEntityId.getType());
relatedEntity.setStartTime(existingEntity.getStartTime());
relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
relatedEntity.setDomainId(existingEntity.getDomainId());
entities.put(relatedEntityId, relatedEntity);
entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
}
}
}
}
return response;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class FileSystemTimelineWriter method putEntities.
@Override
public TimelinePutResponse putEntities(ApplicationAttemptId appAttemptId, TimelineEntityGroupId groupId, TimelineEntity... entities) throws IOException, YarnException {
if (appAttemptId == null) {
return putEntities(entities);
}
List<TimelineEntity> entitiesToDBStore = new ArrayList<TimelineEntity>();
List<TimelineEntity> entitiesToSummaryCache = new ArrayList<TimelineEntity>();
List<TimelineEntity> entitiesToEntityCache = new ArrayList<TimelineEntity>();
Path attemptDir = attemptDirCache.getAppAttemptDir(appAttemptId);
for (TimelineEntity entity : entities) {
if (summaryEntityTypes.contains(entity.getEntityType())) {
entitiesToSummaryCache.add(entity);
} else {
if (groupId != null) {
entitiesToEntityCache.add(entity);
} else {
entitiesToDBStore.add(entity);
}
}
}
if (!entitiesToSummaryCache.isEmpty()) {
Path summaryLogPath = new Path(attemptDir, SUMMARY_LOG_PREFIX + appAttemptId.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("Writing summary log for " + appAttemptId.toString() + " to " + summaryLogPath);
}
this.logFDsCache.writeSummaryEntityLogs(fs, summaryLogPath, objMapper, appAttemptId, entitiesToSummaryCache, isAppendSupported);
}
if (!entitiesToEntityCache.isEmpty()) {
Path entityLogPath = new Path(attemptDir, ENTITY_LOG_PREFIX + groupId.toString());
if (LOG.isDebugEnabled()) {
LOG.debug("Writing entity log for " + groupId.toString() + " to " + entityLogPath);
}
this.logFDsCache.writeEntityLogs(fs, entityLogPath, objMapper, appAttemptId, groupId, entitiesToEntityCache, isAppendSupported);
}
if (!entitiesToDBStore.isEmpty()) {
putEntities(entitiesToDBStore.toArray(new TimelineEntity[entitiesToDBStore.size()]));
}
return new TimelinePutResponse();
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TimelineClientImpl method putTimelineDataInJSONFile.
/**
* Put timeline data in a JSON file via command line.
*
* @param path
* path to the timeline data JSON file
* @param type
* the type of the timeline data in the JSON file
*/
private static void putTimelineDataInJSONFile(String path, String type) {
File jsonFile = new File(path);
if (!jsonFile.exists()) {
LOG.error("File [" + jsonFile.getAbsolutePath() + "] doesn't exist");
return;
}
YarnJacksonJaxbJsonProvider.configObjectMapper(MAPPER);
TimelineEntities entities = null;
TimelineDomains domains = null;
try {
if (type.equals(ENTITY_DATA_TYPE)) {
entities = MAPPER.readValue(jsonFile, TimelineEntities.class);
} else if (type.equals(DOMAIN_DATA_TYPE)) {
domains = MAPPER.readValue(jsonFile, TimelineDomains.class);
}
} catch (Exception e) {
LOG.error("Error when reading " + e.getMessage());
e.printStackTrace(System.err);
return;
}
Configuration conf = new YarnConfiguration();
TimelineClient client = TimelineClient.createTimelineClient();
client.init(conf);
client.start();
try {
if (UserGroupInformation.isSecurityEnabled() && conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, false)) {
Token<TimelineDelegationTokenIdentifier> token = client.getDelegationToken(UserGroupInformation.getCurrentUser().getUserName());
UserGroupInformation.getCurrentUser().addToken(token);
}
if (type.equals(ENTITY_DATA_TYPE)) {
TimelinePutResponse response = client.putEntities(entities.getEntities().toArray(new TimelineEntity[entities.getEntities().size()]));
if (response.getErrors().size() == 0) {
LOG.info("Timeline entities are successfully put");
} else {
for (TimelinePutResponse.TimelinePutError error : response.getErrors()) {
LOG.error("TimelineEntity [" + error.getEntityType() + ":" + error.getEntityId() + "] is not successfully put. Error code: " + error.getErrorCode());
}
}
} else if (type.equals(DOMAIN_DATA_TYPE) && domains != null) {
boolean hasError = false;
for (TimelineDomain domain : domains.getDomains()) {
try {
client.putDomain(domain);
} catch (Exception e) {
LOG.error("Error when putting domain " + domain.getId(), e);
hasError = true;
}
}
if (!hasError) {
LOG.info("Timeline domains are successfully put");
}
}
} catch (RuntimeException e) {
LOG.error("Error when putting the timeline data", e);
} catch (Exception e) {
LOG.error("Error when putting the timeline data", e);
} finally {
client.stop();
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TestTimelineWebServicesWithSSL method testPutEntities.
@Test
public void testPutEntities() throws Exception {
TestTimelineClient client = new TestTimelineClient();
try {
client.init(conf);
client.start();
TimelineEntity expectedEntity = new TimelineEntity();
expectedEntity.setEntityType("test entity type");
expectedEntity.setEntityId("test entity id");
expectedEntity.setDomainId("test domain id");
TimelineEvent event = new TimelineEvent();
event.setEventType("test event type");
event.setTimestamp(0L);
expectedEntity.addEvent(event);
TimelinePutResponse response = client.putEntities(expectedEntity);
Assert.assertEquals(0, response.getErrors().size());
Assert.assertTrue(client.resp.toString().contains("https"));
TimelineEntity actualEntity = store.getEntity(expectedEntity.getEntityId(), expectedEntity.getEntityType(), EnumSet.allOf(Field.class));
Assert.assertNotNull(actualEntity);
Assert.assertEquals(expectedEntity.getEntityId(), actualEntity.getEntityId());
Assert.assertEquals(expectedEntity.getEntityType(), actualEntity.getEntityType());
} finally {
client.stop();
client.close();
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse in project hadoop by apache.
the class TestTimelineWebServices method testPostEntities.
@Test
public void testPostEntities() throws Exception {
TimelineEntities entities = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
entity.setEntityId("test id 1");
entity.setEntityType("test type 1");
entity.setStartTime(System.currentTimeMillis());
entity.setDomainId("domain_id_1");
entities.addEntity(entity);
WebResource r = resource();
// No owner, will be rejected
ClientResponse response = r.path("ws").path("v1").path("timeline").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
assertResponseStatusCode(Status.FORBIDDEN, response.getStatusInfo());
response = r.path("ws").path("v1").path("timeline").queryParam("user.name", "tester").accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, entities);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class);
Assert.assertNotNull(putResposne);
Assert.assertEquals(0, putResposne.getErrors().size());
// verify the entity exists in the store
response = r.path("ws").path("v1").path("timeline").path("test type 1").path("test id 1").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString());
entity = response.getEntity(TimelineEntity.class);
Assert.assertNotNull(entity);
Assert.assertEquals("test id 1", entity.getEntityId());
Assert.assertEquals("test type 1", entity.getEntityType());
}
Aggregations