use of com.dtstack.taier.develop.utils.develop.hive.service.LogPluginDownload in project Taier by DTStack.
the class HadoopDataDownloadService method typeLogDownloader.
@Override
public IDownload typeLogDownloader(Long tenantId, String jobId, Integer limitNum, String logType) {
String applicationId = batchJobService.getApplicationId(jobId);
if (StringUtils.isBlank(applicationId)) {
throw new RdosDefineException("任务尚未执行完成或提交失败,请稍后再试");
}
IDownload iDownload = null;
try {
iDownload = RetryUtil.executeWithRetry(() -> {
final Map<String, Object> hadoopConf = Engine2DTOService.getHdfs(tenantId);
JSONObject yarnConf = Engine2DTOService.getComponentConfig(tenantId, EComponentType.YARN);
String submitUserName = getSubmitUserNameByJobId(jobId);
final LogPluginDownload downloader = new LogPluginDownload(applicationId, yarnConf, hadoopConf, submitUserName, limitNum);
downloader.configure();
return downloader;
}, 3, 1000L, false);
} catch (Exception e) {
throw new RdosDefineException(String.format("typeLogDownloader 失败,原因是:%s", e.getMessage()), e);
}
return iDownload;
}
use of com.dtstack.taier.develop.utils.develop.hive.service.LogPluginDownload in project Taier by DTStack.
the class HadoopDataDownloadService method buildIDownLoad.
@Override
public IDownload buildIDownLoad(String jobId, Integer taskType, Long tenantId, Integer limitNum) {
if (StringUtils.isBlank(jobId)) {
throw new RdosDefineException("jobId 不能为空");
}
if (EScheduleJobType.SYNC.getVal().equals(taskType)) {
// standalone模式的不支持日志下载直接返回null
Boolean isStandalone = clusterService.hasStandalone(tenantId, EComponentType.FLINK.getTypeCode());
if (isStandalone) {
return null;
}
// 同步任务
StringBuilder syncLog = new StringBuilder();
SyncDownload syncDownload = new SyncDownload();
ActionLogVO log = actionService.log(jobId);
if (!Objects.isNull(log)) {
String engineLogStr = log.getEngineLog();
String logInfoStr = log.getLogInfo();
if (StringUtils.isNotBlank(engineLogStr)) {
try {
JSONObject engineLogJson = JSON.parseObject(engineLogStr);
engineLogJson.remove("increConf");
engineLogStr = engineLogJson.toJSONString();
} catch (Exception e) {
LOGGER.info("engineLog非json", e);
}
syncLog.append("engineLog:\n").append(engineLogStr).append("\n");
}
try {
JSONObject logInfo = JSON.parseObject(logInfoStr);
syncLog.append("logInfo:\n").append(logInfo.getString("msg_info"));
} catch (Exception e) {
LOGGER.error("同步任务日志下载失败", e);
}
}
syncDownload.setLogInfo(syncLog.toString());
return syncDownload;
}
String applicationId = batchJobService.getApplicationId(jobId);
if (StringUtils.isBlank(applicationId)) {
return null;
}
IDownload iDownload = null;
try {
iDownload = RetryUtil.executeWithRetry(() -> {
final Map<String, Object> hadoopConf = Engine2DTOService.getHdfs(tenantId);
final JSONObject yarnConf = Engine2DTOService.getComponentConfig(tenantId, EComponentType.YARN);
String submitUserName = getSubmitUserNameByJobId(jobId);
final LogPluginDownload downloader = new LogPluginDownload(applicationId, yarnConf, hadoopConf, submitUserName, limitNum);
return downloader;
}, 3, 1000L, false);
} catch (Exception e) {
LOGGER.error("downloadJobLog {} 失败:{}", jobId, e);
return null;
}
return iDownload;
}
Aggregations