use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class SparkYarnClient method buildYarnClient.
/**
* 创建YarnClient 增加KerberosUtils 逻辑
* @return
*/
private YarnClient buildYarnClient() {
try {
return KerberosUtils.login(sparkYarnConfig, () -> {
logger.info("buildYarnClient, init YarnClient!");
YarnClient yarnClient1 = YarnClient.createYarnClient();
yarnClient1.init(yarnConf);
yarnClient1.start();
return yarnClient1;
}, yarnConf, true);
} catch (Exception e) {
logger.error("buildYarnClient initSecurity happens error", e);
throw new PluginDefineException(e);
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class SparkYarnClient method submitSparkSqlJobForBatch.
/**
* 执行spark 批处理sql
* @param jobClient
* @return
*/
private JobResult submitSparkSqlJobForBatch(JobClient jobClient) {
Properties confProp = jobClient.getConfProperties();
setHadoopUserName(sparkYarnConfig);
Map<String, Object> paramsMap = new HashMap<>();
String zipSql = DtStringUtil.zip(jobClient.getSql());
paramsMap.put("sql", zipSql);
paramsMap.put("appName", jobClient.getJobName());
paramsMap.put("sparkSessionConf", getSparkSessionConf(confProp));
String logLevel = MathUtil.getString(confProp.get(LOG_LEVEL_KEY));
if (StringUtils.isNotEmpty(logLevel)) {
paramsMap.put("logLevel", logLevel);
}
String sqlExeJson = null;
try {
sqlExeJson = PublicUtil.objToString(paramsMap);
sqlExeJson = URLEncoder.encode(sqlExeJson, Charsets.UTF_8.name());
} catch (Exception e) {
logger.error("", e);
throw new PluginDefineException("get unexpected exception:" + e.getMessage());
}
String sqlProxyClass = sparkYarnConfig.getSparkSqlProxyMainClass();
List<String> argList = new ArrayList<>();
argList.add("--jar");
argList.add(sparkYarnConfig.getSparkSqlProxyPath());
argList.add("--class");
argList.add(sqlProxyClass);
argList.add("--arg");
argList.add(sqlExeJson);
ClientArguments clientArguments = new ClientArguments(argList.toArray(new String[argList.size()]));
SparkConf sparkConf = buildBasicSparkConf(jobClient);
sparkConf.setAppName(jobClient.getJobName());
setSparkLog4jLocalFilePath(sparkConf, jobClient);
fillExtSparkConf(sparkConf, confProp);
setSparkLog4jConfiguration(sparkConf);
ApplicationId appId = null;
try {
ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf);
clientExt.setSparkYarnConfig(sparkYarnConfig);
String proxyUserName = sparkYarnConfig.getDtProxyUserName();
if (StringUtils.isNotBlank(proxyUserName)) {
logger.info("ugi proxyUser is {}", proxyUserName);
appId = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser()).doAs((PrivilegedExceptionAction<ApplicationId>) () -> clientExt.submitApplication(jobClient.getApplicationPriority()));
} else {
appId = clientExt.submitApplication(jobClient.getApplicationPriority());
}
return JobResult.createSuccessResult(appId.toString());
} catch (Exception ex) {
return JobResult.createErrorResult("submit job get unknown error\n" + ExceptionUtil.getErrorMessage(ex));
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class SparkYarnClient method getJobStatus.
@Override
public TaskStatus getJobStatus(JobIdentifier jobIdentifier) throws IOException {
try {
return KerberosUtils.login(sparkYarnConfig, () -> {
String jobId = jobIdentifier.getApplicationId();
if (StringUtils.isEmpty(jobId)) {
return null;
}
ApplicationId appId = ConverterUtils.toApplicationId(jobId);
try {
ApplicationReport report = getYarnClient().getApplicationReport(appId);
YarnApplicationState applicationState = report.getYarnApplicationState();
switch(applicationState) {
case KILLED:
return TaskStatus.KILLED;
case NEW:
case NEW_SAVING:
return TaskStatus.CREATED;
case SUBMITTED:
// FIXME 特殊逻辑,认为已提交到计算引擎的状态为等待资源状态
return TaskStatus.WAITCOMPUTE;
case ACCEPTED:
return TaskStatus.SCHEDULED;
case RUNNING:
return TaskStatus.RUNNING;
case FINISHED:
// state 为finished状态下需要兼顾判断finalStatus.
FinalApplicationStatus finalApplicationStatus = report.getFinalApplicationStatus();
if (finalApplicationStatus == FinalApplicationStatus.FAILED) {
return TaskStatus.FAILED;
} else if (finalApplicationStatus == FinalApplicationStatus.SUCCEEDED) {
return TaskStatus.FINISHED;
} else if (finalApplicationStatus == FinalApplicationStatus.KILLED) {
return TaskStatus.KILLED;
} else {
return TaskStatus.RUNNING;
}
case FAILED:
return TaskStatus.FAILED;
default:
throw new PluginDefineException("Unsupported application state");
}
} catch (Exception e) {
logger.error("", e);
return TaskStatus.NOTFOUND;
}
}, yarnConf, false);
} catch (Exception e) {
logger.error("", e);
return TaskStatus.NOTFOUND;
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class SparkYarnClient method submitJobWithJar.
private JobResult submitJobWithJar(JobClient jobClient) {
setHadoopUserName(sparkYarnConfig);
JobParam jobParam = new JobParam(jobClient);
String mainClass = jobParam.getMainClass();
// 只支持hdfs
String jarPath = jobParam.getJarPath();
String appName = jobParam.getJobName();
String exeArgsStr = jobParam.getClassArgs();
if (!jarPath.startsWith(HDFS_PREFIX)) {
throw new PluginDefineException("spark jar path protocol must be " + HDFS_PREFIX);
}
if (Strings.isNullOrEmpty(appName)) {
throw new PluginDefineException("spark jar must set app name!");
}
String[] appArgs = new String[] {};
if (StringUtils.isNotBlank(exeArgsStr)) {
appArgs = exeArgsStr.split("\\s+");
}
List<String> argList = new ArrayList<>();
argList.add("--jar");
argList.add(jarPath);
argList.add("--class");
argList.add(mainClass);
for (String appArg : appArgs) {
if (StringUtils.isBlank(appArg)) {
continue;
}
argList.add("--arg");
argList.add(appArg);
}
ClientArguments clientArguments = new ClientArguments(argList.toArray(new String[argList.size()]));
SparkConf sparkConf = buildBasicSparkConf(jobClient);
sparkConf.setAppName(appName);
setSparkLog4jLocalFilePath(sparkConf, jobClient);
fillExtSparkConf(sparkConf, jobClient.getConfProperties());
setSparkLog4jConfiguration(sparkConf);
ApplicationId appId = null;
try {
ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf);
clientExt.setSparkYarnConfig(sparkYarnConfig);
String proxyUserName = sparkYarnConfig.getDtProxyUserName();
if (StringUtils.isNotBlank(proxyUserName)) {
logger.info("jobId {} ugi proxyUser is {}", jobClient.getJobId(), proxyUserName);
appId = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser()).doAs((PrivilegedExceptionAction<ApplicationId>) () -> clientExt.submitApplication(jobClient.getApplicationPriority()));
} else {
appId = clientExt.submitApplication(jobClient.getApplicationPriority());
}
return JobResult.createSuccessResult(appId.toString());
} catch (Exception ex) {
logger.info("", ex);
return JobResult.createErrorResult("submit job get unknown error\n" + ExceptionUtil.getErrorMessage(ex));
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class AddJarOperator method parseSql.
public static JarFileInfo parseSql(String sql) {
Matcher matcher = pattern.matcher(sql);
if (!matcher.find()) {
throw new PluginDefineException("not a addJar operator:" + sql);
}
JarFileInfo jarFileInfo = new JarFileInfo();
jarFileInfo.setJarPath(matcher.group(1));
if (matcher.groupCount() == 3) {
jarFileInfo.setMainClass(matcher.group(3));
}
return jarFileInfo;
}
Aggregations