Search in sources :

Example 1 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class SparkYarnClient method buildYarnClient.

/**
 * 创建YarnClient 增加KerberosUtils 逻辑
 * @return
 */
private YarnClient buildYarnClient() {
    try {
        return KerberosUtils.login(sparkYarnConfig, () -> {
            logger.info("buildYarnClient, init YarnClient!");
            YarnClient yarnClient1 = YarnClient.createYarnClient();
            yarnClient1.init(yarnConf);
            yarnClient1.start();
            return yarnClient1;
        }, yarnConf, true);
    } catch (Exception e) {
        logger.error("buildYarnClient initSecurity happens error", e);
        throw new PluginDefineException(e);
    }
}
Also used : PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) IOException(java.io.IOException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 2 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class SparkYarnClient method submitSparkSqlJobForBatch.

/**
 * 执行spark 批处理sql
 * @param jobClient
 * @return
 */
private JobResult submitSparkSqlJobForBatch(JobClient jobClient) {
    Properties confProp = jobClient.getConfProperties();
    setHadoopUserName(sparkYarnConfig);
    Map<String, Object> paramsMap = new HashMap<>();
    String zipSql = DtStringUtil.zip(jobClient.getSql());
    paramsMap.put("sql", zipSql);
    paramsMap.put("appName", jobClient.getJobName());
    paramsMap.put("sparkSessionConf", getSparkSessionConf(confProp));
    String logLevel = MathUtil.getString(confProp.get(LOG_LEVEL_KEY));
    if (StringUtils.isNotEmpty(logLevel)) {
        paramsMap.put("logLevel", logLevel);
    }
    String sqlExeJson = null;
    try {
        sqlExeJson = PublicUtil.objToString(paramsMap);
        sqlExeJson = URLEncoder.encode(sqlExeJson, Charsets.UTF_8.name());
    } catch (Exception e) {
        logger.error("", e);
        throw new PluginDefineException("get unexpected exception:" + e.getMessage());
    }
    String sqlProxyClass = sparkYarnConfig.getSparkSqlProxyMainClass();
    List<String> argList = new ArrayList<>();
    argList.add("--jar");
    argList.add(sparkYarnConfig.getSparkSqlProxyPath());
    argList.add("--class");
    argList.add(sqlProxyClass);
    argList.add("--arg");
    argList.add(sqlExeJson);
    ClientArguments clientArguments = new ClientArguments(argList.toArray(new String[argList.size()]));
    SparkConf sparkConf = buildBasicSparkConf(jobClient);
    sparkConf.setAppName(jobClient.getJobName());
    setSparkLog4jLocalFilePath(sparkConf, jobClient);
    fillExtSparkConf(sparkConf, confProp);
    setSparkLog4jConfiguration(sparkConf);
    ApplicationId appId = null;
    try {
        ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf);
        clientExt.setSparkYarnConfig(sparkYarnConfig);
        String proxyUserName = sparkYarnConfig.getDtProxyUserName();
        if (StringUtils.isNotBlank(proxyUserName)) {
            logger.info("ugi proxyUser is {}", proxyUserName);
            appId = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser()).doAs((PrivilegedExceptionAction<ApplicationId>) () -> clientExt.submitApplication(jobClient.getApplicationPriority()));
        } else {
            appId = clientExt.submitApplication(jobClient.getApplicationPriority());
        }
        return JobResult.createSuccessResult(appId.toString());
    } catch (Exception ex) {
        return JobResult.createErrorResult("submit job get unknown error\n" + ExceptionUtil.getErrorMessage(ex));
    }
}
Also used : ClientExt(com.dtstack.taier.sparkyarn.sparkext.ClientExt) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) IOException(java.io.IOException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ClientArguments(org.apache.spark.deploy.yarn.ClientArguments) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) SparkConf(org.apache.spark.SparkConf)

Example 3 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class SparkYarnClient method getJobStatus.

@Override
public TaskStatus getJobStatus(JobIdentifier jobIdentifier) throws IOException {
    try {
        return KerberosUtils.login(sparkYarnConfig, () -> {
            String jobId = jobIdentifier.getApplicationId();
            if (StringUtils.isEmpty(jobId)) {
                return null;
            }
            ApplicationId appId = ConverterUtils.toApplicationId(jobId);
            try {
                ApplicationReport report = getYarnClient().getApplicationReport(appId);
                YarnApplicationState applicationState = report.getYarnApplicationState();
                switch(applicationState) {
                    case KILLED:
                        return TaskStatus.KILLED;
                    case NEW:
                    case NEW_SAVING:
                        return TaskStatus.CREATED;
                    case SUBMITTED:
                        // FIXME 特殊逻辑,认为已提交到计算引擎的状态为等待资源状态
                        return TaskStatus.WAITCOMPUTE;
                    case ACCEPTED:
                        return TaskStatus.SCHEDULED;
                    case RUNNING:
                        return TaskStatus.RUNNING;
                    case FINISHED:
                        // state 为finished状态下需要兼顾判断finalStatus.
                        FinalApplicationStatus finalApplicationStatus = report.getFinalApplicationStatus();
                        if (finalApplicationStatus == FinalApplicationStatus.FAILED) {
                            return TaskStatus.FAILED;
                        } else if (finalApplicationStatus == FinalApplicationStatus.SUCCEEDED) {
                            return TaskStatus.FINISHED;
                        } else if (finalApplicationStatus == FinalApplicationStatus.KILLED) {
                            return TaskStatus.KILLED;
                        } else {
                            return TaskStatus.RUNNING;
                        }
                    case FAILED:
                        return TaskStatus.FAILED;
                    default:
                        throw new PluginDefineException("Unsupported application state");
                }
            } catch (Exception e) {
                logger.error("", e);
                return TaskStatus.NOTFOUND;
            }
        }, yarnConf, false);
    } catch (Exception e) {
        logger.error("", e);
        return TaskStatus.NOTFOUND;
    }
}
Also used : ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) FinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) IOException(java.io.IOException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 4 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class SparkYarnClient method submitJobWithJar.

private JobResult submitJobWithJar(JobClient jobClient) {
    setHadoopUserName(sparkYarnConfig);
    JobParam jobParam = new JobParam(jobClient);
    String mainClass = jobParam.getMainClass();
    // 只支持hdfs
    String jarPath = jobParam.getJarPath();
    String appName = jobParam.getJobName();
    String exeArgsStr = jobParam.getClassArgs();
    if (!jarPath.startsWith(HDFS_PREFIX)) {
        throw new PluginDefineException("spark jar path protocol must be " + HDFS_PREFIX);
    }
    if (Strings.isNullOrEmpty(appName)) {
        throw new PluginDefineException("spark jar must set app name!");
    }
    String[] appArgs = new String[] {};
    if (StringUtils.isNotBlank(exeArgsStr)) {
        appArgs = exeArgsStr.split("\\s+");
    }
    List<String> argList = new ArrayList<>();
    argList.add("--jar");
    argList.add(jarPath);
    argList.add("--class");
    argList.add(mainClass);
    for (String appArg : appArgs) {
        if (StringUtils.isBlank(appArg)) {
            continue;
        }
        argList.add("--arg");
        argList.add(appArg);
    }
    ClientArguments clientArguments = new ClientArguments(argList.toArray(new String[argList.size()]));
    SparkConf sparkConf = buildBasicSparkConf(jobClient);
    sparkConf.setAppName(appName);
    setSparkLog4jLocalFilePath(sparkConf, jobClient);
    fillExtSparkConf(sparkConf, jobClient.getConfProperties());
    setSparkLog4jConfiguration(sparkConf);
    ApplicationId appId = null;
    try {
        ClientExt clientExt = ClientExtFactory.getClientExt(filesystemManager, clientArguments, yarnConf, sparkConf);
        clientExt.setSparkYarnConfig(sparkYarnConfig);
        String proxyUserName = sparkYarnConfig.getDtProxyUserName();
        if (StringUtils.isNotBlank(proxyUserName)) {
            logger.info("jobId {} ugi proxyUser is {}", jobClient.getJobId(), proxyUserName);
            appId = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser()).doAs((PrivilegedExceptionAction<ApplicationId>) () -> clientExt.submitApplication(jobClient.getApplicationPriority()));
        } else {
            appId = clientExt.submitApplication(jobClient.getApplicationPriority());
        }
        return JobResult.createSuccessResult(appId.toString());
    } catch (Exception ex) {
        logger.info("", ex);
        return JobResult.createErrorResult("submit job get unknown error\n" + ExceptionUtil.getErrorMessage(ex));
    }
}
Also used : ClientExt(com.dtstack.taier.sparkyarn.sparkext.ClientExt) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ClientArguments(org.apache.spark.deploy.yarn.ClientArguments) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) SparkConf(org.apache.spark.SparkConf) IOException(java.io.IOException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 5 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class AddJarOperator method parseSql.

public static JarFileInfo parseSql(String sql) {
    Matcher matcher = pattern.matcher(sql);
    if (!matcher.find()) {
        throw new PluginDefineException("not a addJar operator:" + sql);
    }
    JarFileInfo jarFileInfo = new JarFileInfo();
    jarFileInfo.setJarPath(matcher.group(1));
    if (matcher.groupCount() == 3) {
        jarFileInfo.setMainClass(matcher.group(3));
    }
    return jarFileInfo;
}
Also used : JarFileInfo(com.dtstack.taier.pluginapi.JarFileInfo) Matcher(java.util.regex.Matcher) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Aggregations

PluginDefineException (com.dtstack.taier.pluginapi.exception.PluginDefineException)58 IOException (java.io.IOException)30 File (java.io.File)13 MalformedURLException (java.net.MalformedURLException)13 YarnClient (org.apache.hadoop.yarn.client.api.YarnClient)11 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)10 JarFileInfo (com.dtstack.taier.pluginapi.JarFileInfo)8 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)8 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)8 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)7 YarnApplicationState (org.apache.hadoop.yarn.api.records.YarnApplicationState)7 ClusterClient (org.apache.flink.client.program.ClusterClient)6 JSONObject (com.alibaba.fastjson.JSONObject)5 Configuration (org.apache.flink.configuration.Configuration)5 Path (org.apache.hadoop.fs.Path)5 KerberosUtils (com.dtstack.taier.base.util.KerberosUtils)4 FlinkConfig (com.dtstack.taier.flink.FlinkConfig)4 ConfigConstant (com.dtstack.taier.pluginapi.constrant.ConfigConstant)4 URL (java.net.URL)4 Matcher (java.util.regex.Matcher)4