Search in sources :

Example 6 with TenantComponent

use of com.dtstack.taier.dao.domain.TenantComponent in project Taier by DTStack.

the class BatchSparkHiveSqlExeService method executeSql.

/**
 * 执行sql
 *
 * @param executeContent
 * @param scheduleJobType
 * @return
 */
protected ExecuteResultVO executeSql(ExecuteContent executeContent, EScheduleJobType scheduleJobType) {
    // 判断血缘解析结果,防止空指针
    if (null == executeContent.getParseResult()) {
        throw new RdosDefineException("SQL解析异常,结果为空");
    }
    Long tenantId = executeContent.getTenantId();
    Long userId = executeContent.getUserId();
    Long taskId = executeContent.getTaskId();
    String preJobId = executeContent.getPreJobId();
    String currDb = executeContent.getParseResult().getCurrentDb();
    ParseResult parseResult = executeContent.getParseResult();
    boolean useSelfFunction = batchFunctionService.validContainSelfFunction(executeContent.getSql(), tenantId, null, scheduleJobType.getType());
    ExecuteResultVO<List<Object>> result = new ExecuteResultVO<>();
    if (Objects.nonNull(parseResult) && Objects.nonNull(parseResult.getStandardSql()) && isSimpleQuery(parseResult.getStandardSql()) && !useSelfFunction) {
        result = simpleQuery(tenantId, parseResult, currDb, userId, scheduleJobType);
        if (!result.getContinue()) {
            return result;
        }
    }
    DataSourceType dataSourceType = scheduleJobType == EScheduleJobType.SPARK_SQL ? DataSourceType.SPARKTHRIFT2_1 : null;
    if (SqlType.CREATE_AS.equals(parseResult.getSqlType())) {
        String jobId = batchHadoopSelectSqlService.runSqlByTask(tenantId, parseResult, userId, currDb.toLowerCase(), true, taskId, scheduleJobType.getType(), preJobId);
        result.setJobId(jobId);
        result.setContinue(false);
        return result;
    } else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType()) || SqlType.QUERY.equals(parseResult.getSqlType()) || useSelfFunction) {
        String jobId = batchHadoopSelectSqlService.runSqlByTask(tenantId, parseResult, userId, currDb.toLowerCase(), taskId, scheduleJobType.getType(), preJobId);
        result.setJobId(jobId);
    } else {
        if (!executeContent.isExecuteSqlLater()) {
            TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
            Preconditions.checkNotNull(tenantEngine, "引擎不能为空");
            if (SqlType.CREATE.equals(parseResult.getSqlType()) || SqlType.CREATE_LIKE.equals(parseResult.getSqlType())) {
                executeCreateTableSql(parseResult, tenantId, tenantEngine.getComponentIdentity().toLowerCase(), scheduleJobType);
            } else {
                this.exeSqlDirect(executeContent, tenantId, parseResult, result, tenantEngine, dataSourceType);
            }
        }
    }
    result.setContinue(true);
    return result;
}
Also used : ParseResult(com.dtstack.taier.develop.sql.ParseResult) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) DataSourceType(com.dtstack.taier.common.enums.DataSourceType) ExecuteResultVO(com.dtstack.taier.develop.dto.devlop.ExecuteResultVO) List(java.util.List) TenantComponent(com.dtstack.taier.dao.domain.TenantComponent)

Example 7 with TenantComponent

use of com.dtstack.taier.dao.domain.TenantComponent in project Taier by DTStack.

the class BatchSparkSqlExeService method batchExecuteSql.

@Override
public ExecuteSqlParseVO batchExecuteSql(ExecuteContent executeContent) {
    String preJobId = executeContent.getPreJobId();
    Integer taskType = executeContent.getTaskType();
    String currDb = executeContent.getParseResult().getCurrentDb();
    Long tenantId = executeContent.getTenantId();
    Long userId = executeContent.getUserId();
    Long taskId = executeContent.getTaskId();
    List<ParseResult> parseResultList = executeContent.getParseResultList();
    ExecuteResultVO<List<Object>> result = new ExecuteResultVO<>();
    boolean useSelfFunction = batchFunctionService.validContainSelfFunction(executeContent.getSql(), tenantId, null, executeContent.getTaskType());
    ExecuteSqlParseVO executeSqlParseVO = new ExecuteSqlParseVO();
    List<SqlResultVO> sqlIdList = Lists.newArrayList();
    List<String> sqlList = Lists.newArrayList();
    BuildSqlVO buildSqlVO = new BuildSqlVO();
    for (ParseResult parseResult : parseResultList) {
        // 简单查询
        if (Objects.nonNull(parseResult.getStandardSql()) && isSimpleQuery(parseResult.getStandardSql()) && !useSelfFunction) {
            result = simpleQuery(tenantId, parseResult, currDb, userId, EScheduleJobType.SPARK_SQL);
            if (!result.getContinue()) {
                SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
                sqlResultVO.setSqlId(result.getJobId());
                sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
                sqlIdList.add(sqlResultVO);
                continue;
            }
        }
        if (SqlType.CREATE_AS.equals(parseResult.getSqlType())) {
            buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), true, taskId, taskType);
            SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
            sqlResultVO.setSqlId(buildSqlVO.getJobId());
            sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
            sqlIdList.add(sqlResultVO);
            sqlList.add(buildSqlVO.getSql());
        } else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType()) || SqlType.QUERY.equals(parseResult.getSqlType()) || useSelfFunction) {
            buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), false, taskId, taskType);
            // insert和insert overwrite都没有返回结果
            SqlResultVO sqlResultVO = new SqlResultVO();
            sqlResultVO.setSqlId(buildSqlVO.getJobId());
            sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
            sqlIdList.add(sqlResultVO);
            sqlList.add(buildSqlVO.getSql());
        } else {
            if (!executeContent.isExecuteSqlLater()) {
                TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
                Preconditions.checkNotNull(tenantEngine, "引擎不能为空");
                SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
                sqlResultVO.setSqlText(parseResult.getStandardSql());
                sqlResultVO.setType(SqlTypeEnums.NO_SELECT_DATA.getType());
                if (SqlType.CREATE.equals(parseResult.getSqlType()) || SqlType.CREATE_LIKE.equals(parseResult.getSqlType())) {
                    executeCreateTableSql(parseResult, tenantId, tenantEngine.getComponentIdentity().toLowerCase(), EScheduleJobType.SPARK_SQL);
                    sqlIdList.add(sqlResultVO);
                } else {
                    exeSqlDirect(executeContent, tenantId, parseResult, result, tenantEngine, DataSourceType.Spark);
                    sqlResultVO.setResult(result.getResult());
                    sqlIdList.add(sqlResultVO);
                }
            }
        }
    }
    String sqlToEngine = StringUtils.join(sqlList, ";");
    // 除简单查询,其他sql发送到engine执行
    String jobId = batchHadoopSelectSqlService.sendSqlTask(tenantId, sqlToEngine, buildSqlVO.getTaskParam(), preJobId, taskId, executeContent.getTaskType());
    // 记录发送到engine的id
    selectSqlService.addSelectSql(jobId, StringUtils.EMPTY, 0, tenantId, sqlToEngine, userId, StringUtils.EMPTY, taskType);
    sqlIdList.sort(Comparator.comparingInt(SqlResultVO::getType));
    executeSqlParseVO.setJobId(jobId);
    executeSqlParseVO.setSqlIdList(sqlIdList);
    return executeSqlParseVO;
}
Also used : ExecuteSqlParseVO(com.dtstack.taier.develop.dto.devlop.ExecuteSqlParseVO) SqlResultVO(com.dtstack.taier.develop.dto.devlop.SqlResultVO) ParseResult(com.dtstack.taier.develop.sql.ParseResult) BuildSqlVO(com.dtstack.taier.develop.dto.devlop.BuildSqlVO) ExecuteResultVO(com.dtstack.taier.develop.dto.devlop.ExecuteResultVO) List(java.util.List) TenantComponent(com.dtstack.taier.dao.domain.TenantComponent)

Example 8 with TenantComponent

use of com.dtstack.taier.dao.domain.TenantComponent in project Taier by DTStack.

the class BatchSqlExeService method parseSql.

/**
 * 解析sql
 *
 * @param executeContent
 * @return
 */
private ParseResult parseSql(final ExecuteContent executeContent) {
    String dbName = this.getDbName(executeContent);
    executeContent.setDatabase(dbName);
    TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
    Preconditions.checkNotNull(tenantEngine, String.format("tenantEngine %d not support hadoop engine.", executeContent.getTenantId()));
    SqlParserImpl sqlParser = parserFactory.getSqlParser(ETableType.HIVE);
    ParseResult parseResult = null;
    try {
        parseResult = sqlParser.parseSql(executeContent.getSql(), tenantEngine.getComponentIdentity(), new HashMap<>());
    } catch (final Exception e) {
        LOGGER.error("解析sql异常,sql:{}", executeContent.getSql(), e);
        parseResult = new ParseResult();
        // libra解析失败也提交sql执行
        if (MultiEngineType.HADOOP.getType() == executeContent.getTaskType()) {
            parseResult.setParseSuccess(false);
        }
        parseResult.setFailedMsg(ExceptionUtils.getStackTrace(e));
        parseResult.setStandardSql(SqlFormatUtil.getStandardSql(executeContent.getSql()));
    }
    return parseResult;
}
Also used : ParseResult(com.dtstack.taier.develop.sql.ParseResult) HashMap(java.util.HashMap) TenantComponent(com.dtstack.taier.dao.domain.TenantComponent) SqlParserImpl(com.dtstack.taier.develop.sql.SqlParserImpl) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException)

Aggregations

TenantComponent (com.dtstack.taier.dao.domain.TenantComponent)8 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)4 ParseResult (com.dtstack.taier.develop.sql.ParseResult)3 ExecuteResultVO (com.dtstack.taier.develop.dto.devlop.ExecuteResultVO)2 List (java.util.List)2 JSONObject (com.alibaba.fastjson.JSONObject)1 DataSourceType (com.dtstack.taier.common.enums.DataSourceType)1 EComponentType (com.dtstack.taier.common.enums.EComponentType)1 Component (com.dtstack.taier.dao.domain.Component)1 BuildSqlVO (com.dtstack.taier.develop.dto.devlop.BuildSqlVO)1 CheckSyntaxResult (com.dtstack.taier.develop.dto.devlop.CheckSyntaxResult)1 ComponentBindDBDTO (com.dtstack.taier.develop.dto.devlop.ComponentBindDBDTO)1 ExecuteSqlParseVO (com.dtstack.taier.develop.dto.devlop.ExecuteSqlParseVO)1 SqlResultVO (com.dtstack.taier.develop.dto.devlop.SqlResultVO)1 IComponentService (com.dtstack.taier.develop.service.develop.IComponentService)1 ISqlExeService (com.dtstack.taier.develop.service.develop.ISqlExeService)1 SqlParserImpl (com.dtstack.taier.develop.sql.SqlParserImpl)1 ComponentVO (com.dtstack.taier.scheduler.vo.ComponentVO)1 HashMap (java.util.HashMap)1 Matcher (java.util.regex.Matcher)1