use of com.dtstack.taier.develop.dto.devlop.ExecuteResultVO in project Taier by DTStack.
the class BatchSparkHiveSqlExeService method executeSql.
/**
* 执行sql
*
* @param executeContent
* @param scheduleJobType
* @return
*/
protected ExecuteResultVO executeSql(ExecuteContent executeContent, EScheduleJobType scheduleJobType) {
// 判断血缘解析结果,防止空指针
if (null == executeContent.getParseResult()) {
throw new RdosDefineException("SQL解析异常,结果为空");
}
Long tenantId = executeContent.getTenantId();
Long userId = executeContent.getUserId();
Long taskId = executeContent.getTaskId();
String preJobId = executeContent.getPreJobId();
String currDb = executeContent.getParseResult().getCurrentDb();
ParseResult parseResult = executeContent.getParseResult();
boolean useSelfFunction = batchFunctionService.validContainSelfFunction(executeContent.getSql(), tenantId, null, scheduleJobType.getType());
ExecuteResultVO<List<Object>> result = new ExecuteResultVO<>();
if (Objects.nonNull(parseResult) && Objects.nonNull(parseResult.getStandardSql()) && isSimpleQuery(parseResult.getStandardSql()) && !useSelfFunction) {
result = simpleQuery(tenantId, parseResult, currDb, userId, scheduleJobType);
if (!result.getContinue()) {
return result;
}
}
DataSourceType dataSourceType = scheduleJobType == EScheduleJobType.SPARK_SQL ? DataSourceType.SPARKTHRIFT2_1 : null;
if (SqlType.CREATE_AS.equals(parseResult.getSqlType())) {
String jobId = batchHadoopSelectSqlService.runSqlByTask(tenantId, parseResult, userId, currDb.toLowerCase(), true, taskId, scheduleJobType.getType(), preJobId);
result.setJobId(jobId);
result.setContinue(false);
return result;
} else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType()) || SqlType.QUERY.equals(parseResult.getSqlType()) || useSelfFunction) {
String jobId = batchHadoopSelectSqlService.runSqlByTask(tenantId, parseResult, userId, currDb.toLowerCase(), taskId, scheduleJobType.getType(), preJobId);
result.setJobId(jobId);
} else {
if (!executeContent.isExecuteSqlLater()) {
TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
Preconditions.checkNotNull(tenantEngine, "引擎不能为空");
if (SqlType.CREATE.equals(parseResult.getSqlType()) || SqlType.CREATE_LIKE.equals(parseResult.getSqlType())) {
executeCreateTableSql(parseResult, tenantId, tenantEngine.getComponentIdentity().toLowerCase(), scheduleJobType);
} else {
this.exeSqlDirect(executeContent, tenantId, parseResult, result, tenantEngine, dataSourceType);
}
}
}
result.setContinue(true);
return result;
}
use of com.dtstack.taier.develop.dto.devlop.ExecuteResultVO in project Taier by DTStack.
the class BatchSparkSqlExeService method batchExecuteSql.
@Override
public ExecuteSqlParseVO batchExecuteSql(ExecuteContent executeContent) {
String preJobId = executeContent.getPreJobId();
Integer taskType = executeContent.getTaskType();
String currDb = executeContent.getParseResult().getCurrentDb();
Long tenantId = executeContent.getTenantId();
Long userId = executeContent.getUserId();
Long taskId = executeContent.getTaskId();
List<ParseResult> parseResultList = executeContent.getParseResultList();
ExecuteResultVO<List<Object>> result = new ExecuteResultVO<>();
boolean useSelfFunction = batchFunctionService.validContainSelfFunction(executeContent.getSql(), tenantId, null, executeContent.getTaskType());
ExecuteSqlParseVO executeSqlParseVO = new ExecuteSqlParseVO();
List<SqlResultVO> sqlIdList = Lists.newArrayList();
List<String> sqlList = Lists.newArrayList();
BuildSqlVO buildSqlVO = new BuildSqlVO();
for (ParseResult parseResult : parseResultList) {
// 简单查询
if (Objects.nonNull(parseResult.getStandardSql()) && isSimpleQuery(parseResult.getStandardSql()) && !useSelfFunction) {
result = simpleQuery(tenantId, parseResult, currDb, userId, EScheduleJobType.SPARK_SQL);
if (!result.getContinue()) {
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlId(result.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
continue;
}
}
if (SqlType.CREATE_AS.equals(parseResult.getSqlType())) {
buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), true, taskId, taskType);
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlId(buildSqlVO.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
sqlList.add(buildSqlVO.getSql());
} else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType()) || SqlType.QUERY.equals(parseResult.getSqlType()) || useSelfFunction) {
buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), false, taskId, taskType);
// insert和insert overwrite都没有返回结果
SqlResultVO sqlResultVO = new SqlResultVO();
sqlResultVO.setSqlId(buildSqlVO.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
sqlList.add(buildSqlVO.getSql());
} else {
if (!executeContent.isExecuteSqlLater()) {
TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
Preconditions.checkNotNull(tenantEngine, "引擎不能为空");
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlText(parseResult.getStandardSql());
sqlResultVO.setType(SqlTypeEnums.NO_SELECT_DATA.getType());
if (SqlType.CREATE.equals(parseResult.getSqlType()) || SqlType.CREATE_LIKE.equals(parseResult.getSqlType())) {
executeCreateTableSql(parseResult, tenantId, tenantEngine.getComponentIdentity().toLowerCase(), EScheduleJobType.SPARK_SQL);
sqlIdList.add(sqlResultVO);
} else {
exeSqlDirect(executeContent, tenantId, parseResult, result, tenantEngine, DataSourceType.Spark);
sqlResultVO.setResult(result.getResult());
sqlIdList.add(sqlResultVO);
}
}
}
}
String sqlToEngine = StringUtils.join(sqlList, ";");
// 除简单查询,其他sql发送到engine执行
String jobId = batchHadoopSelectSqlService.sendSqlTask(tenantId, sqlToEngine, buildSqlVO.getTaskParam(), preJobId, taskId, executeContent.getTaskType());
// 记录发送到engine的id
selectSqlService.addSelectSql(jobId, StringUtils.EMPTY, 0, tenantId, sqlToEngine, userId, StringUtils.EMPTY, taskType);
sqlIdList.sort(Comparator.comparingInt(SqlResultVO::getType));
executeSqlParseVO.setJobId(jobId);
executeSqlParseVO.setSqlIdList(sqlIdList);
return executeSqlParseVO;
}
use of com.dtstack.taier.develop.dto.devlop.ExecuteResultVO in project Taier by DTStack.
the class BatchSqlExeService method executeSql.
/**
* 执行SQL
*/
@Forbidden
public ExecuteResultVO executeSql(final ExecuteContent executeContent) throws Exception {
final ExecuteResultVO result = new ExecuteResultVO();
this.prepareExecuteContent(executeContent);
// 前置操作
result.setSqlText(executeContent.getSql());
final ISqlExeService sqlExeService = this.multiEngineServiceFactory.getSqlExeService(executeContent.getTaskType());
final ExecuteResultVO engineExecuteResult = sqlExeService.executeSql(executeContent);
if (!engineExecuteResult.getContinue()) {
return engineExecuteResult;
}
PublicUtil.copyPropertiesIgnoreNull(engineExecuteResult, result);
return result;
}
Aggregations