use of com.dtstack.taier.develop.dto.devlop.BuildSqlVO in project Taier by DTStack.
the class BatchHadoopSelectSqlService method getSqlIdAndSql.
/**
* 获取查询sql对应的id和拼接参数之后的sql
* @param tenantId
* @param parseResult
* @param tenantId
* @param userId
* @param database
* @param isCreateAs
* @param taskId
* @return
*/
public BuildSqlVO getSqlIdAndSql(Long tenantId, ParseResult parseResult, Long userId, String database, Boolean isCreateAs, Long taskId, Integer taskType) {
BuildSqlVO buildSqlVO = buildSql(parseResult, tenantId, userId, database, isCreateAs, taskId);
String jobId = UUID.randomUUID().toString();
// 记录job
batchSelectSqlService.addSelectSql(jobId, buildSqlVO.getTempTable(), buildSqlVO.getIsSelectSql(), tenantId, buildSqlVO.getOriginSql(), userId, buildSqlVO.getParsedColumns(), taskType);
buildSqlVO.setJobId(jobId);
return buildSqlVO;
}
use of com.dtstack.taier.develop.dto.devlop.BuildSqlVO in project Taier by DTStack.
the class BatchHadoopSelectSqlService method buildSql.
/**
* 解析sql
* @param parseResult
* @param tenantId
* @param userId
* @param database
* @param isCreateAs
* @param taskId
* @return
*/
public BuildSqlVO buildSql(ParseResult parseResult, Long tenantId, Long userId, String database, Boolean isCreateAs, Long taskId) {
BatchTask batchTask = batchTaskService.getBatchTaskById(taskId);
String originSql = parseResult.getStandardSql();
// 生成临时表名
String tempTable = TEMP_TABLE_PREFIX + System.nanoTime();
if (StringUtils.isEmpty(originSql)) {
return null;
}
String parsedColumns = "";
int isSelectSql;
Matcher witchMatcher = WITH_SQL_PATTERN.matcher(originSql);
String sql = null;
if (SqlType.CREATE.equals(parseResult.getSqlType())) {
isSelectSql = TempJobType.CREATE.getType();
sql = buildCustomFunctionAndDbSql(originSql, tenantId, database, true, batchTask.getTaskType());
} else if (isCreateAs) {
isSelectSql = TempJobType.CREATE_AS.getType();
sql = buildCustomFunctionAndDbSql(originSql, tenantId, database, true, batchTask.getTaskType());
} else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType())) {
isSelectSql = TempJobType.INSERT.getType();
sql = buildCustomFunctionAndDbSql(originSql, tenantId, database, true, batchTask.getTaskType());
} else if (witchMatcher.find()) {
TempJobType jobType = getTempJobType(witchMatcher.group("option"));
isSelectSql = jobType.getType();
sql = formatSql(jobType, database, tempTable, originSql);
} else {
isSelectSql = TempJobType.SELECT.getType();
sql = buildSelectSqlCustomFunction(originSql, tenantId, database, tempTable, batchTask.getTaskType());
}
// 设置需要环境参数
String taskParam = batchTask.getTaskParams();
BuildSqlVO buildSqlVO = new BuildSqlVO();
buildSqlVO.setSql(sql);
buildSqlVO.setTaskParam(taskParam);
buildSqlVO.setIsSelectSql(isSelectSql);
buildSqlVO.setOriginSql(originSql);
buildSqlVO.setParsedColumns(parsedColumns);
buildSqlVO.setTenantId(tenantId);
buildSqlVO.setTempTable(tempTable);
buildSqlVO.setUserId(userId);
return buildSqlVO;
}
use of com.dtstack.taier.develop.dto.devlop.BuildSqlVO in project Taier by DTStack.
the class BatchHadoopSelectSqlService method runSqlByTask.
/**
* 使用任务的方式运行sql
*/
@Override
public String runSqlByTask(Long tenantId, ParseResult parseResult, Long userId, String database, Boolean isCreateAs, Long taskId, Integer taskType, String preJobId) {
try {
BuildSqlVO buildSqlVO = buildSql(parseResult, tenantId, userId, database, isCreateAs, taskId);
// 发送sql任务
sendSqlTask(tenantId, buildSqlVO.getSql(), buildSqlVO.getTaskParam(), preJobId, taskId, taskType);
// 记录job
batchSelectSqlService.addSelectSql(preJobId, buildSqlVO.getTempTable(), buildSqlVO.getIsSelectSql(), tenantId, parseResult.getOriginSql(), userId, buildSqlVO.getParsedColumns(), taskType);
return preJobId;
} catch (Exception e) {
throw new RdosDefineException("任务执行sql失败", e);
}
}
use of com.dtstack.taier.develop.dto.devlop.BuildSqlVO in project Taier by DTStack.
the class BatchSparkSqlExeService method batchExecuteSql.
@Override
public ExecuteSqlParseVO batchExecuteSql(ExecuteContent executeContent) {
String preJobId = executeContent.getPreJobId();
Integer taskType = executeContent.getTaskType();
String currDb = executeContent.getParseResult().getCurrentDb();
Long tenantId = executeContent.getTenantId();
Long userId = executeContent.getUserId();
Long taskId = executeContent.getTaskId();
List<ParseResult> parseResultList = executeContent.getParseResultList();
ExecuteResultVO<List<Object>> result = new ExecuteResultVO<>();
boolean useSelfFunction = batchFunctionService.validContainSelfFunction(executeContent.getSql(), tenantId, null, executeContent.getTaskType());
ExecuteSqlParseVO executeSqlParseVO = new ExecuteSqlParseVO();
List<SqlResultVO> sqlIdList = Lists.newArrayList();
List<String> sqlList = Lists.newArrayList();
BuildSqlVO buildSqlVO = new BuildSqlVO();
for (ParseResult parseResult : parseResultList) {
// 简单查询
if (Objects.nonNull(parseResult.getStandardSql()) && isSimpleQuery(parseResult.getStandardSql()) && !useSelfFunction) {
result = simpleQuery(tenantId, parseResult, currDb, userId, EScheduleJobType.SPARK_SQL);
if (!result.getContinue()) {
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlId(result.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
continue;
}
}
if (SqlType.CREATE_AS.equals(parseResult.getSqlType())) {
buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), true, taskId, taskType);
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlId(buildSqlVO.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
sqlList.add(buildSqlVO.getSql());
} else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType()) || SqlType.QUERY.equals(parseResult.getSqlType()) || useSelfFunction) {
buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), false, taskId, taskType);
// insert和insert overwrite都没有返回结果
SqlResultVO sqlResultVO = new SqlResultVO();
sqlResultVO.setSqlId(buildSqlVO.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
sqlList.add(buildSqlVO.getSql());
} else {
if (!executeContent.isExecuteSqlLater()) {
TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
Preconditions.checkNotNull(tenantEngine, "引擎不能为空");
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlText(parseResult.getStandardSql());
sqlResultVO.setType(SqlTypeEnums.NO_SELECT_DATA.getType());
if (SqlType.CREATE.equals(parseResult.getSqlType()) || SqlType.CREATE_LIKE.equals(parseResult.getSqlType())) {
executeCreateTableSql(parseResult, tenantId, tenantEngine.getComponentIdentity().toLowerCase(), EScheduleJobType.SPARK_SQL);
sqlIdList.add(sqlResultVO);
} else {
exeSqlDirect(executeContent, tenantId, parseResult, result, tenantEngine, DataSourceType.Spark);
sqlResultVO.setResult(result.getResult());
sqlIdList.add(sqlResultVO);
}
}
}
}
String sqlToEngine = StringUtils.join(sqlList, ";");
// 除简单查询,其他sql发送到engine执行
String jobId = batchHadoopSelectSqlService.sendSqlTask(tenantId, sqlToEngine, buildSqlVO.getTaskParam(), preJobId, taskId, executeContent.getTaskType());
// 记录发送到engine的id
selectSqlService.addSelectSql(jobId, StringUtils.EMPTY, 0, tenantId, sqlToEngine, userId, StringUtils.EMPTY, taskType);
sqlIdList.sort(Comparator.comparingInt(SqlResultVO::getType));
executeSqlParseVO.setJobId(jobId);
executeSqlParseVO.setSqlIdList(sqlIdList);
return executeSqlParseVO;
}
Aggregations