use of com.dtstack.taier.develop.sql.ParseResult in project Taier by DTStack.
the class BatchSparkSqlExeService method batchExecuteSql.
@Override
public ExecuteSqlParseVO batchExecuteSql(ExecuteContent executeContent) {
String preJobId = executeContent.getPreJobId();
Integer taskType = executeContent.getTaskType();
String currDb = executeContent.getParseResult().getCurrentDb();
Long tenantId = executeContent.getTenantId();
Long userId = executeContent.getUserId();
Long taskId = executeContent.getTaskId();
List<ParseResult> parseResultList = executeContent.getParseResultList();
ExecuteResultVO<List<Object>> result = new ExecuteResultVO<>();
boolean useSelfFunction = batchFunctionService.validContainSelfFunction(executeContent.getSql(), tenantId, null, executeContent.getTaskType());
ExecuteSqlParseVO executeSqlParseVO = new ExecuteSqlParseVO();
List<SqlResultVO> sqlIdList = Lists.newArrayList();
List<String> sqlList = Lists.newArrayList();
BuildSqlVO buildSqlVO = new BuildSqlVO();
for (ParseResult parseResult : parseResultList) {
// 简单查询
if (Objects.nonNull(parseResult.getStandardSql()) && isSimpleQuery(parseResult.getStandardSql()) && !useSelfFunction) {
result = simpleQuery(tenantId, parseResult, currDb, userId, EScheduleJobType.SPARK_SQL);
if (!result.getContinue()) {
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlId(result.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
continue;
}
}
if (SqlType.CREATE_AS.equals(parseResult.getSqlType())) {
buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), true, taskId, taskType);
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlId(buildSqlVO.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
sqlList.add(buildSqlVO.getSql());
} else if (SqlType.INSERT.equals(parseResult.getSqlType()) || SqlType.INSERT_OVERWRITE.equals(parseResult.getSqlType()) || SqlType.QUERY.equals(parseResult.getSqlType()) || useSelfFunction) {
buildSqlVO = batchHadoopSelectSqlService.getSqlIdAndSql(tenantId, parseResult, userId, currDb.toLowerCase(), false, taskId, taskType);
// insert和insert overwrite都没有返回结果
SqlResultVO sqlResultVO = new SqlResultVO();
sqlResultVO.setSqlId(buildSqlVO.getJobId());
sqlResultVO.setType(SqlTypeEnums.SELECT_DATA.getType());
sqlIdList.add(sqlResultVO);
sqlList.add(buildSqlVO.getSql());
} else {
if (!executeContent.isExecuteSqlLater()) {
TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
Preconditions.checkNotNull(tenantEngine, "引擎不能为空");
SqlResultVO<List<Object>> sqlResultVO = new SqlResultVO<>();
sqlResultVO.setSqlText(parseResult.getStandardSql());
sqlResultVO.setType(SqlTypeEnums.NO_SELECT_DATA.getType());
if (SqlType.CREATE.equals(parseResult.getSqlType()) || SqlType.CREATE_LIKE.equals(parseResult.getSqlType())) {
executeCreateTableSql(parseResult, tenantId, tenantEngine.getComponentIdentity().toLowerCase(), EScheduleJobType.SPARK_SQL);
sqlIdList.add(sqlResultVO);
} else {
exeSqlDirect(executeContent, tenantId, parseResult, result, tenantEngine, DataSourceType.Spark);
sqlResultVO.setResult(result.getResult());
sqlIdList.add(sqlResultVO);
}
}
}
}
String sqlToEngine = StringUtils.join(sqlList, ";");
// 除简单查询,其他sql发送到engine执行
String jobId = batchHadoopSelectSqlService.sendSqlTask(tenantId, sqlToEngine, buildSqlVO.getTaskParam(), preJobId, taskId, executeContent.getTaskType());
// 记录发送到engine的id
selectSqlService.addSelectSql(jobId, StringUtils.EMPTY, 0, tenantId, sqlToEngine, userId, StringUtils.EMPTY, taskType);
sqlIdList.sort(Comparator.comparingInt(SqlResultVO::getType));
executeSqlParseVO.setJobId(jobId);
executeSqlParseVO.setSqlIdList(sqlIdList);
return executeSqlParseVO;
}
use of com.dtstack.taier.develop.sql.ParseResult in project Taier by DTStack.
the class BatchSqlExeService method prepareExecuteContent.
/**
* 进行SQL解析
* @param executeContent
*/
private void prepareExecuteContent(final ExecuteContent executeContent) {
BatchTask one = batchTaskService.getOneWithError(executeContent.getTaskId());
String taskParam = one.getTaskParams();
ISqlExeService sqlExeService = this.multiEngineServiceFactory.getSqlExeService(executeContent.getTaskType());
String sql = executeContent.getSql();
// TODO cache lazy table 暂时不解析血缘,不知道这种类型的sql如何处理
if (StringUtils.isNotBlank(sql) && (sql.toLowerCase().trim().startsWith("set") || CACHE_LAZY_SQL_PATTEN.matcher(sql).matches())) {
// set sql 不解析
ParseResult parseResult = new ParseResult();
parseResult.setParseSuccess(true);
parseResult.setOriginSql(executeContent.getSql());
parseResult.setStandardSql(executeContent.getSql());
executeContent.setParseResult(parseResult);
return;
}
// 单条sql解析
if (StringUtils.isNotBlank(executeContent.getSql())) {
ParseResult parseResult = this.parseSql(executeContent);
executeContent.setParseResult(parseResult);
// 校验语法
if (executeContent.isCheckSyntax()) {
sqlExeService.checkSingleSqlSyntax(executeContent.getTenantId(), executeContent.getSql(), executeContent.getDatabase(), taskParam);
}
}
// 批量解析sql
List<ParseResult> parseResultList = Lists.newLinkedList();
if (CollectionUtils.isNotEmpty(executeContent.getSqlList())) {
String finalTaskParam = taskParam;
executeContent.getSqlList().forEach(x -> {
if (!x.trim().startsWith("set")) {
if (executeContent.isCheckSyntax()) {
executeContent.setSql(x);
ParseResult batchParseResult = this.parseSql(executeContent);
sqlExeService.checkSingleSqlSyntax(executeContent.getTenantId(), x, executeContent.getDatabase(), finalTaskParam);
parseResultList.add(batchParseResult);
}
} else {
// set sql 不解析
ParseResult batchParseResult = new ParseResult();
batchParseResult.setParseSuccess(true);
batchParseResult.setOriginSql(x);
batchParseResult.setStandardSql(x);
parseResultList.add(batchParseResult);
}
});
executeContent.setParseResultList(parseResultList);
}
}
use of com.dtstack.taier.develop.sql.ParseResult in project Taier by DTStack.
the class BatchSqlExeService method parseSql.
/**
* 解析sql
*
* @param executeContent
* @return
*/
private ParseResult parseSql(final ExecuteContent executeContent) {
String dbName = this.getDbName(executeContent);
executeContent.setDatabase(dbName);
TenantComponent tenantEngine = developTenantComponentService.getByTenantAndEngineType(executeContent.getTenantId(), executeContent.getTaskType());
Preconditions.checkNotNull(tenantEngine, String.format("tenantEngine %d not support hadoop engine.", executeContent.getTenantId()));
SqlParserImpl sqlParser = parserFactory.getSqlParser(ETableType.HIVE);
ParseResult parseResult = null;
try {
parseResult = sqlParser.parseSql(executeContent.getSql(), tenantEngine.getComponentIdentity(), new HashMap<>());
} catch (final Exception e) {
LOGGER.error("解析sql异常,sql:{}", executeContent.getSql(), e);
parseResult = new ParseResult();
// libra解析失败也提交sql执行
if (MultiEngineType.HADOOP.getType() == executeContent.getTaskType()) {
parseResult.setParseSuccess(false);
}
parseResult.setFailedMsg(ExceptionUtils.getStackTrace(e));
parseResult.setStandardSql(SqlFormatUtil.getStandardSql(executeContent.getSql()));
}
return parseResult;
}
Aggregations