use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class AddJarOperator method parseSql.
public static JarFileInfo parseSql(String sql) {
Matcher matcher = pattern.matcher(sql);
if (!matcher.find()) {
throw new PluginDefineException("not a addJar operator:" + sql);
}
JarFileInfo jarFileInfo = new JarFileInfo();
jarFileInfo.setJarPath(matcher.group(1));
if (matcher.groupCount() == 3) {
jarFileInfo.setMainClass(matcher.group(3));
}
return jarFileInfo;
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class FlinkClient method submitSyncJob.
private JobResult submitSyncJob(JobClient jobClient) {
// 使用flink作为数据同步调用的其实是提交mr--job
JarFileInfo coreJar = syncPluginInfo.createAddJarInfo();
jobClient.setCoreJarInfo(coreJar);
List<String> programArgList = syncPluginInfo.createSyncPluginArgs(jobClient, this);
List<URL> classPaths = syncPluginInfo.getClassPaths(programArgList);
return submitJobWithJar(jobClient, classPaths, programArgList);
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class FlinkClient method beforeSubmitFunc.
@Override
public void beforeSubmitFunc(JobClient jobClient) {
logger.info("Job[{}] submit before", jobClient.getJobId());
String sql = jobClient.getSql();
List<String> sqlArr = DtStringUtil.splitIgnoreQuota(sql, ';');
if (sqlArr.size() == 0) {
return;
}
List<String> sqlList = Lists.newArrayList(sqlArr);
Iterator<String> sqlItera = sqlList.iterator();
List<String> fileList = Lists.newArrayList();
String taskWorkspace = FlinkUtil.getTaskWorkspace(jobClient.getJobId());
while (sqlItera.hasNext()) {
String tmpSql = sqlItera.next();
// handle add jar statements and comment statements on the same line
tmpSql = PrepareOperator.handleSql(tmpSql);
if (PrepareOperator.verificResource(tmpSql)) {
sqlItera.remove();
String localResourceDir = taskWorkspace + ConfigConstrant.SP + "resource";
if (!new File(localResourceDir).exists()) {
new File(localResourceDir).mkdirs();
}
File resourceFile = PrepareOperator.getResourceFile(tmpSql);
String resourceFileName = PrepareOperator.getResourceFileName(tmpSql);
String remoteFile = resourceFile.getAbsolutePath();
String localFile = localResourceDir + ConfigConstrant.SP + resourceFileName;
// download file and close
File downloadFile = filesystemManager.downloadFile(remoteFile, localFile);
logger.info("Download Resource File :" + downloadFile.getAbsolutePath());
} else if (PrepareOperator.verificJar(tmpSql)) {
sqlItera.remove();
JarFileInfo jarFileInfo = PrepareOperator.parseJarFile(tmpSql);
String addFilePath = jarFileInfo.getJarPath();
String tmpJarDir = taskWorkspace + ConfigConstrant.SP + "jar";
if (!new File(tmpJarDir).exists()) {
new File(tmpJarDir).mkdirs();
}
File jarFile = null;
try {
jarFile = FlinkUtil.downloadJar(addFilePath, tmpJarDir, filesystemManager, false);
} catch (Exception e) {
throw new PluginDefineException(e);
}
fileList.add(jarFile.getAbsolutePath());
// 更改路径为本地路径
jarFileInfo.setJarPath(jarFile.getAbsolutePath());
if (jobClient.getJobType() == EJobType.SQL) {
jobClient.addAttachJarInfo(jarFileInfo);
} else {
// 非sql任务只允许提交一个附件包
jobClient.setCoreJarInfo(jarFileInfo);
break;
}
}
}
cacheFile.put(taskWorkspace, fileList);
String newSql = String.join(";", sqlList);
jobClient.setSql(newSql);
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class FlinkClient method submitSqlJobForStream.
/**
* 1: 不再对操作顺序做限制
* 2:不再限制输入源数量
* 3:不再限制输出源数量
* @param jobClient
* @return
* @throws IOException
* @throws ClassNotFoundException
*/
private JobResult submitSqlJobForStream(JobClient jobClient) {
try {
String taskWorkspace = String.format("%s/%s_%s", ConfigConstrant.TMP_DIR, jobClient.getJobId(), Thread.currentThread().getId());
// 构建args
List<String> args = sqlPluginInfo.buildExeArgs(jobClient);
List<String> attachJarLists = cacheFile.get(taskWorkspace);
List<URL> attachJarUrls = Lists.newArrayList();
if (!CollectionUtils.isEmpty(attachJarLists)) {
args.add("-addjar");
String attachJarStr = PublicUtil.objToString(attachJarLists);
args.add(URLEncoder.encode(attachJarStr, Charsets.UTF_8.name()));
attachJarUrls = attachJarLists.stream().map(k -> {
try {
return new File(k).toURL();
} catch (MalformedURLException e) {
throw new PluginDefineException(e);
}
}).collect(Collectors.toList());
}
JarFileInfo coreJarInfo = sqlPluginInfo.createCoreJarInfo();
jobClient.setCoreJarInfo(coreJarInfo);
return submitJobWithJar(jobClient, attachJarUrls, args);
} catch (Exception e) {
return JobResult.createErrorResult(e);
}
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class YarnMockUtil method mockJobClient.
public static JobClient mockJobClient(String jobType, String sqlNewText, String jarPath) throws Exception {
String taskId = "9999";
String sqlText = "ADD JAR WITH /data/sftp/21_window_WindowJoin.jar AS dtstack.WindowJoin";
if (StringUtils.isNotEmpty(sqlNewText)) {
sqlText = sqlNewText;
}
ParamAction paramAction = new ParamAction();
if ("perJob".equalsIgnoreCase(jobType)) {
paramAction.setTaskType(0);
paramAction.setComputeType(0);
} else {
paramAction.setTaskType(1);
paramAction.setComputeType(1);
}
paramAction.setJobId(taskId);
paramAction.setSqlText(sqlText);
paramAction.setTenantId(0L);
paramAction.setTaskParams("{\"test\":\"test\"}");
paramAction.setExternalPath("/tmp/savepoint");
Map<String, Object> map = new HashMap();
map.put("yarnConf", new HashMap());
paramAction.setPluginInfo(map);
JobClient jobClient = new JobClient(paramAction);
JarFileInfo jarFileInfo = new JarFileInfo();
jarFileInfo.setJarPath(jarPath);
jarFileInfo.setMainClass("dtstack.WindowJoin");
jobClient.setCoreJarInfo(jarFileInfo);
return jobClient;
}
Aggregations