use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class AddJarOperator method parseSql.
public static JarFileInfo parseSql(String sql) {
Matcher matcher = pattern.matcher(sql);
if (!matcher.find()) {
throw new PluginDefineException("not a addJar operator:" + sql);
}
JarFileInfo jarFileInfo = new JarFileInfo();
jarFileInfo.setJarPath(matcher.group(1));
if (matcher.groupCount() == 3) {
jarFileInfo.setMainClass(matcher.group(3));
}
return jarFileInfo;
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class PerJobClientFactory method createPerJobClusterDescriptor.
public YarnClusterDescriptor createPerJobClusterDescriptor(JobClient jobClient) throws Exception {
String flinkJarPath = flinkConfig.getFlinkJarPath();
FileUtil.checkFileExist(flinkJarPath);
Configuration newConf = new Configuration(flinkConfiguration);
newConf = appendJobConfigAndInitFs(jobClient, newConf);
newConf = setHdfsFlinkJarPath(flinkConfig, newConf);
List<File> resourceFiles = getResourceFilesAndSetSecurityConfig(jobClient, newConf);
YarnClusterDescriptor clusterDescriptor = getClusterDescriptor(newConf, yarnConf);
List<URL> classpaths = getFlinkJarFile(flinkJarPath, clusterDescriptor);
if (CollectionUtils.isNotEmpty(jobClient.getAttachJarInfos())) {
for (JarFileInfo jarFileInfo : jobClient.getAttachJarInfos()) {
classpaths.add(new File(jarFileInfo.getJarPath()).toURI().toURL());
}
}
if (CollectionUtils.isNotEmpty(resourceFiles)) {
clusterDescriptor.addShipFiles(resourceFiles);
}
clusterDescriptor.setProvidedUserJarFiles(classpaths);
// judge job kind via JobType
clusterDescriptor.setJobType(jobClient.getJobType());
return clusterDescriptor;
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class HadoopClient method beforeSubmitFunc.
@Override
public void beforeSubmitFunc(JobClient jobClient) {
String sql = jobClient.getSql();
List<String> sqlArr = DtStringUtil.splitIgnoreQuota(sql, ';');
if (sqlArr.size() == 0) {
return;
}
List<String> sqlList = Lists.newArrayList(sqlArr);
Iterator<String> sqlItera = sqlList.iterator();
List<String> fileList = Lists.newArrayList();
while (sqlItera.hasNext()) {
String tmpSql = sqlItera.next();
// handle add jar statements and comment statements on the same line
tmpSql = AddJarOperator.handleSql(tmpSql);
if (AddJarOperator.verific(tmpSql)) {
JarFileInfo jarFileInfo = AddJarOperator.parseSql(tmpSql);
String addFilePath = jarFileInfo.getJarPath();
// 只支持hdfs
if (!addFilePath.startsWith(HDFS_PREFIX)) {
throw new PluginDefineException("only support hdfs protocol for jar path");
}
String localJarPath = TMP_PATH + File.separator + UUID.randomUUID().toString() + ".jar";
try {
downloadHdfsFile(addFilePath, localJarPath);
} catch (IOException e) {
throw new RuntimeException(e);
}
jarFileInfo.setJarPath(localJarPath);
jobClient.setCoreJarInfo(jarFileInfo);
fileList.add(localJarPath);
}
}
cacheFile.put(jobClient.getJobId(), fileList);
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class SqlPluginInfo method createCoreJarInfo.
public JarFileInfo createCoreJarInfo() {
JarFileInfo jarFileInfo = new JarFileInfo();
String coreJarFileName = getCoreJarFileName();
String jarFilePath = localSqlPluginDir + File.separator + coreJarFileName;
jarFileInfo.setJarPath(jarFilePath);
return jarFileInfo;
}
use of com.dtstack.taier.pluginapi.JarFileInfo in project Taier by DTStack.
the class SyncPluginInfo method createAddJarInfo.
public JarFileInfo createAddJarInfo() {
JarFileInfo jarFileInfo = new JarFileInfo();
String coreJarFileName = getCoreJarFileName();
String jarFilePath = localSyncPluginDir + ConfigConstrant.SP + coreJarFileName;
jarFileInfo.setJarPath(jarFilePath);
return jarFileInfo;
}
Aggregations