use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class PerJobClientFactory method getResourceFilesAndSetSecurityConfig.
private List<File> getResourceFilesAndSetSecurityConfig(JobClient jobClient, Configuration config) throws IOException {
Map<String, File> resources = new HashMap<>();
String remoteDir = flinkConfig.getRemoteDir();
// resource files
String taskWorkspace = FlinkUtil.getTaskWorkspace(jobClient.getJobId());
String taskResourceDirPath = taskWorkspace + ConfigConstrant.SP + "resource";
File taskResourceDir = new File(taskResourceDirPath);
File[] taskResourceDirFiles = taskResourceDir.listFiles();
if (taskResourceDirFiles != null && taskResourceDirFiles.length > 0) {
for (File file : taskResourceDirFiles) {
String fileName = file.getName();
resources.put(fileName, file);
}
}
// 任务提交keytab
String clusterKeytabDirPath = ConfigConstant.LOCAL_KEYTAB_DIR_PARENT + remoteDir;
File clusterKeytabDir = new File(clusterKeytabDirPath);
File[] clusterKeytabFiles = clusterKeytabDir.listFiles();
if (clusterKeytabFiles != null && clusterKeytabFiles.length > 0) {
for (File file : clusterKeytabFiles) {
String fileName = file.getName();
String keytabPath = file.getAbsolutePath();
String keytabFileName = flinkConfig.getPrincipalFile();
if (resources.containsKey(fileName) && StringUtils.endsWith(fileName, "keytab")) {
String newFileName = String.format("%s-%s", RandomStringUtils.randomAlphanumeric(4), fileName);
keytabPath = String.format("%s/%s", taskResourceDirPath, newFileName);
FileUtils.copyFile(file, new File(keytabPath));
}
if (StringUtils.equals(fileName, keytabFileName)) {
String principal = flinkConfig.getPrincipal();
if (StringUtils.isEmpty(principal)) {
principal = KerberosUtils.getPrincipal(keytabPath);
}
config.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, keytabPath);
config.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, principal);
continue;
}
File newKeytabFile = new File(keytabPath);
resources.put(newKeytabFile.getName(), newKeytabFile);
}
}
return resources.entrySet().stream().map(entry -> entry.getValue()).collect(Collectors.toList());
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class PerJobClientFactory method deleteTaskIfExist.
/**
* kill application which name and queue are the same as this jobClient
* when run in stream-computing mode
*/
public void deleteTaskIfExist(JobClient jobClient) {
if (ComputeType.BATCH.equals(jobClient.getComputeType())) {
return;
}
try {
String taskName = jobClient.getJobName();
String queueName = flinkConfig.getQueue();
YarnClient yarnClient = flinkClientBuilder.getYarnClient();
EnumSet<YarnApplicationState> enumSet = EnumSet.noneOf(YarnApplicationState.class);
enumSet.add(YarnApplicationState.ACCEPTED);
enumSet.add(YarnApplicationState.RUNNING);
List<ApplicationReport> existApps = yarnClient.getApplications(enumSet).stream().filter(report -> report.getQueue().endsWith(queueName)).filter(report -> report.getName().equals(taskName)).collect(Collectors.toList());
for (ApplicationReport report : existApps) {
ApplicationId appId = report.getApplicationId();
LOG.info("try to kill application " + appId.toString() + " which name is " + report.getName());
yarnClient.killApplication(appId);
}
} catch (Exception e) {
LOG.error("Delete task error ", e);
throw new PluginDefineException("Delete task error");
}
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class PerJobClientFactory method getClusterClient.
@Override
public ClusterClient getClusterClient(JobIdentifier jobIdentifier) {
String applicationId = jobIdentifier.getApplicationId();
String jobId = jobIdentifier.getJobId();
ClusterClient clusterClient = null;
try {
clusterClient = KerberosUtils.login(flinkConfig, () -> {
try {
return perJobClientCache.get(applicationId, () -> {
ParamAction action = new ParamAction();
action.setJobId(jobId);
action.setName("jobId-" + jobId);
action.setTaskType(EJobType.SQL.getType());
action.setComputeType(ComputeType.STREAM.getType());
action.setTenantId(-1L);
String taskParams = "flinkTaskRunMode=per_job";
action.setTaskParams(taskParams);
JobClient jobClient = new JobClient(action);
try (YarnClusterDescriptor perJobYarnClusterDescriptor = this.createPerJobClusterDescriptor(jobClient)) {
return perJobYarnClusterDescriptor.retrieve(ConverterUtils.toApplicationId(applicationId)).getClusterClient();
}
});
} catch (ExecutionException e) {
throw new PluginDefineException(e);
}
}, flinkClientBuilder.getYarnConf());
} catch (Exception e) {
LOG.error("job[{}] get perJobClient exception:{}", jobId, e);
throw new PluginDefineException(e);
}
return clusterClient;
}
Aggregations