use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class AbstractRdbsClientTest method testProcessSubmitJobWithType.
@Test
public void testProcessSubmitJobWithType() throws Exception {
JobClient jobClient = new JobClient();
jobClient.setJobType(EJobType.MR);
AbstractRdbsClient abstractRdbsClient = PowerMockito.mock(AbstractRdbsClient.class, Mockito.CALLS_REAL_METHODS);
Boolean isMr = true;
try {
JobResult jobResult = abstractRdbsClient.processSubmitJobWithType(jobClient);
Assert.assertNotNull(jobResult);
} catch (Exception e) {
isMr = true;
}
Assert.assertTrue(isMr);
jobClient.setJobType(EJobType.SQL);
RdbsExeQueue rdbsExeQueue = PowerMockito.mock(RdbsExeQueue.class);
when(rdbsExeQueue.submit(any(JobClient.class))).thenReturn("test");
MemberModifier.field(AbstractRdbsClient.class, "exeQueue").set(abstractRdbsClient, rdbsExeQueue);
JobResult jobResult = abstractRdbsClient.processSubmitJobWithType(jobClient);
Assert.assertNotNull(jobResult);
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class Launcher method main.
public static void main(String[] args) throws Exception {
System.setProperty("HADOOP_USER_NAME", "admin");
// job json path
String jobJsonPath = USER_DIR + SP + "local-test/src/main/json/dtscript-agent.json";
// create jobClient
String content = getJobContent(jobJsonPath);
Map params = PublicUtil.jsonStrToObject(content, Map.class);
ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
// create jobIdentifier
String jobId = "jobId";
String appId = "appId";
String taskId = "taskId";
JobIdentifier jobIdentifier = JobIdentifier.createInstance(jobId, appId, taskId);
// get pluginInfo
String pluginInfo = jobClient.getPluginInfo();
Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
String md5plugin = MD5Util.getMd5String(pluginInfo);
properties.setProperty("md5sum", md5plugin);
// create client
String pluginParentPath = USER_DIR + SP + "pluginLibs";
IClient client = ClientFactory.buildPluginClient(pluginInfo, pluginParentPath);
// client init
ClassLoaderCallBackMethod.callbackAndReset(new CallBack<String>() {
@Override
public String execute() throws Exception {
client.init(properties);
return null;
}
}, client.getClass().getClassLoader(), true);
// test target method
ClassLoaderCallBackMethod.callbackAndReset(new CallBack<Object>() {
@Override
public Object execute() throws Exception {
JobResult jobResult = client.submitJob(jobClient);
return jobResult;
}
}, client.getClass().getClassLoader(), true);
LOG.info("Launcher Success!");
System.exit(0);
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class AbstractRdbsClient method processSubmitJobWithType.
@Override
protected JobResult processSubmitJobWithType(JobClient jobClient) {
EJobType jobType = jobClient.getJobType();
JobResult jobResult = null;
if (EJobType.MR.equals(jobType)) {
jobResult = submitJobWithJar(jobClient);
} else if (EJobType.SQL.equals(jobType)) {
jobResult = submitSqlJob(jobClient);
}
return jobResult;
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class FlinkClient method submitJobWithJar.
private JobResult submitJobWithJar(JobClient jobClient, List<URL> classPaths, List<String> programArgList) {
JobParam jobParam = new JobParam(jobClient);
String jarPath = jobParam.getJarPath();
if (jarPath == null) {
logger.error("can not submit a job without jar path, please check it");
return JobResult.createErrorResult("can not submit a job without jar path, please check it");
}
String args = jobParam.getClassArgs();
if (StringUtils.isNotBlank(args)) {
programArgList.addAll(DtStringUtil.splitIgnoreQuota(args, ' '));
}
// 如果jar包里面未指定mainclass,需要设置该参数
String entryPointClass = jobParam.getMainClass();
SavepointRestoreSettings spSettings = buildSavepointSetting(jobClient);
String[] programArgs = programArgList.toArray(new String[programArgList.size()]);
PackagedProgram packagedProgram = null;
JobGraph jobGraph = null;
Pair<String, String> runResult;
try {
ClusterMode clusterMode = ClusterMode.getClusteMode(flinkConfig.getClusterMode());
if (ClusterMode.isPerjob(clusterMode)) {
// perjob模式延后创建PackagedProgram
ClusterSpecification clusterSpecification = FlinkConfUtil.createClusterSpecification(flinkClientBuilder.getFlinkConfiguration(), jobClient.getApplicationPriority(), jobClient.getConfProperties());
clusterSpecification.setClasspaths(classPaths);
clusterSpecification.setEntryPointClass(entryPointClass);
clusterSpecification.setJarFile(new File(jarPath));
clusterSpecification.setSpSetting(spSettings);
clusterSpecification.setProgramArgs(programArgs);
clusterSpecification.setCreateProgramDelay(true);
clusterSpecification.setYarnConfiguration(hadoopConf.getYarnConfiguration());
logger.info("--------taskId: {} run by PerJob mode-----", jobClient.getJobId());
runResult = runJobByPerJob(clusterSpecification, jobClient);
jobGraph = clusterSpecification.getJobGraph();
packagedProgram = clusterSpecification.getProgram();
} else {
Integer runParallelism = FlinkUtil.getJobParallelism(jobClient.getConfProperties());
packagedProgram = FlinkUtil.buildProgram(jarPath, classPaths, jobClient.getJobType(), entryPointClass, programArgs, spSettings, flinkClientBuilder.getFlinkConfiguration(), filesystemManager);
jobGraph = PackagedProgramUtils.createJobGraph(packagedProgram, flinkClientBuilder.getFlinkConfiguration(), runParallelism, false);
// 只有当程序本身没有指定并行度的时候该参数才生效
clearClassPathShipfileLoadMode(packagedProgram);
logger.info("--------taskId: {} run by Session mode-----", jobClient.getJobId());
runResult = runJobBySession(jobGraph);
}
JobResult jobResult = JobResult.createSuccessResult(runResult.getSecond(), runResult.getFirst());
jobResult.setExtraData(JobResultConstant.JOB_GRAPH, JobGraphBuildUtil.buildLatencyMarker(jobGraph));
long checkpointInterval = jobGraph.getCheckpointingSettings().getCheckpointCoordinatorConfiguration().getCheckpointInterval();
if (checkpointInterval >= Long.MAX_VALUE) {
checkpointInterval = 0;
}
jobResult.setExtraData(JobResultConstant.FLINK_CHECKPOINT, String.valueOf(checkpointInterval));
return jobResult;
} catch (Throwable e) {
return JobResult.createErrorResult(e);
} finally {
if (packagedProgram != null) {
packagedProgram.deleteExtractedLibraries();
}
}
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class FlinkClient method processSubmitJobWithType.
@Override
protected JobResult processSubmitJobWithType(JobClient jobClient) {
try {
return KerberosUtils.login(flinkConfig, () -> {
EJobType jobType = jobClient.getJobType();
JobResult jobResult = null;
if (EJobType.MR.equals(jobType)) {
jobResult = submitJobWithJar(jobClient);
} else if (EJobType.SQL.equals(jobType)) {
jobResult = submitSqlJob(jobClient);
} else if (EJobType.SYNC.equals(jobType)) {
jobResult = submitSyncJob(jobClient);
}
if (jobResult != null) {
logger.info("taskId: {}, job submit success, result: {}", jobClient.getJobId(), jobResult.toString());
}
return jobResult;
}, hadoopConf.getYarnConfiguration());
} catch (Exception e) {
logger.error("taskId: {}, can not submit a job process SubmitJobWithType error: ", jobClient.getJobId(), e);
return JobResult.createErrorResult(e);
}
}
Aggregations