use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class FlinkSqlInterpreter method open.
@Override
public void open() throws InterpreterException {
this.sqlCommandParser = new SqlCommandParser(flinkInterpreter.getFlinkShims(), tbenv);
this.sqlSplitter = new SqlSplitter();
JobListener jobListener = new JobListener() {
@Override
public void onJobSubmitted(@Nullable JobClient jobClient, @Nullable Throwable throwable) {
if (lock.isHeldByCurrentThread()) {
lock.unlock();
LOGGER.info("UnLock JobSubmitLock");
}
}
@Override
public void onJobExecuted(@Nullable JobExecutionResult jobExecutionResult, @Nullable Throwable throwable) {
}
};
flinkInterpreter.getExecutionEnvironment().getJavaEnv().registerJobListener(jobListener);
flinkInterpreter.getStreamExecutionEnvironment().getJavaEnv().registerJobListener(jobListener);
this.defaultSqlParallelism = flinkInterpreter.getDefaultSqlParallelism();
this.tableConfigOptions = flinkInterpreter.getFlinkShims().extractTableConfigOptions();
}
use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class JobManager method addJob.
public void addJob(InterpreterContext context, JobClient jobClient) {
String paragraphId = context.getParagraphId();
JobClient previousJobClient = this.jobs.put(paragraphId, jobClient);
if (previousJobClient != null) {
LOGGER.warn("There's another Job {} that is associated with paragraph {}", jobClient.getJobID(), paragraphId);
return;
}
long checkInterval = Long.parseLong(properties.getProperty("zeppelin.flink.job.check_interval", "1000"));
FlinkJobProgressPoller thread = new FlinkJobProgressPoller(flinkWebUrl, jobClient.getJobID(), context, checkInterval);
thread.setName("JobProgressPoller-Thread-" + paragraphId);
thread.start();
this.jobProgressPollerMap.put(jobClient.getJobID(), thread);
}
use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class JobManager method cancelJob.
public void cancelJob(InterpreterContext context) throws InterpreterException {
LOGGER.info("Canceling job associated of paragraph: {}", context.getParagraphId());
JobClient jobClient = this.jobs.get(context.getParagraphId());
if (jobClient == null) {
LOGGER.warn("Unable to remove Job from paragraph {} as no job associated to this paragraph", context.getParagraphId());
return;
}
boolean cancelled = false;
try {
String savePointDir = context.getLocalProperties().get(SAVEPOINT_DIR);
if (StringUtils.isBlank(savePointDir)) {
LOGGER.info("Trying to cancel job of paragraph {}", context.getParagraphId());
jobClient.cancel();
} else {
LOGGER.info("Trying to stop job of paragraph {} with save point dir: {}", context.getParagraphId(), savePointDir);
try {
String savePointPath = jobClient.stopWithSavepoint(true, savePointDir).get();
Map<String, String> config = new HashMap<>();
config.put(SAVEPOINT_PATH, savePointPath);
context.getIntpEventClient().updateParagraphConfig(context.getNoteId(), context.getParagraphId(), config);
LOGGER.info("Job {} of paragraph {} is stopped with save point path: {}", jobClient.getJobID(), context.getParagraphId(), savePointPath);
} catch (Exception e) {
LOGGER.warn("Fail to cancel job of paragraph {} with savepoint, try to cancel it without savepoint", context.getParagraphId(), e);
jobClient.cancel();
}
}
cancelled = true;
} catch (Exception e) {
String errorMessage = String.format("Fail to cancel job %s that is associated " + "with paragraph %s", jobClient.getJobID(), context.getParagraphId());
LOGGER.warn(errorMessage, e);
throw new InterpreterException(errorMessage, e);
} finally {
if (cancelled) {
LOGGER.info("Cancelling is successful, remove the associated FlinkJobProgressPoller of paragraph: " + context.getParagraphId());
FlinkJobProgressPoller jobProgressPoller = jobProgressPollerMap.remove(jobClient.getJobID());
if (jobProgressPoller != null) {
jobProgressPoller.cancel();
jobProgressPoller.interrupt();
}
this.jobs.remove(context.getParagraphId());
}
}
}
use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class JobManager method getJobProgress.
public int getJobProgress(String paragraphId) {
JobClient jobClient = this.jobs.get(paragraphId);
if (jobClient == null) {
LOGGER.warn("Unable to get job progress for paragraph: " + paragraphId + ", because no job is associated with this paragraph");
return 0;
}
FlinkJobProgressPoller jobProgressPoller = this.jobProgressPollerMap.get(jobClient.getJobID());
if (jobProgressPoller == null) {
LOGGER.warn("Unable to get job progress for paragraph: " + paragraphId + ", because no job progress is associated with this jobId: " + jobClient.getJobID());
return 0;
}
return jobProgressPoller.getProgress();
}
use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class Flink113Shims method executeMultipleInsertInto.
@Override
public boolean executeMultipleInsertInto(String jobName, Object tblEnv, InterpreterContext context) throws Exception {
JobClient jobClient = statementSetMap.get(context.getParagraphId()).execute().getJobClient().get();
while (!jobClient.getJobStatus().get().isTerminalState()) {
LOGGER.debug("Wait for job to finish");
Thread.sleep(1000 * 5);
}
if (jobClient.getJobStatus().get() == JobStatus.CANCELED) {
context.out.write("Job is cancelled.\n");
return false;
}
return true;
}
Aggregations