use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class Flink112Shims method executeMultipleInsertInto.
@Override
public boolean executeMultipleInsertInto(String jobName, Object tblEnv, InterpreterContext context) throws Exception {
JobClient jobClient = statementSetMap.get(context.getParagraphId()).execute().getJobClient().get();
while (!jobClient.getJobStatus().get().isTerminalState()) {
LOGGER.debug("Wait for job to finish");
Thread.sleep(1000 * 5);
}
if (jobClient.getJobStatus().get() == JobStatus.CANCELED) {
context.out.write("Job is cancelled.\n");
return false;
}
return true;
}
use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class JobManager method removeJob.
public void removeJob(String paragraphId) {
LOGGER.info("Remove job in paragraph: " + paragraphId);
JobClient jobClient = this.jobs.remove(paragraphId);
if (jobClient == null) {
LOGGER.warn("Unable to remove job, because no job is associated with paragraph: " + paragraphId);
return;
}
FlinkJobProgressPoller jobProgressPoller = this.jobProgressPollerMap.remove(jobClient.getJobID());
if (jobProgressPoller == null) {
LOGGER.warn("Unable to remove poller, because no poller is associated with paragraph: " + paragraphId);
return;
}
jobProgressPoller.cancel();
jobProgressPoller.interrupt();
}
use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.
the class JobManager method sendFlinkJobUrl.
public void sendFlinkJobUrl(InterpreterContext context) {
JobClient jobClient = jobs.get(context.getParagraphId());
if (jobClient != null) {
String jobUrl = displayedFlinkWebUrl + "#/job/" + jobClient.getJobID();
Map<String, String> infos = new HashMap<>();
infos.put("jobUrl", jobUrl);
infos.put("label", "FLINK JOB");
infos.put("tooltip", "View in Flink web UI");
infos.put("noteId", context.getNoteId());
infos.put("paraId", context.getParagraphId());
context.getIntpEventClient().onParaInfosReceived(infos);
} else {
LOGGER.warn("No job is associated with paragraph: " + context.getParagraphId());
}
}
use of org.apache.flink.core.execution.JobClient in project flink by apache.
the class KafkaTableITCase method testStartFromGroupOffsetsWithNoneResetStrategy.
private void testStartFromGroupOffsetsWithNoneResetStrategy() throws ExecutionException, InterruptedException {
// we always use a different topic name for each parameterized topic,
// in order to make sure the topic can be created.
final String resetStrategy = "none";
final String tableName = resetStrategy + "Table";
final String topic = "groupOffset_" + format;
String groupId = resetStrategy + (new Random()).nextInt();
TableResult tableResult = null;
try {
tableResult = startFromGroupOffset(tableName, topic, groupId, resetStrategy, "MySink");
tableResult.await();
} finally {
// ------------- cleanup -------------------
if (tableResult != null) {
tableResult.getJobClient().ifPresent(JobClient::cancel);
}
deleteTestTopic(topic);
}
}
use of org.apache.flink.core.execution.JobClient in project flink by apache.
the class KafkaTableITCase method testStartFromGroupOffsets.
private void testStartFromGroupOffsets(String resetStrategy) throws Exception {
// we always use a different topic name for each parameterized topic,
// in order to make sure the topic can be created.
final String tableName = "Table" + format + resetStrategy;
final String topic = "groupOffset_" + format + resetStrategy + ThreadLocalRandom.current().nextLong();
String groupId = format + resetStrategy;
String sinkName = "mySink" + format + resetStrategy;
List<String> expected = Arrays.asList("+I[0, 0]", "+I[0, 1]", "+I[0, 2]", "+I[1, 3]", "+I[1, 4]", "+I[1, 5]");
TableResult tableResult = null;
try {
tableResult = startFromGroupOffset(tableName, topic, groupId, resetStrategy, sinkName);
if ("latest".equals(resetStrategy)) {
expected = appendNewData(topic, tableName, groupId, expected.size());
}
KafkaTableTestUtils.waitingExpectedResults(sinkName, expected, Duration.ofSeconds(15));
} finally {
// ------------- cleanup -------------------
if (tableResult != null) {
tableResult.getJobClient().ifPresent(JobClient::cancel);
}
deleteTestTopic(topic);
}
}
Aggregations