Search in sources :

Example 31 with JobClient

use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.

the class Flink112Shims method executeMultipleInsertInto.

@Override
public boolean executeMultipleInsertInto(String jobName, Object tblEnv, InterpreterContext context) throws Exception {
    JobClient jobClient = statementSetMap.get(context.getParagraphId()).execute().getJobClient().get();
    while (!jobClient.getJobStatus().get().isTerminalState()) {
        LOGGER.debug("Wait for job to finish");
        Thread.sleep(1000 * 5);
    }
    if (jobClient.getJobStatus().get() == JobStatus.CANCELED) {
        context.out.write("Job is cancelled.\n");
        return false;
    }
    return true;
}
Also used : JobClient(org.apache.flink.core.execution.JobClient)

Example 32 with JobClient

use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.

the class JobManager method removeJob.

public void removeJob(String paragraphId) {
    LOGGER.info("Remove job in paragraph: " + paragraphId);
    JobClient jobClient = this.jobs.remove(paragraphId);
    if (jobClient == null) {
        LOGGER.warn("Unable to remove job, because no job is associated with paragraph: " + paragraphId);
        return;
    }
    FlinkJobProgressPoller jobProgressPoller = this.jobProgressPollerMap.remove(jobClient.getJobID());
    if (jobProgressPoller == null) {
        LOGGER.warn("Unable to remove poller, because no poller is associated with paragraph: " + paragraphId);
        return;
    }
    jobProgressPoller.cancel();
    jobProgressPoller.interrupt();
}
Also used : JobClient(org.apache.flink.core.execution.JobClient)

Example 33 with JobClient

use of org.apache.flink.core.execution.JobClient in project zeppelin by apache.

the class JobManager method sendFlinkJobUrl.

public void sendFlinkJobUrl(InterpreterContext context) {
    JobClient jobClient = jobs.get(context.getParagraphId());
    if (jobClient != null) {
        String jobUrl = displayedFlinkWebUrl + "#/job/" + jobClient.getJobID();
        Map<String, String> infos = new HashMap<>();
        infos.put("jobUrl", jobUrl);
        infos.put("label", "FLINK JOB");
        infos.put("tooltip", "View in Flink web UI");
        infos.put("noteId", context.getNoteId());
        infos.put("paraId", context.getParagraphId());
        context.getIntpEventClient().onParaInfosReceived(infos);
    } else {
        LOGGER.warn("No job is associated with paragraph: " + context.getParagraphId());
    }
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) JobClient(org.apache.flink.core.execution.JobClient)

Example 34 with JobClient

use of org.apache.flink.core.execution.JobClient in project flink by apache.

the class KafkaTableITCase method testStartFromGroupOffsetsWithNoneResetStrategy.

private void testStartFromGroupOffsetsWithNoneResetStrategy() throws ExecutionException, InterruptedException {
    // we always use a different topic name for each parameterized topic,
    // in order to make sure the topic can be created.
    final String resetStrategy = "none";
    final String tableName = resetStrategy + "Table";
    final String topic = "groupOffset_" + format;
    String groupId = resetStrategy + (new Random()).nextInt();
    TableResult tableResult = null;
    try {
        tableResult = startFromGroupOffset(tableName, topic, groupId, resetStrategy, "MySink");
        tableResult.await();
    } finally {
        // ------------- cleanup -------------------
        if (tableResult != null) {
            tableResult.getJobClient().ifPresent(JobClient::cancel);
        }
        deleteTestTopic(topic);
    }
}
Also used : TableResult(org.apache.flink.table.api.TableResult) Random(java.util.Random) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) JobClient(org.apache.flink.core.execution.JobClient)

Example 35 with JobClient

use of org.apache.flink.core.execution.JobClient in project flink by apache.

the class KafkaTableITCase method testStartFromGroupOffsets.

private void testStartFromGroupOffsets(String resetStrategy) throws Exception {
    // we always use a different topic name for each parameterized topic,
    // in order to make sure the topic can be created.
    final String tableName = "Table" + format + resetStrategy;
    final String topic = "groupOffset_" + format + resetStrategy + ThreadLocalRandom.current().nextLong();
    String groupId = format + resetStrategy;
    String sinkName = "mySink" + format + resetStrategy;
    List<String> expected = Arrays.asList("+I[0, 0]", "+I[0, 1]", "+I[0, 2]", "+I[1, 3]", "+I[1, 4]", "+I[1, 5]");
    TableResult tableResult = null;
    try {
        tableResult = startFromGroupOffset(tableName, topic, groupId, resetStrategy, sinkName);
        if ("latest".equals(resetStrategy)) {
            expected = appendNewData(topic, tableName, groupId, expected.size());
        }
        KafkaTableTestUtils.waitingExpectedResults(sinkName, expected, Duration.ofSeconds(15));
    } finally {
        // ------------- cleanup -------------------
        if (tableResult != null) {
            tableResult.getJobClient().ifPresent(JobClient::cancel);
        }
        deleteTestTopic(topic);
    }
}
Also used : TableResult(org.apache.flink.table.api.TableResult) JobClient(org.apache.flink.core.execution.JobClient)

Aggregations

JobClient (org.apache.flink.core.execution.JobClient)70 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)36 Test (org.junit.Test)32 JobExecutionResult (org.apache.flink.api.common.JobExecutionResult)16 Configuration (org.apache.flink.configuration.Configuration)16 JobListener (org.apache.flink.core.execution.JobListener)14 ArrayList (java.util.ArrayList)12 List (java.util.List)10 JobID (org.apache.flink.api.common.JobID)10 ExecutionException (java.util.concurrent.ExecutionException)9 AtomicReference (java.util.concurrent.atomic.AtomicReference)8 DEFAULT_COLLECT_DATA_TIMEOUT (org.apache.flink.connector.testframe.utils.ConnectorTestConstants.DEFAULT_COLLECT_DATA_TIMEOUT)8 DEFAULT_JOB_STATUS_CHANGE_TIMEOUT (org.apache.flink.connector.testframe.utils.ConnectorTestConstants.DEFAULT_JOB_STATUS_CHANGE_TIMEOUT)8 IOException (java.io.IOException)7 DisplayName (org.junit.jupiter.api.DisplayName)7 TestTemplate (org.junit.jupiter.api.TestTemplate)7 Iterator (java.util.Iterator)6 CompletableFuture (java.util.concurrent.CompletableFuture)6 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)6 Preconditions.checkNotNull (org.apache.flink.util.Preconditions.checkNotNull)6