Search in sources :

Example 31 with PackagedProgram

use of org.apache.flink.client.program.PackagedProgram in project flink by apache.

the class KubernetesApplicationClusterEntrypoint method main.

public static void main(final String[] args) {
    // startup checks and logging
    EnvironmentInformation.logEnvironmentInfo(LOG, KubernetesApplicationClusterEntrypoint.class.getSimpleName(), args);
    SignalHandler.register(LOG);
    JvmShutdownSafeguard.installAsShutdownHook(LOG);
    final Configuration dynamicParameters = ClusterEntrypointUtils.parseParametersOrExit(args, new DynamicParametersConfigurationParserFactory(), KubernetesApplicationClusterEntrypoint.class);
    final Configuration configuration = KubernetesEntrypointUtils.loadConfiguration(dynamicParameters);
    PackagedProgram program = null;
    try {
        program = getPackagedProgram(configuration);
    } catch (Exception e) {
        LOG.error("Could not create application program.", e);
        System.exit(1);
    }
    try {
        configureExecution(configuration, program);
    } catch (Exception e) {
        LOG.error("Could not apply application configuration.", e);
        System.exit(1);
    }
    final KubernetesApplicationClusterEntrypoint kubernetesApplicationClusterEntrypoint = new KubernetesApplicationClusterEntrypoint(configuration, program);
    ClusterEntrypoint.runClusterEntrypoint(kubernetesApplicationClusterEntrypoint);
}
Also used : DynamicParametersConfigurationParserFactory(org.apache.flink.runtime.entrypoint.DynamicParametersConfigurationParserFactory) PackagedProgram(org.apache.flink.client.program.PackagedProgram) Configuration(org.apache.flink.configuration.Configuration) ApplicationConfiguration(org.apache.flink.client.deployment.application.ApplicationConfiguration) FlinkException(org.apache.flink.util.FlinkException)

Example 32 with PackagedProgram

use of org.apache.flink.client.program.PackagedProgram in project flink by apache.

the class CliFrontend method run.

/**
 * Executions the run action.
 *
 * @param args Command line arguments for the run action.
 */
protected void run(String[] args) throws Exception {
    LOG.info("Running 'run' command.");
    final Options commandOptions = CliFrontendParser.getRunCommandOptions();
    final CommandLine commandLine = getCommandLine(commandOptions, args, true);
    // evaluate help flag
    if (commandLine.hasOption(HELP_OPTION.getOpt())) {
        CliFrontendParser.printHelpForRun(customCommandLines);
        return;
    }
    final CustomCommandLine activeCommandLine = validateAndGetActiveCommandLine(checkNotNull(commandLine));
    final ProgramOptions programOptions = ProgramOptions.create(commandLine);
    final List<URL> jobJars = getJobJarAndDependencies(programOptions);
    final Configuration effectiveConfiguration = getEffectiveConfiguration(activeCommandLine, commandLine, programOptions, jobJars);
    LOG.debug("Effective executor configuration: {}", effectiveConfiguration);
    try (PackagedProgram program = getPackagedProgram(programOptions, effectiveConfiguration)) {
        executeProgram(effectiveConfiguration, program);
    }
}
Also used : JobManagerOptions(org.apache.flink.configuration.JobManagerOptions) Options(org.apache.commons.cli.Options) RestOptions(org.apache.flink.configuration.RestOptions) CoreOptions(org.apache.flink.configuration.CoreOptions) PackagedProgram(org.apache.flink.client.program.PackagedProgram) CommandLine(org.apache.commons.cli.CommandLine) ApplicationConfiguration(org.apache.flink.client.deployment.application.ApplicationConfiguration) SecurityConfiguration(org.apache.flink.runtime.security.SecurityConfiguration) Configuration(org.apache.flink.configuration.Configuration) GlobalConfiguration(org.apache.flink.configuration.GlobalConfiguration) URL(java.net.URL)

Example 33 with PackagedProgram

use of org.apache.flink.client.program.PackagedProgram in project flink by apache.

the class CliFrontend method info.

/**
 * Executes the info action.
 *
 * @param args Command line arguments for the info action.
 */
protected void info(String[] args) throws Exception {
    LOG.info("Running 'info' command.");
    final Options commandOptions = CliFrontendParser.getInfoCommandOptions();
    final CommandLine commandLine = CliFrontendParser.parse(commandOptions, args, true);
    final ProgramOptions programOptions = ProgramOptions.create(commandLine);
    // evaluate help flag
    if (commandLine.hasOption(HELP_OPTION.getOpt())) {
        CliFrontendParser.printHelpForInfo();
        return;
    }
    // -------- build the packaged program -------------
    LOG.info("Building program from JAR file");
    PackagedProgram program = null;
    try {
        int parallelism = programOptions.getParallelism();
        if (ExecutionConfig.PARALLELISM_DEFAULT == parallelism) {
            parallelism = defaultParallelism;
        }
        LOG.info("Creating program plan dump");
        final CustomCommandLine activeCommandLine = validateAndGetActiveCommandLine(checkNotNull(commandLine));
        final Configuration effectiveConfiguration = getEffectiveConfiguration(activeCommandLine, commandLine, programOptions, getJobJarAndDependencies(programOptions));
        program = buildProgram(programOptions, effectiveConfiguration);
        Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, effectiveConfiguration, parallelism, true);
        String jsonPlan = FlinkPipelineTranslationUtil.translateToJSONExecutionPlan(pipeline);
        if (jsonPlan != null) {
            System.out.println("----------------------- Execution Plan -----------------------");
            System.out.println(jsonPlan);
            System.out.println("--------------------------------------------------------------");
        } else {
            System.out.println("JSON plan could not be generated.");
        }
        String description = program.getDescription();
        if (description != null) {
            System.out.println();
            System.out.println(description);
        } else {
            System.out.println();
            System.out.println("No description provided.");
        }
    } finally {
        if (program != null) {
            program.close();
        }
    }
}
Also used : JobManagerOptions(org.apache.flink.configuration.JobManagerOptions) Options(org.apache.commons.cli.Options) RestOptions(org.apache.flink.configuration.RestOptions) CoreOptions(org.apache.flink.configuration.CoreOptions) PackagedProgram(org.apache.flink.client.program.PackagedProgram) CommandLine(org.apache.commons.cli.CommandLine) ApplicationConfiguration(org.apache.flink.client.deployment.application.ApplicationConfiguration) SecurityConfiguration(org.apache.flink.runtime.security.SecurityConfiguration) Configuration(org.apache.flink.configuration.Configuration) GlobalConfiguration(org.apache.flink.configuration.GlobalConfiguration) Pipeline(org.apache.flink.api.dag.Pipeline)

Example 34 with PackagedProgram

use of org.apache.flink.client.program.PackagedProgram in project flink by apache.

the class ClassLoaderITCase method testDisposeSavepointWithCustomKvState.

/**
 * Tests disposal of a savepoint, which contains custom user code KvState.
 */
@Test
public void testDisposeSavepointWithCustomKvState() throws Exception {
    ClusterClient<?> clusterClient = new MiniClusterClient(new Configuration(), miniClusterResource.getMiniCluster());
    Deadline deadline = new FiniteDuration(100, TimeUnit.SECONDS).fromNow();
    File checkpointDir = FOLDER.newFolder();
    File outputDir = FOLDER.newFolder();
    final PackagedProgram program = PackagedProgram.newBuilder().setJarFile(new File(CUSTOM_KV_STATE_JAR_PATH)).setArguments(new String[] { String.valueOf(parallelism), checkpointDir.toURI().toString(), "5000", outputDir.toURI().toString(), // Disable unaligned checkpoints as this test is
    "false" // triggering concurrent savepoints/checkpoints
    }).build();
    TestStreamEnvironment.setAsContext(miniClusterResource.getMiniCluster(), parallelism, Collections.singleton(new Path(CUSTOM_KV_STATE_JAR_PATH)), Collections.emptyList());
    // Execute detached
    Thread invokeThread = new Thread(() -> {
        try {
            program.invokeInteractiveModeForExecution();
        } catch (ProgramInvocationException ex) {
            if (ex.getCause() == null || !(ex.getCause() instanceof JobCancellationException)) {
                ex.printStackTrace();
            }
        }
    });
    LOG.info("Starting program invoke thread");
    invokeThread.start();
    // The job ID
    JobID jobId = null;
    LOG.info("Waiting for job status running.");
    // Wait for running job
    while (jobId == null && deadline.hasTimeLeft()) {
        Collection<JobStatusMessage> jobs = clusterClient.listJobs().get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
        for (JobStatusMessage job : jobs) {
            if (job.getJobState() == JobStatus.RUNNING) {
                jobId = job.getJobId();
                LOG.info("Job running. ID: " + jobId);
                break;
            }
        }
        // Retry if job is not available yet
        if (jobId == null) {
            Thread.sleep(100L);
        }
    }
    // Trigger savepoint
    String savepointPath = null;
    for (int i = 0; i < 20; i++) {
        LOG.info("Triggering savepoint (" + (i + 1) + "/20).");
        try {
            savepointPath = clusterClient.triggerSavepoint(jobId, null, SavepointFormatType.CANONICAL).get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
        } catch (Exception cause) {
            LOG.info("Failed to trigger savepoint. Retrying...", cause);
            // This can fail if the operators are not opened yet
            Thread.sleep(500);
        }
    }
    assertNotNull("Failed to trigger savepoint", savepointPath);
    clusterClient.disposeSavepoint(savepointPath).get();
    clusterClient.cancel(jobId).get();
    // make sure, the execution is finished to not influence other test methods
    invokeThread.join(deadline.timeLeft().toMillis());
    assertFalse("Program invoke thread still running", invokeThread.isAlive());
}
Also used : Path(org.apache.flink.core.fs.Path) MiniClusterResourceConfiguration(org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration) Configuration(org.apache.flink.configuration.Configuration) Deadline(scala.concurrent.duration.Deadline) FiniteDuration(scala.concurrent.duration.FiniteDuration) MiniClusterClient(org.apache.flink.client.program.MiniClusterClient) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) SuccessException(org.apache.flink.test.util.SuccessException) JobCancellationException(org.apache.flink.runtime.client.JobCancellationException) IOException(java.io.IOException) JobCancellationException(org.apache.flink.runtime.client.JobCancellationException) PackagedProgram(org.apache.flink.client.program.PackagedProgram) JobStatusMessage(org.apache.flink.runtime.client.JobStatusMessage) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) File(java.io.File) JobID(org.apache.flink.api.common.JobID) Test(org.junit.Test)

Example 35 with PackagedProgram

use of org.apache.flink.client.program.PackagedProgram in project flink by apache.

the class ClassLoaderITCase method testCheckpointingCustomKvStateJobWithCustomClassLoader.

@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
    File checkpointDir = FOLDER.newFolder();
    File outputDir = FOLDER.newFolder();
    final PackagedProgram program = PackagedProgram.newBuilder().setJarFile(new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)).setArguments(new String[] { checkpointDir.toURI().toString(), outputDir.toURI().toString() }).build();
    TestStreamEnvironment.setAsContext(miniClusterResource.getMiniCluster(), parallelism, Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)), Collections.emptyList());
    try {
        program.invokeInteractiveModeForExecution();
        fail("exception should happen");
    } catch (ProgramInvocationException e) {
        assertTrue(ExceptionUtils.findThrowable(e, SuccessException.class).isPresent());
    }
}
Also used : Path(org.apache.flink.core.fs.Path) PackagedProgram(org.apache.flink.client.program.PackagedProgram) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) SuccessException(org.apache.flink.test.util.SuccessException) File(java.io.File) Test(org.junit.Test)

Aggregations

PackagedProgram (org.apache.flink.client.program.PackagedProgram)42 Test (org.junit.Test)25 File (java.io.File)20 Configuration (org.apache.flink.configuration.Configuration)19 URL (java.net.URL)13 ProgramInvocationException (org.apache.flink.client.program.ProgramInvocationException)12 Path (org.apache.flink.core.fs.Path)12 FileNotFoundException (java.io.FileNotFoundException)7 IOException (java.io.IOException)7 CommandLine (org.apache.commons.cli.CommandLine)6 RunOptions (org.apache.flink.client.cli.RunOptions)5 CompilerException (org.apache.flink.optimizer.CompilerException)5 FlinkException (org.apache.flink.util.FlinkException)5 Pipeline (org.apache.flink.api.dag.Pipeline)4 DataStatistics (org.apache.flink.optimizer.DataStatistics)4 Optimizer (org.apache.flink.optimizer.Optimizer)4 DefaultCostEstimator (org.apache.flink.optimizer.costs.DefaultCostEstimator)4 JobID (org.apache.flink.api.common.JobID)3 ApplicationConfiguration (org.apache.flink.client.deployment.application.ApplicationConfiguration)3 MiniClusterResourceConfiguration (org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration)3