use of org.apache.flink.client.program.PackagedProgram in project flink by apache.
the class KubernetesApplicationClusterEntrypoint method main.
public static void main(final String[] args) {
// startup checks and logging
EnvironmentInformation.logEnvironmentInfo(LOG, KubernetesApplicationClusterEntrypoint.class.getSimpleName(), args);
SignalHandler.register(LOG);
JvmShutdownSafeguard.installAsShutdownHook(LOG);
final Configuration dynamicParameters = ClusterEntrypointUtils.parseParametersOrExit(args, new DynamicParametersConfigurationParserFactory(), KubernetesApplicationClusterEntrypoint.class);
final Configuration configuration = KubernetesEntrypointUtils.loadConfiguration(dynamicParameters);
PackagedProgram program = null;
try {
program = getPackagedProgram(configuration);
} catch (Exception e) {
LOG.error("Could not create application program.", e);
System.exit(1);
}
try {
configureExecution(configuration, program);
} catch (Exception e) {
LOG.error("Could not apply application configuration.", e);
System.exit(1);
}
final KubernetesApplicationClusterEntrypoint kubernetesApplicationClusterEntrypoint = new KubernetesApplicationClusterEntrypoint(configuration, program);
ClusterEntrypoint.runClusterEntrypoint(kubernetesApplicationClusterEntrypoint);
}
use of org.apache.flink.client.program.PackagedProgram in project flink by apache.
the class CliFrontend method run.
/**
* Executions the run action.
*
* @param args Command line arguments for the run action.
*/
protected void run(String[] args) throws Exception {
LOG.info("Running 'run' command.");
final Options commandOptions = CliFrontendParser.getRunCommandOptions();
final CommandLine commandLine = getCommandLine(commandOptions, args, true);
// evaluate help flag
if (commandLine.hasOption(HELP_OPTION.getOpt())) {
CliFrontendParser.printHelpForRun(customCommandLines);
return;
}
final CustomCommandLine activeCommandLine = validateAndGetActiveCommandLine(checkNotNull(commandLine));
final ProgramOptions programOptions = ProgramOptions.create(commandLine);
final List<URL> jobJars = getJobJarAndDependencies(programOptions);
final Configuration effectiveConfiguration = getEffectiveConfiguration(activeCommandLine, commandLine, programOptions, jobJars);
LOG.debug("Effective executor configuration: {}", effectiveConfiguration);
try (PackagedProgram program = getPackagedProgram(programOptions, effectiveConfiguration)) {
executeProgram(effectiveConfiguration, program);
}
}
use of org.apache.flink.client.program.PackagedProgram in project flink by apache.
the class CliFrontend method info.
/**
* Executes the info action.
*
* @param args Command line arguments for the info action.
*/
protected void info(String[] args) throws Exception {
LOG.info("Running 'info' command.");
final Options commandOptions = CliFrontendParser.getInfoCommandOptions();
final CommandLine commandLine = CliFrontendParser.parse(commandOptions, args, true);
final ProgramOptions programOptions = ProgramOptions.create(commandLine);
// evaluate help flag
if (commandLine.hasOption(HELP_OPTION.getOpt())) {
CliFrontendParser.printHelpForInfo();
return;
}
// -------- build the packaged program -------------
LOG.info("Building program from JAR file");
PackagedProgram program = null;
try {
int parallelism = programOptions.getParallelism();
if (ExecutionConfig.PARALLELISM_DEFAULT == parallelism) {
parallelism = defaultParallelism;
}
LOG.info("Creating program plan dump");
final CustomCommandLine activeCommandLine = validateAndGetActiveCommandLine(checkNotNull(commandLine));
final Configuration effectiveConfiguration = getEffectiveConfiguration(activeCommandLine, commandLine, programOptions, getJobJarAndDependencies(programOptions));
program = buildProgram(programOptions, effectiveConfiguration);
Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, effectiveConfiguration, parallelism, true);
String jsonPlan = FlinkPipelineTranslationUtil.translateToJSONExecutionPlan(pipeline);
if (jsonPlan != null) {
System.out.println("----------------------- Execution Plan -----------------------");
System.out.println(jsonPlan);
System.out.println("--------------------------------------------------------------");
} else {
System.out.println("JSON plan could not be generated.");
}
String description = program.getDescription();
if (description != null) {
System.out.println();
System.out.println(description);
} else {
System.out.println();
System.out.println("No description provided.");
}
} finally {
if (program != null) {
program.close();
}
}
}
use of org.apache.flink.client.program.PackagedProgram in project flink by apache.
the class ClassLoaderITCase method testDisposeSavepointWithCustomKvState.
/**
* Tests disposal of a savepoint, which contains custom user code KvState.
*/
@Test
public void testDisposeSavepointWithCustomKvState() throws Exception {
ClusterClient<?> clusterClient = new MiniClusterClient(new Configuration(), miniClusterResource.getMiniCluster());
Deadline deadline = new FiniteDuration(100, TimeUnit.SECONDS).fromNow();
File checkpointDir = FOLDER.newFolder();
File outputDir = FOLDER.newFolder();
final PackagedProgram program = PackagedProgram.newBuilder().setJarFile(new File(CUSTOM_KV_STATE_JAR_PATH)).setArguments(new String[] { String.valueOf(parallelism), checkpointDir.toURI().toString(), "5000", outputDir.toURI().toString(), // Disable unaligned checkpoints as this test is
"false" // triggering concurrent savepoints/checkpoints
}).build();
TestStreamEnvironment.setAsContext(miniClusterResource.getMiniCluster(), parallelism, Collections.singleton(new Path(CUSTOM_KV_STATE_JAR_PATH)), Collections.emptyList());
// Execute detached
Thread invokeThread = new Thread(() -> {
try {
program.invokeInteractiveModeForExecution();
} catch (ProgramInvocationException ex) {
if (ex.getCause() == null || !(ex.getCause() instanceof JobCancellationException)) {
ex.printStackTrace();
}
}
});
LOG.info("Starting program invoke thread");
invokeThread.start();
// The job ID
JobID jobId = null;
LOG.info("Waiting for job status running.");
// Wait for running job
while (jobId == null && deadline.hasTimeLeft()) {
Collection<JobStatusMessage> jobs = clusterClient.listJobs().get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
for (JobStatusMessage job : jobs) {
if (job.getJobState() == JobStatus.RUNNING) {
jobId = job.getJobId();
LOG.info("Job running. ID: " + jobId);
break;
}
}
// Retry if job is not available yet
if (jobId == null) {
Thread.sleep(100L);
}
}
// Trigger savepoint
String savepointPath = null;
for (int i = 0; i < 20; i++) {
LOG.info("Triggering savepoint (" + (i + 1) + "/20).");
try {
savepointPath = clusterClient.triggerSavepoint(jobId, null, SavepointFormatType.CANONICAL).get(deadline.timeLeft().toMillis(), TimeUnit.MILLISECONDS);
} catch (Exception cause) {
LOG.info("Failed to trigger savepoint. Retrying...", cause);
// This can fail if the operators are not opened yet
Thread.sleep(500);
}
}
assertNotNull("Failed to trigger savepoint", savepointPath);
clusterClient.disposeSavepoint(savepointPath).get();
clusterClient.cancel(jobId).get();
// make sure, the execution is finished to not influence other test methods
invokeThread.join(deadline.timeLeft().toMillis());
assertFalse("Program invoke thread still running", invokeThread.isAlive());
}
use of org.apache.flink.client.program.PackagedProgram in project flink by apache.
the class ClassLoaderITCase method testCheckpointingCustomKvStateJobWithCustomClassLoader.
@Test
public void testCheckpointingCustomKvStateJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
File checkpointDir = FOLDER.newFolder();
File outputDir = FOLDER.newFolder();
final PackagedProgram program = PackagedProgram.newBuilder().setJarFile(new File(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)).setArguments(new String[] { checkpointDir.toURI().toString(), outputDir.toURI().toString() }).build();
TestStreamEnvironment.setAsContext(miniClusterResource.getMiniCluster(), parallelism, Collections.singleton(new Path(CHECKPOINTING_CUSTOM_KV_STATE_JAR_PATH)), Collections.emptyList());
try {
program.invokeInteractiveModeForExecution();
fail("exception should happen");
} catch (ProgramInvocationException e) {
assertTrue(ExceptionUtils.findThrowable(e, SuccessException.class).isPresent());
}
}
Aggregations