use of org.apache.flink.client.cli.CliArgsException in project flink by apache.
the class CliFrontend method info.
/**
* Executes the info action.
*
* @param args Command line arguments for the info action.
*/
protected int info(String[] args) {
LOG.info("Running 'info' command.");
// Parse command line options
InfoOptions options;
try {
options = CliFrontendParser.parseInfoCommand(args);
} catch (CliArgsException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
// evaluate help flag
if (options.isPrintHelp()) {
CliFrontendParser.printHelpForInfo();
return 0;
}
if (options.getJarFilePath() == null) {
return handleArgException(new CliArgsException("The program JAR file was not specified."));
}
// -------- build the packaged program -------------
PackagedProgram program;
try {
LOG.info("Building program from JAR file");
program = buildProgram(options);
} catch (Throwable t) {
return handleError(t);
}
try {
int parallelism = options.getParallelism();
LOG.info("Creating program plan dump");
Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
FlinkPlan flinkPlan = ClusterClient.getOptimizedPlan(compiler, program, parallelism);
String jsonPlan = null;
if (flinkPlan instanceof OptimizedPlan) {
jsonPlan = new PlanJSONDumpGenerator().getOptimizerPlanAsJSON((OptimizedPlan) flinkPlan);
} else if (flinkPlan instanceof StreamingPlan) {
jsonPlan = ((StreamingPlan) flinkPlan).getStreamingPlanAsJSON();
}
if (jsonPlan != null) {
System.out.println("----------------------- Execution Plan -----------------------");
System.out.println(jsonPlan);
System.out.println("--------------------------------------------------------------");
} else {
System.out.println("JSON plan could not be generated.");
}
String description = program.getDescription();
if (description != null) {
System.out.println();
System.out.println(description);
} else {
System.out.println();
System.out.println("No description provided.");
}
return 0;
} catch (Throwable t) {
return handleError(t);
} finally {
program.deleteExtractedLibraries();
}
}
use of org.apache.flink.client.cli.CliArgsException in project flink by apache.
the class CliFrontend method cancel.
/**
* Executes the CANCEL action.
*
* @param args Command line arguments for the cancel action.
*/
protected int cancel(String[] args) {
LOG.info("Running 'cancel' command.");
CancelOptions options;
try {
options = CliFrontendParser.parseCancelCommand(args);
} catch (CliArgsException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
// evaluate help flag
if (options.isPrintHelp()) {
CliFrontendParser.printHelpForCancel();
return 0;
}
String[] cleanedArgs = options.getArgs();
boolean withSavepoint = options.isWithSavepoint();
String targetDirectory = options.getSavepointTargetDirectory();
JobID jobId;
// - cancel -s <targetDir> <jobID> => custom target dir (parsed correctly)
if (cleanedArgs.length > 0) {
String jobIdString = cleanedArgs[0];
try {
jobId = new JobID(StringUtils.hexStringToByte(jobIdString));
} catch (Exception e) {
LOG.error("Error: The value for the Job ID is not a valid ID.");
System.out.println("Error: The value for the Job ID is not a valid ID.");
return 1;
}
} else if (targetDirectory != null) {
// Try this for case: cancel -s <jobID> (default savepoint target dir)
String jobIdString = targetDirectory;
try {
jobId = new JobID(StringUtils.hexStringToByte(jobIdString));
targetDirectory = null;
} catch (Exception e) {
LOG.error("Missing JobID in the command line arguments.");
System.out.println("Error: Specify a Job ID to cancel a job.");
return 1;
}
} else {
LOG.error("Missing JobID in the command line arguments.");
System.out.println("Error: Specify a Job ID to cancel a job.");
return 1;
}
try {
ActorGateway jobManager = getJobManagerGateway(options);
Object cancelMsg;
if (withSavepoint) {
if (targetDirectory == null) {
logAndSysout("Cancelling job " + jobId + " with savepoint to default savepoint directory.");
} else {
logAndSysout("Cancelling job " + jobId + " with savepoint to " + targetDirectory + ".");
}
cancelMsg = new CancelJobWithSavepoint(jobId, targetDirectory);
} else {
logAndSysout("Cancelling job " + jobId + ".");
cancelMsg = new CancelJob(jobId);
}
Future<Object> response = jobManager.ask(cancelMsg, clientTimeout);
final Object rc = Await.result(response, clientTimeout);
if (rc instanceof CancellationSuccess) {
if (withSavepoint) {
CancellationSuccess success = (CancellationSuccess) rc;
String savepointPath = success.savepointPath();
logAndSysout("Cancelled job " + jobId + ". Savepoint stored in " + savepointPath + ".");
} else {
logAndSysout("Cancelled job " + jobId + ".");
}
} else if (rc instanceof CancellationFailure) {
throw new Exception("Canceling the job with ID " + jobId + " failed.", ((CancellationFailure) rc).cause());
} else {
throw new IllegalStateException("Unexpected response: " + rc);
}
return 0;
} catch (Throwable t) {
return handleError(t);
}
}
use of org.apache.flink.client.cli.CliArgsException in project flink by apache.
the class CliFrontend method list.
/**
* Executes the list action.
*
* @param args Command line arguments for the list action.
*/
protected int list(String[] args) {
LOG.info("Running 'list' command.");
ListOptions options;
try {
options = CliFrontendParser.parseListCommand(args);
} catch (CliArgsException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
// evaluate help flag
if (options.isPrintHelp()) {
CliFrontendParser.printHelpForList();
return 0;
}
boolean running = options.getRunning();
boolean scheduled = options.getScheduled();
// print running and scheduled jobs if not option supplied
if (!running && !scheduled) {
running = true;
scheduled = true;
}
try {
ActorGateway jobManagerGateway = getJobManagerGateway(options);
LOG.info("Connecting to JobManager to retrieve list of jobs");
Future<Object> response = jobManagerGateway.ask(JobManagerMessages.getRequestRunningJobsStatus(), clientTimeout);
Object result;
try {
result = Await.result(response, clientTimeout);
} catch (Exception e) {
throw new Exception("Could not retrieve running jobs from the JobManager.", e);
}
if (result instanceof RunningJobsStatus) {
LOG.info("Successfully retrieved list of jobs");
List<JobStatusMessage> jobs = ((RunningJobsStatus) result).getStatusMessages();
ArrayList<JobStatusMessage> runningJobs = null;
ArrayList<JobStatusMessage> scheduledJobs = null;
if (running) {
runningJobs = new ArrayList<JobStatusMessage>();
}
if (scheduled) {
scheduledJobs = new ArrayList<JobStatusMessage>();
}
for (JobStatusMessage rj : jobs) {
if (running && (rj.getJobState().equals(JobStatus.RUNNING) || rj.getJobState().equals(JobStatus.RESTARTING))) {
runningJobs.add(rj);
}
if (scheduled && rj.getJobState().equals(JobStatus.CREATED)) {
scheduledJobs.add(rj);
}
}
SimpleDateFormat df = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
Comparator<JobStatusMessage> njec = new Comparator<JobStatusMessage>() {
@Override
public int compare(JobStatusMessage o1, JobStatusMessage o2) {
return (int) (o1.getStartTime() - o2.getStartTime());
}
};
if (running) {
if (runningJobs.size() == 0) {
System.out.println("No running jobs.");
} else {
Collections.sort(runningJobs, njec);
System.out.println("------------------ Running/Restarting Jobs -------------------");
for (JobStatusMessage rj : runningJobs) {
System.out.println(df.format(new Date(rj.getStartTime())) + " : " + rj.getJobId() + " : " + rj.getJobName() + " (" + rj.getJobState() + ")");
}
System.out.println("--------------------------------------------------------------");
}
}
if (scheduled) {
if (scheduledJobs.size() == 0) {
System.out.println("No scheduled jobs.");
} else {
Collections.sort(scheduledJobs, njec);
System.out.println("----------------------- Scheduled Jobs -----------------------");
for (JobStatusMessage rj : scheduledJobs) {
System.out.println(df.format(new Date(rj.getStartTime())) + " : " + rj.getJobId() + " : " + rj.getJobName());
}
System.out.println("--------------------------------------------------------------");
}
}
return 0;
} else {
throw new Exception("ReqeustRunningJobs requires a response of type " + "RunningJobs. Instead the response is of type " + result.getClass() + ".");
}
} catch (Throwable t) {
return handleError(t);
}
}
use of org.apache.flink.client.cli.CliArgsException in project flink by apache.
the class CliFrontend method savepoint.
/**
* Executes the SAVEPOINT action.
*
* @param args Command line arguments for the cancel action.
*/
protected int savepoint(String[] args) {
LOG.info("Running 'savepoint' command.");
SavepointOptions options;
try {
options = CliFrontendParser.parseSavepointCommand(args);
} catch (CliArgsException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
// evaluate help flag
if (options.isPrintHelp()) {
CliFrontendParser.printHelpForSavepoint();
return 0;
}
if (options.isDispose()) {
// Discard
return disposeSavepoint(options);
} else {
// Trigger
String[] cleanedArgs = options.getArgs();
JobID jobId;
if (cleanedArgs.length >= 1) {
String jobIdString = cleanedArgs[0];
try {
jobId = new JobID(StringUtils.hexStringToByte(jobIdString));
} catch (Exception e) {
return handleArgException(new IllegalArgumentException("Error: The value for the Job ID is not a valid ID."));
}
} else {
return handleArgException(new IllegalArgumentException("Error: The value for the Job ID is not a valid ID. " + "Specify a Job ID to trigger a savepoint."));
}
String savepointDirectory = null;
if (cleanedArgs.length >= 2) {
savepointDirectory = cleanedArgs[1];
}
// Print superfluous arguments
if (cleanedArgs.length >= 3) {
logAndSysout("Provided more arguments than required. Ignoring not needed arguments.");
}
return triggerSavepoint(options, jobId, savepointDirectory);
}
}
use of org.apache.flink.client.cli.CliArgsException in project flink by apache.
the class CliFrontend method run.
// --------------------------------------------------------------------------------------------
// Execute Actions
// --------------------------------------------------------------------------------------------
/**
* Executions the run action.
*
* @param args Command line arguments for the run action.
*/
protected int run(String[] args) {
LOG.info("Running 'run' command.");
RunOptions options;
try {
options = CliFrontendParser.parseRunCommand(args);
} catch (CliArgsException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
// evaluate help flag
if (options.isPrintHelp()) {
CliFrontendParser.printHelpForRun();
return 0;
}
if (options.getJarFilePath() == null) {
return handleArgException(new CliArgsException("The program JAR file was not specified."));
}
PackagedProgram program;
try {
LOG.info("Building program from JAR file");
program = buildProgram(options);
} catch (FileNotFoundException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
ClusterClient client = null;
try {
client = createClient(options, program);
client.setPrintStatusDuringExecution(options.getStdoutLogging());
client.setDetached(options.getDetachedMode());
LOG.debug("Client slots is set to {}", client.getMaxSlots());
LOG.debug(options.getSavepointRestoreSettings().toString());
int userParallelism = options.getParallelism();
LOG.debug("User parallelism is set to {}", userParallelism);
if (client.getMaxSlots() != -1 && userParallelism == -1) {
logAndSysout("Using the parallelism provided by the remote cluster (" + client.getMaxSlots() + "). " + "To use another parallelism, set it at the ./bin/flink client.");
userParallelism = client.getMaxSlots();
}
return executeProgram(program, client, userParallelism);
} catch (Throwable t) {
return handleError(t);
} finally {
if (client != null) {
client.shutdown();
}
if (program != null) {
program.deleteExtractedLibraries();
}
}
}
Aggregations