use of org.apache.flink.optimizer.Optimizer in project flink by apache.
the class CliFrontend method info.
/**
* Executes the info action.
*
* @param args Command line arguments for the info action.
*/
protected int info(String[] args) {
LOG.info("Running 'info' command.");
// Parse command line options
InfoOptions options;
try {
options = CliFrontendParser.parseInfoCommand(args);
} catch (CliArgsException e) {
return handleArgException(e);
} catch (Throwable t) {
return handleError(t);
}
// evaluate help flag
if (options.isPrintHelp()) {
CliFrontendParser.printHelpForInfo();
return 0;
}
if (options.getJarFilePath() == null) {
return handleArgException(new CliArgsException("The program JAR file was not specified."));
}
// -------- build the packaged program -------------
PackagedProgram program;
try {
LOG.info("Building program from JAR file");
program = buildProgram(options);
} catch (Throwable t) {
return handleError(t);
}
try {
int parallelism = options.getParallelism();
LOG.info("Creating program plan dump");
Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
FlinkPlan flinkPlan = ClusterClient.getOptimizedPlan(compiler, program, parallelism);
String jsonPlan = null;
if (flinkPlan instanceof OptimizedPlan) {
jsonPlan = new PlanJSONDumpGenerator().getOptimizerPlanAsJSON((OptimizedPlan) flinkPlan);
} else if (flinkPlan instanceof StreamingPlan) {
jsonPlan = ((StreamingPlan) flinkPlan).getStreamingPlanAsJSON();
}
if (jsonPlan != null) {
System.out.println("----------------------- Execution Plan -----------------------");
System.out.println(jsonPlan);
System.out.println("--------------------------------------------------------------");
} else {
System.out.println("JSON plan could not be generated.");
}
String description = program.getDescription();
if (description != null) {
System.out.println();
System.out.println(description);
} else {
System.out.println();
System.out.println("No description provided.");
}
return 0;
} catch (Throwable t) {
return handleError(t);
} finally {
program.deleteExtractedLibraries();
}
}
use of org.apache.flink.optimizer.Optimizer in project flink by apache.
the class CliFrontendPackageProgramTest method testPlanWithExternalClass.
/**
* Ensure that we will never have the following error.
*
* <pre>
* org.apache.flink.client.program.ProgramInvocationException: The main method caused an error.
* at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:398)
* at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:301)
* at org.apache.flink.client.program.Client.getOptimizedPlan(Client.java:140)
* at org.apache.flink.client.program.Client.getOptimizedPlanAsJson(Client.java:125)
* at org.apache.flink.client.CliFrontend.info(CliFrontend.java:439)
* at org.apache.flink.client.CliFrontend.parseParameters(CliFrontend.java:931)
* at org.apache.flink.client.CliFrontend.main(CliFrontend.java:951)
* Caused by: java.io.IOException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.io.RCFileInputFormat
* at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:102)
* at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:54)
* at tlabs.CDR_In_Report.createHCatInputFormat(CDR_In_Report.java:322)
* at tlabs.CDR_Out_Report.main(CDR_Out_Report.java:380)
* at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
* at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
* at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
* at java.lang.reflect.Method.invoke(Method.java:622)
* at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:383)
* </pre>
*
* The test works as follows:
*
* <ul>
* <li> Use the CliFrontend to invoke a jar file that loads a class which is only available
* in the jarfile itself (via a custom classloader)
* <li> Change the Usercode classloader of the PackagedProgram to a special classloader for this test
* <li> the classloader will accept the special class (and return a String.class)
* </ul>
*/
@Test
public void testPlanWithExternalClass() throws CompilerException, ProgramInvocationException {
// create a final object reference, to be able to change its val later
final boolean[] callme = { false };
try {
String[] arguments = { "--classpath", "file:///tmp/foo", "--classpath", "file:///tmp/bar", "-c", TEST_JAR_CLASSLOADERTEST_CLASS, getTestJarPath(), "true", "arg1", "arg2" };
URL[] classpath = new URL[] { new URL("file:///tmp/foo"), new URL("file:///tmp/bar") };
String[] reducedArguments = { "true", "arg1", "arg2" };
RunOptions options = CliFrontendParser.parseRunCommand(arguments);
assertEquals(getTestJarPath(), options.getJarFilePath());
assertArrayEquals(classpath, options.getClasspaths().toArray());
assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, options.getEntryPointClassName());
assertArrayEquals(reducedArguments, options.getProgramArgs());
CliFrontend frontend = new CliFrontend(CliFrontendTestUtils.getConfigDir());
PackagedProgram prog = spy(frontend.buildProgram(options));
ClassLoader testClassLoader = new ClassLoader(prog.getUserCodeClassLoader()) {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if ("org.apache.hadoop.hive.ql.io.RCFileInputFormat".equals(name)) {
callme[0] = true;
// Intentionally return the wrong class.
return String.class;
} else {
return super.loadClass(name);
}
}
};
when(prog.getUserCodeClassLoader()).thenReturn(testClassLoader);
assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, prog.getMainClassName());
assertArrayEquals(reducedArguments, prog.getArguments());
Configuration c = new Configuration();
Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), c);
// we expect this to fail with a "ClassNotFoundException"
ClusterClient.getOptimizedPlanAsJson(compiler, prog, 666);
fail("Should have failed with a ClassNotFoundException");
} catch (ProgramInvocationException e) {
if (!(e.getCause() instanceof ClassNotFoundException)) {
e.printStackTrace();
fail("Program didn't throw ClassNotFoundException");
}
assertTrue("Classloader was not called", callme[0]);
} catch (Exception e) {
e.printStackTrace();
fail("Program failed with the wrong exception: " + e.getClass().getName());
}
}
use of org.apache.flink.optimizer.Optimizer in project flink by apache.
the class LocalExecutor method executePlan.
/**
* Executes the given program on a local runtime and waits for the job to finish.
*
* <p>If the executor has not been started before, this starts the executor and shuts it down
* after the job finished. If the job runs in session mode, the executor is kept alive until
* no more references to the executor exist.</p>
*
* @param plan The plan of the program to execute.
* @return The net runtime of the program, in milliseconds.
*
* @throws Exception Thrown, if either the startup of the local execution context, or the execution
* caused an exception.
*/
@Override
public JobExecutionResult executePlan(Plan plan) throws Exception {
if (plan == null) {
throw new IllegalArgumentException("The plan may not be null.");
}
synchronized (this.lock) {
// check if we start a session dedicated for this execution
final boolean shutDownAtEnd;
if (flink == null) {
shutDownAtEnd = true;
// configure the number of local slots equal to the parallelism of the local plan
if (this.taskManagerNumSlots == DEFAULT_TASK_MANAGER_NUM_SLOTS) {
int maxParallelism = plan.getMaximumParallelism();
if (maxParallelism > 0) {
this.taskManagerNumSlots = maxParallelism;
}
}
// start the cluster for us
start();
} else {
// we use the existing session
shutDownAtEnd = false;
}
try {
Configuration configuration = this.flink.configuration();
Optimizer pc = new Optimizer(new DataStatistics(), configuration);
OptimizedPlan op = pc.compile(plan);
JobGraphGenerator jgg = new JobGraphGenerator(configuration);
JobGraph jobGraph = jgg.compileJobGraph(op, plan.getJobId());
boolean sysoutPrint = isPrintingStatusDuringExecution();
return flink.submitJobAndWait(jobGraph, sysoutPrint);
} finally {
if (shutDownAtEnd) {
stop();
}
}
}
}
use of org.apache.flink.optimizer.Optimizer in project flink by apache.
the class RemoteExecutor method getOptimizerPlanAsJSON.
@Override
public String getOptimizerPlanAsJSON(Plan plan) throws Exception {
Optimizer opt = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), new Configuration());
OptimizedPlan optPlan = opt.compile(plan);
return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(optPlan);
}
use of org.apache.flink.optimizer.Optimizer in project flink by apache.
the class ClientTest method testGetExecutionPlan.
@Test
public void testGetExecutionPlan() {
try {
jobManagerSystem.actorOf(Props.create(FailureReturningActor.class), JobManager.JOB_MANAGER_NAME());
PackagedProgram prg = new PackagedProgram(TestOptimizerPlan.class, "/dev/random", "/tmp");
assertNotNull(prg.getPreviewPlan());
Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
OptimizedPlan op = (OptimizedPlan) ClusterClient.getOptimizedPlan(optimizer, prg, 1);
assertNotNull(op);
PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
assertNotNull(dumper.getOptimizerPlanAsJSON(op));
// test HTML escaping
PlanJSONDumpGenerator dumper2 = new PlanJSONDumpGenerator();
dumper2.setEncodeForHTML(true);
String htmlEscaped = dumper2.getOptimizerPlanAsJSON(op);
assertEquals(-1, htmlEscaped.indexOf('\\'));
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations