Search in sources :

Example 1 with PlanJSONDumpGenerator

use of org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator in project flink by apache.

the class PreviewPlanDumpTest method dump.

private void dump(Plan p) {
    try {
        List<DataSinkNode> sinks = Optimizer.createPreOptimizedPlan(p);
        PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
        String json = dumper.getPactPlanAsJSON(sinks);
        JsonParser parser = new JsonFactory().createJsonParser(json);
        while (parser.nextToken() != null) ;
    } catch (JsonParseException e) {
        e.printStackTrace();
        Assert.fail("JSON Generator produced malformatted output: " + e.getMessage());
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("An error occurred in the test: " + e.getMessage());
    }
}
Also used : PlanJSONDumpGenerator(org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator) DataSinkNode(org.apache.flink.optimizer.dag.DataSinkNode) JsonFactory(org.codehaus.jackson.JsonFactory) JsonParseException(org.codehaus.jackson.JsonParseException) JsonParseException(org.codehaus.jackson.JsonParseException) JsonParser(org.codehaus.jackson.JsonParser)

Example 2 with PlanJSONDumpGenerator

use of org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator in project flink by apache.

the class ConnectedComponentsCoGroupTest method testWorksetConnectedComponents.

@Test
public void testWorksetConnectedComponents() {
    Plan plan = getConnectedComponentsCoGroupPlan();
    plan.setExecutionConfig(new ExecutionConfig());
    OptimizedPlan optPlan = compileNoStats(plan);
    OptimizerPlanNodeResolver or = getOptimizerPlanNodeResolver(optPlan);
    if (PRINT_PLAN) {
        PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
        String json = dumper.getOptimizerPlanAsJSON(optPlan);
        System.out.println(json);
    }
    SourcePlanNode vertexSource = or.getNode(VERTEX_SOURCE);
    SourcePlanNode edgesSource = or.getNode(EDGES_SOURCE);
    SinkPlanNode sink = or.getNode(SINK);
    WorksetIterationPlanNode iter = or.getNode(ITERATION_NAME);
    DualInputPlanNode neighborsJoin = or.getNode(JOIN_NEIGHBORS_MATCH);
    DualInputPlanNode cogroup = or.getNode(MIN_ID_AND_UPDATE);
    // --------------------------------------------------------------------
    // Plan validation:
    //
    // We expect the plan to go with a sort-merge join, because the CoGroup
    // sorts and the join in the successive iteration can re-exploit the sorting.
    // --------------------------------------------------------------------
    // test all drivers
    Assert.assertEquals(DriverStrategy.NONE, sink.getDriverStrategy());
    Assert.assertEquals(DriverStrategy.NONE, vertexSource.getDriverStrategy());
    Assert.assertEquals(DriverStrategy.NONE, edgesSource.getDriverStrategy());
    Assert.assertEquals(DriverStrategy.INNER_MERGE, neighborsJoin.getDriverStrategy());
    Assert.assertEquals(set0, neighborsJoin.getKeysForInput1());
    Assert.assertEquals(set0, neighborsJoin.getKeysForInput2());
    Assert.assertEquals(DriverStrategy.CO_GROUP, cogroup.getDriverStrategy());
    Assert.assertEquals(set0, cogroup.getKeysForInput1());
    Assert.assertEquals(set0, cogroup.getKeysForInput2());
    // test all the shipping strategies
    Assert.assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
    Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialSolutionSetInput().getShipStrategy());
    Assert.assertEquals(set0, iter.getInitialSolutionSetInput().getShipStrategyKeys());
    Assert.assertEquals(ShipStrategyType.PARTITION_HASH, iter.getInitialWorksetInput().getShipStrategy());
    Assert.assertEquals(set0, iter.getInitialWorksetInput().getShipStrategyKeys());
    // workset
    Assert.assertEquals(ShipStrategyType.FORWARD, neighborsJoin.getInput1().getShipStrategy());
    // edges
    Assert.assertEquals(ShipStrategyType.PARTITION_HASH, neighborsJoin.getInput2().getShipStrategy());
    Assert.assertEquals(set0, neighborsJoin.getInput2().getShipStrategyKeys());
    Assert.assertTrue(neighborsJoin.getInput2().getTempMode().isCached());
    // min id
    Assert.assertEquals(ShipStrategyType.PARTITION_HASH, cogroup.getInput1().getShipStrategy());
    // solution set
    Assert.assertEquals(ShipStrategyType.FORWARD, cogroup.getInput2().getShipStrategy());
    // test all the local strategies
    Assert.assertEquals(LocalStrategy.NONE, sink.getInput().getLocalStrategy());
    Assert.assertEquals(LocalStrategy.NONE, iter.getInitialSolutionSetInput().getLocalStrategy());
    // the sort for the neighbor join in the first iteration is pushed out of the loop
    Assert.assertEquals(LocalStrategy.SORT, iter.getInitialWorksetInput().getLocalStrategy());
    // workset
    Assert.assertEquals(LocalStrategy.NONE, neighborsJoin.getInput1().getLocalStrategy());
    // edges
    Assert.assertEquals(LocalStrategy.SORT, neighborsJoin.getInput2().getLocalStrategy());
    Assert.assertEquals(LocalStrategy.SORT, cogroup.getInput1().getLocalStrategy());
    // solution set
    Assert.assertEquals(LocalStrategy.NONE, cogroup.getInput2().getLocalStrategy());
    // check the caches
    Assert.assertTrue(TempMode.CACHED == neighborsJoin.getInput2().getTempMode());
    JobGraphGenerator jgg = new JobGraphGenerator();
    jgg.compileJobGraph(optPlan);
}
Also used : DualInputPlanNode(org.apache.flink.optimizer.plan.DualInputPlanNode) PlanJSONDumpGenerator(org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator) WorksetIterationPlanNode(org.apache.flink.optimizer.plan.WorksetIterationPlanNode) JobGraphGenerator(org.apache.flink.optimizer.plantranslate.JobGraphGenerator) SourcePlanNode(org.apache.flink.optimizer.plan.SourcePlanNode) SinkPlanNode(org.apache.flink.optimizer.plan.SinkPlanNode) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Plan(org.apache.flink.api.common.Plan) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) Test(org.junit.Test)

Example 3 with PlanJSONDumpGenerator

use of org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator in project flink by apache.

the class TestEnvironment method getExecutionPlan.

@Override
public String getExecutionPlan() throws Exception {
    OptimizedPlan op = compileProgram("unused");
    PlanJSONDumpGenerator jsonGen = new PlanJSONDumpGenerator();
    return jsonGen.getOptimizerPlanAsJSON(op);
}
Also used : PlanJSONDumpGenerator(org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan)

Example 4 with PlanJSONDumpGenerator

use of org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator in project flink by apache.

the class CliFrontend method info.

/**
	 * Executes the info action.
	 * 
	 * @param args Command line arguments for the info action.
	 */
protected int info(String[] args) {
    LOG.info("Running 'info' command.");
    // Parse command line options
    InfoOptions options;
    try {
        options = CliFrontendParser.parseInfoCommand(args);
    } catch (CliArgsException e) {
        return handleArgException(e);
    } catch (Throwable t) {
        return handleError(t);
    }
    // evaluate help flag
    if (options.isPrintHelp()) {
        CliFrontendParser.printHelpForInfo();
        return 0;
    }
    if (options.getJarFilePath() == null) {
        return handleArgException(new CliArgsException("The program JAR file was not specified."));
    }
    // -------- build the packaged program -------------
    PackagedProgram program;
    try {
        LOG.info("Building program from JAR file");
        program = buildProgram(options);
    } catch (Throwable t) {
        return handleError(t);
    }
    try {
        int parallelism = options.getParallelism();
        LOG.info("Creating program plan dump");
        Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), config);
        FlinkPlan flinkPlan = ClusterClient.getOptimizedPlan(compiler, program, parallelism);
        String jsonPlan = null;
        if (flinkPlan instanceof OptimizedPlan) {
            jsonPlan = new PlanJSONDumpGenerator().getOptimizerPlanAsJSON((OptimizedPlan) flinkPlan);
        } else if (flinkPlan instanceof StreamingPlan) {
            jsonPlan = ((StreamingPlan) flinkPlan).getStreamingPlanAsJSON();
        }
        if (jsonPlan != null) {
            System.out.println("----------------------- Execution Plan -----------------------");
            System.out.println(jsonPlan);
            System.out.println("--------------------------------------------------------------");
        } else {
            System.out.println("JSON plan could not be generated.");
        }
        String description = program.getDescription();
        if (description != null) {
            System.out.println();
            System.out.println(description);
        } else {
            System.out.println();
            System.out.println("No description provided.");
        }
        return 0;
    } catch (Throwable t) {
        return handleError(t);
    } finally {
        program.deleteExtractedLibraries();
    }
}
Also used : PlanJSONDumpGenerator(org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator) StreamingPlan(org.apache.flink.optimizer.plan.StreamingPlan) Optimizer(org.apache.flink.optimizer.Optimizer) DataStatistics(org.apache.flink.optimizer.DataStatistics) CliArgsException(org.apache.flink.client.cli.CliArgsException) FlinkPlan(org.apache.flink.optimizer.plan.FlinkPlan) TriggerSavepoint(org.apache.flink.runtime.messages.JobManagerMessages.TriggerSavepoint) DisposeSavepoint(org.apache.flink.runtime.messages.JobManagerMessages.DisposeSavepoint) CancelJobWithSavepoint(org.apache.flink.runtime.messages.JobManagerMessages.CancelJobWithSavepoint) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) PackagedProgram(org.apache.flink.client.program.PackagedProgram) DefaultCostEstimator(org.apache.flink.optimizer.costs.DefaultCostEstimator) InfoOptions(org.apache.flink.client.cli.InfoOptions)

Example 5 with PlanJSONDumpGenerator

use of org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator in project flink by apache.

the class RemoteExecutor method getOptimizerPlanAsJSON.

@Override
public String getOptimizerPlanAsJSON(Plan plan) throws Exception {
    Optimizer opt = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), new Configuration());
    OptimizedPlan optPlan = opt.compile(plan);
    return new PlanJSONDumpGenerator().getOptimizerPlanAsJSON(optPlan);
}
Also used : PlanJSONDumpGenerator(org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator) Configuration(org.apache.flink.configuration.Configuration) Optimizer(org.apache.flink.optimizer.Optimizer) DataStatistics(org.apache.flink.optimizer.DataStatistics) DefaultCostEstimator(org.apache.flink.optimizer.costs.DefaultCostEstimator) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan)

Aggregations

PlanJSONDumpGenerator (org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator)13 OptimizedPlan (org.apache.flink.optimizer.plan.OptimizedPlan)11 DataStatistics (org.apache.flink.optimizer.DataStatistics)6 Optimizer (org.apache.flink.optimizer.Optimizer)6 DefaultCostEstimator (org.apache.flink.optimizer.costs.DefaultCostEstimator)4 Test (org.junit.Test)4 Configuration (org.apache.flink.configuration.Configuration)3 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)2 Plan (org.apache.flink.api.common.Plan)2 DataSinkNode (org.apache.flink.optimizer.dag.DataSinkNode)2 JobGraphGenerator (org.apache.flink.optimizer.plantranslate.JobGraphGenerator)2 JsonFactory (org.codehaus.jackson.JsonFactory)2 JsonParseException (org.codehaus.jackson.JsonParseException)2 JsonParser (org.codehaus.jackson.JsonParser)2 PrintWriter (java.io.PrintWriter)1 StringWriter (java.io.StringWriter)1 InetAddress (java.net.InetAddress)1 InetSocketAddress (java.net.InetSocketAddress)1 InvalidProgramException (org.apache.flink.api.common.InvalidProgramException)1 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)1