Search in sources :

Example 1 with Optimizer

use of org.apache.flink.optimizer.Optimizer in project flink by apache.

the class CompilerTestBase method setup.

// ------------------------------------------------------------------------
@Before
public void setup() {
    Configuration flinkConf = new Configuration();
    this.dataStats = new DataStatistics();
    this.withStatsCompiler = new Optimizer(this.dataStats, new DefaultCostEstimator(), flinkConf);
    this.withStatsCompiler.setDefaultParallelism(DEFAULT_PARALLELISM);
    this.noStatsCompiler = new Optimizer(null, new DefaultCostEstimator(), flinkConf);
    this.noStatsCompiler.setDefaultParallelism(DEFAULT_PARALLELISM);
}
Also used : Configuration(org.apache.flink.configuration.Configuration) Optimizer(org.apache.flink.optimizer.Optimizer) DataStatistics(org.apache.flink.optimizer.DataStatistics) DefaultCostEstimator(org.apache.flink.optimizer.costs.DefaultCostEstimator) Before(org.junit.Before)

Example 2 with Optimizer

use of org.apache.flink.optimizer.Optimizer in project flink by apache.

the class JobGraphGeneratorTest method testResourcesForChainedOperators.

/**
	 * Verifies that the resources are merged correctly for chained operators when
	 * generating job graph
	 */
@Test
public void testResourcesForChainedOperators() throws Exception {
    ResourceSpec resource1 = new ResourceSpec(0.1, 100);
    ResourceSpec resource2 = new ResourceSpec(0.2, 200);
    ResourceSpec resource3 = new ResourceSpec(0.3, 300);
    ResourceSpec resource4 = new ResourceSpec(0.4, 400);
    ResourceSpec resource5 = new ResourceSpec(0.5, 500);
    ResourceSpec resource6 = new ResourceSpec(0.6, 600);
    ResourceSpec resource7 = new ResourceSpec(0.7, 700);
    Method opMethod = Operator.class.getDeclaredMethod("setResources", ResourceSpec.class);
    opMethod.setAccessible(true);
    Method sinkMethod = DataSink.class.getDeclaredMethod("setResources", ResourceSpec.class);
    sinkMethod.setAccessible(true);
    MapFunction<Long, Long> mapFunction = new MapFunction<Long, Long>() {

        @Override
        public Long map(Long value) throws Exception {
            return value;
        }
    };
    FilterFunction<Long> filterFunction = new FilterFunction<Long>() {

        @Override
        public boolean filter(Long value) throws Exception {
            return false;
        }
    };
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<Long> input = env.fromElements(1L, 2L, 3L);
    opMethod.invoke(input, resource1);
    DataSet<Long> map1 = input.map(mapFunction);
    opMethod.invoke(map1, resource2);
    // CHAIN(Source -> Map -> Filter)
    DataSet<Long> filter1 = map1.filter(filterFunction);
    opMethod.invoke(filter1, resource3);
    IterativeDataSet<Long> startOfIteration = filter1.iterate(10);
    opMethod.invoke(startOfIteration, resource4);
    DataSet<Long> map2 = startOfIteration.map(mapFunction);
    opMethod.invoke(map2, resource5);
    // CHAIN(Map -> Filter)
    DataSet<Long> feedback = map2.filter(filterFunction);
    opMethod.invoke(feedback, resource6);
    DataSink<Long> sink = startOfIteration.closeWith(feedback).output(new DiscardingOutputFormat<Long>());
    sinkMethod.invoke(sink, resource7);
    Plan plan = env.createProgramPlan();
    Optimizer pc = new Optimizer(new Configuration());
    OptimizedPlan op = pc.compile(plan);
    JobGraphGenerator jgg = new JobGraphGenerator();
    JobGraph jobGraph = jgg.compileJobGraph(op);
    JobVertex sourceMapFilterVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(0);
    JobVertex iterationHeadVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
    JobVertex feedbackVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(2);
    JobVertex sinkVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(3);
    JobVertex iterationSyncVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(4);
    assertTrue(sourceMapFilterVertex.getMinResources().equals(resource1.merge(resource2).merge(resource3)));
    assertTrue(iterationHeadVertex.getPreferredResources().equals(resource4));
    assertTrue(feedbackVertex.getMinResources().equals(resource5.merge(resource6)));
    assertTrue(sinkVertex.getPreferredResources().equals(resource7));
    assertTrue(iterationSyncVertex.getMinResources().equals(resource4));
}
Also used : FilterFunction(org.apache.flink.api.common.functions.FilterFunction) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Configuration(org.apache.flink.configuration.Configuration) Optimizer(org.apache.flink.optimizer.Optimizer) ResourceSpec(org.apache.flink.api.common.operators.ResourceSpec) Method(java.lang.reflect.Method) MapFunction(org.apache.flink.api.common.functions.MapFunction) Plan(org.apache.flink.api.common.Plan) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) Test(org.junit.Test)

Example 3 with Optimizer

use of org.apache.flink.optimizer.Optimizer in project flink by apache.

the class TempInIterationsTest method testTempInIterationTest.

/*
	 * Tests whether temps barriers are correctly set in within iterations
	 */
@Test
public void testTempInIterationTest() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<Tuple2<Long, Long>> input = env.readCsvFile("file:///does/not/exist").types(Long.class, Long.class);
    DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration = input.iterateDelta(input, 1, 0);
    DataSet<Tuple2<Long, Long>> update = iteration.getWorkset().join(iteration.getSolutionSet()).where(0).equalTo(0).with(new DummyFlatJoinFunction<Tuple2<Long, Long>>());
    iteration.closeWith(update, update).output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
    Plan plan = env.createProgramPlan();
    OptimizedPlan oPlan = (new Optimizer(new Configuration())).compile(plan);
    JobGraphGenerator jgg = new JobGraphGenerator();
    JobGraph jg = jgg.compileJobGraph(oPlan);
    boolean solutionSetUpdateChecked = false;
    for (JobVertex v : jg.getVertices()) {
        if (v.getName().equals("SolutionSet Delta")) {
            // check if input of solution set delta is temped
            TaskConfig tc = new TaskConfig(v.getConfiguration());
            assertTrue(tc.isInputAsynchronouslyMaterialized(0));
            solutionSetUpdateChecked = true;
        }
    }
    assertTrue(solutionSetUpdateChecked);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Configuration(org.apache.flink.configuration.Configuration) Optimizer(org.apache.flink.optimizer.Optimizer) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig) Plan(org.apache.flink.api.common.Plan) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Test(org.junit.Test)

Example 4 with Optimizer

use of org.apache.flink.optimizer.Optimizer in project flink by apache.

the class CancelingTestBase method getJobGraph.

private JobGraph getJobGraph(final Plan plan) throws Exception {
    final Optimizer pc = new Optimizer(new DataStatistics(), this.executor.configuration());
    final OptimizedPlan op = pc.compile(plan);
    final JobGraphGenerator jgg = new JobGraphGenerator();
    return jgg.compileJobGraph(op);
}
Also used : Optimizer(org.apache.flink.optimizer.Optimizer) JobGraphGenerator(org.apache.flink.optimizer.plantranslate.JobGraphGenerator) DataStatistics(org.apache.flink.optimizer.DataStatistics) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan)

Example 5 with Optimizer

use of org.apache.flink.optimizer.Optimizer in project flink by apache.

the class AccumulatorLiveITCase method getOptimizedPlan.

/**
	 * Helpers to generate the JobGraph
	 */
private static JobGraph getOptimizedPlan(Plan plan) {
    Optimizer pc = new Optimizer(new DataStatistics(), new Configuration());
    JobGraphGenerator jgg = new JobGraphGenerator();
    OptimizedPlan op = pc.compile(plan);
    return jgg.compileJobGraph(op);
}
Also used : Configuration(org.apache.flink.configuration.Configuration) Optimizer(org.apache.flink.optimizer.Optimizer) JobGraphGenerator(org.apache.flink.optimizer.plantranslate.JobGraphGenerator) DataStatistics(org.apache.flink.optimizer.DataStatistics) OptimizedPlan(org.apache.flink.optimizer.plan.OptimizedPlan)

Aggregations

Optimizer (org.apache.flink.optimizer.Optimizer)17 OptimizedPlan (org.apache.flink.optimizer.plan.OptimizedPlan)15 DataStatistics (org.apache.flink.optimizer.DataStatistics)13 Configuration (org.apache.flink.configuration.Configuration)12 DefaultCostEstimator (org.apache.flink.optimizer.costs.DefaultCostEstimator)8 PlanJSONDumpGenerator (org.apache.flink.optimizer.plandump.PlanJSONDumpGenerator)6 Test (org.junit.Test)6 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)5 Plan (org.apache.flink.api.common.Plan)4 JobGraphGenerator (org.apache.flink.optimizer.plantranslate.JobGraphGenerator)4 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)3 PackagedProgram (org.apache.flink.client.program.PackagedProgram)3 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)3 Method (java.lang.reflect.Method)2 URL (java.net.URL)2 FilterFunction (org.apache.flink.api.common.functions.FilterFunction)2 MapFunction (org.apache.flink.api.common.functions.MapFunction)2 ResourceSpec (org.apache.flink.api.common.operators.ResourceSpec)2 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)2 ProgramInvocationException (org.apache.flink.client.program.ProgramInvocationException)2