use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class CancelingTestBase method getJobGraph.
private JobGraph getJobGraph(final Plan plan) throws Exception {
final Optimizer pc = new Optimizer(new DataStatistics(), this.executor.configuration());
final OptimizedPlan op = pc.compile(plan);
final JobGraphGenerator jgg = new JobGraphGenerator();
return jgg.compileJobGraph(op);
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class AccumulatorLiveITCase method getOptimizedPlan.
/**
* Helpers to generate the JobGraph
*/
private static JobGraph getOptimizedPlan(Plan plan) {
Optimizer pc = new Optimizer(new DataStatistics(), new Configuration());
JobGraphGenerator jgg = new JobGraphGenerator();
OptimizedPlan op = pc.compile(plan);
return jgg.compileJobGraph(op);
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class TestEnvironment method execute.
@Override
public JobExecutionResult execute(String jobName) throws Exception {
OptimizedPlan op = compileProgram(jobName);
JobGraphGenerator jgg = new JobGraphGenerator();
JobGraph jobGraph = jgg.compileJobGraph(op);
this.lastJobExecutionResult = executor.submitJobAndWait(jobGraph, false);
return this.lastJobExecutionResult;
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class IterationsCompilerTest method testBulkIterationWithPartialSolutionProperties.
/**
* Tests that interesting properties can be pushed out of the bulk iteration. This requires
* that a NoOp node is appended to the step function which re-establishes the properties of
* the initial input. If this does not work, then Flink won't find a plan, because the optimizer
* will not consider plans where the partitioning is done after the partial solution node in
* this case (because of pruning).
* @throws Exception
*/
@Test
public void testBulkIterationWithPartialSolutionProperties() throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple1<Long>> input1 = env.generateSequence(1, 10).map(new MapFunction<Long, Tuple1<Long>>() {
@Override
public Tuple1<Long> map(Long value) throws Exception {
return new Tuple1<>(value);
}
});
DataSet<Tuple1<Long>> input2 = env.generateSequence(1, 10).map(new MapFunction<Long, Tuple1<Long>>() {
@Override
public Tuple1<Long> map(Long value) throws Exception {
return new Tuple1<>(value);
}
});
DataSet<Tuple1<Long>> distinctInput = input1.distinct();
IterativeDataSet<Tuple1<Long>> iteration = distinctInput.iterate(10);
DataSet<Tuple1<Long>> iterationStep = iteration.coGroup(input2).where(0).equalTo(0).with(new CoGroupFunction<Tuple1<Long>, Tuple1<Long>, Tuple1<Long>>() {
@Override
public void coGroup(Iterable<Tuple1<Long>> first, Iterable<Tuple1<Long>> second, Collector<Tuple1<Long>> out) throws Exception {
Iterator<Tuple1<Long>> it = first.iterator();
if (it.hasNext()) {
out.collect(it.next());
}
}
});
DataSet<Tuple1<Long>> iterationResult = iteration.closeWith(iterationStep);
iterationResult.output(new DiscardingOutputFormat<Tuple1<Long>>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
new JobGraphGenerator().compileJobGraph(op);
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class IterationsCompilerTest method testResetPartialSolution.
@Test
public void testResetPartialSolution() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Long> width = env.generateSequence(1, 10);
DataSet<Long> update = env.generateSequence(1, 10);
DataSet<Long> lastGradient = env.generateSequence(1, 10);
DataSet<Long> init = width.union(update).union(lastGradient);
IterativeDataSet<Long> iteration = init.iterate(10);
width = iteration.filter(new IdFilter<Long>());
update = iteration.filter(new IdFilter<Long>());
lastGradient = iteration.filter(new IdFilter<Long>());
DataSet<Long> gradient = width.map(new IdentityMapper<Long>());
DataSet<Long> term = gradient.join(lastGradient).where(new IdentityKeyExtractor<Long>()).equalTo(new IdentityKeyExtractor<Long>()).with(new JoinFunction<Long, Long, Long>() {
public Long join(Long first, Long second) {
return null;
}
});
update = update.map(new RichMapFunction<Long, Long>() {
public Long map(Long value) {
return null;
}
}).withBroadcastSet(term, "some-name");
DataSet<Long> result = iteration.closeWith(width.union(update).union(lastGradient));
result.output(new DiscardingOutputFormat<Long>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
new JobGraphGenerator().compileJobGraph(op);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations