use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class IterationsCompilerTest method testTwoIterationsWithMapperInbetween.
@Test
public void testTwoIterationsWithMapperInbetween() throws Exception {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(8);
DataSet<Tuple2<Long, Long>> verticesWithInitialId = env.fromElements(new Tuple2<Long, Long>(1L, 2L));
DataSet<Tuple2<Long, Long>> edges = env.fromElements(new Tuple2<Long, Long>(1L, 2L));
DataSet<Tuple2<Long, Long>> bulkResult = doBulkIteration(verticesWithInitialId, edges);
DataSet<Tuple2<Long, Long>> mappedBulk = bulkResult.map(new DummyMap());
DataSet<Tuple2<Long, Long>> depResult = doDeltaIteration(mappedBulk, edges);
depResult.output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
assertEquals(1, op.getDataSinks().size());
assertTrue(op.getDataSinks().iterator().next().getInput().getSource() instanceof WorksetIterationPlanNode);
WorksetIterationPlanNode wipn = (WorksetIterationPlanNode) op.getDataSinks().iterator().next().getInput().getSource();
assertEquals(ShipStrategyType.PARTITION_HASH, wipn.getInput1().getShipStrategy());
assertEquals(TempMode.NONE, wipn.getInput1().getTempMode());
assertEquals(TempMode.NONE, wipn.getInput2().getTempMode());
assertEquals(DataExchangeMode.BATCH, wipn.getInput1().getDataExchangeMode());
assertEquals(DataExchangeMode.BATCH, wipn.getInput2().getDataExchangeMode());
new JobGraphGenerator().compileJobGraph(op);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class ReduceAllTest method testReduce.
@Test
public void testReduce() {
// construct the plan
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
DataSet<Long> set1 = env.generateSequence(0, 1);
set1.reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce1").output(new DiscardingOutputFormat<Long>()).name("Sink");
Plan plan = env.createProgramPlan();
try {
OptimizedPlan oPlan = compileNoStats(plan);
JobGraphGenerator jobGen = new JobGraphGenerator();
jobGen.compileJobGraph(oPlan);
} catch (CompilerException ce) {
ce.printStackTrace();
fail("The pact compiler is unable to compile this plan correctly");
}
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class CachedMatchStrategyCompilerTest method testRightSide.
/**
* This tests whether a HYBRIDHASH_BUILD_SECOND is correctly transformed to a
* HYBRIDHASH_BUILD_SECOND_CACHED when inside of an iteration an on the static path
*/
@Test
public void testRightSide() {
try {
Plan plan = getTestPlanRightStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);
OptimizedPlan oPlan = compileNoStats(plan);
OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan);
DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner");
// verify correct join strategy
assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED, innerJoin.getDriverStrategy());
assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode());
assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode());
new JobGraphGenerator().compileJobGraph(oPlan);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
fail("Test errored: " + e.getMessage());
}
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class DisjointDataFlowsTest method testDisjointFlows.
@Test
public void testDisjointFlows() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
// generate two different flows
env.generateSequence(1, 10).output(new DiscardingOutputFormat<Long>());
env.generateSequence(1, 10).output(new DiscardingOutputFormat<Long>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
new JobGraphGenerator().compileJobGraph(op);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.plantranslate.JobGraphGenerator in project flink by apache.
the class HardPlansCompilationTest method testTicket158.
/**
* Source -> Map -> Reduce -> Cross -> Reduce -> Cross -> Reduce -> |--------------------------/
* / |--------------------------------------------/
*
* <p>First cross has SameKeyFirst output contract
*/
@Test
public void testTicket158() {
// construct the plan
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
DataSet<Long> set1 = env.generateSequence(0, 1);
set1.map(new IdentityMapper<Long>()).name("Map1").groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce1").cross(set1).with(new IdentityCrosser<Long>()).withForwardedFieldsFirst("*").name("Cross1").groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce2").cross(set1).with(new IdentityCrosser<Long>()).name("Cross2").groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce3").output(new DiscardingOutputFormat<Long>()).name("Sink");
Plan plan = env.createProgramPlan();
OptimizedPlan oPlan = compileNoStats(plan);
JobGraphGenerator jobGen = new JobGraphGenerator();
jobGen.compileJobGraph(oPlan);
}
Aggregations