use of org.apache.flink.optimizer.testfunctions.IdentityMapper in project flink by apache.
the class PipelineBreakerTest method testPipelineBreakerWithBroadcastVariable.
@Test
public void testPipelineBreakerWithBroadcastVariable() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setExecutionMode(ExecutionMode.PIPELINED);
env.setParallelism(64);
DataSet<Long> source = env.generateSequence(1, 10).map(new IdentityMapper<Long>());
DataSet<Long> result = source.map(new IdentityMapper<Long>()).map(new IdentityMapper<Long>()).withBroadcastSet(source, "bc");
result.output(new DiscardingOutputFormat<Long>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
SinkPlanNode sink = op.getDataSinks().iterator().next();
SingleInputPlanNode mapper = (SingleInputPlanNode) sink.getInput().getSource();
SingleInputPlanNode mapperInput = (SingleInputPlanNode) mapper.getInput().getSource();
assertEquals(TempMode.NONE, mapper.getInput().getTempMode());
assertEquals(TempMode.NONE, mapper.getBroadcastInputs().get(0).getTempMode());
assertEquals(DataExchangeMode.BATCH, mapperInput.getInput().getDataExchangeMode());
assertEquals(DataExchangeMode.BATCH, mapper.getBroadcastInputs().get(0).getDataExchangeMode());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.testfunctions.IdentityMapper in project flink by apache.
the class PipelineBreakerTest method testPipelineBreakerBroadcastedAllReduce.
@Test
public void testPipelineBreakerBroadcastedAllReduce() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setExecutionMode(ExecutionMode.PIPELINED);
env.setParallelism(64);
DataSet<Long> sourceWithMapper = env.generateSequence(1, 10).map(new IdentityMapper<Long>());
DataSet<Long> bcInput1 = sourceWithMapper.map(new IdentityMapper<Long>()).reduce(new SelectOneReducer<Long>());
DataSet<Long> bcInput2 = env.generateSequence(1, 10);
DataSet<Long> result = sourceWithMapper.map(new IdentityMapper<Long>()).withBroadcastSet(bcInput1, "bc1").withBroadcastSet(bcInput2, "bc2");
result.output(new DiscardingOutputFormat<Long>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
SinkPlanNode sink = op.getDataSinks().iterator().next();
SingleInputPlanNode mapper = (SingleInputPlanNode) sink.getInput().getSource();
assertEquals(TempMode.NONE, mapper.getInput().getTempMode());
assertEquals(DataExchangeMode.BATCH, mapper.getInput().getDataExchangeMode());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.testfunctions.IdentityMapper in project flink by apache.
the class BroadcastVariablePipelinebreakerTest method testBreakerForDependentVariable.
@Test
public void testBreakerForDependentVariable() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<String> source1 = env.fromElements("test");
source1.map(new IdentityMapper<String>()).map(new IdentityMapper<String>()).withBroadcastSet(source1, "some name").output(new DiscardingOutputFormat<String>());
Plan p = env.createProgramPlan();
OptimizedPlan op = compileNoStats(p);
SinkPlanNode sink = op.getDataSinks().iterator().next();
SingleInputPlanNode mapper = (SingleInputPlanNode) sink.getInput().getSource();
SingleInputPlanNode beforeMapper = (SingleInputPlanNode) mapper.getInput().getSource();
assertEquals(TempMode.NONE, mapper.getInput().getTempMode());
assertEquals(TempMode.NONE, beforeMapper.getInput().getTempMode());
assertEquals(TempMode.NONE, mapper.getBroadcastInputs().get(0).getTempMode());
assertEquals(DataExchangeMode.PIPELINED, mapper.getInput().getDataExchangeMode());
assertEquals(DataExchangeMode.BATCH, beforeMapper.getInput().getDataExchangeMode());
assertEquals(DataExchangeMode.BATCH, mapper.getBroadcastInputs().get(0).getDataExchangeMode());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.testfunctions.IdentityMapper in project flink by apache.
the class HardPlansCompilationTest method testTicket158.
/**
* Source -> Map -> Reduce -> Cross -> Reduce -> Cross -> Reduce -> |--------------------------/
* / |--------------------------------------------/
*
* <p>First cross has SameKeyFirst output contract
*/
@Test
public void testTicket158() {
// construct the plan
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
DataSet<Long> set1 = env.generateSequence(0, 1);
set1.map(new IdentityMapper<Long>()).name("Map1").groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce1").cross(set1).with(new IdentityCrosser<Long>()).withForwardedFieldsFirst("*").name("Cross1").groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce2").cross(set1).with(new IdentityCrosser<Long>()).name("Cross2").groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce3").output(new DiscardingOutputFormat<Long>()).name("Sink");
Plan plan = env.createProgramPlan();
OptimizedPlan oPlan = compileNoStats(plan);
JobGraphGenerator jobGen = new JobGraphGenerator();
jobGen.compileJobGraph(oPlan);
}
use of org.apache.flink.optimizer.testfunctions.IdentityMapper in project flink by apache.
the class BranchingPlansCompilerTest method testBranchesOnlyInBCVariables2.
@Test
public void testBranchesOnlyInBCVariables2() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(100);
DataSet<Tuple2<Long, Long>> input = env.generateSequence(1, 10).map(new Duplicator<Long>()).name("proper input");
DataSet<Long> bc_input1 = env.generateSequence(1, 10).name("BC input 1");
DataSet<Long> bc_input2 = env.generateSequence(1, 10).name("BC input 1");
DataSet<Tuple2<Long, Long>> joinInput1 = input.map(new IdentityMapper<Tuple2<Long, Long>>()).withBroadcastSet(bc_input1.map(new IdentityMapper<Long>()), "bc1").withBroadcastSet(bc_input2, "bc2");
DataSet<Tuple2<Long, Long>> joinInput2 = input.map(new IdentityMapper<Tuple2<Long, Long>>()).withBroadcastSet(bc_input1, "bc1").withBroadcastSet(bc_input2, "bc2");
DataSet<Tuple2<Long, Long>> joinResult = joinInput1.join(joinInput2, JoinHint.REPARTITION_HASH_FIRST).where(0).equalTo(1).with(new DummyFlatJoinFunction<Tuple2<Long, Long>>());
input.map(new IdentityMapper<Tuple2<Long, Long>>()).withBroadcastSet(bc_input1, "bc1").union(joinResult).output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
Plan plan = env.createProgramPlan();
compileNoStats(plan);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations