use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class DeltaIterationDependenciesTest method testExceptionWhenNewWorksetNotDependentOnWorkset.
@Test
public void testExceptionWhenNewWorksetNotDependentOnWorkset() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<Long, Long>> input = env.fromElements(new Tuple2<Long, Long>(0L, 0L));
DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> deltaIteration = input.iterateDelta(input, 10, 0);
DataSet<Tuple2<Long, Long>> delta = deltaIteration.getSolutionSet().join(deltaIteration.getWorkset()).where(0).equalTo(0).projectFirst(1).projectSecond(1);
DataSet<Tuple2<Long, Long>> nextWorkset = deltaIteration.getSolutionSet().join(input).where(0).equalTo(0).projectFirst(1).projectSecond(1);
DataSet<Tuple2<Long, Long>> result = deltaIteration.closeWith(delta, nextWorkset);
result.output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
Plan p = env.createProgramPlan();
try {
compileNoStats(p);
fail("Should not be able to compile, since the next workset does not depend on the workset");
} catch (CompilerException e) {
// good
} catch (Exception e) {
fail("wrong exception type");
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class JarActionHandler method getJobGraphAndClassLoader.
protected Tuple2<JobGraph, ClassLoader> getJobGraphAndClassLoader(JarActionHandlerConfig config) throws Exception {
// generate the graph
JobGraph graph = null;
PackagedProgram program = new PackagedProgram(new File(jarDir, config.getJarFile()), config.getEntryClass(), config.getProgramArgs());
ClassLoader classLoader = program.getUserCodeClassLoader();
Optimizer optimizer = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), new Configuration());
FlinkPlan plan = ClusterClient.getOptimizedPlan(optimizer, program, config.getParallelism());
if (plan instanceof StreamingPlan) {
graph = ((StreamingPlan) plan).getJobGraph();
} else if (plan instanceof OptimizedPlan) {
graph = new JobGraphGenerator().compileJobGraph((OptimizedPlan) plan);
}
if (graph == null) {
throw new CompilerException("A valid job graph couldn't be generated for the jar.");
}
// Set the savepoint settings
graph.setSavepointRestoreSettings(config.getSavepointRestoreSettings());
for (URL jar : program.getAllLibraries()) {
try {
graph.addJar(new Path(jar.toURI()));
} catch (URISyntaxException e) {
throw new ProgramInvocationException("Invalid jar path. Unexpected error. :(");
}
}
return Tuple2.of(graph, classLoader);
}
use of org.apache.flink.optimizer.CompilerException in project flink by apache.
the class JoinOnConflictingPartitioningsTest method testRejectJoinOnHashAndRangePartitioning.
@Test
public void testRejectJoinOnHashAndRangePartitioning() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple2<Long, Long>> input = env.fromElements(new Tuple2<Long, Long>(0L, 0L));
Configuration cfg = new Configuration();
cfg.setString(Optimizer.HINT_SHIP_STRATEGY_FIRST_INPUT, Optimizer.HINT_SHIP_STRATEGY_REPARTITION_HASH);
cfg.setString(Optimizer.HINT_SHIP_STRATEGY_SECOND_INPUT, Optimizer.HINT_SHIP_STRATEGY_REPARTITION_RANGE);
input.join(input).where(0).equalTo(0).withParameters(cfg).output(new DiscardingOutputFormat<Tuple2<Tuple2<Long, Long>, Tuple2<Long, Long>>>());
Plan p = env.createProgramPlan();
try {
compileNoStats(p);
fail("This should fail with an exception");
} catch (CompilerException e) {
// expected
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations