Search in sources :

Example 6 with ProgramInvocationException

use of org.apache.flink.client.program.ProgramInvocationException in project flink by apache.

the class MiscellaneousIssuesITCase method testNullValues.

@Test
public void testNullValues() {
    try {
        ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", cluster.getLeaderRPCPort());
        env.setParallelism(1);
        env.getConfig().disableSysoutLogging();
        DataSet<String> data = env.fromElements("hallo").map(new MapFunction<String, String>() {

            @Override
            public String map(String value) throws Exception {
                return null;
            }
        });
        data.writeAsText("/tmp/myTest", FileSystem.WriteMode.OVERWRITE);
        try {
            env.execute();
            fail("this should fail due to null values.");
        } catch (ProgramInvocationException e) {
            assertNotNull(e.getCause());
            assertNotNull(e.getCause().getCause());
            assertTrue(e.getCause().getCause() instanceof NullPointerException);
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) Test(org.junit.Test)

Example 7 with ProgramInvocationException

use of org.apache.flink.client.program.ProgramInvocationException in project flink by apache.

the class SuccessAfterNetworkBuffersFailureITCase method testSuccessfulProgramAfterFailure.

@Test
public void testSuccessfulProgramAfterFailure() {
    LocalFlinkMiniCluster cluster = null;
    try {
        Configuration config = new Configuration();
        config.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 2);
        config.setInteger(ConfigConstants.TASK_MANAGER_MEMORY_SIZE_KEY, 80);
        config.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 8);
        config.setInteger(ConfigConstants.TASK_MANAGER_NETWORK_NUM_BUFFERS_KEY, 840);
        cluster = new LocalFlinkMiniCluster(config, false);
        cluster.start();
        try {
            runConnectedComponents(cluster.getLeaderRPCPort());
        } catch (Exception e) {
            e.printStackTrace();
            fail("Program Execution should have succeeded.");
        }
        try {
            runKMeans(cluster.getLeaderRPCPort());
            fail("This program execution should have failed.");
        } catch (ProgramInvocationException e) {
            assertTrue(e.getCause().getCause().getMessage().contains("Insufficient number of network buffers"));
        }
        try {
            runConnectedComponents(cluster.getLeaderRPCPort());
        } catch (Exception e) {
            e.printStackTrace();
            fail("Program Execution should have succeeded.");
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) LocalFlinkMiniCluster(org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster) Test(org.junit.Test)

Example 8 with ProgramInvocationException

use of org.apache.flink.client.program.ProgramInvocationException in project flink by apache.

the class AccumulatorErrorITCase method testInvalidTypeAccumulator.

@Test
public void testInvalidTypeAccumulator() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", cluster.getLeaderRPCPort());
    env.getConfig().disableSysoutLogging();
    // Test Exception forwarding with faulty Accumulator implementation
    DataSet<Long> input = env.generateSequence(0, 10000);
    DataSet<Long> mappers = input.map(new IncompatibleAccumulatorTypesMapper()).map(new IncompatibleAccumulatorTypesMapper2());
    mappers.output(new DiscardingOutputFormat<Long>());
    try {
        env.execute();
        fail("Should have failed.");
    } catch (ProgramInvocationException e) {
        Assert.assertTrue("Exception should be passed:", e.getCause() instanceof JobExecutionException);
        Assert.assertTrue("Root cause should be:", e.getCause().getCause() instanceof Exception);
        Assert.assertTrue("Root cause should be:", e.getCause().getCause().getCause() instanceof UnsupportedOperationException);
    }
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) JobExecutionException(org.apache.flink.runtime.client.JobExecutionException) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) JobExecutionException(org.apache.flink.runtime.client.JobExecutionException) Test(org.junit.Test)

Example 9 with ProgramInvocationException

use of org.apache.flink.client.program.ProgramInvocationException in project flink by apache.

the class AccumulatorErrorITCase method testFaultyAccumulator.

@Test
public void testFaultyAccumulator() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", cluster.getLeaderRPCPort());
    env.getConfig().disableSysoutLogging();
    // Test Exception forwarding with faulty Accumulator implementation
    DataSet<Long> input = env.generateSequence(0, 10000);
    DataSet<Long> map = input.map(new FaultyAccumulatorUsingMapper());
    map.output(new DiscardingOutputFormat<Long>());
    try {
        env.execute();
        fail("Should have failed.");
    } catch (ProgramInvocationException e) {
        Assert.assertTrue("Exception should be passed:", e.getCause() instanceof JobExecutionException);
        Assert.assertTrue("Root cause should be:", e.getCause().getCause() instanceof CustomException);
    }
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) JobExecutionException(org.apache.flink.runtime.client.JobExecutionException) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) Test(org.junit.Test)

Example 10 with ProgramInvocationException

use of org.apache.flink.client.program.ProgramInvocationException in project flink by apache.

the class CliFrontendPackageProgramTest method testPlanWithExternalClass.

/**
	 * Ensure that we will never have the following error.
	 *
	 * <pre>
	 * 	org.apache.flink.client.program.ProgramInvocationException: The main method caused an error.
	 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:398)
	 *		at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:301)
	 *		at org.apache.flink.client.program.Client.getOptimizedPlan(Client.java:140)
	 *		at org.apache.flink.client.program.Client.getOptimizedPlanAsJson(Client.java:125)
	 *		at org.apache.flink.client.CliFrontend.info(CliFrontend.java:439)
	 *		at org.apache.flink.client.CliFrontend.parseParameters(CliFrontend.java:931)
	 *		at org.apache.flink.client.CliFrontend.main(CliFrontend.java:951)
	 *	Caused by: java.io.IOException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.io.RCFileInputFormat
	 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:102)
	 *		at org.apache.hcatalog.mapreduce.HCatInputFormat.setInput(HCatInputFormat.java:54)
	 *		at tlabs.CDR_In_Report.createHCatInputFormat(CDR_In_Report.java:322)
	 *		at tlabs.CDR_Out_Report.main(CDR_Out_Report.java:380)
	 *		at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	 *		at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	 *		at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	 *		at java.lang.reflect.Method.invoke(Method.java:622)
	 *		at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:383)
	 * </pre>
	 *
	 * The test works as follows:
	 *
	 * <ul>
	 *   <li> Use the CliFrontend to invoke a jar file that loads a class which is only available
	 * 	      in the jarfile itself (via a custom classloader)
	 *   <li> Change the Usercode classloader of the PackagedProgram to a special classloader for this test
	 *   <li> the classloader will accept the special class (and return a String.class)
	 * </ul>
	 */
@Test
public void testPlanWithExternalClass() throws CompilerException, ProgramInvocationException {
    // create a final object reference, to be able to change its val later
    final boolean[] callme = { false };
    try {
        String[] arguments = { "--classpath", "file:///tmp/foo", "--classpath", "file:///tmp/bar", "-c", TEST_JAR_CLASSLOADERTEST_CLASS, getTestJarPath(), "true", "arg1", "arg2" };
        URL[] classpath = new URL[] { new URL("file:///tmp/foo"), new URL("file:///tmp/bar") };
        String[] reducedArguments = { "true", "arg1", "arg2" };
        RunOptions options = CliFrontendParser.parseRunCommand(arguments);
        assertEquals(getTestJarPath(), options.getJarFilePath());
        assertArrayEquals(classpath, options.getClasspaths().toArray());
        assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, options.getEntryPointClassName());
        assertArrayEquals(reducedArguments, options.getProgramArgs());
        CliFrontend frontend = new CliFrontend(CliFrontendTestUtils.getConfigDir());
        PackagedProgram prog = spy(frontend.buildProgram(options));
        ClassLoader testClassLoader = new ClassLoader(prog.getUserCodeClassLoader()) {

            @Override
            public Class<?> loadClass(String name) throws ClassNotFoundException {
                if ("org.apache.hadoop.hive.ql.io.RCFileInputFormat".equals(name)) {
                    callme[0] = true;
                    // Intentionally return the wrong class.
                    return String.class;
                } else {
                    return super.loadClass(name);
                }
            }
        };
        when(prog.getUserCodeClassLoader()).thenReturn(testClassLoader);
        assertEquals(TEST_JAR_CLASSLOADERTEST_CLASS, prog.getMainClassName());
        assertArrayEquals(reducedArguments, prog.getArguments());
        Configuration c = new Configuration();
        Optimizer compiler = new Optimizer(new DataStatistics(), new DefaultCostEstimator(), c);
        // we expect this to fail with a "ClassNotFoundException"
        ClusterClient.getOptimizedPlanAsJson(compiler, prog, 666);
        fail("Should have failed with a ClassNotFoundException");
    } catch (ProgramInvocationException e) {
        if (!(e.getCause() instanceof ClassNotFoundException)) {
            e.printStackTrace();
            fail("Program didn't throw ClassNotFoundException");
        }
        assertTrue("Classloader was not called", callme[0]);
    } catch (Exception e) {
        e.printStackTrace();
        fail("Program failed with the wrong exception: " + e.getClass().getName());
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) Optimizer(org.apache.flink.optimizer.Optimizer) DataStatistics(org.apache.flink.optimizer.DataStatistics) URL(java.net.URL) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) FileNotFoundException(java.io.FileNotFoundException) CompilerException(org.apache.flink.optimizer.CompilerException) PackagedProgram(org.apache.flink.client.program.PackagedProgram) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) DefaultCostEstimator(org.apache.flink.optimizer.costs.DefaultCostEstimator) RunOptions(org.apache.flink.client.cli.RunOptions) Test(org.junit.Test)

Aggregations

ProgramInvocationException (org.apache.flink.client.program.ProgramInvocationException)23 Test (org.junit.Test)13 IOException (java.io.IOException)8 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)8 Configuration (org.apache.flink.configuration.Configuration)7 JobExecutionException (org.apache.flink.runtime.client.JobExecutionException)5 File (java.io.File)4 URL (java.net.URL)4 Properties (java.util.Properties)3 PackagedProgram (org.apache.flink.client.program.PackagedProgram)3 Path (org.apache.flink.core.fs.Path)3 CompilerException (org.apache.flink.optimizer.CompilerException)3 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)3 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)3 FileNotFoundException (java.io.FileNotFoundException)2 StringWriter (java.io.StringWriter)2 MalformedURLException (java.net.MalformedURLException)2 ArrayList (java.util.ArrayList)2 JobSubmissionResult (org.apache.flink.api.common.JobSubmissionResult)2 ClusterClient (org.apache.flink.client.program.ClusterClient)2