Search in sources :

Example 1 with ExecutionContext

use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.

the class FlinkIntegrationTest method testFlinkCmd.

@Test
public void testFlinkCmd() throws InterpreterException {
    InterpreterSetting flinkCmdInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("flink-cmd");
    flinkCmdInterpreterSetting.setProperty("FLINK_HOME", flinkHome);
    Interpreter flinkCmdInterpreter = interpreterFactory.getInterpreter("flink-cmd", new ExecutionContext("user1", "note1", "flink"));
    InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build();
    InterpreterResult interpreterResult = flinkCmdInterpreter.interpret("info -c org.apache.flink.streaming.examples.wordcount.WordCount " + flinkHome + "/examples/streaming/WordCount.jar", context);
    assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
}
Also used : Interpreter(org.apache.zeppelin.interpreter.Interpreter) ExecutionContext(org.apache.zeppelin.interpreter.ExecutionContext) InterpreterSetting(org.apache.zeppelin.interpreter.InterpreterSetting) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) Test(org.junit.Test)

Example 2 with ExecutionContext

use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.

the class SparkIntegrationTest method testScopedMode.

@Test
public void testScopedMode() throws InterpreterException {
    assumeTrue("Hadoop version mismatch, skip test", isHadoopVersionMatch());
    InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark");
    try {
        sparkInterpreterSetting.setProperty("spark.master", "local[*]");
        sparkInterpreterSetting.setProperty("spark.submit.deployMode", "client");
        sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome);
        sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath());
        sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false");
        sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false");
        sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false");
        sparkInterpreterSetting.setProperty("zeppelin.spark.deprecatedMsg.show", "false");
        sparkInterpreterSetting.getOption().setPerNote(InterpreterOption.SCOPED);
        Interpreter sparkInterpreter1 = interpreterFactory.getInterpreter("spark.spark", new ExecutionContext("user1", "note1", "test"));
        InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build();
        InterpreterResult interpreterResult = sparkInterpreter1.interpret("sc.range(1,10).map(e=>e+1).sum()", context);
        assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
        assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("54"));
        Interpreter sparkInterpreter2 = interpreterFactory.getInterpreter("spark.spark", new ExecutionContext("user1", "note2", "test"));
        assertNotEquals(sparkInterpreter1, sparkInterpreter2);
        context = new InterpreterContext.Builder().setNoteId("note2").setParagraphId("paragraph_1").build();
        interpreterResult = sparkInterpreter2.interpret("sc.range(1,10).map(e=>e+1).sum()", context);
        assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
        assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("54"));
    } finally {
        interpreterSettingManager.close();
        if (sparkInterpreterSetting != null) {
            // reset InterpreterOption so that it won't affect other tests.
            sparkInterpreterSetting.getOption().setPerNote(InterpreterOption.SHARED);
        }
    }
}
Also used : Interpreter(org.apache.zeppelin.interpreter.Interpreter) ExecutionContext(org.apache.zeppelin.interpreter.ExecutionContext) InterpreterSetting(org.apache.zeppelin.interpreter.InterpreterSetting) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) Test(org.junit.Test)

Example 3 with ExecutionContext

use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.

the class SparkIntegrationTest method testSparkSubmit.

@Test
public void testSparkSubmit() throws InterpreterException {
    assumeTrue("Hadoop version mismatch, skip test", isHadoopVersionMatch());
    try {
        InterpreterSetting sparkSubmitInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark-submit");
        sparkSubmitInterpreterSetting.setProperty("SPARK_HOME", sparkHome);
        // test SparkSubmitInterpreter
        InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build();
        Interpreter sparkSubmitInterpreter = interpreterFactory.getInterpreter("spark-submit", new ExecutionContext("user1", "note1", "test"));
        InterpreterResult interpreterResult = sparkSubmitInterpreter.interpret("--class org.apache.spark.examples.SparkPi " + sparkHome + "/examples/jars/spark-examples*.jar ", context);
        assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
    } finally {
        interpreterSettingManager.close();
    }
}
Also used : Interpreter(org.apache.zeppelin.interpreter.Interpreter) ExecutionContext(org.apache.zeppelin.interpreter.ExecutionContext) InterpreterSetting(org.apache.zeppelin.interpreter.InterpreterSetting) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) Test(org.junit.Test)

Example 4 with ExecutionContext

use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.

the class NotebookTest method testCronWithReleaseResourceClosesOnlySpecificInterpreters.

// @Test
public void testCronWithReleaseResourceClosesOnlySpecificInterpreters() throws IOException, InterruptedException, InterpreterNotFoundException {
    // create a cron scheduled note.
    String cronNoteId = notebook.createNote("note1", anonymous);
    // use write lock, because we overwrite the note configuration
    notebook.processNote(cronNoteId, cronNote -> {
        Map<String, Object> config = new HashMap<>();
        config.put("cron", "1/5 * * * * ?");
        config.put("cronExecutingUser", anonymous.getUser());
        config.put("releaseresource", true);
        cronNote.setConfig(config);
        return null;
    });
    RemoteInterpreter cronNoteInterpreter = (RemoteInterpreter) interpreterFactory.getInterpreter("mock1", new ExecutionContext(anonymous.getUser(), cronNoteId, "test"));
    // create a paragraph of the cron scheduled note.
    notebook.processNote(cronNoteId, cronNote -> {
        Paragraph cronNoteParagraph = cronNote.addNewParagraph(AuthenticationInfo.ANONYMOUS);
        Map<String, Object> config = new HashMap<>();
        config.put("enabled", true);
        cronNoteParagraph.setConfig(config);
        cronNoteParagraph.setText("%mock1 sleep 1000");
        return null;
    });
    // create another note
    String anotherNoteId = notebook.createNote("note1", anonymous);
    RemoteInterpreter anotherNoteInterpreter = (RemoteInterpreter) interpreterFactory.getInterpreter("mock2", new ExecutionContext(anonymous.getUser(), anotherNoteId, "test"));
    // create a paragraph of another note
    notebook.processNote(anotherNoteId, anotherNote -> {
        Paragraph anotherNoteParagraph = anotherNote.addNewParagraph(AuthenticationInfo.ANONYMOUS);
        Map<String, Object> config = new HashMap<>();
        config.put("enabled", true);
        anotherNoteParagraph.setConfig(config);
        anotherNoteParagraph.setText("%mock2 echo 1");
        // run the paragraph of another note
        anotherNote.run(anotherNoteParagraph.getId());
        return null;
    });
    // wait until anotherNoteInterpreter is opened
    while (!anotherNoteInterpreter.isOpened()) {
        Thread.yield();
    }
    // refresh the cron schedule
    schedulerService.refreshCron(cronNoteId);
    // wait until cronNoteInterpreter is opened
    while (!cronNoteInterpreter.isOpened()) {
        Thread.yield();
    }
    // wait until cronNoteInterpreter is closed
    while (cronNoteInterpreter.isOpened()) {
        Thread.yield();
    }
    // wait for a few seconds
    Thread.sleep(5 * 1000);
    // test that anotherNoteInterpreter is still opened
    assertTrue(anotherNoteInterpreter.isOpened());
    // remove cron scheduler
    // use write lock because config is overwritten
    notebook.processNote(cronNoteId, cronNote -> {
        Map<String, Object> config = new HashMap<>();
        config.put("cron", null);
        config.put("cronExecutingUser", null);
        config.put("releaseresource", null);
        cronNote.setConfig(config);
        return null;
    });
    schedulerService.refreshCron(cronNoteId);
    // remove notebooks
    notebook.removeNote(cronNoteId, anonymous);
    notebook.removeNote(anotherNoteId, anonymous);
}
Also used : ExecutionContext(org.apache.zeppelin.interpreter.ExecutionContext) HashMap(java.util.HashMap) RemoteInterpreter(org.apache.zeppelin.interpreter.remote.RemoteInterpreter)

Example 5 with ExecutionContext

use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.

the class TimeoutLifecycleManagerTest method testTimeout_1.

@Test
public void testTimeout_1() throws InterpreterException, InterruptedException, IOException {
    assertTrue(interpreterFactory.getInterpreter("test.echo", new ExecutionContext("user1", "note1", "test")) instanceof RemoteInterpreter);
    RemoteInterpreter remoteInterpreter = (RemoteInterpreter) interpreterFactory.getInterpreter("test.echo", new ExecutionContext("user1", "note1", "test"));
    assertFalse(remoteInterpreter.isOpened());
    InterpreterSetting interpreterSetting = interpreterSettingManager.getInterpreterSettingByName("test");
    assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
    Thread.sleep(15 * 1000);
    // InterpreterGroup is not removed after 15 seconds, as TimeoutLifecycleManager only manage it after it is started
    assertEquals(1, interpreterSetting.getAllInterpreterGroups().size());
    InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").build();
    remoteInterpreter.interpret("hello world", context);
    assertTrue(remoteInterpreter.isOpened());
    Thread.sleep(15 * 1000);
    // interpreterGroup is timeout, so is removed.
    assertEquals(0, interpreterSetting.getAllInterpreterGroups().size());
}
Also used : ExecutionContext(org.apache.zeppelin.interpreter.ExecutionContext) InterpreterSetting(org.apache.zeppelin.interpreter.InterpreterSetting) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) RemoteInterpreter(org.apache.zeppelin.interpreter.remote.RemoteInterpreter) AbstractInterpreterTest(org.apache.zeppelin.interpreter.AbstractInterpreterTest) Test(org.junit.Test)

Aggregations

ExecutionContext (org.apache.zeppelin.interpreter.ExecutionContext)18 InterpreterContext (org.apache.zeppelin.interpreter.InterpreterContext)13 Interpreter (org.apache.zeppelin.interpreter.Interpreter)11 InterpreterResult (org.apache.zeppelin.interpreter.InterpreterResult)11 InterpreterSetting (org.apache.zeppelin.interpreter.InterpreterSetting)11 Test (org.junit.Test)11 GetApplicationsRequest (org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest)5 GetApplicationsResponse (org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse)5 RemoteInterpreter (org.apache.zeppelin.interpreter.remote.RemoteInterpreter)4 HashMap (java.util.HashMap)2 Waiter (net.jodah.concurrentunit.Waiter)2 Dependency (org.apache.zeppelin.dep.Dependency)2 AbstractInterpreterTest (org.apache.zeppelin.interpreter.AbstractInterpreterTest)2 InterpreterException (org.apache.zeppelin.interpreter.InterpreterException)2 File (java.io.File)1 FileReader (java.io.FileReader)1 IOException (java.io.IOException)1 EnumSet (java.util.EnumSet)1 List (java.util.List)1 Map (java.util.Map)1