Search in sources :

Example 11 with LazyOpenInterpreter

use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.

the class LivyInterpreterIT method testSharedInterpreter.

@Test
public void testSharedInterpreter() throws InterpreterException {
    if (!checkPreCondition()) {
        return;
    }
    InterpreterGroup interpreterGroup = new InterpreterGroup("group_1");
    interpreterGroup.put("session_1", new ArrayList<Interpreter>());
    LazyOpenInterpreter sparkInterpreter = new LazyOpenInterpreter(new LivySparkInterpreter(properties));
    sparkInterpreter.setInterpreterGroup(interpreterGroup);
    interpreterGroup.get("session_1").add(sparkInterpreter);
    LazyOpenInterpreter sqlInterpreter = new LazyOpenInterpreter(new LivySparkSQLInterpreter(properties));
    interpreterGroup.get("session_1").add(sqlInterpreter);
    sqlInterpreter.setInterpreterGroup(interpreterGroup);
    LazyOpenInterpreter pysparkInterpreter = new LazyOpenInterpreter(new LivyPySparkInterpreter(properties));
    interpreterGroup.get("session_1").add(pysparkInterpreter);
    pysparkInterpreter.setInterpreterGroup(interpreterGroup);
    LazyOpenInterpreter sparkRInterpreter = new LazyOpenInterpreter(new LivySparkRInterpreter(properties));
    interpreterGroup.get("session_1").add(sparkRInterpreter);
    sparkRInterpreter.setInterpreterGroup(interpreterGroup);
    LazyOpenInterpreter sharedInterpreter = new LazyOpenInterpreter(new LivySharedInterpreter(properties));
    interpreterGroup.get("session_1").add(sharedInterpreter);
    sharedInterpreter.setInterpreterGroup(interpreterGroup);
    sparkInterpreter.open();
    sqlInterpreter.open();
    pysparkInterpreter.open();
    sparkRInterpreter.open();
    try {
        AuthenticationInfo authInfo = new AuthenticationInfo("user1");
        MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
        InterpreterOutput output = new InterpreterOutput(outputListener);
        InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
        // detect spark version
        InterpreterResult result = sparkInterpreter.interpret("sc.version", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        boolean isSpark2 = isSpark2((BaseLivyInterpreter) sparkInterpreter.getInnerInterpreter(), context);
        if (!isSpark2) {
            result = sparkInterpreter.interpret("val df=sqlContext.createDataFrame(Seq((\"hello\",20))).toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
            sparkInterpreter.interpret("df.registerTempTable(\"df\")", context);
            // access table from pyspark
            result = pysparkInterpreter.interpret("sqlContext.sql(\"select * from df\").show()", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("+-----+-----+\n" + "|col_1|col_2|\n" + "+-----+-----+\n" + "|hello|   20|\n" + "+-----+-----+"));
            // access table from sparkr
            result = sparkRInterpreter.interpret("head(sql(sqlContext, \"select * from df\"))", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("col_1 col_2\n1 hello    20"));
        } else {
            result = sparkInterpreter.interpret("val df=spark.createDataFrame(Seq((\"hello\",20))).toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
            sparkInterpreter.interpret("df.registerTempTable(\"df\")", context);
            // access table from pyspark
            result = pysparkInterpreter.interpret("spark.sql(\"select * from df\").show()", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("+-----+-----+\n" + "|col_1|col_2|\n" + "+-----+-----+\n" + "|hello|   20|\n" + "+-----+-----+"));
            // access table from sparkr
            result = sparkRInterpreter.interpret("head(sql(\"select * from df\"))", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("col_1 col_2\n1 hello    20"));
        }
        // test plotting of python
        result = pysparkInterpreter.interpret("import matplotlib.pyplot as plt\n" + "plt.switch_backend('agg')\n" + "data=[1,2,3,4]\n" + "plt.figure()\n" + "plt.plot(data)\n" + "%matplot plt", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertEquals(InterpreterResult.Type.IMG, result.message().get(0).getType());
        // test plotting of R
        result = sparkRInterpreter.interpret("hist(mtcars$mpg)", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertEquals(InterpreterResult.Type.IMG, result.message().get(0).getType());
        // test code completion
        List<InterpreterCompletion> completionResult = sparkInterpreter.completion("df.sho", 6, context);
        assertEquals(1, completionResult.size());
        assertEquals("show", completionResult.get(0).name);
    } finally {
        sparkInterpreter.close();
        sqlInterpreter.close();
    }
}
Also used : Interpreter(org.apache.zeppelin.interpreter.Interpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) InterpreterCompletion(org.apache.zeppelin.interpreter.thrift.InterpreterCompletion) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) InterpreterGroup(org.apache.zeppelin.interpreter.InterpreterGroup) InterpreterOutput(org.apache.zeppelin.interpreter.InterpreterOutput) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) Test(org.junit.Test)

Example 12 with LazyOpenInterpreter

use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.

the class IRKernelTest method setUp.

@Before
public void setUp() throws InterpreterException {
    Properties properties = new Properties();
    InterpreterContext context = getInterpreterContext();
    InterpreterContext.set(context);
    interpreter = createInterpreter(properties);
    InterpreterGroup interpreterGroup = new InterpreterGroup();
    interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1");
    interpreter.setInterpreterGroup(interpreterGroup);
    interpreter.open();
}
Also used : LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) InterpreterGroup(org.apache.zeppelin.interpreter.InterpreterGroup) Properties(java.util.Properties) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) Before(org.junit.Before)

Example 13 with LazyOpenInterpreter

use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.

the class SparkSqlInterpreter method getSparkInterpreter.

private SparkInterpreter getSparkInterpreter() {
    LazyOpenInterpreter lazy = null;
    SparkInterpreter spark = null;
    Interpreter p = getInterpreterInTheSameSessionByClassName(SparkInterpreter.class.getName());
    while (p instanceof WrappedInterpreter) {
        if (p instanceof LazyOpenInterpreter) {
            lazy = (LazyOpenInterpreter) p;
        }
        p = ((WrappedInterpreter) p).getInnerInterpreter();
    }
    spark = (SparkInterpreter) p;
    if (lazy != null) {
        lazy.open();
    }
    return spark;
}
Also used : WrappedInterpreter(org.apache.zeppelin.interpreter.WrappedInterpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) Interpreter(org.apache.zeppelin.interpreter.Interpreter) WrappedInterpreter(org.apache.zeppelin.interpreter.WrappedInterpreter)

Example 14 with LazyOpenInterpreter

use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.

the class NoteInterpreterLoaderTest method testNoteInterpreterCloseForAll.

@Test
public void testNoteInterpreterCloseForAll() throws IOException {
    interpreterSettingManager.setInterpreters("user", "FitstNote", interpreterSettingManager.getDefaultInterpreterSettingList());
    interpreterSettingManager.getInterpreterSettings("FitstNote").get(0).getOption().setPerNote(InterpreterOption.SCOPED);
    interpreterSettingManager.setInterpreters("user", "yourFirstNote", interpreterSettingManager.getDefaultInterpreterSettingList());
    interpreterSettingManager.getInterpreterSettings("yourFirstNote").get(0).getOption().setPerNote(InterpreterOption.ISOLATED);
    // interpreters are not created before accessing it
    assertNull(interpreterSettingManager.getInterpreterSettings("FitstNote").get(0).getInterpreterGroup("user", "FitstNote").get("FitstNote"));
    assertNull(interpreterSettingManager.getInterpreterSettings("yourFirstNote").get(0).getInterpreterGroup("user", "yourFirstNote").get("yourFirstNote"));
    Interpreter firstNoteIntp = factory.getInterpreter("user", "FitstNote", "group1.mock1");
    Interpreter yourFirstNoteIntp = factory.getInterpreter("user", "yourFirstNote", "group1.mock1");
    firstNoteIntp.open();
    yourFirstNoteIntp.open();
    assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
    assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
    interpreterSettingManager.closeNote("user", "FitstNote");
    assertFalse(((LazyOpenInterpreter) firstNoteIntp).isOpen());
    assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
    //reopen
    firstNoteIntp.open();
    assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
    assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
    // invalid check
    interpreterSettingManager.closeNote("invalid", "Note");
    assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
    assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
    // invalid contains value check
    interpreterSettingManager.closeNote("u", "Note");
    assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
    assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
}
Also used : Interpreter(org.apache.zeppelin.interpreter.Interpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) Test(org.junit.Test)

Example 15 with LazyOpenInterpreter

use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.

the class PigQueryInterpreterTest method setUp.

@Before
public void setUp() throws InterpreterException {
    Properties properties = new Properties();
    properties.put("zeppelin.pig.execType", "local");
    properties.put("zeppelin.pig.maxResult", "20");
    pigInterpreter = new LazyOpenInterpreter(new PigInterpreter(properties));
    pigQueryInterpreter = new LazyOpenInterpreter(new PigQueryInterpreter(properties));
    List<Interpreter> interpreters = new ArrayList();
    interpreters.add(pigInterpreter);
    interpreters.add(pigQueryInterpreter);
    InterpreterGroup group = new InterpreterGroup();
    group.put("note_id", interpreters);
    pigInterpreter.setInterpreterGroup(group);
    pigQueryInterpreter.setInterpreterGroup(group);
    pigInterpreter.open();
    pigQueryInterpreter.open();
    context = InterpreterContext.builder().setParagraphId("paragraphId").build();
}
Also used : LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) Interpreter(org.apache.zeppelin.interpreter.Interpreter) InterpreterGroup(org.apache.zeppelin.interpreter.InterpreterGroup) ArrayList(java.util.ArrayList) Properties(java.util.Properties) Before(org.junit.Before)

Aggregations

LazyOpenInterpreter (org.apache.zeppelin.interpreter.LazyOpenInterpreter)25 InterpreterGroup (org.apache.zeppelin.interpreter.InterpreterGroup)21 Interpreter (org.apache.zeppelin.interpreter.Interpreter)17 Properties (java.util.Properties)16 InterpreterContext (org.apache.zeppelin.interpreter.InterpreterContext)14 Before (org.junit.Before)8 Test (org.junit.Test)8 InterpreterException (org.apache.zeppelin.interpreter.InterpreterException)5 AngularObjectRegistry (org.apache.zeppelin.display.AngularObjectRegistry)3 InterpreterOutput (org.apache.zeppelin.interpreter.InterpreterOutput)3 IOException (java.io.IOException)2 InterpreterResult (org.apache.zeppelin.interpreter.InterpreterResult)2 WrappedInterpreter (org.apache.zeppelin.interpreter.WrappedInterpreter)2 AuthenticationInfo (org.apache.zeppelin.user.AuthenticationInfo)2 ArrayList (java.util.ArrayList)1 TException (org.apache.thrift.TException)1 TTransportException (org.apache.thrift.transport.TTransportException)1 DependencyResolver (org.apache.zeppelin.dep.DependencyResolver)1 AngularObject (org.apache.zeppelin.display.AngularObject)1 ApplicationException (org.apache.zeppelin.helium.ApplicationException)1