Search in sources :

Example 96 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class NotebookServer method broadcastReloadedNoteList.

public void broadcastReloadedNoteList(AuthenticationInfo subject, HashSet userAndRoles) {
    if (subject == null) {
        subject = new AuthenticationInfo(StringUtils.EMPTY);
    }
    //reload and reply first to requesting user
    List<Map<String, String>> notesInfo = generateNotesInfo(true, subject, userAndRoles);
    multicastToUser(subject.getUser(), new Message(OP.NOTES_INFO).put("notes", notesInfo));
    //to others afterwards
    broadcastNoteListExcept(notesInfo, subject);
}
Also used : InterpreterResultMessage(org.apache.zeppelin.interpreter.InterpreterResultMessage) Message(org.apache.zeppelin.notebook.socket.Message) WatcherMessage(org.apache.zeppelin.notebook.socket.WatcherMessage) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo)

Example 97 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class LivyInterpreterIT method testSparkInterpreter.

@Test
public void testSparkInterpreter() throws InterpreterException {
    if (!checkPreCondition()) {
        return;
    }
    InterpreterGroup interpreterGroup = new InterpreterGroup("group_1");
    interpreterGroup.put("session_1", new ArrayList<Interpreter>());
    LivySparkInterpreter sparkInterpreter = new LivySparkInterpreter(properties);
    sparkInterpreter.setInterpreterGroup(interpreterGroup);
    interpreterGroup.get("session_1").add(sparkInterpreter);
    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
    InterpreterOutput output = new InterpreterOutput(outputListener);
    InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
    sparkInterpreter.open();
    LivySparkSQLInterpreter sqlInterpreter = new LivySparkSQLInterpreter(properties);
    interpreterGroup.get("session_1").add(sqlInterpreter);
    sqlInterpreter.setInterpreterGroup(interpreterGroup);
    sqlInterpreter.open();
    try {
        // detect spark version
        InterpreterResult result = sparkInterpreter.interpret("sc.version", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        boolean isSpark2 = isSpark2(sparkInterpreter, context);
        testRDD(sparkInterpreter, isSpark2);
        testDataFrame(sparkInterpreter, sqlInterpreter, isSpark2);
    } finally {
        sparkInterpreter.close();
        sqlInterpreter.close();
    }
}
Also used : Interpreter(org.apache.zeppelin.interpreter.Interpreter) LazyOpenInterpreter(org.apache.zeppelin.interpreter.LazyOpenInterpreter) InterpreterGroup(org.apache.zeppelin.interpreter.InterpreterGroup) InterpreterOutput(org.apache.zeppelin.interpreter.InterpreterOutput) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) Test(org.junit.Test)

Example 98 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class LivyInterpreterIT method testPySparkInterpreter.

@Test
public void testPySparkInterpreter() throws InterpreterException {
    if (!checkPreCondition()) {
        return;
    }
    final LivyPySparkInterpreter pysparkInterpreter = new LivyPySparkInterpreter(properties);
    pysparkInterpreter.setInterpreterGroup(mock(InterpreterGroup.class));
    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
    InterpreterOutput output = new InterpreterOutput(outputListener);
    final InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
    pysparkInterpreter.open();
    // test traceback msg
    try {
        pysparkInterpreter.getLivyVersion();
        // for livy version >=0.3 , input some erroneous spark code, check the shown result is more
        // than one line
        InterpreterResult result = pysparkInterpreter.interpret("sc.parallelize(wrongSyntax(1, 2)).count()", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertTrue(result.message().get(0).getData().split("\n").length > 1);
        assertTrue(result.message().get(0).getData().contains("Traceback"));
    } catch (APINotFoundException e) {
        // only livy 0.2 can throw this exception since it doesn't have /version endpoint
        // in livy 0.2, most error msg is encapsulated in evalue field, only print(a) in pyspark would
        // return none-empty traceback
        InterpreterResult result = pysparkInterpreter.interpret("print(a)", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertTrue(result.message().get(0).getData().split("\n").length > 1);
        assertTrue(result.message().get(0).getData().contains("Traceback"));
    }
    // test utf-8 Encoding
    String utf8Str = "你你你你你你好";
    InterpreterResult reslt = pysparkInterpreter.interpret("print(\"" + utf8Str + "\")", context);
    assertEquals(InterpreterResult.Code.SUCCESS, reslt.code());
    assertTrue(reslt.message().get(0).getData().contains(utf8Str));
    // test special characters
    String charStr = "açñiñíûÑoç";
    InterpreterResult res = pysparkInterpreter.interpret("print(\"" + charStr + "\")", context);
    assertEquals(InterpreterResult.Code.SUCCESS, res.code());
    assertTrue(res.message().get(0).getData().contains(charStr));
    try {
        InterpreterResult result = pysparkInterpreter.interpret("sc.version", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        boolean isSpark2 = isSpark2(pysparkInterpreter, context);
        // test RDD api
        result = pysparkInterpreter.interpret("sc.range(1, 10).sum()", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertTrue(result.message().get(0).getData().contains("45"));
        // test DataFrame api
        if (!isSpark2) {
            pysparkInterpreter.interpret("from pyspark.sql import SQLContext\n" + "sqlContext = SQLContext(sc)", context);
            result = pysparkInterpreter.interpret("df=sqlContext.createDataFrame([(\"hello\",20)])\n" + "df.collect()", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            // python2 has u and python3 don't have u
            assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]") || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]"));
        } else {
            result = pysparkInterpreter.interpret("df=spark.createDataFrame([(\"hello\",20)])\n" + "df.collect()", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            // python2 has u and python3 don't have u
            assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]") || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]"));
        }
        // test magic api
        pysparkInterpreter.interpret("t = [{\"name\":\"userA\", \"role\":\"roleA\"}," + "{\"name\":\"userB\", \"role\":\"roleB\"}]", context);
        result = pysparkInterpreter.interpret("%table t", context);
        assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
        assertTrue(result.message().get(0).getData().contains("userA"));
        // error
        result = pysparkInterpreter.interpret("print(a)", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
        assertTrue(result.message().get(0).getData().contains("name 'a' is not defined"));
        // cancel
        if (pysparkInterpreter.livyVersion.newerThanEquals(LivyVersion.LIVY_0_3_0)) {
            Thread cancelThread = new Thread() {

                @Override
                public void run() {
                    // invoke cancel after 1 millisecond to wait job starting
                    try {
                        Thread.sleep(1);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    pysparkInterpreter.cancel(context);
                }
            };
            cancelThread.start();
            result = pysparkInterpreter.interpret("import time\n" + "sc.range(1, 10).foreach(lambda a: time.sleep(10))", context);
            assertEquals(InterpreterResult.Code.ERROR, result.code());
            String message = result.message().get(0).getData();
            // 2 possibilities, sometimes livy doesn't return the real cancel exception
            assertTrue(message.contains("cancelled part of cancelled job group") || message.contains("Job is cancelled"));
        }
    } finally {
        pysparkInterpreter.close();
    }
}
Also used : InterpreterGroup(org.apache.zeppelin.interpreter.InterpreterGroup) InterpreterOutput(org.apache.zeppelin.interpreter.InterpreterOutput) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) Test(org.junit.Test)

Example 99 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class LivyInterpreterIT method testRDD.

private void testRDD(final LivySparkInterpreter sparkInterpreter, boolean isSpark2) {
    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
    InterpreterOutput output = new InterpreterOutput(outputListener);
    final InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
    ;
    InterpreterResult result = sparkInterpreter.interpret("sc.parallelize(1 to 10).sum()", context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    assertTrue(result.message().get(0).getData().contains("Double = 55.0"));
    // single line comment
    String singleLineComment = "println(1)// my comment";
    result = sparkInterpreter.interpret(singleLineComment, context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    // multiple line comment
    String multipleLineComment = "println(1)/* multiple \n" + "line \n" + "comment */";
    result = sparkInterpreter.interpret(multipleLineComment, context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    // multi-line string
    String multiLineString = "val str = \"\"\"multiple\n" + "line\"\"\"\n" + "println(str)";
    result = sparkInterpreter.interpret(multiLineString, context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    assertTrue(result.message().get(0).getData().contains("multiple\nline"));
    // case class
    String caseClassCode = "case class Person(id:Int, \n" + "name:String)\n" + "val p=Person(1, \"name_a\")";
    result = sparkInterpreter.interpret(caseClassCode, context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    assertTrue(result.message().get(0).getData().contains("p: Person = Person(1,name_a)"));
    // object class
    String objectClassCode = "object Person {}";
    result = sparkInterpreter.interpret(objectClassCode, context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    if (!isSpark2) {
        assertTrue(result.message().get(0).getData().contains("defined module Person"));
    } else {
        assertTrue(result.message().get(0).getData().contains("defined object Person"));
    }
    // html output
    String htmlCode = "println(\"%html <h1> hello </h1>\")";
    result = sparkInterpreter.interpret(htmlCode, context);
    assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, result.message().size());
    assertEquals(InterpreterResult.Type.HTML, result.message().get(0).getType());
    // error
    result = sparkInterpreter.interpret("println(a)", context);
    assertEquals(InterpreterResult.Code.ERROR, result.code());
    assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
    assertTrue(result.message().get(0).getData().contains("error: not found: value a"));
    // incomplete code
    result = sparkInterpreter.interpret("if(true){", context);
    assertEquals(InterpreterResult.Code.ERROR, result.code());
    assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
    assertTrue(result.message().get(0).getData().contains("incomplete statement"));
    // cancel
    if (sparkInterpreter.livyVersion.newerThanEquals(LivyVersion.LIVY_0_3_0)) {
        Thread cancelThread = new Thread() {

            @Override
            public void run() {
                // invoke cancel after 1 millisecond to wait job starting
                try {
                    Thread.sleep(1);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                sparkInterpreter.cancel(context);
            }
        };
        cancelThread.start();
        result = sparkInterpreter.interpret("sc.parallelize(1 to 10).foreach(e=>Thread.sleep(10*1000))", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        String message = result.message().get(0).getData();
        // 2 possibilities, sometimes livy doesn't return the real cancel exception
        assertTrue(message.contains("cancelled part of cancelled job group") || message.contains("Job is cancelled"));
    }
}
Also used : InterpreterOutput(org.apache.zeppelin.interpreter.InterpreterOutput) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo)

Example 100 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class LivyInterpreterIT method testSparkRInterpreter.

@Test
public void testSparkRInterpreter() throws InterpreterException {
    if (!checkPreCondition()) {
        return;
    }
    final LivySparkRInterpreter sparkRInterpreter = new LivySparkRInterpreter(properties);
    sparkRInterpreter.setInterpreterGroup(mock(InterpreterGroup.class));
    try {
        sparkRInterpreter.getLivyVersion();
    } catch (APINotFoundException e) {
        // don't run sparkR test for livy 0.2 as there's some issues for livy 0.2
        return;
    }
    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
    InterpreterOutput output = new InterpreterOutput(outputListener);
    final InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
    sparkRInterpreter.open();
    try {
        // only test it in livy newer than 0.2.0
        boolean isSpark2 = isSpark2(sparkRInterpreter, context);
        InterpreterResult result = null;
        // test DataFrame api
        if (isSpark2) {
            result = sparkRInterpreter.interpret("df <- as.DataFrame(faithful)\nhead(df)", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("eruptions waiting"));
            // cancel
            Thread cancelThread = new Thread() {

                @Override
                public void run() {
                    // invoke cancel after 1 millisecond to wait job starting
                    try {
                        Thread.sleep(1);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    sparkRInterpreter.cancel(context);
                }
            };
            cancelThread.start();
            result = sparkRInterpreter.interpret("df <- as.DataFrame(faithful)\n" + "df1 <- dapplyCollect(df, function(x) " + "{ Sys.sleep(10); x <- cbind(x, x$waiting * 60) })", context);
            assertEquals(InterpreterResult.Code.ERROR, result.code());
            String message = result.message().get(0).getData();
            // 2 possibilities, sometimes livy doesn't return the real cancel exception
            assertTrue(message.contains("cancelled part of cancelled job group") || message.contains("Job is cancelled"));
        } else {
            result = sparkRInterpreter.interpret("df <- createDataFrame(sqlContext, faithful)" + "\nhead(df)", context);
            assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("eruptions waiting"));
        }
        // error
        result = sparkRInterpreter.interpret("cat(a)", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
        assertTrue(result.message().get(0).getData().contains("object 'a' not found"));
    } finally {
        sparkRInterpreter.close();
    }
}
Also used : InterpreterGroup(org.apache.zeppelin.interpreter.InterpreterGroup) InterpreterOutput(org.apache.zeppelin.interpreter.InterpreterOutput) InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) Test(org.junit.Test)

Aggregations

AuthenticationInfo (org.apache.zeppelin.user.AuthenticationInfo)128 Test (org.junit.Test)44 HashMap (java.util.HashMap)40 AngularObjectRegistry (org.apache.zeppelin.display.AngularObjectRegistry)29 Properties (java.util.Properties)28 Note (org.apache.zeppelin.notebook.Note)27 LocalResourcePool (org.apache.zeppelin.resource.LocalResourcePool)23 LinkedList (java.util.LinkedList)22 GUI (org.apache.zeppelin.display.GUI)22 Map (java.util.Map)21 ZeppelinApi (org.apache.zeppelin.annotation.ZeppelinApi)20 AngularObject (org.apache.zeppelin.display.AngularObject)19 InterpreterResultMessage (org.apache.zeppelin.interpreter.InterpreterResultMessage)19 IOException (java.io.IOException)18 InterpreterContext (org.apache.zeppelin.interpreter.InterpreterContext)18 InterpreterResult (org.apache.zeppelin.interpreter.InterpreterResult)18 Paragraph (org.apache.zeppelin.notebook.Paragraph)18 InterpreterOutput (org.apache.zeppelin.interpreter.InterpreterOutput)16 Path (javax.ws.rs.Path)15 HashSet (java.util.HashSet)13