Search in sources :

Example 81 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class NotebookRestApi method getUpdatedJobListforNote.

/**
   * Get updated note jobs for job manager
   *
   * Return the `Note` change information within the post unix timestamp.
   *
   * @return JSON with status.OK
   * @throws IOException, IllegalArgumentException
   */
@GET
@Path("jobmanager/{lastUpdateUnixtime}/")
@ZeppelinApi
public Response getUpdatedJobListforNote(@PathParam("lastUpdateUnixtime") long lastUpdateUnixTime) throws IOException, IllegalArgumentException {
    LOG.info("Get updated note jobs lastUpdateTime {}", lastUpdateUnixTime);
    List<Map<String, Object>> noteJobs;
    AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
    noteJobs = notebook.getJobListByUnixTime(false, lastUpdateUnixTime, subject);
    Map<String, Object> response = new HashMap<>();
    response.put("lastResponseUnixTime", System.currentTimeMillis());
    response.put("jobs", noteJobs);
    return new JsonResponse<>(Status.OK, response).build();
}
Also used : HashMap(java.util.HashMap) HashMap(java.util.HashMap) Map(java.util.Map) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) Path(javax.ws.rs.Path) ZeppelinApi(org.apache.zeppelin.annotation.ZeppelinApi) GET(javax.ws.rs.GET)

Example 82 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class NotebookServerTest method setUp.

@Before
public void setUp() {
    mockRequest = mock(HttpServletRequest.class);
    anonymous = new AuthenticationInfo("anonymous");
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) Before(org.junit.Before)

Example 83 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class LivyInterpreterIT method testSparkInterpreterRDD.

@Test
public void testSparkInterpreterRDD() {
    if (!checkPreCondition()) {
        return;
    }
    InterpreterGroup interpreterGroup = new InterpreterGroup("group_1");
    interpreterGroup.put("session_1", new ArrayList<Interpreter>());
    final LivySparkInterpreter sparkInterpreter = new LivySparkInterpreter(properties);
    sparkInterpreter.setInterpreterGroup(interpreterGroup);
    interpreterGroup.get("session_1").add(sparkInterpreter);
    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
    InterpreterOutput output = new InterpreterOutput(outputListener);
    final InterpreterContext context = new InterpreterContext("noteId", "paragraphId", "livy.spark", "title", "text", authInfo, null, null, null, null, null, output);
    sparkInterpreter.open();
    try {
        // detect spark version
        InterpreterResult result = sparkInterpreter.interpret("sc.version", context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        boolean isSpark2 = isSpark2(sparkInterpreter, context);
        // test RDD api
        result = sparkInterpreter.interpret("sc.parallelize(1 to 10).sum()", context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertTrue(result.message().get(0).getData().contains("Double = 55.0"));
        // single line comment
        String singleLineComment = "println(1)// my comment";
        result = sparkInterpreter.interpret(singleLineComment, context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        // multiple line comment
        String multipleLineComment = "println(1)/* multiple \n" + "line \n" + "comment */";
        result = sparkInterpreter.interpret(multipleLineComment, context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        // multi-line string
        String multiLineString = "val str = \"\"\"multiple\n" + "line\"\"\"\n" + "println(str)";
        result = sparkInterpreter.interpret(multiLineString, context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertTrue(result.message().get(0).getData().contains("multiple\nline"));
        // case class
        String caseClassCode = "case class Person(id:Int, \n" + "name:String)\n" + "val p=Person(1, \"name_a\")";
        result = sparkInterpreter.interpret(caseClassCode, context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        assertTrue(result.message().get(0).getData().contains("p: Person = Person(1,name_a)"));
        // object class
        String objectClassCode = "object Person {}";
        result = sparkInterpreter.interpret(objectClassCode, context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        if (!isSpark2) {
            assertTrue(result.message().get(0).getData().contains("defined module Person"));
        } else {
            assertTrue(result.message().get(0).getData().contains("defined object Person"));
        }
        // error
        result = sparkInterpreter.interpret("println(a)", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
        assertTrue(result.message().get(0).getData().contains("error: not found: value a"));
        // incomplete code
        result = sparkInterpreter.interpret("if(true){", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
        assertTrue(result.message().get(0).getData().contains("incomplete statement"));
        // cancel
        if (sparkInterpreter.livyVersion.newerThanEquals(LivyVersion.LIVY_0_3_0)) {
            Thread cancelThread = new Thread() {

                @Override
                public void run() {
                    // invoke cancel after 3 seconds to wait job starting
                    try {
                        Thread.sleep(3000);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    sparkInterpreter.cancel(context);
                }
            };
            cancelThread.start();
            result = sparkInterpreter.interpret("sc.parallelize(1 to 10).foreach(e=>Thread.sleep(10*1000))", context);
            assertEquals(InterpreterResult.Code.ERROR, result.code());
            String message = result.message().get(0).getData();
            // 2 possibilities, sometimes livy doesn't return the real cancel exception
            assertTrue(message.contains("cancelled part of cancelled job group") || message.contains("Job is cancelled"));
        }
    } finally {
        sparkInterpreter.close();
    }
}
Also used : AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo)

Example 84 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class LivyInterpreterIT method testSparkInterpreterDataFrame.

@Test
public void testSparkInterpreterDataFrame() {
    if (!checkPreCondition()) {
        return;
    }
    InterpreterGroup interpreterGroup = new InterpreterGroup("group_1");
    interpreterGroup.put("session_1", new ArrayList<Interpreter>());
    LivySparkInterpreter sparkInterpreter = new LivySparkInterpreter(properties);
    sparkInterpreter.setInterpreterGroup(interpreterGroup);
    interpreterGroup.get("session_1").add(sparkInterpreter);
    AuthenticationInfo authInfo = new AuthenticationInfo("user1");
    MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
    InterpreterOutput output = new InterpreterOutput(outputListener);
    InterpreterContext context = new InterpreterContext("noteId", "paragraphId", "livy.spark", "title", "text", authInfo, null, null, null, null, null, output);
    sparkInterpreter.open();
    LivySparkSQLInterpreter sqlInterpreter = new LivySparkSQLInterpreter(properties);
    interpreterGroup.get("session_1").add(sqlInterpreter);
    sqlInterpreter.setInterpreterGroup(interpreterGroup);
    sqlInterpreter.open();
    try {
        // detect spark version
        InterpreterResult result = sparkInterpreter.interpret("sc.version", context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(1, result.message().size());
        boolean isSpark2 = isSpark2(sparkInterpreter, context);
        // test DataFrame api
        if (!isSpark2) {
            result = sparkInterpreter.interpret("val df=sqlContext.createDataFrame(Seq((\"hello\",20))).toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context);
            assertEquals(InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
        } else {
            result = sparkInterpreter.interpret("val df=spark.createDataFrame(Seq((\"hello\",20))).toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context);
            assertEquals(InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(1, result.message().size());
            assertTrue(result.message().get(0).getData().contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
        }
        sparkInterpreter.interpret("df.registerTempTable(\"df\")", context);
        // test LivySparkSQLInterpreter which share the same SparkContext with LivySparkInterpreter
        result = sqlInterpreter.interpret("select * from df where col_1='hello'", context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
        assertEquals("col_1\tcol_2\nhello\t20", result.message().get(0).getData());
        // double quotes
        result = sqlInterpreter.interpret("select * from df where col_1=\"hello\"", context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
        assertEquals("col_1\tcol_2\nhello\t20", result.message().get(0).getData());
        // only enable this test in spark2 as spark1 doesn't work for this case
        if (isSpark2) {
            result = sqlInterpreter.interpret("select * from df where col_1=\"he\\\"llo\" ", context);
            assertEquals(InterpreterResult.Code.SUCCESS, result.code());
            assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
        }
        // single quotes inside attribute value
        result = sqlInterpreter.interpret("select * from df where col_1=\"he'llo\"", context);
        assertEquals(InterpreterResult.Code.SUCCESS, result.code());
        assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
        // test sql with syntax error
        result = sqlInterpreter.interpret("select * from df2", context);
        assertEquals(InterpreterResult.Code.ERROR, result.code());
        assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType());
        if (!isSpark2) {
            assertTrue(result.message().get(0).getData().contains("Table not found"));
        } else {
            assertTrue(result.message().get(0).getData().contains("Table or view not found"));
        }
    } finally {
        sparkInterpreter.close();
        sqlInterpreter.close();
    }
}
Also used : AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo)

Example 85 with AuthenticationInfo

use of org.apache.zeppelin.user.AuthenticationInfo in project zeppelin by apache.

the class JDBCInterpreterTest method testMultiTenant.

@Test
public void testMultiTenant() throws SQLException, IOException {
    /**
     * assume that the database user is 'dbuser' and password is 'dbpassword'
     * 'jdbc1' interpreter has user('dbuser')/password('dbpassword') property
     * 'jdbc2' interpreter doesn't have user/password property
     * 'user1' doesn't have Credential information.
     * 'user2' has 'jdbc2' Credential information that is same with database account.
     */
    JDBCInterpreter jdbc1 = new JDBCInterpreter(getDBProperty("dbuser", "dbpassword"));
    JDBCInterpreter jdbc2 = new JDBCInterpreter(getDBProperty(null, null));
    AuthenticationInfo user1Credential = getUserAuth("user1", null, null, null);
    AuthenticationInfo user2Credential = getUserAuth("user2", "jdbc.jdbc2", "dbuser", "dbpassword");
    // user1 runs jdbc1
    jdbc1.open();
    InterpreterContext ctx1 = new InterpreterContext("", "1", "jdbc.jdbc1", "", "", user1Credential, null, null, null, null, null, null);
    jdbc1.interpret("", ctx1);
    JDBCUserConfigurations user1JDBC1Conf = jdbc1.getJDBCConfiguration("user1");
    assertEquals("dbuser", user1JDBC1Conf.getPropertyMap("default").get("user"));
    assertEquals("dbpassword", user1JDBC1Conf.getPropertyMap("default").get("password"));
    jdbc1.close();
    // user1 runs jdbc2
    jdbc2.open();
    InterpreterContext ctx2 = new InterpreterContext("", "1", "jdbc.jdbc2", "", "", user1Credential, null, null, null, null, null, null);
    jdbc2.interpret("", ctx2);
    JDBCUserConfigurations user1JDBC2Conf = jdbc2.getJDBCConfiguration("user1");
    assertNull(user1JDBC2Conf.getPropertyMap("default").get("user"));
    assertNull(user1JDBC2Conf.getPropertyMap("default").get("password"));
    jdbc2.close();
    // user2 runs jdbc1
    jdbc1.open();
    InterpreterContext ctx3 = new InterpreterContext("", "1", "jdbc.jdbc1", "", "", user2Credential, null, null, null, null, null, null);
    jdbc1.interpret("", ctx3);
    JDBCUserConfigurations user2JDBC1Conf = jdbc1.getJDBCConfiguration("user2");
    assertEquals("dbuser", user2JDBC1Conf.getPropertyMap("default").get("user"));
    assertEquals("dbpassword", user2JDBC1Conf.getPropertyMap("default").get("password"));
    jdbc1.close();
    // user2 runs jdbc2
    jdbc2.open();
    InterpreterContext ctx4 = new InterpreterContext("", "1", "jdbc.jdbc2", "", "", user2Credential, null, null, null, null, null, null);
    jdbc2.interpret("", ctx4);
    JDBCUserConfigurations user2JDBC2Conf = jdbc2.getJDBCConfiguration("user2");
    assertNull(user2JDBC2Conf.getPropertyMap("default").get("user"));
    assertNull(user2JDBC2Conf.getPropertyMap("default").get("password"));
    jdbc2.close();
}
Also used : InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) Test(org.junit.Test)

Aggregations

AuthenticationInfo (org.apache.zeppelin.user.AuthenticationInfo)128 Test (org.junit.Test)44 HashMap (java.util.HashMap)40 AngularObjectRegistry (org.apache.zeppelin.display.AngularObjectRegistry)29 Properties (java.util.Properties)28 Note (org.apache.zeppelin.notebook.Note)27 LocalResourcePool (org.apache.zeppelin.resource.LocalResourcePool)23 LinkedList (java.util.LinkedList)22 GUI (org.apache.zeppelin.display.GUI)22 Map (java.util.Map)21 ZeppelinApi (org.apache.zeppelin.annotation.ZeppelinApi)20 AngularObject (org.apache.zeppelin.display.AngularObject)19 InterpreterResultMessage (org.apache.zeppelin.interpreter.InterpreterResultMessage)19 IOException (java.io.IOException)18 InterpreterContext (org.apache.zeppelin.interpreter.InterpreterContext)18 InterpreterResult (org.apache.zeppelin.interpreter.InterpreterResult)18 Paragraph (org.apache.zeppelin.notebook.Paragraph)18 InterpreterOutput (org.apache.zeppelin.interpreter.InterpreterOutput)16 Path (javax.ws.rs.Path)15 HashSet (java.util.HashSet)13