Search in sources :

Example 6 with InterpreterResultMessage

use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.

the class FlinkStreamSqlInterpreterTest method testAppendStreamTableApi.

@Test
public void testAppendStreamTableApi() throws IOException, InterpreterException {
    String initStreamScalaScript = getInitStreamScript(100);
    InterpreterResult result = flinkInterpreter.interpret(initStreamScalaScript, getInterpreterContext());
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    InterpreterContext context = getInterpreterContext();
    String code = "val table = stenv.sqlQuery(\"select TUMBLE_START(rowtime, INTERVAL '5' SECOND) as " + "start_time, url, count(1) as pv from log group by " + "TUMBLE(rowtime, INTERVAL '5' SECOND), url\")\nz.show(table, streamType=\"append\")";
    result = flinkInterpreter.interpret(code, context);
    assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
    List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
    assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("url\tpv\n"));
}
Also used : InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) InterpreterResultMessage(org.apache.zeppelin.interpreter.InterpreterResultMessage) Test(org.junit.Test)

Example 7 with InterpreterResultMessage

use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.

the class FlinkStreamSqlInterpreterTest method testInsertInto.

@Test
public void testInsertInto() throws InterpreterException, IOException {
    hiveShell.execute("create table source_table (id int, name string)");
    File destDir = Files.createTempDirectory("flink_test").toFile();
    FileUtils.deleteDirectory(destDir);
    InterpreterResult result = sqlInterpreter.interpret("CREATE TABLE dest_table (\n" + "id int,\n" + "name string" + ") WITH (\n" + "'format.field-delimiter'=',',\n" + "'connector.type'='filesystem',\n" + "'format.derive-schema'='true',\n" + "'connector.path'='" + destDir.getAbsolutePath() + "',\n" + "'format.type'='csv'\n" + ");", getInterpreterContext());
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    result = sqlInterpreter.interpret("insert into dest_table select * from source_table", getInterpreterContext());
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    // after these select queries, `show tables` should still show only one source table,
    // other temporary tables should not be displayed.
    InterpreterContext context = getInterpreterContext();
    result = sqlInterpreter.interpret("show tables", context);
    List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    assertEquals(1, resultMessages.size());
    assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
    assertEquals(resultMessages.get(0).toString(), "table\ndest_table\nsource_table\n", resultMessages.get(0).getData());
}
Also used : InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) File(java.io.File) InterpreterResultMessage(org.apache.zeppelin.interpreter.InterpreterResultMessage) Test(org.junit.Test)

Example 8 with InterpreterResultMessage

use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.

the class FlinkStreamSqlInterpreterTest method testResumeStreamSqlFromExistSavePointPath.

// TODO(zjffdu) flaky test
// @Test
public void testResumeStreamSqlFromExistSavePointPath() throws IOException, InterpreterException, InterruptedException, TimeoutException {
    String initStreamScalaScript = getInitStreamScript(2000);
    InterpreterResult result = flinkInterpreter.interpret(initStreamScalaScript, getInterpreterContext());
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    File savePointDir = FileUtils.getTempDirectory();
    final Waiter waiter = new Waiter();
    Thread thread = new Thread(() -> {
        try {
            InterpreterContext context = getInterpreterContext();
            context.getLocalProperties().put("savePointDir", savePointDir.getAbsolutePath());
            context.getLocalProperties().put("parallelism", "1");
            context.getLocalProperties().put("maxParallelism", "10");
            InterpreterResult result2 = sqlInterpreter.interpret("select url, count(1) as pv from " + "log group by url", context);
            waiter.assertTrue(context.out.toString().contains("url\tpv\n"));
            waiter.assertEquals(InterpreterResult.Code.SUCCESS, result2.code());
        } catch (Exception e) {
            e.printStackTrace();
            waiter.fail("Should not fail here");
        }
        waiter.resume();
    });
    thread.start();
    // the streaming job will run for 20 seconds. check init_stream.scala
    // sleep 10 seconds to make sure the job is started but not finished
    Thread.sleep(10 * 1000);
    InterpreterContext context = getInterpreterContext();
    context.getLocalProperties().put("savePointDir", savePointDir.getAbsolutePath());
    context.getLocalProperties().put("parallelism", "2");
    context.getLocalProperties().put("maxParallelism", "10");
    sqlInterpreter.cancel(context);
    waiter.await(10 * 1000);
    // get exist savepoint path from tempDirectory
    // if dir more than 1 then get first or throw error
    String[] allSavepointPath = savePointDir.list((dir, name) -> name.startsWith("savepoint"));
    assertTrue(allSavepointPath.length > 0);
    String savepointPath = savePointDir.getAbsolutePath().concat(File.separator).concat(allSavepointPath[0]);
    // resume job from exist savepointPath
    context.getConfig().put(JobManager.SAVEPOINT_PATH, savepointPath);
    sqlInterpreter.interpret("select url, count(1) as pv from " + "log group by url", context);
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
    assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("url\tpv\n"));
}
Also used : InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) Waiter(net.jodah.concurrentunit.Waiter) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) File(java.io.File) InterpreterResultMessage(org.apache.zeppelin.interpreter.InterpreterResultMessage) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) InterpreterException(org.apache.zeppelin.interpreter.InterpreterException)

Example 9 with InterpreterResultMessage

use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.

the class FlinkStreamSqlInterpreterTest method testSingleStreamSql.

@Test
public void testSingleStreamSql() throws IOException, InterpreterException {
    String initStreamScalaScript = getInitStreamScript(100);
    InterpreterContext context = getInterpreterContext();
    InterpreterResult result = flinkInterpreter.interpret(initStreamScalaScript, context);
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    context = getInterpreterContext();
    context.getLocalProperties().put("type", "single");
    context.getLocalProperties().put("template", "Total Count: {1} <br/> {0}");
    result = sqlInterpreter.interpret("select max(rowtime), count(1) " + "from log", context);
    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
    List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
    assertEquals(InterpreterResult.Type.ANGULAR, resultMessages.get(0).getType());
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("Total Count"));
}
Also used : InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) InterpreterResultMessage(org.apache.zeppelin.interpreter.InterpreterResultMessage) Test(org.junit.Test)

Example 10 with InterpreterResultMessage

use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.

the class SqlInterpreterTest method testCatalog.

@Test
public void testCatalog() throws IOException, InterpreterException {
    InterpreterContext context = getInterpreterContext();
    // CREATE CATALOG
    InterpreterResult result = sqlInterpreter.interpret("CREATE CATALOG test_catalog \n" + "WITH( \n" + "'type'='generic_in_memory' \n" + ");", context);
    assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
    List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("Catalog has been created."));
    // USE CATALOG & SHOW DATABASES;
    context = getInterpreterContext();
    result = sqlInterpreter.interpret("USE CATALOG test_catalog ;\n" + "SHOW DATABASES;", context);
    assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
    resultMessages = context.out.toInterpreterResultMessage();
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("default"));
    // DROP CATALOG
    context = getInterpreterContext();
    result = sqlInterpreter.interpret("DROP CATALOG test_catalog ;\n", context);
    assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
    resultMessages = context.out.toInterpreterResultMessage();
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("Catalog has been dropped."));
    // SHOW CATALOG. Due to drop CATALOG before, it shouldn't contain 'test_catalog'
    context = getInterpreterContext();
    result = sqlInterpreter.interpret("SHOW CATALOGS ;\n", context);
    assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
    resultMessages = context.out.toInterpreterResultMessage();
    assertTrue(resultMessages.toString(), resultMessages.get(0).getData().contains("default_catalog"));
    assertFalse(resultMessages.toString(), resultMessages.get(0).getData().contains("test_catalog"));
}
Also used : InterpreterResult(org.apache.zeppelin.interpreter.InterpreterResult) InterpreterContext(org.apache.zeppelin.interpreter.InterpreterContext) InterpreterResultMessage(org.apache.zeppelin.interpreter.InterpreterResultMessage) Test(org.junit.Test)

Aggregations

InterpreterResultMessage (org.apache.zeppelin.interpreter.InterpreterResultMessage)80 InterpreterResult (org.apache.zeppelin.interpreter.InterpreterResult)78 Test (org.junit.Test)60 InterpreterContext (org.apache.zeppelin.interpreter.InterpreterContext)55 Properties (java.util.Properties)17 InterpreterException (org.apache.zeppelin.interpreter.InterpreterException)15 IOException (java.io.IOException)13 File (java.io.File)8 TimeoutException (java.util.concurrent.TimeoutException)7 Waiter (net.jodah.concurrentunit.Waiter)7 AuthenticationInfo (org.apache.zeppelin.user.AuthenticationInfo)6 Test (org.junit.jupiter.api.Test)6 UnirestException (com.mashape.unirest.http.exceptions.UnirestException)3 HashMap (java.util.HashMap)3 Matcher (java.util.regex.Matcher)3 Pattern (java.util.regex.Pattern)3 Map (java.util.Map)2 AngularObjectRegistry (org.apache.zeppelin.display.AngularObjectRegistry)2 CheckBox (org.apache.zeppelin.display.ui.CheckBox)2 Select (org.apache.zeppelin.display.ui.Select)2