use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.
the class JDBCInterpreterTest method testStatementPrecode.
@Test
public void testStatementPrecode() throws IOException, InterpreterException {
Properties properties = new Properties();
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
properties.setProperty(DEFAULT_STATEMENT_PRECODE, "set @v='statement'");
JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
jdbcInterpreter.open();
String sqlQuery = "select @v";
InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, context);
assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
assertEquals("@V\nstatement\n", resultMessages.get(0).getData());
}
use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.
the class JDBCInterpreterTest method testSelectMultipleQueries.
@Test
public void testSelectMultipleQueries() throws IOException, InterpreterException {
Properties properties = new Properties();
properties.setProperty("common.max_count", "1000");
properties.setProperty("common.max_retry", "3");
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
properties.setProperty("default.splitQueries", "true");
JDBCInterpreter t = new JDBCInterpreter(properties);
t.open();
String sqlQuery = "select * from test_table;" + "select * from test_table WHERE ID = ';';";
InterpreterResult interpreterResult = t.interpret(sqlQuery, context);
assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
assertEquals(2, resultMessages.size());
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
assertEquals("ID\tNAME\na\ta_name\nb\tb_name\nc\tnull\n", resultMessages.get(0).getData());
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(1).getType());
assertEquals("ID\tNAME\n", resultMessages.get(1).getData());
}
use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.
the class JDBCInterpreterTest method testQueryWithEscapedCharacters.
@Test
public void testQueryWithEscapedCharacters() throws IOException, InterpreterException {
String sqlQuery = "select '\\n', ';';" + "select replace('A\\;B', '\\', 'text');" + "select '\\', ';';" + "select '''', ';'";
Properties properties = new Properties();
properties.setProperty("common.max_count", "1000");
properties.setProperty("common.max_retry", "3");
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
properties.setProperty("default.splitQueries", "true");
JDBCInterpreter t = new JDBCInterpreter(properties);
t.open();
InterpreterResult interpreterResult = t.interpret(sqlQuery, context);
assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(1).getType());
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(2).getType());
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(3).getType());
assertEquals("'\\n'\t';'\n\\n\t;\n", resultMessages.get(0).getData());
assertEquals("'Atext;B'\nAtext;B\n", resultMessages.get(1).getData());
assertEquals("'\\'\t';'\n\\\t;\n", resultMessages.get(2).getData());
assertEquals("''''\t';'\n'\t;\n", resultMessages.get(3).getData());
}
use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.
the class IPySparkInterpreterTest method testPySpark.
public static void testPySpark(final Interpreter interpreter, RemoteInterpreterEventClient mockIntpEventClient) throws InterpreterException, IOException, InterruptedException {
reset(mockIntpEventClient);
// rdd
InterpreterContext context = createInterpreterContext(mockIntpEventClient);
InterpreterResult result = interpreter.interpret("sc.version", context);
Thread.sleep(100);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
String sparkVersion = context.out.toInterpreterResultMessage().get(0).getData();
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("sc.range(1,10).sum()", context);
Thread.sleep(100);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals("45", interpreterResultMessages.get(0).getData().trim());
// spark job url is sent
verify(mockIntpEventClient).onParaInfosReceived(any(Map.class));
// spark sql
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("df = spark.createDataFrame([(1,'a'),(2,'b')])\ndf.show()", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals("+---+---+\n" + "| _1| _2|\n" + "+---+---+\n" + "| 1| a|\n" + "| 2| b|\n" + "+---+---+", interpreterResultMessages.get(0).getData().trim());
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("z.show(df)", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals("_1 _2\n" + "1 a\n" + "2 b", interpreterResultMessages.get(0).getData().trim());
// spark sql python API bindings
result = interpreter.interpret("df.explain()", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// cancel
if (interpreter instanceof IPySparkInterpreter) {
final InterpreterContext context2 = createInterpreterContext(mockIntpEventClient);
Thread thread = new Thread() {
@Override
public void run() {
InterpreterResult result = null;
try {
result = interpreter.interpret("import time\nsc.range(1,10).foreach(lambda x: time.sleep(1))", context2);
} catch (InterpreterException e) {
e.printStackTrace();
}
assertEquals(InterpreterResult.Code.ERROR, result.code());
List<InterpreterResultMessage> interpreterResultMessages = null;
try {
interpreterResultMessages = context2.out.toInterpreterResultMessage();
assertTrue(interpreterResultMessages.get(0).getData().contains("KeyboardInterrupt"));
} catch (IOException e) {
e.printStackTrace();
}
}
};
thread.start();
// sleep 1 second to wait for the spark job starts
Thread.sleep(1000);
interpreter.cancel(context);
thread.join();
}
// completions
List<InterpreterCompletion> completions = interpreter.completion("sc.ran", 6, createInterpreterContext(mockIntpEventClient));
assertEquals(1, completions.size());
assertEquals("range", completions.get(0).getValue());
completions = interpreter.completion("sc.", 3, createInterpreterContext(mockIntpEventClient));
assertTrue(completions.size() > 0);
completions.contains(new InterpreterCompletion("range", "range", ""));
completions = interpreter.completion("1+1\nsc.", 7, createInterpreterContext(mockIntpEventClient));
assertTrue(completions.size() > 0);
completions.contains(new InterpreterCompletion("range", "range", ""));
completions = interpreter.completion("s", 1, createInterpreterContext(mockIntpEventClient));
assertTrue(completions.size() > 0);
completions.contains(new InterpreterCompletion("sc", "sc", ""));
// python call java via py4j
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("sc._jvm.java.lang.System.out.println(\"hello world\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals(1, interpreterResultMessages.size());
assertEquals("hello world\n", interpreterResultMessages.get(0).getData());
// pyspark streaming TODO(zjffdu) disable pyspark streaming test temporary
context = createInterpreterContext(mockIntpEventClient);
// result = interpreter.interpret(
// "from pyspark.streaming import StreamingContext\n" +
// "import time\n" +
// "ssc = StreamingContext(sc, 1)\n" +
// "rddQueue = []\n" +
// "for i in range(5):\n" +
// " rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 1001)], 10)]\n" +
// "inputStream = ssc.queueStream(rddQueue)\n" +
// "mappedStream = inputStream.map(lambda x: (x % 10, 1))\n" +
// "reducedStream = mappedStream.reduceByKey(lambda a, b: a + b)\n" +
// "reducedStream.pprint()\n" +
// "ssc.start()\n" +
// "time.sleep(6)\n" +
// "ssc.stop(stopSparkContext=False, stopGraceFully=True)", context);
// Thread.sleep(1000);
// assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// interpreterResultMessages = context.out.toInterpreterResultMessage();
// assertEquals(1, interpreterResultMessages.size());
// assertTrue(interpreterResultMessages.get(0).getData().contains("(0, 100)"));
}
use of org.apache.zeppelin.interpreter.InterpreterResultMessage in project zeppelin by apache.
the class SparkIRInterpreterTest method testSparkRInterpreter.
@Test
public void testSparkRInterpreter() throws InterpreterException, InterruptedException, IOException {
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret("1+1", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> interpreterResultMessages = context.out.toInterpreterResultMessage();
assertTrue(interpreterResultMessages.get(0).getData().contains("2"));
context = getInterpreterContext();
result = interpreter.interpret("sparkR.version()", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
if (interpreterResultMessages.get(0).getData().contains("2.2")) {
ENABLE_GOOGLEVIS_TEST = false;
}
context = getInterpreterContext();
result = interpreter.interpret("df <- as.DataFrame(faithful)\nhead(df)", context);
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertTrue(interpreterResultMessages.get(0).getData().contains(">eruptions</th>"));
// spark job url is sent
verify(mockRemoteIntpEventClient, atLeastOnce()).onParaInfosReceived(any(Map.class));
// cancel
final InterpreterContext context2 = getInterpreterContext();
Thread thread = new Thread() {
@Override
public void run() {
try {
InterpreterResult result = interpreter.interpret("ldf <- dapplyCollect(\n" + " df,\n" + " function(x) {\n" + " Sys.sleep(3)\n" + " x <- cbind(x, \"waiting_secs\" = x$waiting * 60)\n" + " })\n" + "head(ldf, 3)", context2);
assertTrue(result.message().get(0).getData().contains("cancelled"));
} catch (InterpreterException e) {
fail("Should not throw InterpreterException");
}
}
};
thread.setName("Cancel-Thread");
thread.start();
Thread.sleep(1000);
interpreter.cancel(context2);
}
Aggregations