use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class MockHDFSFileInterpreter method test.
@Test
public void test() {
HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
t.open();
// We have info for /, /user, /tmp, /mr-history/done
// Ensure
// 1. ls -l works
// 2. paths (. and ..) are correctly handled
// 3. flags and arguments to commands are correctly handled
InterpreterResult result1 = t.interpret("ls -l /", null);
assertEquals(result1.message().get(0).getType(), InterpreterResult.Type.TEXT);
InterpreterResult result2 = t.interpret("ls -l /./user/..", null);
assertEquals(result2.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result1.message().get(0).getData(), result2.message().get(0).getData());
// Ensure you can do cd and after that the ls uses current directory correctly
InterpreterResult result3 = t.interpret("cd user", null);
assertEquals(result3.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result3.message().get(0).getData(), "OK");
InterpreterResult result4 = t.interpret("ls", null);
assertEquals(result4.message().get(0).getType(), InterpreterResult.Type.TEXT);
InterpreterResult result5 = t.interpret("ls /user", null);
assertEquals(result5.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result4.message().get(0).getData(), result5.message().get(0).getData());
// Ensure pwd works correctly
InterpreterResult result6 = t.interpret("pwd", null);
assertEquals(result6.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result6.message().get(0).getData(), "/user");
// Move a couple of levels and check we're in the right place
InterpreterResult result7 = t.interpret("cd ../mr-history/done", null);
assertEquals(result7.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result7.message().get(0).getData(), "OK");
InterpreterResult result8 = t.interpret("ls -l ", null);
assertEquals(result8.message().get(0).getType(), InterpreterResult.Type.TEXT);
InterpreterResult result9 = t.interpret("ls -l /mr-history/done", null);
assertEquals(result9.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result8.message().get(0).getData(), result9.message().get(0).getData());
InterpreterResult result10 = t.interpret("cd ../..", null);
assertEquals(result10.message().get(0).getType(), InterpreterResult.Type.TEXT);
assertEquals(result7.message().get(0).getData(), "OK");
InterpreterResult result11 = t.interpret("ls -l ", null);
assertEquals(result11.message().get(0).getType(), InterpreterResult.Type.TEXT);
// we should be back to first result after all this navigation
assertEquals(result1.message().get(0).getData(), result11.message().get(0).getData());
// auto completion test
List expectedResultOne = Arrays.asList(new InterpreterCompletion("ls", "ls", CompletionType.command.name()));
List expectedResultTwo = Arrays.asList(new InterpreterCompletion("pwd", "pwd", CompletionType.command.name()));
List<InterpreterCompletion> resultOne = t.completion("l", 0, null);
List<InterpreterCompletion> resultTwo = t.completion("p", 0, null);
assertEquals(expectedResultOne, resultOne);
assertEquals(expectedResultTwo, resultTwo);
t.close();
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class KotlinCompleter method completion.
public List<InterpreterCompletion> completion(String buf, int cursor, InterpreterContext interpreterContext) {
if (ctx == null) {
return new ArrayList<>(keywords);
}
List<InterpreterCompletion> result = new ArrayList<>();
for (KotlinVariableInfo var : ctx.getVars()) {
result.add(new InterpreterCompletion(var.getName(), var.getName(), shorten(var.getType())));
}
for (KotlinFunctionInfo fun : ctx.getFunctions()) {
result.add(new InterpreterCompletion(fun.getName(), fun.getName(), fun.toString(true)));
}
result.addAll(keywords);
return result;
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class IPySparkInterpreterTest method testPySpark.
public static void testPySpark(final Interpreter interpreter, RemoteInterpreterEventClient mockIntpEventClient) throws InterpreterException, IOException, InterruptedException {
reset(mockIntpEventClient);
// rdd
InterpreterContext context = createInterpreterContext(mockIntpEventClient);
InterpreterResult result = interpreter.interpret("sc.version", context);
Thread.sleep(100);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
String sparkVersion = context.out.toInterpreterResultMessage().get(0).getData();
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("sc.range(1,10).sum()", context);
Thread.sleep(100);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals("45", interpreterResultMessages.get(0).getData().trim());
// spark job url is sent
verify(mockIntpEventClient).onParaInfosReceived(any(Map.class));
// spark sql
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("df = spark.createDataFrame([(1,'a'),(2,'b')])\ndf.show()", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals("+---+---+\n" + "| _1| _2|\n" + "+---+---+\n" + "| 1| a|\n" + "| 2| b|\n" + "+---+---+", interpreterResultMessages.get(0).getData().trim());
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("z.show(df)", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals("_1 _2\n" + "1 a\n" + "2 b", interpreterResultMessages.get(0).getData().trim());
// spark sql python API bindings
result = interpreter.interpret("df.explain()", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// cancel
if (interpreter instanceof IPySparkInterpreter) {
final InterpreterContext context2 = createInterpreterContext(mockIntpEventClient);
Thread thread = new Thread() {
@Override
public void run() {
InterpreterResult result = null;
try {
result = interpreter.interpret("import time\nsc.range(1,10).foreach(lambda x: time.sleep(1))", context2);
} catch (InterpreterException e) {
e.printStackTrace();
}
assertEquals(InterpreterResult.Code.ERROR, result.code());
List<InterpreterResultMessage> interpreterResultMessages = null;
try {
interpreterResultMessages = context2.out.toInterpreterResultMessage();
assertTrue(interpreterResultMessages.get(0).getData().contains("KeyboardInterrupt"));
} catch (IOException e) {
e.printStackTrace();
}
}
};
thread.start();
// sleep 1 second to wait for the spark job starts
Thread.sleep(1000);
interpreter.cancel(context);
thread.join();
}
// completions
List<InterpreterCompletion> completions = interpreter.completion("sc.ran", 6, createInterpreterContext(mockIntpEventClient));
assertEquals(1, completions.size());
assertEquals("range", completions.get(0).getValue());
completions = interpreter.completion("sc.", 3, createInterpreterContext(mockIntpEventClient));
assertTrue(completions.size() > 0);
completions.contains(new InterpreterCompletion("range", "range", ""));
completions = interpreter.completion("1+1\nsc.", 7, createInterpreterContext(mockIntpEventClient));
assertTrue(completions.size() > 0);
completions.contains(new InterpreterCompletion("range", "range", ""));
completions = interpreter.completion("s", 1, createInterpreterContext(mockIntpEventClient));
assertTrue(completions.size() > 0);
completions.contains(new InterpreterCompletion("sc", "sc", ""));
// python call java via py4j
context = createInterpreterContext(mockIntpEventClient);
result = interpreter.interpret("sc._jvm.java.lang.System.out.println(\"hello world\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
interpreterResultMessages = context.out.toInterpreterResultMessage();
assertEquals(1, interpreterResultMessages.size());
assertEquals("hello world\n", interpreterResultMessages.get(0).getData());
// pyspark streaming TODO(zjffdu) disable pyspark streaming test temporary
context = createInterpreterContext(mockIntpEventClient);
// result = interpreter.interpret(
// "from pyspark.streaming import StreamingContext\n" +
// "import time\n" +
// "ssc = StreamingContext(sc, 1)\n" +
// "rddQueue = []\n" +
// "for i in range(5):\n" +
// " rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 1001)], 10)]\n" +
// "inputStream = ssc.queueStream(rddQueue)\n" +
// "mappedStream = inputStream.map(lambda x: (x % 10, 1))\n" +
// "reducedStream = mappedStream.reduceByKey(lambda a, b: a + b)\n" +
// "reducedStream.pprint()\n" +
// "ssc.start()\n" +
// "time.sleep(6)\n" +
// "ssc.stop(stopSparkContext=False, stopGraceFully=True)", context);
// Thread.sleep(1000);
// assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// interpreterResultMessages = context.out.toInterpreterResultMessage();
// assertEquals(1, interpreterResultMessages.size());
// assertTrue(interpreterResultMessages.get(0).getData().contains("(0, 100)"));
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class UniverseCompleter method addCompletions.
private void addCompletions(List<InterpreterCompletion> interpreterCompletions, List<CharSequence> candidates, String meta) {
for (CharSequence candidate : candidates) {
String value;
if (meta.equalsIgnoreCase(CompletionType.universe.name())) {
value = String.format("%s%s;\n", candidate.toString(), END_NAME);
} else {
value = candidate.toString();
}
interpreterCompletions.add(new InterpreterCompletion(candidate.toString(), value, meta));
}
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class FlinkInterpreterTest method testCompletion.
@Test
public void testCompletion() throws InterpreterException {
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret("val a=\"hello world\"", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterCompletion> completions = interpreter.completion("a.", 2, getInterpreterContext());
assertTrue(completions.size() > 0);
completions = interpreter.completion("benv.", 5, getInterpreterContext());
assertTrue(completions.size() > 0);
}
Aggregations