use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class NotebookServer method completion.
private void completion(NotebookSocket conn, ServiceContext context, Message fromMessage) throws IOException {
String noteId = connectionManager.getAssociatedNoteId(conn);
String paragraphId = (String) fromMessage.get("id");
String buffer = (String) fromMessage.get("buf");
int cursor = (int) Double.parseDouble(fromMessage.get("cursor").toString());
getNotebookService().completion(noteId, paragraphId, buffer, cursor, context, new WebSocketServiceCallback<List<InterpreterCompletion>>(conn) {
@Override
public void onSuccess(List<InterpreterCompletion> completions, ServiceContext context) throws IOException {
super.onSuccess(completions, context);
Message resp = new Message(OP.COMPLETION_LIST).put("id", paragraphId);
resp.put("completions", completions);
conn.send(serializeMessage(resp));
}
@Override
public void onFailure(Exception ex, ServiceContext context) throws IOException {
super.onFailure(ex, context);
Message resp = new Message(OP.COMPLETION_LIST).put("id", paragraphId);
resp.put("completions", new ArrayList<>());
conn.send(serializeMessage(resp));
}
});
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class ZeppelinSparkClusterTest method scalaOutputTest.
@Test
public void scalaOutputTest() throws IOException, InterruptedException {
assumeTrue("Hadoop version mismatch, skip test", isHadoopVersionMatch());
String noteId = null;
try {
// create new note
noteId = TestUtils.getInstance(Notebook.class).createNote("note1", anonymous);
TestUtils.getInstance(Notebook.class).processNote(noteId, note -> {
Paragraph p = note.addNewParagraph(anonymous);
p.setText("%spark import java.util.Date\n" + "import java.net.URL\n" + "println(\"hello\")\n");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertEquals("hello\n" + "import java.util.Date\n" + "import java.net.URL\n", p.getReturn().message().get(0).getData());
// check spark weburl in zeppelin-server side
InterpreterSettingManager interpreterSettingManager = TestUtils.getInstance(InterpreterSettingManager.class);
InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getByName("spark");
assertEquals(1, sparkInterpreterSetting.getAllInterpreterGroups().size());
assertNotNull(sparkInterpreterSetting.getAllInterpreterGroups().get(0).getWebUrl());
p.setText("%spark invalid_code");
note.run(p.getId(), true);
assertEquals(Status.ERROR, p.getStatus());
assertTrue(p.getReturn().message().get(0).getData().contains("error: "));
// test local properties
p.setText("%spark(p1=v1,p2=v2) print(z.getInterpreterContext().getLocalProperties().size())");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertEquals("2", p.getReturn().message().get(0).getData());
// test code completion
List<InterpreterCompletion> completions = note.completion(p.getId(), "sc.", 2, AuthenticationInfo.ANONYMOUS);
assertTrue(completions.size() > 0);
// test cancel
p.setText("%spark sc.range(1,10).map(e=>{Thread.sleep(1000); e}).collect()");
note.run(p.getId(), false);
waitForRunning(p);
p.abort();
waitForFinish(p);
assertEquals(Status.ABORT, p.getStatus());
return null;
});
} finally {
if (null != noteId) {
TestUtils.getInstance(Notebook.class).removeNote(noteId, anonymous);
}
}
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class ZeppelinSparkClusterTest method pySparkTest.
@Test
public void pySparkTest() throws IOException {
assumeTrue("Hadoop version mismatch, skip test", isHadoopVersionMatch());
// create new note
String noteId = null;
try {
noteId = TestUtils.getInstance(Notebook.class).createNote("note1", anonymous);
TestUtils.getInstance(Notebook.class).processNote(noteId, note -> {
// run markdown paragraph, again
Paragraph p = note.addNewParagraph(anonymous);
p.setText("%spark.pyspark sc.parallelize(range(1, 11)).reduce(lambda a, b: a + b)");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertEquals("55\n", p.getReturn().message().get(0).getData());
// simple form via local properties
p = note.addNewParagraph(anonymous);
p.setText("%spark.pyspark(form=simple) print('name_' + '${name=abc}')");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertEquals("name_abc\n", p.getReturn().message().get(0).getData());
// test code completion
String code = "%spark.pyspark spark.";
List<InterpreterCompletion> completions = note.completion(p.getId(), code, code.length(), AuthenticationInfo.ANONYMOUS);
assertTrue(completions.size() > 0);
if (isSpark2()) {
// run SparkSession test
p = note.addNewParagraph(anonymous);
p.setText("%pyspark from pyspark.sql import Row\n" + "df=sqlContext.createDataFrame([Row(id=1, age=20)])\n" + "df.collect()");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertEquals("[Row(age=20, id=1)]\n", p.getReturn().message().get(0).getData());
// test udf
p = note.addNewParagraph(anonymous);
// use SQLContext to register UDF but use this UDF through SparkSession
p.setText("%pyspark sqlContext.udf.register(\"f1\", lambda x: len(x))\n" + "spark.sql(\"select f1(\\\"abc\\\") as len\").collect()");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertTrue("[Row(len=u'3')]\n".equals(p.getReturn().message().get(0).getData()) || "[Row(len='3')]\n".equals(p.getReturn().message().get(0).getData()));
} else {
// run SparkSession test
p = note.addNewParagraph(anonymous);
p.setText("%pyspark from pyspark.sql import Row\n" + "df=sqlContext.createDataFrame([Row(id=1, age=20)])\n" + "df.collect()");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertEquals("[Row(id=1, age=20)]\n", p.getReturn().message().get(0).getData());
// test udf
p = note.addNewParagraph(anonymous);
// use SQLContext to register UDF but use this UDF through SparkSession
p.setText("%pyspark sqlContext.udf.register(\"f1\", lambda x: len(x))\n" + "spark.sql(\"select f1(\\\"abc\\\") as len\").collect()");
note.run(p.getId(), true);
assertEquals(Status.FINISHED, p.getStatus());
assertTrue("[Row(len=u'3')]\n".equals(p.getReturn().message().get(0).getData()) || "[Row(len='3')]\n".equals(p.getReturn().message().get(0).getData()));
}
return null;
});
} finally {
if (null != noteId) {
TestUtils.getInstance(Notebook.class).removeNote(noteId, anonymous);
}
}
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class NotebookService method completion.
public List<InterpreterCompletion> completion(String noteId, String paragraphId, String buffer, int cursor, ServiceContext context, ServiceCallback<List<InterpreterCompletion>> callback) throws IOException {
return notebook.processNote(noteId, note -> {
if (note == null) {
callback.onFailure(new NoteNotFoundException(noteId), context);
return null;
}
if (!checkPermission(noteId, Permission.WRITER, Message.OP.COMPLETION, context, callback)) {
return null;
}
try {
List<InterpreterCompletion> completions = note.completion(paragraphId, buffer, cursor, context.getAutheInfo());
callback.onSuccess(completions, context);
return completions;
} catch (RuntimeException e) {
callback.onFailure(new IOException("Fail to get completion", e), context);
return null;
}
});
}
use of org.apache.zeppelin.interpreter.thrift.InterpreterCompletion in project zeppelin by apache.
the class DepInterpreter method completion.
@Override
public List<InterpreterCompletion> completion(String buf, int cursor) {
if (Utils.isScala2_10()) {
ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completer, "completer");
Candidates ret = c.complete(buf, cursor);
List<String> candidates = WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
List<InterpreterCompletion> completions = new LinkedList<>();
for (String candidate : candidates) {
completions.add(new InterpreterCompletion(candidate, candidate));
}
return completions;
} else {
return new LinkedList<>();
}
}
Aggregations