use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class NotebookRestApi method runParagraphSynchronously.
/**
* Run synchronously a paragraph REST API
*
* @param noteId - noteId
* @param paragraphId - paragraphId
* @param message - JSON with params if user wants to update dynamic form's value
* null, empty string, empty json if user doesn't want to update
*
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@POST
@Path("run/{noteId}/{paragraphId}")
@ZeppelinApi
public Response runParagraphSynchronously(@PathParam("noteId") String noteId, @PathParam("paragraphId") String paragraphId, String message) throws IOException, IllegalArgumentException {
LOG.info("run paragraph synchronously {} {} {}", noteId, paragraphId, message);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot run paragraph");
Paragraph paragraph = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(paragraph);
// handle params if presented
handleParagraphParams(message, note, paragraph);
if (paragraph.getListener() == null) {
note.initializeJobListenerForParagraph(paragraph);
}
paragraph.run();
final InterpreterResult result = paragraph.getResult();
if (result.code() == InterpreterResult.Code.SUCCESS) {
return new JsonResponse<>(Status.OK, result).build();
} else {
return new JsonResponse<>(Status.INTERNAL_SERVER_ERROR, result).build();
}
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class NotebookRestApiTest method testClearAllParagraphOutput.
@Test
public void testClearAllParagraphOutput() throws IOException {
// Create note and set result explicitly
Note note = ZeppelinServer.notebook.createNote(anonymous);
Paragraph p1 = note.addParagraph(AuthenticationInfo.ANONYMOUS);
InterpreterResult result = new InterpreterResult(InterpreterResult.Code.SUCCESS, InterpreterResult.Type.TEXT, "result");
p1.setResult(result);
Paragraph p2 = note.addParagraph(AuthenticationInfo.ANONYMOUS);
p2.setReturn(result, new Throwable());
// clear paragraph result
PutMethod put = httpPut("/notebook/" + note.getId() + "/clear", "");
LOG.info("test clear paragraph output response\n" + put.getResponseBodyAsString());
assertThat(put, isAllowed());
put.releaseConnection();
// check if paragraph results are cleared
GetMethod get = httpGet("/notebook/" + note.getId() + "/paragraph/" + p1.getId());
assertThat(get, isAllowed());
Map<String, Object> resp1 = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>() {
}.getType());
Map<String, Object> resp1Body = (Map<String, Object>) resp1.get("body");
assertNull(resp1Body.get("result"));
get = httpGet("/notebook/" + note.getId() + "/paragraph/" + p2.getId());
assertThat(get, isAllowed());
Map<String, Object> resp2 = gson.fromJson(get.getResponseBodyAsString(), new TypeToken<Map<String, Object>>() {
}.getType());
Map<String, Object> resp2Body = (Map<String, Object>) resp2.get("body");
assertNull(resp2Body.get("result"));
get.releaseConnection();
//cleanup
ZeppelinServer.notebook.removeNote(note.getId(), anonymous);
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project metron by apache.
the class StellarInterpreterTest method testExecuteWithStellarList.
/**
* Ensure that Stellar lists are displayed correctly in Zeppelin.
*/
@Test
public void testExecuteWithStellarList() {
final String expected = "[1, 2, 3, 4, 5]";
InterpreterResult result = interpreter.interpret("[1,2,3,4,5]", context);
// validate the result
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
// validate the message
InterpreterResultMessage message = result.message().get(0);
assertEquals(expected, message.getData());
assertEquals(InterpreterResult.Type.TEXT, message.getType());
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class SparkInterpreter method interpretInput.
public InterpreterResult interpretInput(String[] lines, InterpreterContext context) {
SparkEnv.set(env);
String[] linesToRun = new String[lines.length];
for (int i = 0; i < lines.length; i++) {
linesToRun[i] = lines[i];
}
Console.setOut(context.out);
out.setInterpreterOutput(context.out);
context.out.clear();
Code r = null;
String incomplete = "";
boolean inComment = false;
for (int l = 0; l < linesToRun.length; l++) {
String s = linesToRun[l];
// check if next line starts with "." (but not ".." or "./") it is treated as an invocation
if (l + 1 < linesToRun.length) {
String nextLine = linesToRun[l + 1].trim();
boolean continuation = false;
if (nextLine.isEmpty() || // skip empty line or comment
nextLine.startsWith("//") || nextLine.startsWith("}") || nextLine.startsWith("object")) {
// include "} object" for Scala companion object
continuation = true;
} else if (!inComment && nextLine.startsWith("/*")) {
inComment = true;
continuation = true;
} else if (inComment && nextLine.lastIndexOf("*/") >= 0) {
inComment = false;
continuation = true;
} else if (nextLine.length() > 1 && nextLine.charAt(0) == '.' && // ".."
nextLine.charAt(1) != '.' && nextLine.charAt(1) != '/') {
// "./"
continuation = true;
} else if (inComment) {
continuation = true;
}
if (continuation) {
incomplete += s + "\n";
continue;
}
}
scala.tools.nsc.interpreter.Results.Result res = null;
try {
res = interpret(incomplete + s);
} catch (Exception e) {
sc.clearJobGroup();
out.setInterpreterOutput(null);
logger.info("Interpreter exception", e);
return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e));
}
r = getResultCode(res);
if (r == Code.ERROR) {
sc.clearJobGroup();
out.setInterpreterOutput(null);
return new InterpreterResult(r, "");
} else if (r == Code.INCOMPLETE) {
incomplete += s + "\n";
} else {
incomplete = "";
}
}
// make sure code does not finish with comment
if (r == Code.INCOMPLETE) {
scala.tools.nsc.interpreter.Results.Result res = null;
res = interpret(incomplete + "\nprint(\"\")");
r = getResultCode(res);
}
if (r == Code.INCOMPLETE) {
sc.clearJobGroup();
out.setInterpreterOutput(null);
return new InterpreterResult(r, "Incomplete expression");
} else {
sc.clearJobGroup();
putLatestVarInResourcePool(context);
out.setInterpreterOutput(null);
return new InterpreterResult(Code.SUCCESS);
}
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class SparkSqlInterpreter method interpret.
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
SQLContext sqlc = null;
SparkInterpreter sparkInterpreter = getSparkInterpreter();
if (sparkInterpreter.getSparkVersion().isUnsupportedVersion()) {
return new InterpreterResult(Code.ERROR, "Spark " + sparkInterpreter.getSparkVersion().toString() + " is not supported");
}
sparkInterpreter.populateSparkWebUrl(context);
sqlc = getSparkInterpreter().getSQLContext();
SparkContext sc = sqlc.sparkContext();
if (concurrentSQL()) {
sc.setLocalProperty("spark.scheduler.pool", "fair");
} else {
sc.setLocalProperty("spark.scheduler.pool", null);
}
sc.setJobGroup(Utils.buildJobGroupId(context), "Zeppelin", false);
Object rdd = null;
try {
// method signature of sqlc.sql() is changed
// from def sql(sqlText: String): SchemaRDD (1.2 and prior)
// to def sql(sqlText: String): DataFrame (1.3 and later).
// Therefore need to use reflection to keep binary compatibility for all spark versions.
Method sqlMethod = sqlc.getClass().getMethod("sql", String.class);
rdd = sqlMethod.invoke(sqlc, st);
} catch (InvocationTargetException ite) {
if (Boolean.parseBoolean(getProperty("zeppelin.spark.sql.stacktrace"))) {
throw new InterpreterException(ite);
}
logger.error("Invocation target exception", ite);
String msg = ite.getTargetException().getMessage() + "\nset zeppelin.spark.sql.stacktrace = true to see full stacktrace";
return new InterpreterResult(Code.ERROR, msg);
} catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException e) {
throw new InterpreterException(e);
}
String msg = ZeppelinContext.showDF(sc, context, rdd, maxResult);
sc.clearJobGroup();
return new InterpreterResult(Code.SUCCESS, msg);
}
Aggregations