use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class PythonInterpreterTest method setUp.
@Override
public void setUp() throws InterpreterException {
intpGroup = new InterpreterGroup();
Properties properties = new Properties();
properties.setProperty("zeppelin.python.maxResult", "3");
properties.setProperty("zeppelin.python.useIPython", "false");
properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
interpreter = new LazyOpenInterpreter(new PythonInterpreter(properties));
intpGroup.put("note", new LinkedList<Interpreter>());
intpGroup.get("note").add(interpreter);
interpreter.setInterpreterGroup(intpGroup);
InterpreterContext.set(getInterpreterContext());
interpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class ShinyInterpreterTest method setUp.
@Before
public void setUp() throws InterpreterException {
Properties properties = new Properties();
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
interpreter = new ShinyInterpreter(properties);
InterpreterGroup interpreterGroup = new InterpreterGroup();
interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1");
interpreter.setInterpreterGroup(interpreterGroup);
interpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class RInterpreterTest method testInvalidR.
@Test
public void testInvalidR() throws InterpreterException {
tearDown();
Properties properties = new Properties();
properties.setProperty("zeppelin.R.cmd", "invalid_r");
properties.setProperty("spark.master", "local");
properties.setProperty("spark.app.name", "test");
InterpreterGroup interpreterGroup = new InterpreterGroup();
Interpreter rInterpreter = new LazyOpenInterpreter(new RInterpreter(properties));
interpreterGroup.addInterpreterToSession(rInterpreter, "session_1");
rInterpreter.setInterpreterGroup(interpreterGroup);
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
try {
rInterpreter.interpret("1+1", getInterpreterContext());
fail("Should fail to open SparkRInterpreter");
} catch (InterpreterException e) {
String stacktrace = ExceptionUtils.getStackTrace(e);
assertTrue(stacktrace, stacktrace.contains("No such file or directory"));
}
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class IPythonInterpreterTest method startInterpreter.
protected void startInterpreter(Properties properties) throws InterpreterException {
interpreter = new LazyOpenInterpreter(new IPythonInterpreter(properties));
intpGroup = new InterpreterGroup();
intpGroup.put("session_1", new ArrayList<Interpreter>());
intpGroup.get("session_1").add(interpreter);
interpreter.setInterpreterGroup(intpGroup);
interpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class LivyInterpreterIT method testLivyTutorialNote.
@Test
public void testLivyTutorialNote() throws IOException, InterpreterException {
if (!checkPreCondition()) {
return;
}
InterpreterGroup interpreterGroup = new InterpreterGroup("group_1");
interpreterGroup.put("session_1", new ArrayList<Interpreter>());
LazyOpenInterpreter sparkInterpreter = new LazyOpenInterpreter(new LivySparkInterpreter(properties));
sparkInterpreter.setInterpreterGroup(interpreterGroup);
interpreterGroup.get("session_1").add(sparkInterpreter);
LazyOpenInterpreter sqlInterpreter = new LazyOpenInterpreter(new LivySparkSQLInterpreter(properties));
interpreterGroup.get("session_1").add(sqlInterpreter);
sqlInterpreter.setInterpreterGroup(interpreterGroup);
sqlInterpreter.open();
try {
AuthenticationInfo authInfo = new AuthenticationInfo("user1");
MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
InterpreterOutput output = new InterpreterOutput(outputListener);
InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
String p1 = IOUtils.toString(getClass().getResourceAsStream("/livy_tutorial_1.scala"));
InterpreterResult result = sparkInterpreter.interpret(p1, context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
String p2 = IOUtils.toString(getClass().getResourceAsStream("/livy_tutorial_2.sql"));
result = sqlInterpreter.interpret(p2, context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType());
} finally {
sparkInterpreter.close();
sqlInterpreter.close();
}
}
Aggregations