use of org.apache.zeppelin.interpreter.Interpreter in project zeppelin by apache.
the class PySparkInterpreterMatplotlibTest method setUp.
@BeforeClass
public static void setUp() throws Exception {
intpGroup = new InterpreterGroup();
intpGroup.put("note", new LinkedList<Interpreter>());
context = InterpreterContext.builder().setNoteId("note").setInterpreterOut(new InterpreterOutput()).setIntpEventClient(mock(RemoteInterpreterEventClient.class)).setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null)).build();
InterpreterContext.set(context);
sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
intpGroup.get("note").add(sparkInterpreter);
sparkInterpreter.setInterpreterGroup(intpGroup);
sparkInterpreter.open();
pyspark = new AltPySparkInterpreter(getPySparkTestProperties());
intpGroup.get("note").add(pyspark);
pyspark.setInterpreterGroup(intpGroup);
pyspark.open();
}
use of org.apache.zeppelin.interpreter.Interpreter in project zeppelin by apache.
the class SparkRInterpreterTest method testInvalidR.
@Test
public void testInvalidR() throws InterpreterException {
tearDown();
Properties properties = new Properties();
properties.setProperty("zeppelin.R.cmd", "invalid_r");
properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
InterpreterGroup interpreterGroup = new InterpreterGroup();
Interpreter sparkRInterpreter = new LazyOpenInterpreter(new SparkRInterpreter(properties));
Interpreter sparkInterpreter = new LazyOpenInterpreter(new SparkInterpreter(properties));
interpreterGroup.addInterpreterToSession(sparkRInterpreter, "session_1");
interpreterGroup.addInterpreterToSession(sparkInterpreter, "session_1");
sparkRInterpreter.setInterpreterGroup(interpreterGroup);
sparkInterpreter.setInterpreterGroup(interpreterGroup);
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
try {
sparkRInterpreter.interpret("1+1", getInterpreterContext());
fail("Should fail to open SparkRInterpreter");
} catch (InterpreterException e) {
String stacktrace = ExceptionUtils.getStackTrace(e);
assertTrue(stacktrace, stacktrace.contains("No such file or directory"));
}
}
use of org.apache.zeppelin.interpreter.Interpreter in project zeppelin by apache.
the class KotlinSparkInterpreterTest method setUp.
@BeforeClass
public static void setUp() throws Exception {
intpGroup = new InterpreterGroup();
context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setParagraphTitle("title").setAngularObjectRegistry(new AngularObjectRegistry(intpGroup.getId(), null)).setResourcePool(new LocalResourcePool("id")).setInterpreterOut(new InterpreterOutput()).setIntpEventClient(mock(RemoteInterpreterEventClient.class)).build();
context.out = new InterpreterOutput(new InterpreterOutputListener() {
@Override
public void onUpdateAll(InterpreterOutput out) {
}
@Override
public void onAppend(int index, InterpreterResultMessageOutput out, byte[] line) {
try {
output = out.toInterpreterResultMessage().getData();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void onUpdate(int index, InterpreterResultMessageOutput out) {
}
});
InterpreterContext.set(context);
intpGroup.put("note", new LinkedList<Interpreter>());
Properties properties = getSparkTestProperties(tmpDir);
repl = new SparkInterpreter(properties);
repl.setInterpreterGroup(intpGroup);
intpGroup.get("note").add(repl);
repl.open();
repl.interpret("sc", context);
interpreter = new KotlinSparkInterpreter(properties);
interpreter.setInterpreterGroup(intpGroup);
intpGroup.get("note").add(interpreter);
try {
interpreter.open();
sparkSupported = true;
} catch (UnsupportedClassVersionError e) {
sparkSupported = false;
}
}
use of org.apache.zeppelin.interpreter.Interpreter in project zeppelin by apache.
the class PySparkInterpreterTest method testFailtoLaunchPythonProcess.
@Override
@Test
public void testFailtoLaunchPythonProcess() throws InterpreterException {
tearDown();
intpGroup = new InterpreterGroup();
Properties properties = new Properties();
properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "Zeppelin Test");
properties.setProperty("spark.pyspark.python", "invalid_python");
properties.setProperty("zeppelin.python.useIPython", "false");
properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
properties.setProperty("zeppelin.spark.maxResult", "3");
interpreter = new LazyOpenInterpreter(new PySparkInterpreter(properties));
interpreter.setInterpreterGroup(intpGroup);
Interpreter sparkInterpreter = new LazyOpenInterpreter(new SparkInterpreter(properties));
sparkInterpreter.setInterpreterGroup(intpGroup);
LazyOpenInterpreter iPySparkInterpreter = new LazyOpenInterpreter(new IPySparkInterpreter(properties));
iPySparkInterpreter.setInterpreterGroup(intpGroup);
intpGroup.put("note", new LinkedList<Interpreter>());
intpGroup.get("note").add(interpreter);
intpGroup.get("note").add(sparkInterpreter);
intpGroup.get("note").add(iPySparkInterpreter);
InterpreterContext.set(getInterpreterContext());
try {
interpreter.interpret("1+1", getInterpreterContext());
fail("Should fail to open PySparkInterpreter");
} catch (InterpreterException e) {
String stacktrace = ExceptionUtils.getStackTrace(e);
assertTrue(stacktrace, stacktrace.contains("No such file or directory"));
}
}
use of org.apache.zeppelin.interpreter.Interpreter in project zeppelin by apache.
the class SparkSqlInterpreterTest method setUp.
@BeforeClass
public static void setUp() throws Exception {
Properties p = new Properties();
p.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local[4]");
p.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
p.setProperty("zeppelin.spark.maxResult", "10");
p.setProperty("zeppelin.spark.concurrentSQL", "true");
p.setProperty("zeppelin.spark.sql.stacktrace", "true");
p.setProperty("zeppelin.spark.useHiveContext", "true");
p.setProperty("zeppelin.spark.deprecatedMsg.show", "false");
intpGroup = new InterpreterGroup();
sparkInterpreter = new SparkInterpreter(p);
sparkInterpreter.setInterpreterGroup(intpGroup);
sqlInterpreter = new SparkSqlInterpreter(p);
sqlInterpreter.setInterpreterGroup(intpGroup);
intpGroup.put("session_1", new LinkedList<Interpreter>());
intpGroup.get("session_1").add(sparkInterpreter);
intpGroup.get("session_1").add(sqlInterpreter);
context = getInterpreterContext();
InterpreterContext.set(context);
sparkInterpreter.open();
sqlInterpreter.open();
}
Aggregations