use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class PythonInterpreterPandasSql method getPythonInterpreter.
PythonInterpreter getPythonInterpreter() {
LazyOpenInterpreter lazy = null;
PythonInterpreter python = null;
Interpreter p = getInterpreterInTheSameSessionByClassName(PythonInterpreter.class.getName());
while (p instanceof WrappedInterpreter) {
if (p instanceof LazyOpenInterpreter) {
lazy = (LazyOpenInterpreter) p;
}
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
python = (PythonInterpreter) p;
if (lazy != null) {
lazy.open();
}
return python;
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class PySparkInterpreterTest method setUp.
@Override
public void setUp() throws InterpreterException {
Properties properties = new Properties();
properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "Zeppelin Test");
properties.setProperty("zeppelin.spark.useHiveContext", "false");
properties.setProperty("zeppelin.spark.maxResult", "3");
properties.setProperty("zeppelin.spark.importImplicit", "true");
properties.setProperty("zeppelin.pyspark.python", "python");
try {
properties.setProperty("zeppelin.dep.localrepo", Files.createTempDirectory("localrepo").toAbsolutePath().toString());
} catch (IOException e) {
fail(ExceptionUtils.getStackTrace(e));
}
properties.setProperty("zeppelin.pyspark.useIPython", "false");
properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");
// create interpreter group
intpGroup = new InterpreterGroup();
intpGroup.put("note", new LinkedList<Interpreter>());
InterpreterContext context = InterpreterContext.builder().setInterpreterOut(new InterpreterOutput()).setIntpEventClient(mockRemoteEventClient).build();
InterpreterContext.set(context);
LazyOpenInterpreter sparkInterpreter = new LazyOpenInterpreter(new SparkInterpreter(properties));
intpGroup.get("note").add(sparkInterpreter);
sparkInterpreter.setInterpreterGroup(intpGroup);
LazyOpenInterpreter iPySparkInterpreter = new LazyOpenInterpreter(new IPySparkInterpreter(properties));
intpGroup.get("note").add(iPySparkInterpreter);
iPySparkInterpreter.setInterpreterGroup(intpGroup);
interpreter = new LazyOpenInterpreter(new PySparkInterpreter(properties));
intpGroup.get("note").add(interpreter);
interpreter.setInterpreterGroup(intpGroup);
interpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class SparkIRInterpreterTest method setUp.
@Override
@Before
public void setUp() throws InterpreterException {
Properties properties = new Properties();
properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
properties.setProperty("zeppelin.spark.maxResult", "100");
properties.setProperty("spark.r.backendConnectionTimeout", "10");
properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");
properties.setProperty("spark.sql.execution.arrow.sparkr.enabled", "false");
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
interpreter = createInterpreter(properties);
InterpreterGroup interpreterGroup = new InterpreterGroup();
interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1");
interpreter.setInterpreterGroup(interpreterGroup);
SparkInterpreter sparkInterpreter = new SparkInterpreter(properties);
interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkInterpreter), "session_1");
sparkInterpreter.setInterpreterGroup(interpreterGroup);
interpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class SparkRInterpreterTest method setUp.
@Before
public void setUp() throws InterpreterException {
Properties properties = new Properties();
properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local");
properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test");
properties.setProperty("zeppelin.spark.maxResult", "100");
properties.setProperty("zeppelin.R.knitr", "true");
properties.setProperty("spark.r.backendConnectionTimeout", "10");
properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");
properties.setProperty("spark.sql.execution.arrow.sparkr.enabled", "false");
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
sparkRInterpreter = new SparkRInterpreter(properties);
sparkInterpreter = new SparkInterpreter(properties);
InterpreterGroup interpreterGroup = new InterpreterGroup();
interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkRInterpreter), "session_1");
interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkInterpreter), "session_1");
sparkRInterpreter.setInterpreterGroup(interpreterGroup);
sparkInterpreter.setInterpreterGroup(interpreterGroup);
sparkRInterpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class PyFlinkInterpreterTest method setUp.
@Override
public void setUp() throws InterpreterException {
Properties properties = new Properties();
properties.setProperty("zeppelin.pyflink.python", "python");
properties.setProperty("zeppelin.flink.maxResult", "3");
properties.setProperty("zeppelin.dep.localrepo", Files.createTempDir().getAbsolutePath());
properties.setProperty("zeppelin.pyflink.useIPython", "false");
properties.setProperty("zeppelin.flink.test", "true");
properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
properties.setProperty("local.number-taskmanager", "4");
// create interpreter group
intpGroup = new InterpreterGroup();
intpGroup.put("session_1", new LinkedList<>());
IPyFlinkInterpreterTest.angularObjectRegistry = new AngularObjectRegistry("flink", null);
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
this.flinkInnerInterpreter = new FlinkInterpreter(properties);
flinkScalaInterpreter = new LazyOpenInterpreter(flinkInnerInterpreter);
intpGroup.get("session_1").add(flinkScalaInterpreter);
flinkScalaInterpreter.setInterpreterGroup(intpGroup);
LazyOpenInterpreter iPyFlinkInterpreter = new LazyOpenInterpreter(new IPyFlinkInterpreter(properties));
intpGroup.get("session_1").add(iPyFlinkInterpreter);
iPyFlinkInterpreter.setInterpreterGroup(intpGroup);
interpreter = new LazyOpenInterpreter(new PyFlinkInterpreter(properties));
intpGroup.get("session_1").add(interpreter);
interpreter.setInterpreterGroup(intpGroup);
streamSqlInterpreter = new LazyOpenInterpreter(new FlinkStreamSqlInterpreter(properties));
batchSqlInterpreter = new LazyOpenInterpreter(new FlinkBatchSqlInterpreter(properties));
intpGroup.get("session_1").add(streamSqlInterpreter);
intpGroup.get("session_1").add(batchSqlInterpreter);
streamSqlInterpreter.setInterpreterGroup(intpGroup);
batchSqlInterpreter.setInterpreterGroup(intpGroup);
interpreter.open();
}
Aggregations