use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class LivyInterpreterIT method testSharedInterpreter.
@Test
public void testSharedInterpreter() throws InterpreterException {
if (!checkPreCondition()) {
return;
}
InterpreterGroup interpreterGroup = new InterpreterGroup("group_1");
interpreterGroup.put("session_1", new ArrayList<Interpreter>());
LazyOpenInterpreter sparkInterpreter = new LazyOpenInterpreter(new LivySparkInterpreter(properties));
sparkInterpreter.setInterpreterGroup(interpreterGroup);
interpreterGroup.get("session_1").add(sparkInterpreter);
LazyOpenInterpreter sqlInterpreter = new LazyOpenInterpreter(new LivySparkSQLInterpreter(properties));
interpreterGroup.get("session_1").add(sqlInterpreter);
sqlInterpreter.setInterpreterGroup(interpreterGroup);
LazyOpenInterpreter pysparkInterpreter = new LazyOpenInterpreter(new LivyPySparkInterpreter(properties));
interpreterGroup.get("session_1").add(pysparkInterpreter);
pysparkInterpreter.setInterpreterGroup(interpreterGroup);
LazyOpenInterpreter sparkRInterpreter = new LazyOpenInterpreter(new LivySparkRInterpreter(properties));
interpreterGroup.get("session_1").add(sparkRInterpreter);
sparkRInterpreter.setInterpreterGroup(interpreterGroup);
LazyOpenInterpreter sharedInterpreter = new LazyOpenInterpreter(new LivySharedInterpreter(properties));
interpreterGroup.get("session_1").add(sharedInterpreter);
sharedInterpreter.setInterpreterGroup(interpreterGroup);
sparkInterpreter.open();
sqlInterpreter.open();
pysparkInterpreter.open();
sparkRInterpreter.open();
try {
AuthenticationInfo authInfo = new AuthenticationInfo("user1");
MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener();
InterpreterOutput output = new InterpreterOutput(outputListener);
InterpreterContext context = InterpreterContext.builder().setNoteId("noteId").setParagraphId("paragraphId").setAuthenticationInfo(authInfo).setInterpreterOut(output).build();
// detect spark version
InterpreterResult result = sparkInterpreter.interpret("sc.version", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
boolean isSpark2 = isSpark2((BaseLivyInterpreter) sparkInterpreter.getInnerInterpreter(), context);
if (!isSpark2) {
result = sparkInterpreter.interpret("val df=sqlContext.createDataFrame(Seq((\"hello\",20))).toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
sparkInterpreter.interpret("df.registerTempTable(\"df\")", context);
// access table from pyspark
result = pysparkInterpreter.interpret("sqlContext.sql(\"select * from df\").show()", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("+-----+-----+\n" + "|col_1|col_2|\n" + "+-----+-----+\n" + "|hello| 20|\n" + "+-----+-----+"));
// access table from sparkr
result = sparkRInterpreter.interpret("head(sql(sqlContext, \"select * from df\"))", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("col_1 col_2\n1 hello 20"));
} else {
result = sparkInterpreter.interpret("val df=spark.createDataFrame(Seq((\"hello\",20))).toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("Array[org.apache.spark.sql.Row] = Array([hello,20])"));
sparkInterpreter.interpret("df.registerTempTable(\"df\")", context);
// access table from pyspark
result = pysparkInterpreter.interpret("spark.sql(\"select * from df\").show()", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("+-----+-----+\n" + "|col_1|col_2|\n" + "+-----+-----+\n" + "|hello| 20|\n" + "+-----+-----+"));
// access table from sparkr
result = sparkRInterpreter.interpret("head(sql(\"select * from df\"))", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertTrue(result.message().get(0).getData().contains("col_1 col_2\n1 hello 20"));
}
// test plotting of python
result = pysparkInterpreter.interpret("import matplotlib.pyplot as plt\n" + "plt.switch_backend('agg')\n" + "data=[1,2,3,4]\n" + "plt.figure()\n" + "plt.plot(data)\n" + "%matplot plt", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertEquals(InterpreterResult.Type.IMG, result.message().get(0).getType());
// test plotting of R
result = sparkRInterpreter.interpret("hist(mtcars$mpg)", context);
assertEquals(result.toString(), InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, result.message().size());
assertEquals(InterpreterResult.Type.IMG, result.message().get(0).getType());
// test code completion
List<InterpreterCompletion> completionResult = sparkInterpreter.completion("df.sho", 6, context);
assertEquals(1, completionResult.size());
assertEquals("show", completionResult.get(0).name);
} finally {
sparkInterpreter.close();
sqlInterpreter.close();
}
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class IRKernelTest method setUp.
@Before
public void setUp() throws InterpreterException {
Properties properties = new Properties();
InterpreterContext context = getInterpreterContext();
InterpreterContext.set(context);
interpreter = createInterpreter(properties);
InterpreterGroup interpreterGroup = new InterpreterGroup();
interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1");
interpreter.setInterpreterGroup(interpreterGroup);
interpreter.open();
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class SparkSqlInterpreter method getSparkInterpreter.
private SparkInterpreter getSparkInterpreter() {
LazyOpenInterpreter lazy = null;
SparkInterpreter spark = null;
Interpreter p = getInterpreterInTheSameSessionByClassName(SparkInterpreter.class.getName());
while (p instanceof WrappedInterpreter) {
if (p instanceof LazyOpenInterpreter) {
lazy = (LazyOpenInterpreter) p;
}
p = ((WrappedInterpreter) p).getInnerInterpreter();
}
spark = (SparkInterpreter) p;
if (lazy != null) {
lazy.open();
}
return spark;
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class NoteInterpreterLoaderTest method testNoteInterpreterCloseForAll.
@Test
public void testNoteInterpreterCloseForAll() throws IOException {
interpreterSettingManager.setInterpreters("user", "FitstNote", interpreterSettingManager.getDefaultInterpreterSettingList());
interpreterSettingManager.getInterpreterSettings("FitstNote").get(0).getOption().setPerNote(InterpreterOption.SCOPED);
interpreterSettingManager.setInterpreters("user", "yourFirstNote", interpreterSettingManager.getDefaultInterpreterSettingList());
interpreterSettingManager.getInterpreterSettings("yourFirstNote").get(0).getOption().setPerNote(InterpreterOption.ISOLATED);
// interpreters are not created before accessing it
assertNull(interpreterSettingManager.getInterpreterSettings("FitstNote").get(0).getInterpreterGroup("user", "FitstNote").get("FitstNote"));
assertNull(interpreterSettingManager.getInterpreterSettings("yourFirstNote").get(0).getInterpreterGroup("user", "yourFirstNote").get("yourFirstNote"));
Interpreter firstNoteIntp = factory.getInterpreter("user", "FitstNote", "group1.mock1");
Interpreter yourFirstNoteIntp = factory.getInterpreter("user", "yourFirstNote", "group1.mock1");
firstNoteIntp.open();
yourFirstNoteIntp.open();
assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
interpreterSettingManager.closeNote("user", "FitstNote");
assertFalse(((LazyOpenInterpreter) firstNoteIntp).isOpen());
assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
//reopen
firstNoteIntp.open();
assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
// invalid check
interpreterSettingManager.closeNote("invalid", "Note");
assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
// invalid contains value check
interpreterSettingManager.closeNote("u", "Note");
assertTrue(((LazyOpenInterpreter) firstNoteIntp).isOpen());
assertTrue(((LazyOpenInterpreter) yourFirstNoteIntp).isOpen());
}
use of org.apache.zeppelin.interpreter.LazyOpenInterpreter in project zeppelin by apache.
the class PigQueryInterpreterTest method setUp.
@Before
public void setUp() throws InterpreterException {
Properties properties = new Properties();
properties.put("zeppelin.pig.execType", "local");
properties.put("zeppelin.pig.maxResult", "20");
pigInterpreter = new LazyOpenInterpreter(new PigInterpreter(properties));
pigQueryInterpreter = new LazyOpenInterpreter(new PigQueryInterpreter(properties));
List<Interpreter> interpreters = new ArrayList();
interpreters.add(pigInterpreter);
interpreters.add(pigQueryInterpreter);
InterpreterGroup group = new InterpreterGroup();
group.put("note_id", interpreters);
pigInterpreter.setInterpreterGroup(group);
pigQueryInterpreter.setInterpreterGroup(group);
pigInterpreter.open();
pigQueryInterpreter.open();
context = InterpreterContext.builder().setParagraphId("paragraphId").build();
}
Aggregations