use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.
the class Paragraph method getBindedInterpreter.
public Interpreter getBindedInterpreter() throws InterpreterNotFoundException {
ExecutionContext executionContext = note.getExecutionContext();
executionContext.setUser(user);
executionContext.setInterpreterGroupId(interpreterGroupId);
return this.note.getInterpreterFactory().getInterpreter(intpText, executionContext);
}
use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.
the class Note method getExecutionContext.
public ExecutionContext getExecutionContext() {
ExecutionContext executionContext = new ExecutionContext();
executionContext.setNoteId(id);
executionContext.setDefaultInterpreterGroup(defaultInterpreterGroup);
executionContext.setInIsolatedMode(isIsolatedMode());
executionContext.setStartTime(getStartTime());
return executionContext;
}
use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.
the class FlinkIntegrationTest method testInterpreterBasics.
private void testInterpreterBasics() throws IOException, InterpreterException {
// test FlinkInterpreter
Interpreter flinkInterpreter = interpreterFactory.getInterpreter("flink", new ExecutionContext("user1", "note1", "flink"));
InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build();
InterpreterResult interpreterResult = flinkInterpreter.interpret("1+1", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertTrue(interpreterResult.message().get(0).getData().contains("2"));
interpreterResult = flinkInterpreter.interpret("val data = benv.fromElements(1, 2, 3)\ndata.collect()", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertTrue(interpreterResult.message().get(0).getData().contains("1, 2, 3"));
interpreterResult = flinkInterpreter.interpret("val data = senv.fromElements(1, 2, 3)\ndata.print()", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// check spark weburl in zeppelin-server side
InterpreterSetting flinkInterpreterSetting = interpreterSettingManager.getByName("flink");
assertEquals(1, flinkInterpreterSetting.getAllInterpreterGroups().size());
assertNotNull(flinkInterpreterSetting.getAllInterpreterGroups().get(0).getWebUrl());
}
use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.
the class JdbcIntegrationTest method testMySql.
@Test
public void testMySql() throws InterpreterException, InterruptedException {
InterpreterSetting interpreterSetting = interpreterSettingManager.getInterpreterSettingByName("jdbc");
interpreterSetting.setProperty("default.driver", "com.mysql.jdbc.Driver");
interpreterSetting.setProperty("default.url", "jdbc:mysql://localhost:3306/");
interpreterSetting.setProperty("default.user", "root");
interpreterSetting.setProperty("default.password", "root");
Dependency dependency = new Dependency("mysql:mysql-connector-java:5.1.46");
interpreterSetting.setDependencies(Arrays.asList(dependency));
interpreterSettingManager.restart(interpreterSetting.getId());
interpreterSetting.waitForReady(60 * 1000);
Interpreter jdbcInterpreter = interpreterFactory.getInterpreter("jdbc", new ExecutionContext("user1", "note1", "test"));
assertNotNull("JdbcInterpreter is null", jdbcInterpreter);
InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").setAuthenticationInfo(AuthenticationInfo.ANONYMOUS).build();
InterpreterResult interpreterResult = jdbcInterpreter.interpret("show databases;", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
context.getLocalProperties().put("saveAs", "table_1");
interpreterResult = jdbcInterpreter.interpret("SELECT 1 as c1, 2 as c2;", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(1, interpreterResult.message().size());
assertEquals(InterpreterResult.Type.TABLE, interpreterResult.message().get(0).getType());
assertEquals("c1\tc2\n1\t2\n", interpreterResult.message().get(0).getData());
// read table_1 from python interpreter
InterpreterSetting pythonInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("python");
pythonInterpreterSetting.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
Interpreter pythonInterpreter = interpreterFactory.getInterpreter("python", new ExecutionContext("user1", "note1", "test"));
assertNotNull("PythonInterpreter is null", pythonInterpreter);
context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").setAuthenticationInfo(AuthenticationInfo.ANONYMOUS).build();
interpreterResult = pythonInterpreter.interpret("df=z.getAsDataFrame('table_1')\nz.show(df)", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(1, interpreterResult.message().size());
assertEquals(InterpreterResult.Type.TABLE, interpreterResult.message().get(0).getType());
assertEquals("c1\tc2\n1\t2\n", interpreterResult.message().get(0).getData());
}
use of org.apache.zeppelin.interpreter.ExecutionContext in project zeppelin by apache.
the class SparkIntegrationTest method testInterpreterBasics.
private void testInterpreterBasics() throws IOException, InterpreterException, XmlPullParserException {
// add jars & packages for testing
InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark");
sparkInterpreterSetting.setProperty("spark.jars.packages", "com.maxmind.geoip2:geoip2:2.5.0");
sparkInterpreterSetting.setProperty("SPARK_PRINT_LAUNCH_COMMAND", "true");
sparkInterpreterSetting.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1");
MavenXpp3Reader reader = new MavenXpp3Reader();
Model model = reader.read(new FileReader("pom.xml"));
sparkInterpreterSetting.setProperty("spark.jars", new File("target/zeppelin-interpreter-integration-" + model.getVersion() + ".jar").getAbsolutePath());
// test SparkInterpreter
Interpreter sparkInterpreter = interpreterFactory.getInterpreter("spark.spark", new ExecutionContext("user1", "note1", "test"));
InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build();
InterpreterResult interpreterResult = sparkInterpreter.interpret("sc.version", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
String detectedSparkVersion = interpreterResult.message().get(0).getData();
assertTrue(detectedSparkVersion + " doesn't contain " + this.sparkVersion, detectedSparkVersion.contains(this.sparkVersion));
interpreterResult = sparkInterpreter.interpret("sc.range(1,10).sum()", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("45"));
interpreterResult = sparkInterpreter.interpret("sc.getConf.get(\"spark.user.name\")", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("user1"));
// test jars & packages can be loaded correctly
interpreterResult = sparkInterpreter.interpret("import org.apache.zeppelin.interpreter.integration.DummyClass\n" + "import com.maxmind.geoip2._", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// test PySparkInterpreter
Interpreter pySparkInterpreter = interpreterFactory.getInterpreter("spark.pyspark", new ExecutionContext("user1", "note1", "test"));
interpreterResult = pySparkInterpreter.interpret("sqlContext.createDataFrame([(1,'a'),(2,'b')], ['id','name']).registerTempTable('test')", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// test IPySparkInterpreter
Interpreter ipySparkInterpreter = interpreterFactory.getInterpreter("spark.ipyspark", new ExecutionContext("user1", "note1", "test"));
interpreterResult = ipySparkInterpreter.interpret("sqlContext.table('test').show()", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
// test SparkSQLInterpreter
Interpreter sqlInterpreter = interpreterFactory.getInterpreter("spark.sql", new ExecutionContext("user1", "note1", "test"));
interpreterResult = sqlInterpreter.interpret("select count(1) as c from test", context);
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(interpreterResult.toString(), InterpreterResult.Type.TABLE, interpreterResult.message().get(0).getType());
assertEquals(interpreterResult.toString(), "c\n2\n", interpreterResult.message().get(0).getData());
// test SparkRInterpreter
Interpreter sparkrInterpreter = interpreterFactory.getInterpreter("spark.r", new ExecutionContext("user1", "note1", "test"));
if (isSpark2() || isSpark3()) {
interpreterResult = sparkrInterpreter.interpret("df <- as.DataFrame(faithful)\nhead(df)", context);
} else {
interpreterResult = sparkrInterpreter.interpret("df <- createDataFrame(sqlContext, faithful)\nhead(df)", context);
}
assertEquals(interpreterResult.toString(), InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(interpreterResult.toString(), InterpreterResult.Type.TEXT, interpreterResult.message().get(0).getType());
assertTrue(interpreterResult.toString(), interpreterResult.message().get(0).getData().contains("eruptions waiting"));
}
Aggregations