use of scala.tools.nsc.Settings in project zeppelin by apache.
the class IgniteInterpreter method open.
@Override
public void open() {
Settings settings = new Settings();
URL[] urls = getClassloaderUrls();
// set classpath
PathSetting pathSettings = settings.classpath();
StringBuilder sb = new StringBuilder();
for (File f : currentClassPath()) {
if (sb.length() > 0) {
sb.append(File.pathSeparator);
}
sb.append(f.getAbsolutePath());
}
if (urls != null) {
for (URL u : urls) {
if (sb.length() > 0) {
sb.append(File.pathSeparator);
}
sb.append(u.getFile());
}
}
pathSettings.v_$eq(sb.toString());
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
BooleanSetting b = (BooleanSetting) settings.usejavacp();
b.v_$eq(true);
settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
out = new ByteArrayOutputStream();
imain = new IMain(settings, new PrintWriter(out));
initIgnite();
}
use of scala.tools.nsc.Settings in project zeppelin by apache.
the class DepInterpreter method createIMain.
private void createIMain() {
Settings settings = new Settings();
URL[] urls = getClassloaderUrls();
// set classpath for scala compiler
PathSetting pathSettings = settings.classpath();
String classpath = "";
List<File> paths = currentClassPath();
for (File f : paths) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += f.getAbsolutePath();
}
if (urls != null) {
for (URL u : urls) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += u.getFile();
}
}
pathSettings.v_$eq(classpath);
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
// set classloader for scala compiler
settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
BooleanSetting b = (BooleanSetting) settings.usejavacp();
b.v_$eq(true);
settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
interpreter.settings_$eq(settings);
interpreter.createInterpreter();
intp = Utils.invokeMethod(interpreter, "intp");
if (Utils.isScala2_10()) {
Utils.invokeMethod(intp, "setContextClassLoader");
Utils.invokeMethod(intp, "initializeSynchronous");
}
depc = new SparkDependencyContext(getProperty("zeppelin.dep.localrepo"), getProperty("zeppelin.dep.additionalRemoteRepository"));
if (Utils.isScala2_10()) {
completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion", new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
}
interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
Map<String, Object> binder;
if (Utils.isScala2_10()) {
binder = (Map<String, Object>) getValue("_binder");
} else {
binder = (Map<String, Object>) getLastObject();
}
binder.put("depc", depc);
interpret("@transient val z = " + "_binder.get(\"depc\")" + ".asInstanceOf[org.apache.zeppelin.spark.dep.SparkDependencyContext]");
}
use of scala.tools.nsc.Settings in project zeppelin by apache.
the class FlinkInterpreter method createSettings.
private Settings createSettings() {
URL[] urls = getClassloaderUrls();
Settings settings = new Settings();
// set classpath
PathSetting pathSettings = settings.classpath();
String classpath = "";
List<File> paths = currentClassPath();
for (File f : paths) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += f.getAbsolutePath();
}
if (urls != null) {
for (URL u : urls) {
if (classpath.length() > 0) {
classpath += File.pathSeparator;
}
classpath += u.getFile();
}
}
pathSettings.v_$eq(classpath);
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);
settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
BooleanSetting b = (BooleanSetting) settings.usejavacp();
b.v_$eq(true);
settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
// To prevent 'File name too long' error on some file system.
MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
numClassFileSetting.v_$eq(128);
settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(numClassFileSetting);
return settings;
}
use of scala.tools.nsc.Settings in project flink by apache.
the class FlinkILoopTest method testConfigurationForwarding.
@Test
public void testConfigurationForwarding() throws Exception {
Configuration configuration = new Configuration();
configuration.setString("foobar", "foobar");
FlinkILoop flinkILoop = new FlinkILoop("localhost", 6123, configuration, Option.<String[]>empty());
final TestPlanExecutor testPlanExecutor = new TestPlanExecutor();
PowerMockito.mockStatic(PlanExecutor.class);
BDDMockito.given(PlanExecutor.createRemoteExecutor(Matchers.anyString(), Matchers.anyInt(), Matchers.any(Configuration.class), Matchers.any(java.util.List.class), Matchers.any(java.util.List.class))).willAnswer(new Answer<PlanExecutor>() {
@Override
public PlanExecutor answer(InvocationOnMock invocation) throws Throwable {
testPlanExecutor.setHost((String) invocation.getArguments()[0]);
testPlanExecutor.setPort((Integer) invocation.getArguments()[1]);
testPlanExecutor.setConfiguration((Configuration) invocation.getArguments()[2]);
testPlanExecutor.setJars((List<String>) invocation.getArguments()[3]);
testPlanExecutor.setGlobalClasspaths((List<String>) invocation.getArguments()[4]);
return testPlanExecutor;
}
});
Settings settings = new Settings();
((MutableSettings.BooleanSetting) settings.usejavacp()).value_$eq(true);
flinkILoop.settings_$eq(settings);
flinkILoop.createInterpreter();
ExecutionEnvironment env = flinkILoop.scalaBenv().getJavaEnv();
env.fromElements(1).output(new DiscardingOutputFormat<Integer>());
env.execute("Test job");
Configuration forwardedConfiguration = testPlanExecutor.getConfiguration();
assertEquals(configuration, forwardedConfiguration);
}
use of scala.tools.nsc.Settings in project kylo by Teradata.
the class SparkScriptEngine method getInterpreter.
/**
* Gets the Spark REPL interface to be used.
*
* @return the interpreter
*/
@Nonnull
private IMain getInterpreter() {
if (this.interpreter == null) {
// Determine engine settings
final Settings settings = getSettings();
// Initialize engine
final ClassLoader parentClassLoader = getClass().getClassLoader();
final SparkInterpreterBuilder b = this.builder.withSettings(settings).withPrintWriter(getPrintWriter()).withClassLoader(parentClassLoader);
final IMain interpreter = b.newInstance();
interpreter.setContextClassLoader();
interpreter.initializeSynchronous();
// Setup environment
final scala.collection.immutable.List<String> empty = JavaConversions.asScalaBuffer(new ArrayList<String>()).toList();
final Results.Result result = interpreter.bind("engine", SparkScriptEngine.class.getName(), this, empty);
if (result instanceof Results.Error$) {
throw new IllegalStateException("Failed to initialize interpreter");
}
this.interpreter = interpreter;
}
return this.interpreter;
}
Aggregations