use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class CassandraInterpreterTest method should_describe_keyspaces.
@Test
public void should_describe_keyspaces() throws Exception {
//Given
String query = "DESCRIBE KEYSPACES;";
final String expected = reformatHtml(readTestResource("/scalate/DescribeKeyspaces.html"));
//When
final InterpreterResult actual = interpreter.interpret(query, intrContext);
//Then
assertThat(actual.code()).isEqualTo(Code.SUCCESS);
assertThat(reformatHtml(actual.message().get(0).getData())).isEqualTo(expected);
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class CassandraInterpreterTest method should_execute_statement_with_consistency_option.
@Test
public void should_execute_statement_with_consistency_option() throws Exception {
//Given
String statement = "@consistency=THREE\n" + "SELECT * FROM zeppelin.artists LIMIT 1;";
//When
final InterpreterResult actual = interpreter.interpret(statement, intrContext);
//Then
assertThat(actual.code()).isEqualTo(Code.ERROR);
assertThat(actual.message().get(0).getData()).contains("Not enough replicas available for query at consistency THREE (3 required but only 1 alive)");
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class CassandraInterpreterTest method should_describe_udt.
@Test
public void should_describe_udt() throws Exception {
//Given
String query = "DESCRIBE TYPE live_data.address;";
final String expected = reformatHtml(readTestResource("/scalate/DescribeType_live_data_address.html"));
//When
final InterpreterResult actual = interpreter.interpret(query, intrContext);
//Then
assertThat(actual.code()).isEqualTo(Code.SUCCESS);
assertThat(reformatHtml(actual.message().get(0).getData())).isEqualTo(expected);
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class AlluxioInterpreterTest method copyFromLocalLargeTest.
@Test
public void copyFromLocalLargeTest() throws IOException, AlluxioException {
File testFile = new File(mLocalAlluxioCluster.getAlluxioHome() + "/testFile");
testFile.createNewFile();
FileOutputStream fos = new FileOutputStream(testFile);
byte[] toWrite = BufferUtils.getIncreasingByteArray(SIZE_BYTES);
fos.write(toWrite);
fos.close();
InterpreterResult output = alluxioInterpreter.interpret("copyFromLocal " + testFile.getAbsolutePath() + " /testFile", null);
Assert.assertEquals("Copied " + testFile.getAbsolutePath() + " to /testFile\n\n", output.message().get(0).getData());
long fileLength = fs.getStatus(new AlluxioURI("/testFile")).getLength();
Assert.assertEquals(SIZE_BYTES, fileLength);
FileInStream fStream = fs.openFile(new AlluxioURI("/testFile"));
byte[] read = new byte[SIZE_BYTES];
fStream.read(read);
Assert.assertTrue(BufferUtils.equalIncreasingByteArray(SIZE_BYTES, read));
}
use of org.apache.zeppelin.interpreter.InterpreterResult in project zeppelin by apache.
the class ScaldingInterpreter method interpretInput.
public InterpreterResult interpretInput(String[] lines) {
// add print("") to make sure not finishing with comment
// see https://github.com/NFLabs/zeppelin/issues/151
String[] linesToRun = new String[lines.length + 1];
for (int i = 0; i < lines.length; i++) {
linesToRun[i] = lines[i];
}
linesToRun[lines.length] = "print(\"\")";
out.reset();
// Moving two lines below from open() to this function.
// If they are in open output is incomplete.
PrintStream printStream = new PrintStream(out, true);
Console.setOut(printStream);
Code r = null;
String incomplete = "";
boolean inComment = false;
for (int l = 0; l < linesToRun.length; l++) {
String s = linesToRun[l];
// check if next line starts with "." (but not ".." or "./") it is treated as an invocation
if (l + 1 < linesToRun.length) {
String nextLine = linesToRun[l + 1].trim();
boolean continuation = false;
if (nextLine.isEmpty() || // skip empty line or comment
nextLine.startsWith("//") || nextLine.startsWith("}") || nextLine.startsWith("object")) {
// include "} object" for Scala companion object
continuation = true;
} else if (!inComment && nextLine.startsWith("/*")) {
inComment = true;
continuation = true;
} else if (inComment && nextLine.lastIndexOf("*/") >= 0) {
inComment = false;
continuation = true;
} else if (nextLine.length() > 1 && nextLine.charAt(0) == '.' && // ".."
nextLine.charAt(1) != '.' && nextLine.charAt(1) != '/') {
// "./"
continuation = true;
} else if (inComment) {
continuation = true;
}
if (continuation) {
incomplete += s + "\n";
continue;
}
}
scala.tools.nsc.interpreter.Results.Result res = null;
try {
res = interpreter.intp().interpret(incomplete + s);
} catch (Exception e) {
logger.error("Interpreter exception: ", e);
return new InterpreterResult(Code.ERROR, e.getMessage());
}
r = getResultCode(res);
if (r == Code.ERROR) {
Console.flush();
return new InterpreterResult(r, out.toString());
} else if (r == Code.INCOMPLETE) {
incomplete += s + "\n";
} else {
incomplete = "";
}
}
if (r == Code.INCOMPLETE) {
return new InterpreterResult(r, "Incomplete expression");
} else {
Console.flush();
return new InterpreterResult(r, out.toString());
}
}
Aggregations