use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class SparkShellIT method executeScript.
/**
* Executes the specified script.
*
* @param script the Scala script
* @param async {@code true} to execute asynchronously
* @param expectedStatusCode the expected HTTP status code
* @return the result
*/
private TransformResponse executeScript(@Nonnull final String script, final boolean async, final int expectedStatusCode) {
// Create request body
final TransformRequest request = new TransformRequest();
request.setAsync(async);
request.setScript(script);
// Execute request
final Response response = given(SparkShellProxyController.BASE).body(request).when().post(SparkShellProxyController.TRANSFORM);
// Verify response
response.then().statusCode(expectedStatusCode);
return response.as(TransformResponse.class);
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class SparkShellIT method testRuntimeError.
/**
* Verify message for runtime errors.
*/
@Test
public void testRuntimeError() {
final TransformResponse response = executeScript("sqlContext.sql(\"SELECT * FROM invalid-table-name\")", HTTP_INTERNAL_ERROR);
Assert.assertEquals(TransformResponse.Status.ERROR, response.getStatus());
Assert.assertEquals("AnalysisException: cannot recognize input near 'invalid' '-' 'table' in from source; line 1 pos 21", response.getMessage());
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class SparkShellIT method testScriptDownload.
/**
* Verify downloading the result of a script.
*/
@Test
public void testScriptDownload() throws Exception {
final TransformResponse transform = executeScript(SCRIPT, true);
final SaveResponse save = saveScript(transform.getTable(), "json", null, null);
Assert.assertEquals(SaveResponse.Status.SUCCESS, save.getStatus());
Assert.assertEquals("./zip", save.getLocation());
final InputStream stream = given(SparkShellProxyController.BASE).accept(ContentType.ANY).get(SparkShellProxyController.TRANSFORM_DOWNLOAD, transform.getTable(), save.getId()).then().statusCode(HTTP_OK).extract().asInputStream();
final ZipInputStream zip = new ZipInputStream(stream);
final ZipEntry success = zip.getNextEntry();
Assert.assertEquals("_SUCCESS", success.getName());
Assert.assertEquals(-1, zip.read());
zip.closeEntry();
zip.getNextEntry();
Assert.assertEquals("{\"id\":1,\"value\":\"a\"}\n{\"id\":2,\"value\":\"b\"}\n{\"id\":3,\"value\":\"c\"}\n", CharStreams.toString(new InputStreamReader(zip)));
zip.closeEntry();
Assert.assertNull("Expected no more zip entries", zip.getNextEntry());
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class SparkShellIT method testCompileError.
/**
* Verify message for compile errors.
*/
@Test
public void testCompileError() {
final TransformResponse response = executeScript("import org.apache.spark.sql._\nfunctions.abs(-1)", HTTP_INTERNAL_ERROR);
Assert.assertEquals(TransformResponse.Status.ERROR, response.getStatus());
Assert.assertEquals("error: type mismatch; in <console> at line number 9", response.getMessage());
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class SparkFileSchemaParserService method doParse.
/**
* Delegate to spark shell service to load the file into a temporary table and loading it
*/
public Schema doParse(InputStream inputStream, SparkFileType fileType, TableSchemaType tableSchemaType) throws IOException {
File tempFile = toFile(inputStream);
try {
SparkShellProcess shellProcess = shellProcessManager.getSystemProcess();
TransformResponse response = restClient.transform(shellProcess, createTransformRequest(tempFile, fileType));
while (response.getStatus() != TransformResponse.Status.SUCCESS) {
if (response.getStatus() == TransformResponse.Status.ERROR) {
throw new IOException("Failed to process data [" + response.getMessage() + "]");
} else {
Uninterruptibles.sleepUninterruptibly(100L, TimeUnit.MILLISECONDS);
}
final Optional<TransformResponse> optionalResponse = restClient.getTransformResult(shellProcess, response.getTable());
if (optionalResponse.isPresent()) {
response = optionalResponse.get();
}
}
return toSchema(response.getResults(), fileType, tableSchemaType);
} catch (Exception e) {
log.error("Error parsing file {}: {}", fileType, e.getMessage());
throw new IOException("Unexpected exception. Verify file is the proper format", e);
} finally {
if (!tempFile.delete()) {
log.error("The temp file was not deleted successfully: " + tempFile.getName());
}
}
}
Aggregations