Search in sources :

Example 6 with TransformResponse

use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.

the class SparkShellIT method executeScript.

/**
 * Executes the specified script.
 *
 * @param script             the Scala script
 * @param async              {@code true} to execute asynchronously
 * @param expectedStatusCode the expected HTTP status code
 * @return the result
 */
private TransformResponse executeScript(@Nonnull final String script, final boolean async, final int expectedStatusCode) {
    // Create request body
    final TransformRequest request = new TransformRequest();
    request.setAsync(async);
    request.setScript(script);
    // Execute request
    final Response response = given(SparkShellProxyController.BASE).body(request).when().post(SparkShellProxyController.TRANSFORM);
    // Verify response
    response.then().statusCode(expectedStatusCode);
    return response.as(TransformResponse.class);
}
Also used : Response(com.jayway.restassured.response.Response) TransformResponse(com.thinkbiganalytics.spark.rest.model.TransformResponse) SaveResponse(com.thinkbiganalytics.spark.rest.model.SaveResponse) TransformRequest(com.thinkbiganalytics.spark.rest.model.TransformRequest)

Example 7 with TransformResponse

use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.

the class SparkShellIT method testRuntimeError.

/**
 * Verify message for runtime errors.
 */
@Test
public void testRuntimeError() {
    final TransformResponse response = executeScript("sqlContext.sql(\"SELECT * FROM invalid-table-name\")", HTTP_INTERNAL_ERROR);
    Assert.assertEquals(TransformResponse.Status.ERROR, response.getStatus());
    Assert.assertEquals("AnalysisException: cannot recognize input near 'invalid' '-' 'table' in from source; line 1 pos 21", response.getMessage());
}
Also used : TransformResponse(com.thinkbiganalytics.spark.rest.model.TransformResponse) Test(org.junit.Test)

Example 8 with TransformResponse

use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.

the class SparkShellIT method testScriptDownload.

/**
 * Verify downloading the result of a script.
 */
@Test
public void testScriptDownload() throws Exception {
    final TransformResponse transform = executeScript(SCRIPT, true);
    final SaveResponse save = saveScript(transform.getTable(), "json", null, null);
    Assert.assertEquals(SaveResponse.Status.SUCCESS, save.getStatus());
    Assert.assertEquals("./zip", save.getLocation());
    final InputStream stream = given(SparkShellProxyController.BASE).accept(ContentType.ANY).get(SparkShellProxyController.TRANSFORM_DOWNLOAD, transform.getTable(), save.getId()).then().statusCode(HTTP_OK).extract().asInputStream();
    final ZipInputStream zip = new ZipInputStream(stream);
    final ZipEntry success = zip.getNextEntry();
    Assert.assertEquals("_SUCCESS", success.getName());
    Assert.assertEquals(-1, zip.read());
    zip.closeEntry();
    zip.getNextEntry();
    Assert.assertEquals("{\"id\":1,\"value\":\"a\"}\n{\"id\":2,\"value\":\"b\"}\n{\"id\":3,\"value\":\"c\"}\n", CharStreams.toString(new InputStreamReader(zip)));
    zip.closeEntry();
    Assert.assertNull("Expected no more zip entries", zip.getNextEntry());
}
Also used : ZipInputStream(java.util.zip.ZipInputStream) InputStreamReader(java.io.InputStreamReader) SaveResponse(com.thinkbiganalytics.spark.rest.model.SaveResponse) ZipInputStream(java.util.zip.ZipInputStream) InputStream(java.io.InputStream) ZipEntry(java.util.zip.ZipEntry) TransformResponse(com.thinkbiganalytics.spark.rest.model.TransformResponse) Test(org.junit.Test)

Example 9 with TransformResponse

use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.

the class SparkShellIT method testCompileError.

/**
 * Verify message for compile errors.
 */
@Test
public void testCompileError() {
    final TransformResponse response = executeScript("import org.apache.spark.sql._\nfunctions.abs(-1)", HTTP_INTERNAL_ERROR);
    Assert.assertEquals(TransformResponse.Status.ERROR, response.getStatus());
    Assert.assertEquals("error: type mismatch; in <console> at line number 9", response.getMessage());
}
Also used : TransformResponse(com.thinkbiganalytics.spark.rest.model.TransformResponse) Test(org.junit.Test)

Example 10 with TransformResponse

use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.

the class SparkFileSchemaParserService method doParse.

/**
 * Delegate to spark shell service to load the file into a temporary table and loading it
 */
public Schema doParse(InputStream inputStream, SparkFileType fileType, TableSchemaType tableSchemaType) throws IOException {
    File tempFile = toFile(inputStream);
    try {
        SparkShellProcess shellProcess = shellProcessManager.getSystemProcess();
        TransformResponse response = restClient.transform(shellProcess, createTransformRequest(tempFile, fileType));
        while (response.getStatus() != TransformResponse.Status.SUCCESS) {
            if (response.getStatus() == TransformResponse.Status.ERROR) {
                throw new IOException("Failed to process data [" + response.getMessage() + "]");
            } else {
                Uninterruptibles.sleepUninterruptibly(100L, TimeUnit.MILLISECONDS);
            }
            final Optional<TransformResponse> optionalResponse = restClient.getTransformResult(shellProcess, response.getTable());
            if (optionalResponse.isPresent()) {
                response = optionalResponse.get();
            }
        }
        return toSchema(response.getResults(), fileType, tableSchemaType);
    } catch (Exception e) {
        log.error("Error parsing file {}: {}", fileType, e.getMessage());
        throw new IOException("Unexpected exception. Verify file is the proper format", e);
    } finally {
        if (!tempFile.delete()) {
            log.error("The temp file was not deleted successfully: " + tempFile.getName());
        }
    }
}
Also used : SparkShellProcess(com.thinkbiganalytics.spark.shell.SparkShellProcess) TransformResponse(com.thinkbiganalytics.spark.rest.model.TransformResponse) IOException(java.io.IOException) File(java.io.File) IOException(java.io.IOException)

Aggregations

TransformResponse (com.thinkbiganalytics.spark.rest.model.TransformResponse)23 Test (org.junit.Test)12 TransformRequest (com.thinkbiganalytics.spark.rest.model.TransformRequest)8 Nonnull (javax.annotation.Nonnull)7 Response (javax.ws.rs.core.Response)7 SaveResponse (com.thinkbiganalytics.spark.rest.model.SaveResponse)4 DataSet (com.thinkbiganalytics.spark.DataSet)3 TransformJob (com.thinkbiganalytics.spark.metadata.TransformJob)3 TransformQueryResult (com.thinkbiganalytics.spark.rest.model.TransformQueryResult)3 TransformService (com.thinkbiganalytics.spark.service.TransformService)3 InputStream (java.io.InputStream)3 List (java.util.List)3 StructType (org.apache.spark.sql.types.StructType)3 SparkContextService (com.thinkbiganalytics.spark.SparkContextService)2 SparkScriptEngine (com.thinkbiganalytics.spark.repl.SparkScriptEngine)2 MissingResourceException (java.util.MissingResourceException)2 ScriptException (javax.script.ScriptException)2 SparkContext (org.apache.spark.SparkContext)2 StorageLevel (org.apache.spark.storage.StorageLevel)2 NamedParam (scala.tools.nsc.interpreter.NamedParam)2