use of com.thinkbiganalytics.spark.rest.model.SaveResponse in project kylo by Teradata.
the class SparkShellProxyController method saveError.
/**
* Generates an error response for a failed save.
*
* <p>Example:
* <code>
* throw saveError(Response.Status.BAD_REQUEST, "save.error", e);
* </code></p>
*
* @param status HTTP response status
* @param key resource key or error message
* @param cause the cause
* @return the error response
*/
@Nonnull
private WebApplicationException saveError(@Nonnull final Response.Status status, @Nonnull final String key, @Nullable final Throwable cause) {
// Create entity
final SaveResponse entity = new SaveResponse();
entity.setId(cause instanceof SparkShellSaveException ? ((SparkShellSaveException) cause).getId() : null);
entity.setStatus(SaveResponse.Status.ERROR);
try {
entity.setMessage(STRINGS.getString(key));
} catch (final MissingResourceException e) {
log.warn("Missing resource message: {}", key, e);
entity.setMessage(key);
}
// Generate the response
final Response response = Response.status(status).entity(entity).header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON).build();
if (cause != null) {
return new WebApplicationException(cause, response);
} else {
return new WebApplicationException(response);
}
}
use of com.thinkbiganalytics.spark.rest.model.SaveResponse in project kylo by Teradata.
the class AbstractTransformController method getSave.
/**
* Requests the status of a save.
*
* @param id the save id
* @return the save status
*/
@GET
@Path("{table}/save/{save}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Fetches the status of a save")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the status of the save.", response = SaveResponse.class), @ApiResponse(code = 404, message = "The transformation or save does not exist.", response = SaveResponse.class), @ApiResponse(code = 500, message = "There was a problem accessing the data.", response = SaveResponse.class) })
@Nonnull
public Response getSave(@Nonnull @PathParam("save") final String id) {
try {
final SaveJob job = transformService.getSaveJob(id, false);
final SaveResponse response = new SaveResponse();
if (job.isDone()) {
response.setId(job.getGroupId());
response.setStatus(SaveResponse.Status.SUCCESS);
final SaveResult result = job.get();
if (result.getPath() != null) {
response.setLocation("./zip");
} else {
transformService.getSaveJob(id, true);
}
} else {
response.setId(job.getGroupId());
response.setProgress(job.progress());
response.setStatus(SaveResponse.Status.PENDING);
}
return Response.ok(response).build();
} catch (final IllegalArgumentException e) {
return error(Response.Status.NOT_FOUND, "getSave.notFound");
} catch (final Exception e) {
final SaveResponse save = new SaveResponse();
save.setId(id);
save.setMessage(e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName());
save.setStatus(SaveResponse.Status.ERROR);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(save).build();
}
}
use of com.thinkbiganalytics.spark.rest.model.SaveResponse in project kylo by Teradata.
the class TransformService method submitSaveJob.
/**
* Submits the specified task for saving a transformation and returns the result.
*/
@Nonnull
private SaveResponse submitSaveJob(@Nonnull final Supplier<SaveResult> task) {
log.entry(task);
// Execute script
final String table = newTableName();
final SaveJob job = new SaveJob(table, task, engine.getSparkContext());
tracker.submitJob(job);
// Build response
final SaveResponse response = new SaveResponse();
response.setId(table);
response.setProgress(0.0);
response.setStatus(SaveResponse.Status.PENDING);
return log.exit(response);
}
use of com.thinkbiganalytics.spark.rest.model.SaveResponse in project kylo by Teradata.
the class SparkShellIT method testScriptDownload.
/**
* Verify downloading the result of a script.
*/
@Test
public void testScriptDownload() throws Exception {
final TransformResponse transform = executeScript(SCRIPT, true);
final SaveResponse save = saveScript(transform.getTable(), "json", null, null);
Assert.assertEquals(SaveResponse.Status.SUCCESS, save.getStatus());
Assert.assertEquals("./zip", save.getLocation());
final InputStream stream = given(SparkShellProxyController.BASE).accept(ContentType.ANY).get(SparkShellProxyController.TRANSFORM_DOWNLOAD, transform.getTable(), save.getId()).then().statusCode(HTTP_OK).extract().asInputStream();
final ZipInputStream zip = new ZipInputStream(stream);
final ZipEntry success = zip.getNextEntry();
Assert.assertEquals("_SUCCESS", success.getName());
Assert.assertEquals(-1, zip.read());
zip.closeEntry();
zip.getNextEntry();
Assert.assertEquals("{\"id\":1,\"value\":\"a\"}\n{\"id\":2,\"value\":\"b\"}\n{\"id\":3,\"value\":\"c\"}\n", CharStreams.toString(new InputStreamReader(zip)));
zip.closeEntry();
Assert.assertNull("Expected no more zip entries", zip.getNextEntry());
}
use of com.thinkbiganalytics.spark.rest.model.SaveResponse in project kylo by Teradata.
the class SparkShellProxyController method getSaveResponse.
/**
* Gets the save response from the specified supplier.
*/
@Nonnull
private Response getSaveResponse(@Nonnull final Supplier<Optional<SaveResponse>> supplier) {
// Get the result
final Optional<SaveResponse> response;
try {
response = supplier.get();
} catch (final SparkShellSaveException e) {
final String message = (e.getMessage() != null) ? EXCEPTION.matcher(e.getMessage()).replaceAll("") : SparkShellProxyResources.SAVE_ERROR;
throw saveError(Response.Status.INTERNAL_SERVER_ERROR, message, e);
} catch (final Exception e) {
throw saveError(Response.Status.INTERNAL_SERVER_ERROR, SparkShellProxyResources.SAVE_ERROR, e);
}
// Return response
final SaveResponse saveResponse = response.orElseThrow(() -> transformError(Response.Status.NOT_FOUND, SparkShellProxyResources.SAVE_NOT_FOUND, null));
return Response.ok(saveResponse).build();
}
Aggregations