use of javax.ws.rs.core.StreamingOutput in project ice by JBEI.
the class FileResource method downloadSequence.
@GET
@Path("{partId}/sequence/{type}")
public Response downloadSequence(@PathParam("partId") final String partId, @PathParam("type") final String downloadType, @DefaultValue("-1") @QueryParam("remoteId") long remoteId, @QueryParam("sid") String sid) {
if (StringUtils.isEmpty(sessionId))
sessionId = sid;
final String userId = getUserId(sessionId);
if (remoteId != -1) {
RemoteSequence sequence = new RemoteSequence(remoteId, Long.decode(partId));
final InputStreamWrapper wrapper = sequence.get(downloadType);
StreamingOutput stream = output -> IOUtils.copy(wrapper.getInputStream(), output);
return addHeaders(Response.ok(stream), wrapper.getName());
} else {
InputStreamWrapper wrapper = new PartSequence(userId, partId).toFile(SequenceFormat.fromString(downloadType), true);
StreamingOutput stream = output -> IOUtils.copy(wrapper.getInputStream(), output);
return addHeaders(Response.ok(stream), wrapper.getName());
}
}
use of javax.ws.rs.core.StreamingOutput in project ice by JBEI.
the class FileResource method downloadExportedFile.
@GET
@Path("/exports/{fileId}")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response downloadExportedFile(@PathParam("fileId") String fileId) {
String userId = requireUserId();
final java.nio.file.Path tmpFile = Paths.get(Utils.getConfigValue(ConfigurationKey.TEMPORARY_DIRECTORY));
String fileName = userId + "_" + fileId + "_export-data.zip";
if (!Files.exists(Paths.get(tmpFile.toString(), "export", fileName)))
return super.respond(Response.Status.NOT_FOUND);
StreamingOutput stream = output -> {
java.nio.file.Path file = Paths.get(tmpFile.toString(), "export", fileName);
final ByteArrayInputStream input = new ByteArrayInputStream(FileUtils.readFileToByteArray(file.toFile()));
IOUtils.copy(input, output);
};
return addHeaders(Response.ok(stream), "ice-export-data.zip");
}
use of javax.ws.rs.core.StreamingOutput in project indy by Commonjava.
the class PathMappedResource method get.
@ApiOperation("Get specified path.")
@ApiResponse(code = 200, message = "Operation finished.")
@GET
@Path(CONCRETE_CONTENT_PATH)
public Response get(@PathParam("packageType") final String packageType, @ApiParam(allowableValues = "hosted,group,remote", required = true) @PathParam("type") final String type, @ApiParam(required = true) @PathParam("name") final String name, @PathParam("path") final String path, @Context final HttpServletRequest request, @Context final SecurityContext securityContext) {
try {
InputStream inputStream = controller.get(packageType, type, name, path);
Response.ResponseBuilder builder = Response.ok((StreamingOutput) outputStream -> IOUtils.copy(inputStream, outputStream));
return builder.header(ApplicationHeader.content_type.key(), mimeTyper.getContentType(path)).build();
} catch (Exception e) {
logger.warn("Get pathmap content failed, message: " + e.getMessage(), e);
if (e.getMessage() != null && e.getMessage().contains("not exist")) {
return Response.status(Response.Status.NOT_FOUND).build();
}
responseHelper.throwError(e);
}
return null;
}
use of javax.ws.rs.core.StreamingOutput in project druid by druid-io.
the class SqlResource method doPost.
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery, @Context final HttpServletRequest req) throws IOException {
final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), sqlQuery.getContext());
final String remoteAddr = req.getRemoteAddr();
final String currThreadName = Thread.currentThread().getName();
try {
Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId));
lifecycle.setParameters(sqlQuery.getParameterList());
lifecycle.validateAndAuthorize(req);
// must add after lifecycle is authorized
sqlLifecycleManager.add(sqlQueryId, lifecycle);
lifecycle.plan();
final SqlRowTransformer rowTransformer = lifecycle.createRowTransformer();
final Sequence<Object[]> sequence = lifecycle.execute();
final Yielder<Object[]> yielder0 = Yielders.each(sequence);
try {
final Response.ResponseBuilder responseBuilder = Response.ok((StreamingOutput) outputStream -> {
Exception e = null;
CountingOutputStream os = new CountingOutputStream(outputStream);
Yielder<Object[]> yielder = yielder0;
try (final ResultFormat.Writer writer = sqlQuery.getResultFormat().createFormatter(os, jsonMapper)) {
writer.writeResponseStart();
if (sqlQuery.includeHeader()) {
writer.writeHeader(rowTransformer.getRowType(), sqlQuery.includeTypesHeader(), sqlQuery.includeSqlTypesHeader());
}
while (!yielder.isDone()) {
final Object[] row = yielder.get();
writer.writeRowStart();
for (int i = 0; i < rowTransformer.getFieldList().size(); i++) {
final Object value = rowTransformer.transform(row, i);
writer.writeRowField(rowTransformer.getFieldList().get(i), value);
}
writer.writeRowEnd();
yielder = yielder.next(null);
}
writer.writeResponseEnd();
} catch (Exception ex) {
e = ex;
log.error(ex, "Unable to send SQL response [%s]", sqlQueryId);
throw new RuntimeException(ex);
} finally {
yielder.close();
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, os.getCount());
}
}).header(SQL_QUERY_ID_RESPONSE_HEADER, sqlQueryId);
if (sqlQuery.includeHeader()) {
responseBuilder.header(SQL_HEADER_RESPONSE_HEADER, SQL_HEADER_VALUE);
}
return responseBuilder.build();
} catch (Throwable e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder0.close();
throw new RuntimeException(e);
}
} catch (QueryCapacityExceededException cap) {
endLifecycle(sqlQueryId, lifecycle, cap, remoteAddr, -1);
return buildNonOkResponse(QueryCapacityExceededException.STATUS_CODE, cap, sqlQueryId);
} catch (QueryUnsupportedException unsupported) {
endLifecycle(sqlQueryId, lifecycle, unsupported, remoteAddr, -1);
return buildNonOkResponse(QueryUnsupportedException.STATUS_CODE, unsupported, sqlQueryId);
} catch (QueryTimeoutException timeout) {
endLifecycle(sqlQueryId, lifecycle, timeout, remoteAddr, -1);
return buildNonOkResponse(QueryTimeoutException.STATUS_CODE, timeout, sqlQueryId);
} catch (SqlPlanningException | ResourceLimitExceededException e) {
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
return buildNonOkResponse(BadQueryException.STATUS_CODE, e, sqlQueryId);
} catch (ForbiddenException e) {
endLifecycleWithoutEmittingMetrics(sqlQueryId, lifecycle);
throw (ForbiddenException) serverConfig.getErrorResponseTransformStrategy().transformIfNeeded(// let ForbiddenExceptionMapper handle this
e);
} catch (RelOptPlanner.CannotPlanException e) {
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
SqlPlanningException spe = new SqlPlanningException(SqlPlanningException.PlanningError.UNSUPPORTED_SQL_ERROR, e.getMessage());
return buildNonOkResponse(BadQueryException.STATUS_CODE, spe, sqlQueryId);
}// calcite throws a java.lang.AssertionError which is type error not exception. using throwable will catch all
catch (Throwable e) {
log.warn(e, "Failed to handle query: %s", sqlQuery);
endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
return buildNonOkResponse(Status.INTERNAL_SERVER_ERROR.getStatusCode(), QueryInterruptedException.wrapIfNeeded(e), sqlQueryId);
} finally {
Thread.currentThread().setName(currThreadName);
}
}
use of javax.ws.rs.core.StreamingOutput in project druid by druid-io.
the class SqlResourceTest method doPostRaw.
// Returns either an error or a result.
private Pair<QueryException, String> doPostRaw(final SqlQuery query, final HttpServletRequest req) throws Exception {
final Response response = resource.doPost(query, req);
if (response.getStatus() == 200) {
final StreamingOutput output = (StreamingOutput) response.getEntity();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
output.write(baos);
} catch (Exception ignored) {
// Suppress errors and return the response so far. Similar to what the real web server would do, if it
// started writing a 200 OK and then threw an exception in the middle.
}
return Pair.of(null, new String(baos.toByteArray(), StandardCharsets.UTF_8));
} else {
return Pair.of(JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryException.class), null);
}
}
Aggregations