Search in sources :

Example 91 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project pentaho-platform by pentaho.

the class FileService method doGetFileOrDir.

/**
 * Takes a pathId and returns a response object with the output stream based on the file located at the pathID
 *
 * @param pathId pathId to the file
 * @return Response object containing the file stream for the file located at the pathId, along with the mimetype,
 * and file name.
 * @throws FileNotFoundException, IllegalArgumentException
 */
public RepositoryFileToStreamWrapper doGetFileOrDir(String pathId) throws FileNotFoundException {
    String path = idToPath(pathId);
    if (!isPathValid(path)) {
        throw new IllegalArgumentException();
    }
    RepositoryFile repoFile = getRepository().getFile(path);
    if (repoFile == null) {
        // file does not exist or is not readable but we can't tell at this point
        throw new FileNotFoundException();
    }
    // check whitelist acceptance of file (based on extension)
    if (!getWhitelist().accept(repoFile.getName())) {
        // if whitelist check fails, we can still inline if you have PublishAction, otherwise we're FORBIDDEN
        if (!getPolicy().isAllowed(PublishAction.NAME)) {
            throw new IllegalArgumentException();
        }
    }
    final RepositoryFileInputStream is = getRepositoryFileInputStream(repoFile);
    StreamingOutput streamingOutput = getStreamingOutput(is);
    RepositoryFileToStreamWrapper wrapper = new RepositoryFileToStreamWrapper();
    wrapper.setOutputStream(streamingOutput);
    wrapper.setRepositoryFile(repoFile);
    wrapper.setMimetype(is.getMimeType());
    return wrapper;
}
Also used : FileNotFoundException(java.io.FileNotFoundException) RepositoryFile(org.pentaho.platform.api.repository2.unified.RepositoryFile) StreamingOutput(javax.ws.rs.core.StreamingOutput) RepositoryFileInputStream(org.pentaho.platform.repository2.unified.fileio.RepositoryFileInputStream)

Example 92 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project druid by druid-io.

the class QueryResource method doPost.

@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(InputStream in, @QueryParam("pretty") String pretty, // used to get request content-type, remote address and AuthorizationInfo
@Context final HttpServletRequest req) throws IOException {
    final long start = System.currentTimeMillis();
    Query query = null;
    QueryToolChest toolChest = null;
    String queryId = null;
    final ResponseContext context = createContext(req.getContentType(), pretty != null);
    final String currThreadName = Thread.currentThread().getName();
    try {
        query = context.getObjectMapper().readValue(in, Query.class);
        queryId = query.getId();
        if (queryId == null) {
            queryId = UUID.randomUUID().toString();
            query = query.withId(queryId);
        }
        if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
            query = query.withOverriddenContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis()));
        }
        toolChest = warehouse.getToolChest(query);
        Thread.currentThread().setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId));
        if (log.isDebugEnabled()) {
            log.debug("Got query [%s]", query);
        }
        if (authConfig.isEnabled()) {
            // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424
            AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN);
            if (authorizationInfo != null) {
                for (String dataSource : query.getDataSource().getNames()) {
                    Access authResult = authorizationInfo.isAuthorized(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ);
                    if (!authResult.isAllowed()) {
                        return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build();
                    }
                }
            } else {
                throw new ISE("WTF?! Security is enabled but no authorization info found in the request");
            }
        }
        String prevEtag = req.getHeader(HDR_IF_NONE_MATCH);
        if (prevEtag != null) {
            query = query.withOverriddenContext(ImmutableMap.of(HDR_IF_NONE_MATCH, prevEtag));
        }
        final Map<String, Object> responseContext = new MapMaker().makeMap();
        final Sequence res = query.run(texasRanger, responseContext);
        if (prevEtag != null && prevEtag.equals(responseContext.get(HDR_ETAG))) {
            return Response.notModified().build();
        }
        final Sequence results;
        if (res == null) {
            results = Sequences.empty();
        } else {
            results = res;
        }
        final Yielder yielder = Yielders.each(results);
        try {
            final Query theQuery = query;
            final QueryToolChest theToolChest = toolChest;
            final ObjectWriter jsonWriter = context.newOutputWriter();
            Response.ResponseBuilder builder = Response.ok(new StreamingOutput() {

                @Override
                public void write(OutputStream outputStream) throws IOException, WebApplicationException {
                    try {
                        // json serializer will always close the yielder
                        CountingOutputStream os = new CountingOutputStream(outputStream);
                        jsonWriter.writeValue(os, yielder);
                        // Some types of OutputStream suppress flush errors in the .close() method.
                        os.flush();
                        os.close();
                        successfulQueryCount.incrementAndGet();
                        final long queryTime = System.currentTimeMillis() - start;
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).setDimension("success", "true").build("query/time", queryTime));
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).build("query/bytes", os.getCount()));
                        requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "query/bytes", os.getCount(), "success", true))));
                    } finally {
                        Thread.currentThread().setName(currThreadName);
                    }
                }
            }, context.getContentType()).header("X-Druid-Query-Id", queryId);
            if (responseContext.get(HDR_ETAG) != null) {
                builder.header(HDR_ETAG, responseContext.get(HDR_ETAG));
                responseContext.remove(HDR_ETAG);
            }
            //Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
            //Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
            //and encodes the string using ASCII, so 1 char is = 1 byte
            String responseCtxString = jsonMapper.writeValueAsString(responseContext);
            if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
                log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
                responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
            }
            return builder.header("X-Druid-Response-Context", responseCtxString).build();
        } catch (Exception e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw Throwables.propagate(e);
        } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
        }
    } catch (QueryInterruptedException e) {
        try {
            log.warn(e, "Exception while processing queryId [%s]", queryId);
            interruptedQueryCount.incrementAndGet();
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", query);
        }
        return context.gotError(e);
    } catch (Exception e) {
        // Input stream has already been consumed by the json object mapper if query == null
        final String queryString = query == null ? "unparsable query" : query.toString();
        log.warn(e, "Exception occurred on request [%s]", queryString);
        failedQueryCount.incrementAndGet();
        try {
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "exception", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", queryString);
        }
        log.makeAlert(e, "Exception handling request").addData("exception", e.toString()).addData("query", queryString).addData("peer", req.getRemoteAddr()).emit();
        return context.gotError(e);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : Query(io.druid.query.Query) CountingOutputStream(com.google.common.io.CountingOutputStream) OutputStream(java.io.OutputStream) Access(io.druid.server.security.Access) StreamingOutput(javax.ws.rs.core.StreamingOutput) QueryToolChest(io.druid.query.QueryToolChest) DateTime(org.joda.time.DateTime) CountingOutputStream(com.google.common.io.CountingOutputStream) ISE(io.druid.java.util.common.ISE) QueryInterruptedException(io.druid.query.QueryInterruptedException) Yielder(io.druid.java.util.common.guava.Yielder) Resource(io.druid.server.security.Resource) MapMaker(com.google.common.collect.MapMaker) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Sequence(io.druid.java.util.common.guava.Sequence) AuthorizationInfo(io.druid.server.security.AuthorizationInfo) WebApplicationException(javax.ws.rs.WebApplicationException) QueryInterruptedException(io.druid.query.QueryInterruptedException) IOException(java.io.IOException) Response(javax.ws.rs.core.Response) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 93 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project hadoop by apache.

the class AHSWebServices method sendStreamOutputResponse.

private Response sendStreamOutputResponse(ApplicationId appId, String appOwner, String nodeId, String containerIdStr, String fileName, String format, long bytes, boolean printEmptyLocalContainerLog) {
    String contentType = WebAppUtils.getDefaultLogContentType();
    if (format != null && !format.isEmpty()) {
        contentType = WebAppUtils.getSupportedLogContentType(format);
        if (contentType == null) {
            String errorMessage = "The valid values for the parameter : format " + "are " + WebAppUtils.listSupportedLogContentType();
            return Response.status(Status.BAD_REQUEST).entity(errorMessage).build();
        }
    }
    StreamingOutput stream = null;
    try {
        stream = getStreamingOutput(appId, appOwner, nodeId, containerIdStr, fileName, bytes, printEmptyLocalContainerLog);
    } catch (Exception ex) {
        return createBadResponse(Status.INTERNAL_SERVER_ERROR, ex.getMessage());
    }
    ResponseBuilder response = Response.ok(stream);
    response.header("Content-Type", contentType);
    // Sending the X-Content-Type-Options response header with the value
    // nosniff will prevent Internet Explorer from MIME-sniffing a response
    // away from the declared content-type.
    response.header("X-Content-Type-Options", "nosniff");
    return response.build();
}
Also used : StreamingOutput(javax.ws.rs.core.StreamingOutput) ResponseBuilder(javax.ws.rs.core.Response.ResponseBuilder) WebApplicationException(javax.ws.rs.WebApplicationException) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) NotFoundException(org.apache.hadoop.yarn.webapp.NotFoundException) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException)

Example 94 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project neo4j by neo4j.

the class BatchOperationService method batchProcessAndStream.

private Response batchProcessAndStream(final UriInfo uriInfo, final HttpHeaders httpHeaders, final HttpServletRequest req, final InputStream body) {
    try {
        final StreamingOutput stream = new StreamingOutput() {

            @Override
            public void write(final OutputStream output) throws IOException, WebApplicationException {
                try {
                    final ServletOutputStream servletOutputStream = new ServletOutputStream() {

                        @Override
                        public void write(int i) throws IOException {
                            output.write(i);
                        }

                        @Override
                        public boolean isReady() {
                            return true;
                        }

                        @Override
                        public void setWriteListener(WriteListener writeListener) {
                            try {
                                writeListener.onWritePossible();
                            } catch (IOException e) {
                            // Ignore
                            }
                        }
                    };
                    new StreamingBatchOperations(webServer).readAndExecuteOperations(uriInfo, httpHeaders, req, body, servletOutputStream);
                    representationWriteHandler.onRepresentationWritten();
                } catch (Exception e) {
                    LOGGER.warn("Error executing batch request ", e);
                } finally {
                    representationWriteHandler.onRepresentationFinal();
                }
            }
        };
        return Response.ok(stream).type(HttpHeaderUtils.mediaTypeWithCharsetUtf8(MediaType.APPLICATION_JSON_TYPE)).build();
    } catch (Exception e) {
        return output.serverError(e);
    }
}
Also used : ServletOutputStream(javax.servlet.ServletOutputStream) ServletOutputStream(javax.servlet.ServletOutputStream) OutputStream(java.io.OutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) NonStreamingBatchOperations(org.neo4j.server.rest.batch.NonStreamingBatchOperations) WriteListener(javax.servlet.WriteListener) IOException(java.io.IOException) WebApplicationException(javax.ws.rs.WebApplicationException)

Example 95 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project neo4j by neo4j.

the class TransactionalService method commitNewTransaction.

@POST
@Path("/commit")
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public Response commitNewTransaction(final InputStream input, @Context final UriInfo uriInfo, @Context final HttpServletRequest request) {
    final TransactionHandle transactionHandle;
    try {
        SecurityContext securityContext = AuthorizedRequestWrapper.getSecurityContextFromHttpServletRequest(request);
        long customTransactionTimeout = HttpHeaderUtils.getTransactionTimeout(request, log);
        transactionHandle = facade.newTransactionHandle(uriScheme, true, securityContext, customTransactionTimeout);
    } catch (TransactionLifecycleException e) {
        return invalidTransaction(e, uriInfo.getBaseUri());
    }
    final StreamingOutput streamingResults = executeStatementsAndCommit(input, transactionHandle, uriInfo.getBaseUri(), request);
    return okResponse(streamingResults);
}
Also used : TransactionHandle(org.neo4j.server.rest.transactional.TransactionHandle) SecurityContext(org.neo4j.kernel.api.security.SecurityContext) StreamingOutput(javax.ws.rs.core.StreamingOutput) TransactionLifecycleException(org.neo4j.server.rest.transactional.error.TransactionLifecycleException) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Consumes(javax.ws.rs.Consumes) Produces(javax.ws.rs.Produces)

Aggregations

StreamingOutput (javax.ws.rs.core.StreamingOutput)190 OutputStream (java.io.OutputStream)84 Response (javax.ws.rs.core.Response)76 Path (javax.ws.rs.Path)53 Produces (javax.ws.rs.Produces)52 IOException (java.io.IOException)50 GET (javax.ws.rs.GET)50 File (java.io.File)45 InputStream (java.io.InputStream)45 Test (org.junit.Test)44 WebApplicationException (javax.ws.rs.WebApplicationException)33 ByteArrayOutputStream (java.io.ByteArrayOutputStream)32 List (java.util.List)26 MediaType (javax.ws.rs.core.MediaType)24 ByteArrayInputStream (java.io.ByteArrayInputStream)20 ArrayList (java.util.ArrayList)20 Consumes (javax.ws.rs.Consumes)20 HashMap (java.util.HashMap)19 POST (javax.ws.rs.POST)19 FileOutputStream (java.io.FileOutputStream)17