Search in sources :

Example 1 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project hadoop by apache.

the class NMWebServices method getLogs.

/**
   * Returns the contents of a container's log file in plain text. 
   *
   * Only works for containers that are still in the NodeManager's memory, so
   * logs are no longer available after the corresponding application is no
   * longer running.
   * 
   * @param containerIdStr
   *    The container ID
   * @param filename
   *    The name of the log file
   * @param format
   *    The content type
   * @param size
   *    the size of the log file
   * @return
   *    The contents of the container's log file
   */
@GET
@Path("/containerlogs/{containerid}/{filename}")
@Produces({ MediaType.TEXT_PLAIN + "; " + JettyUtils.UTF_8 })
@Public
@Unstable
public Response getLogs(@PathParam(YarnWebServiceParams.CONTAINER_ID) final String containerIdStr, @PathParam(YarnWebServiceParams.CONTAINER_LOG_FILE_NAME) String filename, @QueryParam(YarnWebServiceParams.RESPONSE_CONTENT_FORMAT) String format, @QueryParam(YarnWebServiceParams.RESPONSE_CONTENT_SIZE) String size) {
    ContainerId tempContainerId;
    try {
        tempContainerId = ContainerId.fromString(containerIdStr);
    } catch (IllegalArgumentException ex) {
        return Response.status(Status.BAD_REQUEST).build();
    }
    final ContainerId containerId = tempContainerId;
    boolean tempIsRunning = false;
    // check what is the status for container
    try {
        Container container = nmContext.getContainers().get(containerId);
        tempIsRunning = (container.getContainerState() == ContainerState.RUNNING);
    } catch (Exception ex) {
        // assume the container has already finished.
        if (LOG.isDebugEnabled()) {
            LOG.debug("Can not find the container:" + containerId + " in this node.");
        }
    }
    final boolean isRunning = tempIsRunning;
    File logFile = null;
    try {
        logFile = ContainerLogsUtils.getContainerLogFile(containerId, filename, request.getRemoteUser(), nmContext);
    } catch (NotFoundException ex) {
        if (redirectWSUrl == null || redirectWSUrl.isEmpty()) {
            return Response.status(Status.NOT_FOUND).entity(ex.getMessage()).build();
        }
        // redirect the request to the configured log server
        String redirectURI = "/containers/" + containerIdStr + "/logs/" + filename;
        return createRedirectResponse(request, redirectWSUrl, redirectURI);
    } catch (YarnException ex) {
        return Response.serverError().entity(ex.getMessage()).build();
    }
    final long bytes = parseLongParam(size);
    final String lastModifiedTime = Times.format(logFile.lastModified());
    final String outputFileName = filename;
    String contentType = WebAppUtils.getDefaultLogContentType();
    if (format != null && !format.isEmpty()) {
        contentType = WebAppUtils.getSupportedLogContentType(format);
        if (contentType == null) {
            String errorMessage = "The valid values for the parameter : format " + "are " + WebAppUtils.listSupportedLogContentType();
            return Response.status(Status.BAD_REQUEST).entity(errorMessage).build();
        }
    }
    try {
        final FileInputStream fis = ContainerLogsUtils.openLogFileForRead(containerIdStr, logFile, nmContext);
        final long fileLength = logFile.length();
        StreamingOutput stream = new StreamingOutput() {

            @Override
            public void write(OutputStream os) throws IOException, WebApplicationException {
                try {
                    int bufferSize = 65536;
                    byte[] buf = new byte[bufferSize];
                    LogToolUtils.outputContainerLog(containerId.toString(), nmContext.getNodeId().toString(), outputFileName, fileLength, bytes, lastModifiedTime, fis, os, buf, ContainerLogAggregationType.LOCAL);
                    StringBuilder sb = new StringBuilder();
                    String endOfFile = "End of LogType:" + outputFileName;
                    sb.append(endOfFile + ".");
                    if (isRunning) {
                        sb.append("This log file belongs to a running container (" + containerIdStr + ") and so may not be complete." + "\n");
                    } else {
                        sb.append("\n");
                    }
                    sb.append(StringUtils.repeat("*", endOfFile.length() + 50) + "\n\n");
                    os.write(sb.toString().getBytes(Charset.forName("UTF-8")));
                    // If we have aggregated logs for this container,
                    // output the aggregation logs as well.
                    ApplicationId appId = containerId.getApplicationAttemptId().getApplicationId();
                    Application app = nmContext.getApplications().get(appId);
                    String appOwner = app == null ? null : app.getUser();
                    try {
                        LogToolUtils.outputAggregatedContainerLog(nmContext.getConf(), appId, appOwner, containerId.toString(), nmContext.getNodeId().toString(), outputFileName, bytes, os, buf);
                    } catch (Exception ex) {
                        // Something wrong when we try to access the aggregated log.
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("Can not access the aggregated log for " + "the container:" + containerId);
                            LOG.debug(ex.getMessage());
                        }
                    }
                } finally {
                    IOUtils.closeQuietly(fis);
                }
            }
        };
        ResponseBuilder resp = Response.ok(stream);
        resp.header("Content-Type", contentType + "; " + JettyUtils.UTF_8);
        // Sending the X-Content-Type-Options response header with the value
        // nosniff will prevent Internet Explorer from MIME-sniffing a response
        // away from the declared content-type.
        resp.header("X-Content-Type-Options", "nosniff");
        return resp.build();
    } catch (IOException ex) {
        return Response.serverError().entity(ex.getMessage()).build();
    }
}
Also used : OutputStream(java.io.OutputStream) NotFoundException(org.apache.hadoop.yarn.webapp.NotFoundException) StreamingOutput(javax.ws.rs.core.StreamingOutput) IOException(java.io.IOException) WebApplicationException(javax.ws.rs.WebApplicationException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) NotFoundException(org.apache.hadoop.yarn.webapp.NotFoundException) IOException(java.io.IOException) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) FileInputStream(java.io.FileInputStream) Container(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) ResponseBuilder(javax.ws.rs.core.Response.ResponseBuilder) File(java.io.File) Application(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) Unstable(org.apache.hadoop.classification.InterfaceStability.Unstable) Public(org.apache.hadoop.classification.InterfaceAudience.Public)

Example 2 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project hadoop by apache.

the class AHSWebServices method getStreamingOutput.

private StreamingOutput getStreamingOutput(final ApplicationId appId, final String appOwner, final String nodeId, final String containerIdStr, final String logFile, final long bytes, final boolean printEmptyLocalContainerLog) throws IOException {
    StreamingOutput stream = new StreamingOutput() {

        @Override
        public void write(OutputStream os) throws IOException, WebApplicationException {
            byte[] buf = new byte[65535];
            boolean findLogs = LogToolUtils.outputAggregatedContainerLog(conf, appId, appOwner, containerIdStr, nodeId, logFile, bytes, os, buf);
            if (!findLogs) {
                os.write(("Can not find logs for container:" + containerIdStr).getBytes(Charset.forName("UTF-8")));
            } else {
                if (printEmptyLocalContainerLog) {
                    StringBuilder sb = new StringBuilder();
                    sb.append(containerIdStr + "\n");
                    sb.append("LogAggregationType: " + ContainerLogAggregationType.LOCAL + "\n");
                    sb.append("LogContents:\n");
                    sb.append(getNoRedirectWarning() + "\n");
                    os.write(sb.toString().getBytes(Charset.forName("UTF-8")));
                }
            }
        }
    };
    return stream;
}
Also used : OutputStream(java.io.OutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput)

Example 3 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project killbill by killbill.

the class AdminResource method getQueueEntries.

@GET
@Path("/queues")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Get queues entries", response = Response.class)
@ApiResponses(value = {})
public Response getQueueEntries(@QueryParam("accountId") final String accountIdStr, @QueryParam("queueName") final String queueName, @QueryParam("serviceName") final String serviceName, @QueryParam("withHistory") @DefaultValue("true") final Boolean withHistory, @QueryParam("minDate") final String minDateOrNull, @QueryParam("maxDate") final String maxDateOrNull, @QueryParam("withInProcessing") @DefaultValue("true") final Boolean withInProcessing, @QueryParam("withBusEvents") @DefaultValue("true") final Boolean withBusEvents, @QueryParam("withNotifications") @DefaultValue("true") final Boolean withNotifications, @javax.ws.rs.core.Context final HttpServletRequest request) {
    final TenantContext tenantContext = context.createContext(request);
    final Long tenantRecordId = recordIdApi.getRecordId(tenantContext.getTenantId(), ObjectType.TENANT, tenantContext);
    final Long accountRecordId = Strings.isNullOrEmpty(accountIdStr) ? null : recordIdApi.getRecordId(UUID.fromString(accountIdStr), ObjectType.ACCOUNT, tenantContext);
    // Limit search results by default
    final DateTime minDate = Strings.isNullOrEmpty(minDateOrNull) ? clock.getUTCNow().minusDays(2) : DATE_TIME_FORMATTER.parseDateTime(minDateOrNull).toDateTime(DateTimeZone.UTC);
    final DateTime maxDate = Strings.isNullOrEmpty(maxDateOrNull) ? clock.getUTCNow().plusDays(2) : DATE_TIME_FORMATTER.parseDateTime(maxDateOrNull).toDateTime(DateTimeZone.UTC);
    final StreamingOutput json = new StreamingOutput() {

        @Override
        public void write(final OutputStream output) throws IOException, WebApplicationException {
            Iterator<BusEventWithMetadata<BusEvent>> busEventsIterator = null;
            Iterator<NotificationEventWithMetadata<NotificationEvent>> notificationsIterator = null;
            try {
                final JsonGenerator generator = mapper.getFactory().createGenerator(output);
                generator.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
                generator.writeStartObject();
                if (withBusEvents) {
                    generator.writeFieldName("busEvents");
                    generator.writeStartArray();
                    busEventsIterator = getBusEvents(withInProcessing, withHistory, minDate, maxDate, accountRecordId, tenantRecordId).iterator();
                    while (busEventsIterator.hasNext()) {
                        final BusEventWithMetadata<BusEvent> busEvent = busEventsIterator.next();
                        generator.writeObject(new BusEventWithRichMetadata(busEvent));
                    }
                    generator.writeEndArray();
                }
                if (withNotifications) {
                    generator.writeFieldName("notifications");
                    generator.writeStartArray();
                    notificationsIterator = getNotifications(queueName, serviceName, withInProcessing, withHistory, minDate, maxDate, accountRecordId, tenantRecordId).iterator();
                    while (notificationsIterator.hasNext()) {
                        final NotificationEventWithMetadata<NotificationEvent> notification = notificationsIterator.next();
                        generator.writeObject(notification);
                    }
                    generator.writeEndArray();
                }
                generator.writeEndObject();
                generator.close();
            } finally {
                // In case the client goes away (IOException), make sure to close the underlying DB connection
                if (busEventsIterator != null) {
                    while (busEventsIterator.hasNext()) {
                        busEventsIterator.next();
                    }
                }
                if (notificationsIterator != null) {
                    while (notificationsIterator.hasNext()) {
                        notificationsIterator.next();
                    }
                }
            }
        }
    };
    return Response.status(Status.OK).entity(json).build();
}
Also used : BusEventWithMetadata(org.killbill.bus.api.BusEventWithMetadata) OutputStream(java.io.OutputStream) TenantContext(org.killbill.billing.util.callcontext.TenantContext) StreamingOutput(javax.ws.rs.core.StreamingOutput) NotificationEvent(org.killbill.notificationq.api.NotificationEvent) DateTime(org.joda.time.DateTime) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) NotificationEventWithMetadata(org.killbill.notificationq.api.NotificationEventWithMetadata) BusEvent(org.killbill.bus.api.BusEvent) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) ApiOperation(io.swagger.annotations.ApiOperation) ApiResponses(io.swagger.annotations.ApiResponses)

Example 4 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project druid by druid-io.

the class SqlResource method doPost.

@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery) throws SQLException, IOException {
    // This is not integrated with the experimental authorization framework.
    // (Non-trivial since we don't know the dataSources up-front)
    final PlannerResult plannerResult;
    final DateTimeZone timeZone;
    try (final DruidPlanner planner = plannerFactory.createPlanner(sqlQuery.getContext())) {
        plannerResult = planner.plan(sqlQuery.getQuery());
        timeZone = planner.getPlannerContext().getTimeZone();
        // Remember which columns are time-typed, so we can emit ISO8601 instead of millis values.
        final List<RelDataTypeField> fieldList = plannerResult.rowType().getFieldList();
        final boolean[] timeColumns = new boolean[fieldList.size()];
        final boolean[] dateColumns = new boolean[fieldList.size()];
        for (int i = 0; i < fieldList.size(); i++) {
            final SqlTypeName sqlTypeName = fieldList.get(i).getType().getSqlTypeName();
            timeColumns[i] = sqlTypeName == SqlTypeName.TIMESTAMP;
            dateColumns[i] = sqlTypeName == SqlTypeName.DATE;
        }
        final Yielder<Object[]> yielder0 = Yielders.each(plannerResult.run());
        try {
            return Response.ok(new StreamingOutput() {

                @Override
                public void write(final OutputStream outputStream) throws IOException, WebApplicationException {
                    Yielder<Object[]> yielder = yielder0;
                    try (final JsonGenerator jsonGenerator = jsonMapper.getFactory().createGenerator(outputStream)) {
                        jsonGenerator.writeStartArray();
                        while (!yielder.isDone()) {
                            final Object[] row = yielder.get();
                            jsonGenerator.writeStartObject();
                            for (int i = 0; i < fieldList.size(); i++) {
                                final Object value;
                                if (timeColumns[i]) {
                                    value = ISODateTimeFormat.dateTime().print(Calcites.calciteTimestampToJoda((long) row[i], timeZone));
                                } else if (dateColumns[i]) {
                                    value = ISODateTimeFormat.dateTime().print(Calcites.calciteDateToJoda((int) row[i], timeZone));
                                } else {
                                    value = row[i];
                                }
                                jsonGenerator.writeObjectField(fieldList.get(i).getName(), value);
                            }
                            jsonGenerator.writeEndObject();
                            yielder = yielder.next(null);
                        }
                        jsonGenerator.writeEndArray();
                        jsonGenerator.flush();
                        // End with CRLF
                        outputStream.write('\r');
                        outputStream.write('\n');
                    } finally {
                        yielder.close();
                    }
                }
            }).build();
        } catch (Throwable e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder0.close();
            throw Throwables.propagate(e);
        }
    } catch (Exception e) {
        log.warn(e, "Failed to handle query: %s", sqlQuery);
        final Exception exceptionToReport;
        if (e instanceof RelOptPlanner.CannotPlanException) {
            exceptionToReport = new ISE("Cannot build plan for query: %s", sqlQuery.getQuery());
        } else {
            exceptionToReport = e;
        }
        return Response.serverError().type(MediaType.APPLICATION_JSON_TYPE).entity(jsonMapper.writeValueAsBytes(QueryInterruptedException.wrapIfNeeded(exceptionToReport))).build();
    }
}
Also used : SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) OutputStream(java.io.OutputStream) StreamingOutput(javax.ws.rs.core.StreamingOutput) RelOptPlanner(org.apache.calcite.plan.RelOptPlanner) DateTimeZone(org.joda.time.DateTimeZone) QueryInterruptedException(io.druid.query.QueryInterruptedException) SQLException(java.sql.SQLException) IOException(java.io.IOException) WebApplicationException(javax.ws.rs.WebApplicationException) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) DruidPlanner(io.druid.sql.calcite.planner.DruidPlanner) JsonGenerator(com.fasterxml.jackson.core.JsonGenerator) ISE(io.druid.java.util.common.ISE) PlannerResult(io.druid.sql.calcite.planner.PlannerResult) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 5 with StreamingOutput

use of javax.ws.rs.core.StreamingOutput in project druid by druid-io.

the class SqlResourceTest method doPost.

// Returns either an error or a result.
private Pair<QueryInterruptedException, List<Map<String, Object>>> doPost(final SqlQuery query) throws Exception {
    final Response response = resource.doPost(query);
    if (response.getStatus() == 200) {
        final StreamingOutput output = (StreamingOutput) response.getEntity();
        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
        output.write(baos);
        return Pair.of(null, JSON_MAPPER.<List<Map<String, Object>>>readValue(baos.toByteArray(), new TypeReference<List<Map<String, Object>>>() {
        }));
    } else {
        return Pair.of(JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryInterruptedException.class), null);
    }
}
Also used : Response(javax.ws.rs.core.Response) StreamingOutput(javax.ws.rs.core.StreamingOutput) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) QueryInterruptedException(io.druid.query.QueryInterruptedException)

Aggregations

StreamingOutput (javax.ws.rs.core.StreamingOutput)186 OutputStream (java.io.OutputStream)82 Response (javax.ws.rs.core.Response)76 Path (javax.ws.rs.Path)51 Produces (javax.ws.rs.Produces)50 IOException (java.io.IOException)49 GET (javax.ws.rs.GET)48 File (java.io.File)44 InputStream (java.io.InputStream)43 Test (org.junit.Test)43 WebApplicationException (javax.ws.rs.WebApplicationException)33 ByteArrayOutputStream (java.io.ByteArrayOutputStream)32 List (java.util.List)26 MediaType (javax.ws.rs.core.MediaType)24 ByteArrayInputStream (java.io.ByteArrayInputStream)20 Consumes (javax.ws.rs.Consumes)20 ArrayList (java.util.ArrayList)19 HashMap (java.util.HashMap)19 POST (javax.ws.rs.POST)19 FileOutputStream (java.io.FileOutputStream)17