Search in sources :

Example 66 with ByteBufInputStream

use of org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream in project netty by netty.

the class LzmaFrameEncoder method encode.

@Override
protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) throws Exception {
    final int length = in.readableBytes();
    InputStream bbIn = null;
    ByteBufOutputStream bbOut = null;
    try {
        bbIn = new ByteBufInputStream(in);
        bbOut = new ByteBufOutputStream(out);
        bbOut.writeByte(properties);
        bbOut.writeInt(littleEndianDictionarySize);
        bbOut.writeLong(Long.reverseBytes(length));
        encoder.code(bbIn, bbOut, -1, -1, null);
    } finally {
        if (bbIn != null) {
            bbIn.close();
        }
        if (bbOut != null) {
            bbOut.close();
        }
    }
}
Also used : ByteBufOutputStream(io.netty.buffer.ByteBufOutputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) InputStream(java.io.InputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream)

Example 67 with ByteBufInputStream

use of org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream in project netty by netty.

the class Lz4FrameEncoderTest method decompress.

@Override
protected ByteBuf decompress(ByteBuf compressed, int originalLength) throws Exception {
    InputStream is = new ByteBufInputStream(compressed, true);
    LZ4BlockInputStream lz4Is = null;
    byte[] decompressed = new byte[originalLength];
    try {
        lz4Is = new LZ4BlockInputStream(is);
        int remaining = originalLength;
        while (remaining > 0) {
            int read = lz4Is.read(decompressed, originalLength - remaining, remaining);
            if (read > 0) {
                remaining -= read;
            } else {
                break;
            }
        }
        assertEquals(-1, lz4Is.read());
    } finally {
        if (lz4Is != null) {
            lz4Is.close();
        } else {
            is.close();
        }
    }
    return Unpooled.wrappedBuffer(decompressed);
}
Also used : LZ4BlockInputStream(net.jpountz.lz4.LZ4BlockInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) InputStream(java.io.InputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) LZ4BlockInputStream(net.jpountz.lz4.LZ4BlockInputStream)

Example 68 with ByteBufInputStream

use of org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream in project netty by netty.

the class LzmaFrameEncoderTest method decompress.

@Override
protected ByteBuf decompress(ByteBuf compressed, int originalLength) throws Exception {
    InputStream is = new ByteBufInputStream(compressed, true);
    LzmaInputStream lzmaIs = null;
    byte[] decompressed = new byte[originalLength];
    try {
        lzmaIs = new LzmaInputStream(is, new Decoder());
        int remaining = originalLength;
        while (remaining > 0) {
            int read = lzmaIs.read(decompressed, originalLength - remaining, remaining);
            if (read > 0) {
                remaining -= read;
            } else {
                break;
            }
        }
        assertEquals(-1, lzmaIs.read());
    } finally {
        if (lzmaIs != null) {
            lzmaIs.close();
        }
        // https://github.com/jponge/lzma-java/issues/14
        if (is != null) {
            is.close();
        }
    }
    return Unpooled.wrappedBuffer(decompressed);
}
Also used : LzmaInputStream(lzma.streams.LzmaInputStream) LzmaInputStream(lzma.streams.LzmaInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) InputStream(java.io.InputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) Decoder(lzma.sdk.lzma.Decoder)

Example 69 with ByteBufInputStream

use of org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream in project netty by netty.

the class Bzip2EncoderTest method decompress.

@Override
protected ByteBuf decompress(ByteBuf compressed, int originalLength) throws Exception {
    InputStream is = new ByteBufInputStream(compressed, true);
    BZip2CompressorInputStream bzip2Is = null;
    byte[] decompressed = new byte[originalLength];
    try {
        bzip2Is = new BZip2CompressorInputStream(is);
        int remaining = originalLength;
        while (remaining > 0) {
            int read = bzip2Is.read(decompressed, originalLength - remaining, remaining);
            if (read > 0) {
                remaining -= read;
            } else {
                break;
            }
        }
        assertEquals(-1, bzip2Is.read());
    } finally {
        if (bzip2Is != null) {
            bzip2Is.close();
        } else {
            is.close();
        }
    }
    return Unpooled.wrappedBuffer(decompressed);
}
Also used : BZip2CompressorInputStream(org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream) BZip2CompressorInputStream(org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) InputStream(java.io.InputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream)

Example 70 with ByteBufInputStream

use of org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream in project flink by apache.

the class AbstractHandler method respondAsLeader.

@Override
protected void respondAsLeader(ChannelHandlerContext ctx, RoutedRequest routedRequest, T gateway) {
    HttpRequest httpRequest = routedRequest.getRequest();
    if (log.isTraceEnabled()) {
        log.trace("Received request " + httpRequest.uri() + '.');
    }
    FileUploads uploadedFiles = null;
    try {
        if (!inFlightRequestTracker.registerRequest()) {
            log.debug("The handler instance for {} had already been closed.", untypedResponseMessageHeaders.getTargetRestEndpointURL());
            ctx.channel().close();
            return;
        }
        if (!(httpRequest instanceof FullHttpRequest)) {
            // The RestServerEndpoint defines a HttpObjectAggregator in the pipeline that always
            // returns
            // FullHttpRequests.
            log.error("Implementation error: Received a request that wasn't a FullHttpRequest.");
            throw new RestHandlerException("Bad request received.", HttpResponseStatus.BAD_REQUEST);
        }
        final ByteBuf msgContent = ((FullHttpRequest) httpRequest).content();
        uploadedFiles = FileUploadHandler.getMultipartFileUploads(ctx);
        if (!untypedResponseMessageHeaders.acceptsFileUploads() && !uploadedFiles.getUploadedFiles().isEmpty()) {
            throw new RestHandlerException("File uploads not allowed.", HttpResponseStatus.BAD_REQUEST);
        }
        R request;
        if (msgContent.capacity() == 0) {
            try {
                request = MAPPER.readValue("{}", untypedResponseMessageHeaders.getRequestClass());
            } catch (JsonParseException | JsonMappingException je) {
                throw new RestHandlerException("Bad request received. Request did not conform to expected format.", HttpResponseStatus.BAD_REQUEST, je);
            }
        } else {
            try {
                InputStream in = new ByteBufInputStream(msgContent);
                request = MAPPER.readValue(in, untypedResponseMessageHeaders.getRequestClass());
            } catch (JsonParseException | JsonMappingException je) {
                throw new RestHandlerException(String.format("Request did not match expected format %s.", untypedResponseMessageHeaders.getRequestClass().getSimpleName()), HttpResponseStatus.BAD_REQUEST, je);
            }
        }
        final HandlerRequest<R> handlerRequest;
        try {
            handlerRequest = HandlerRequest.resolveParametersAndCreate(request, untypedResponseMessageHeaders.getUnresolvedMessageParameters(), routedRequest.getRouteResult().pathParams(), routedRequest.getRouteResult().queryParams(), uploadedFiles.getUploadedFiles());
        } catch (HandlerRequestException hre) {
            log.error("Could not create the handler request.", hre);
            throw new RestHandlerException(String.format("Bad request, could not parse parameters: %s", hre.getMessage()), HttpResponseStatus.BAD_REQUEST, hre);
        }
        log.trace("Starting request processing.");
        CompletableFuture<Void> requestProcessingFuture = respondToRequest(ctx, httpRequest, handlerRequest, gateway);
        final FileUploads finalUploadedFiles = uploadedFiles;
        requestProcessingFuture.handle((Void ignored, Throwable throwable) -> {
            if (throwable != null) {
                return handleException(ExceptionUtils.stripCompletionException(throwable), ctx, httpRequest);
            }
            return CompletableFuture.<Void>completedFuture(null);
        }).thenCompose(Function.identity()).whenComplete((Void ignored, Throwable throwable) -> {
            if (throwable != null) {
                log.warn("An exception occurred while handling another exception.", throwable);
            }
            finalizeRequestProcessing(finalUploadedFiles);
        });
    } catch (Throwable e) {
        final FileUploads finalUploadedFiles = uploadedFiles;
        handleException(e, ctx, httpRequest).whenComplete((Void ignored, Throwable throwable) -> finalizeRequestProcessing(finalUploadedFiles));
    }
}
Also used : HttpRequest(org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpRequest) FullHttpRequest(org.apache.flink.shaded.netty4.io.netty.handler.codec.http.FullHttpRequest) FullHttpRequest(org.apache.flink.shaded.netty4.io.netty.handler.codec.http.FullHttpRequest) ByteBufInputStream(org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream) InputStream(java.io.InputStream) ByteBufInputStream(org.apache.flink.shaded.netty4.io.netty.buffer.ByteBufInputStream) ByteBuf(org.apache.flink.shaded.netty4.io.netty.buffer.ByteBuf) JsonParseException(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonParseException) JsonMappingException(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonMappingException)

Aggregations

ByteBufInputStream (io.netty.buffer.ByteBufInputStream)69 ByteBuf (io.netty.buffer.ByteBuf)22 IOException (java.io.IOException)22 InputStreamReader (java.io.InputStreamReader)18 BadRequestException (co.cask.cdap.common.BadRequestException)16 Reader (java.io.Reader)16 JsonSyntaxException (com.google.gson.JsonSyntaxException)11 InputStream (java.io.InputStream)10 Path (javax.ws.rs.Path)9 ObjectInputStream (java.io.ObjectInputStream)8 NamespaceId (co.cask.cdap.proto.id.NamespaceId)6 POST (javax.ws.rs.POST)6 Test (org.junit.jupiter.api.Test)5 AuditPolicy (co.cask.cdap.common.security.AuditPolicy)4 InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)4 ByteBufOutputStream (io.netty.buffer.ByteBufOutputStream)4 RpcException (org.apache.drill.exec.rpc.RpcException)4 UnsupportedTypeException (co.cask.cdap.api.data.schema.UnsupportedTypeException)3 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)3 ObjectOutputStream (java.io.ObjectOutputStream)3