Search in sources :

Example 6 with Lz4Compressor

use of io.airlift.compress.lz4.Lz4Compressor in project chunkstories by Hugobros3.

the class ChunkHolderImplementation method compressChunkData.

/**
 * This method is called assumming the chunk is well-locked
 */
private CompressedData compressChunkData(final CubicChunk chunk) {
    final int changesTakenIntoAccount = chunk.compr_uncomittedBlockModifications.get();
    // Stage 1: Compress the actual voxel data
    byte[] voxelCompressedData;
    if (!chunk.isAirChunk()) {
        // Heuristic value for the size of the buffer: fixed voxel size + factor of components & entities
        // + chunk.voxelComponents.size() * 1024 + chunk.localEntities.size() * 2048;
        int uncompressedStuffBufferSize = 32 * 32 * 32 * 4;
        ByteBuffer uncompressedStuff = MemoryUtil.memAlloc(uncompressedStuffBufferSize);
        uncompressedStuff.asIntBuffer().put(chunk.chunkVoxelData);
        // uncompressedStuff.flip();
        ByteBuffer compressedStuff = MemoryUtil.memAlloc(uncompressedStuffBufferSize + 2048);
        LZ4Compressor compressor = factory.fastCompressor();
        compressor.compress(uncompressedStuff, compressedStuff);
        // No longer need that buffer
        MemoryUtil.memFree(uncompressedStuff);
        // Make a Java byte[] array to put the final stuff in
        voxelCompressedData = new byte[compressedStuff.position()];
        compressedStuff.flip();
        compressedStuff.get(voxelCompressedData);
        // No longer need that buffer either
        MemoryUtil.memFree(compressedStuff);
    } else {
        // Just use a symbolic null here
        voxelCompressedData = null;
    }
    // Stage 2: Take care of the voxel components
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream daos = new DataOutputStream(baos);
    // ByteBuffer smallBuffer = MemoryUtil.memAlloc(4096);
    // byte[] smallArray = new byte[4096];
    // ByteBufferOutputStream bbos = new ByteBufferOutputStream(smallBuffer);
    ByteArrayOutputStream bbos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(bbos);
    try {
        // For all cells that have components
        for (CellComponentsHolder voxelComponents : chunk.allCellComponents.values()) {
            // Write a 1 then their in-chunk index
            daos.writeByte((byte) 0x01);
            daos.writeInt(voxelComponents.getIndex());
            // For all components in this cell
            for (Entry<String, VoxelComponent> entry : voxelComponents.all()) {
                // Write component name
                daos.writeUTF(entry.getKey());
                // Push the component in the temporary buffer
                entry.getValue().push(region.handler, dos);
                // smallBuffer.flip();
                byte[] bytesPushed = bbos.toByteArray();
                bbos.reset();
                // Write how many bytes the temporary buffer now contains
                // int bytesPushed = smallBuffer.limit();
                daos.writeShort(bytesPushed.length);
                // Get those bytes as an array then write it in the compressed stuff
                // smallBuffer.get(smallArray);
                daos.write(bytesPushed, 0, bytesPushed.length);
            // Reset the temporary buffer
            // smallBuffer.clear();
            }
            daos.writeUTF("\n");
        }
        // Write the final 00, so to be clear we are done with voxel components
        daos.writeByte((byte) 0x00);
    // Since we output to a local buffer, any failure is viewed as catastrophic
    } catch (IOException e) {
        assert false;
    }
    // Extract the byte array from the baos
    byte[] voxelComponentsData = baos.toByteArray();
    // MemoryUtil.memFree(smallBuffer);
    // Stage 3: Compress entities
    baos.reset();
    for (Entity entity : chunk.localEntities) {
        // Don't save controllable entities
        if (entity.exists() && !(entity instanceof EntityUnsaveable && !((EntityUnsaveable) entity).shouldSaveIntoRegion())) {
            EntitySerializer.writeEntityToStream(daos, region.handler, entity);
        }
    }
    EntitySerializer.writeEntityToStream(daos, region.handler, null);
    byte[] entityData = baos.toByteArray();
    // Remove whatever modifications existed when the method started, this is for avoiding concurrent modifications not being taken into account
    chunk.compr_uncomittedBlockModifications.addAndGet(-changesTakenIntoAccount);
    return new CompressedData(voxelCompressedData, voxelComponentsData, entityData);
}
Also used : EntityUnsaveable(io.xol.chunkstories.api.entity.interfaces.EntityUnsaveable) Entity(io.xol.chunkstories.api.entity.Entity) DataOutputStream(java.io.DataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) CellComponentsHolder(io.xol.chunkstories.voxel.components.CellComponentsHolder) ByteBuffer(java.nio.ByteBuffer) LZ4Compressor(net.jpountz.lz4.LZ4Compressor) VoxelComponent(io.xol.chunkstories.api.voxel.components.VoxelComponent) PacketChunkCompressedData(io.xol.chunkstories.net.packets.PacketChunkCompressedData)

Example 7 with Lz4Compressor

use of io.airlift.compress.lz4.Lz4Compressor in project vespa by vespa-engine.

the class BasicPacket method encodeAndCompressBody.

protected void encodeAndCompressBody(ByteBuffer buffer, int startPosition) {
    int startOfBody = buffer.position();
    encodeBody(buffer);
    setEncodedBody(buffer, startOfBody, buffer.position() - startOfBody);
    length = buffer.position() - startPosition;
    if (compressionLimit != 0 && length - 4 > compressionLimit) {
        byte[] compressedBody;
        compressionType = CompressionType.LZ4;
        LZ4Factory factory = LZ4Factory.fastestInstance();
        LZ4Compressor compressor = factory.fastCompressor();
        compressedBody = compressor.compress(encodedBody);
        log.log(LogLevel.DEBUG, "Uncompressed size: " + encodedBody.length + ", Compressed size: " + compressedBody.length);
        if (compressedBody.length + 4 < encodedBody.length) {
            buffer.position(startPosition);
            // +4 for compressed size
            buffer.putInt(compressedBody.length + startOfBody - startPosition + 4 - 4);
            buffer.putInt(getCompressedCode(compressionType));
            buffer.position(startOfBody);
            buffer.putInt(encodedBody.length);
            buffer.put(compressedBody);
            buffer.limit(buffer.position());
            return;
        }
    }
    // Encoded length 4 less than actual length
    buffer.putInt(startPosition, length - 4);
    buffer.limit(buffer.position());
}
Also used : LZ4Compressor(net.jpountz.lz4.LZ4Compressor) LZ4Factory(net.jpountz.lz4.LZ4Factory)

Example 8 with Lz4Compressor

use of io.airlift.compress.lz4.Lz4Compressor in project vespa by vespa-engine.

the class NormalSketch method onSerialize.

/**
 * Serializes the Sketch.
 *
 * Serialization format
 * ==================
 * Original size:     4 bytes
 * Compressed size:   4 bytes
 * Compressed data:   N * 1 bytes
 *
 * Invariant:
 *      compressed size &lt;= original size
 *
 * Special case:
 *      compressed size == original size =&gt; data is uncompressed
 *
 * @param buf Serializer
 */
@Override
protected void onSerialize(Serializer buf) {
    super.onSerialize(buf);
    buf.putInt(null, data.length);
    try {
        LZ4Compressor c = LZ4Factory.safeInstance().highCompressor();
        byte[] compressedData = new byte[data.length];
        int compressedSize = c.compress(data, compressedData);
        serializeDataArray(compressedData, compressedSize, buf);
    } catch (LZ4Exception e) {
        // LZ4Compressor.compress will throw this exception if it is unable to compress
        // into compressedData (when compressed size >= original size)
        serializeDataArray(data, data.length, buf);
    }
}
Also used : LZ4Compressor(net.jpountz.lz4.LZ4Compressor) LZ4Exception(net.jpountz.lz4.LZ4Exception)

Example 9 with Lz4Compressor

use of io.airlift.compress.lz4.Lz4Compressor in project presto by prestodb.

the class TestParquetCompressionUtils method performTest.

private void performTest(CompressionCodecName codec, int inputLength) throws IOException {
    Compressor compressor = null;
    if (codec.equals(ZSTD)) {
        compressor = new ZstdCompressor();
    } else if (codec.equals(LZ4)) {
        compressor = new Lz4Compressor();
    }
    byte[] input = createArray(inputLength);
    byte[] output = new byte[inputLength + 512];
    int retLength = compress(compressor, input, inputLength, output, 0);
    Slice decompressedSlice = ParquetCompressionUtils.decompress(codec, wrappedBuffer(output, 0, retLength), inputLength);
    assertEquals(decompressedSlice, wrappedBuffer(input));
}
Also used : Lz4Compressor(io.airlift.compress.lz4.Lz4Compressor) Slice(io.airlift.slice.Slice) ZstdCompressor(io.airlift.compress.zstd.ZstdCompressor) ZstdCompressor(io.airlift.compress.zstd.ZstdCompressor) Compressor(io.airlift.compress.Compressor) Lz4Compressor(io.airlift.compress.lz4.Lz4Compressor)

Example 10 with Lz4Compressor

use of io.airlift.compress.lz4.Lz4Compressor in project presto by prestodb.

the class PageFileWriterFactory method createPagesSerdeForPageFile.

public static PagesSerde createPagesSerdeForPageFile(BlockEncodingSerde blockEncodingSerde, Optional<HiveCompressionCodec> compressionCodec) {
    if (!compressionCodec.isPresent()) {
        return new PagesSerde(blockEncodingSerde, Optional.empty(), Optional.empty(), Optional.empty());
    }
    PageCompressor pageCompressor = null;
    PageDecompressor pageDecompressor = null;
    switch(compressionCodec.get()) {
        case NONE:
            break;
        case SNAPPY:
            pageCompressor = new AirliftCompressorAdapter(new SnappyCompressor());
            pageDecompressor = new AirliftDecompressorAdapter(new SnappyDecompressor());
            break;
        case LZ4:
            pageCompressor = new AirliftCompressorAdapter(new Lz4Compressor());
            pageDecompressor = new AirliftDecompressorAdapter(new Lz4Decompressor());
            break;
        case GZIP:
            pageCompressor = new AirliftCompressorAdapter(new DeflateCompressor(OptionalInt.empty()));
            pageDecompressor = new AirliftDecompressorAdapter(new InflateDecompressor());
            break;
        case ZSTD:
            pageCompressor = new AirliftCompressorAdapter(new ZstdJniCompressor(OptionalInt.empty()));
            pageDecompressor = new AirliftDecompressorAdapter(new ZstdJniDecompressor());
            break;
        default:
            throw new PrestoException(GENERIC_INTERNAL_ERROR, format("%s compression is not supported for %s", compressionCodec.get().name(), PAGEFILE.getOutputFormat()));
    }
    return new PagesSerde(blockEncodingSerde, Optional.ofNullable(pageCompressor), Optional.ofNullable(pageDecompressor), Optional.empty());
}
Also used : PageCompressor(com.facebook.presto.spi.page.PageCompressor) Lz4Compressor(io.airlift.compress.lz4.Lz4Compressor) DeflateCompressor(com.facebook.presto.orc.zlib.DeflateCompressor) PrestoException(com.facebook.presto.spi.PrestoException) SnappyDecompressor(io.airlift.compress.snappy.SnappyDecompressor) PagesSerde(com.facebook.presto.spi.page.PagesSerde) SnappyCompressor(io.airlift.compress.snappy.SnappyCompressor) Lz4Decompressor(io.airlift.compress.lz4.Lz4Decompressor) InflateDecompressor(com.facebook.presto.orc.zlib.InflateDecompressor) ZstdJniDecompressor(com.facebook.presto.orc.zstd.ZstdJniDecompressor) ZstdJniCompressor(com.facebook.presto.orc.zstd.ZstdJniCompressor) PageDecompressor(com.facebook.presto.spi.page.PageDecompressor)

Aggregations

IOException (java.io.IOException)11 LZ4Compressor (net.jpountz.lz4.LZ4Compressor)10 Lz4Compressor (org.apache.hadoop.io.compress.lz4.Lz4Compressor)10 Test (org.junit.Test)10 Lz4Decompressor (org.apache.hadoop.io.compress.lz4.Lz4Decompressor)5 ByteBuffer (java.nio.ByteBuffer)3 Lz4Compressor (io.airlift.compress.lz4.Lz4Compressor)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 DataInputStream (java.io.DataInputStream)2 DataOutputStream (java.io.DataOutputStream)2 LZ4Exception (net.jpountz.lz4.LZ4Exception)2 BlockCompressorStream (org.apache.hadoop.io.compress.BlockCompressorStream)2 BlockDecompressorStream (org.apache.hadoop.io.compress.BlockDecompressorStream)2 DeflateCompressor (com.facebook.presto.orc.zlib.DeflateCompressor)1 InflateDecompressor (com.facebook.presto.orc.zlib.InflateDecompressor)1 ZstdJniCompressor (com.facebook.presto.orc.zstd.ZstdJniCompressor)1 ZstdJniDecompressor (com.facebook.presto.orc.zstd.ZstdJniDecompressor)1 PrestoException (com.facebook.presto.spi.PrestoException)1 PageCompressor (com.facebook.presto.spi.page.PageCompressor)1 PageDecompressor (com.facebook.presto.spi.page.PageDecompressor)1