Search in sources :

Example 1 with Compressor

use of org.gridgain.internal.h2.compress.Compressor in project h2database by h2database.

the class Page method write.

/**
 * Store the page and update the position.
 *
 * @param chunk the chunk
 * @param buff the target buffer
 * @return the position of the buffer just after the type
 */
private int write(Chunk chunk, WriteBuffer buff) {
    int start = buff.position();
    int len = keys.length;
    int type = children != null ? DataUtils.PAGE_TYPE_NODE : DataUtils.PAGE_TYPE_LEAF;
    buff.putInt(0).putShort((byte) 0).putVarInt(map.getId()).putVarInt(len);
    int typePos = buff.position();
    buff.put((byte) type);
    if (type == DataUtils.PAGE_TYPE_NODE) {
        writeChildren(buff);
        for (int i = 0; i <= len; i++) {
            buff.putVarLong(children[i].count);
        }
    }
    int compressStart = buff.position();
    map.getKeyType().write(buff, keys, len, true);
    if (type == DataUtils.PAGE_TYPE_LEAF) {
        map.getValueType().write(buff, values, len, false);
    }
    MVStore store = map.getStore();
    int expLen = buff.position() - compressStart;
    if (expLen > 16) {
        int compressionLevel = store.getCompressionLevel();
        if (compressionLevel > 0) {
            Compressor compressor;
            int compressType;
            if (compressionLevel == 1) {
                compressor = map.getStore().getCompressorFast();
                compressType = DataUtils.PAGE_COMPRESSED;
            } else {
                compressor = map.getStore().getCompressorHigh();
                compressType = DataUtils.PAGE_COMPRESSED_HIGH;
            }
            byte[] exp = new byte[expLen];
            buff.position(compressStart).get(exp);
            byte[] comp = new byte[expLen * 2];
            int compLen = compressor.compress(exp, expLen, comp, 0);
            int plus = DataUtils.getVarIntLen(compLen - expLen);
            if (compLen + plus < expLen) {
                buff.position(typePos).put((byte) (type + compressType));
                buff.position(compressStart).putVarInt(expLen - compLen).put(comp, 0, compLen);
            }
        }
    }
    int pageLength = buff.position() - start;
    int chunkId = chunk.id;
    int check = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(start) ^ DataUtils.getCheckValue(pageLength);
    buff.putInt(start, pageLength).putShort(start + 4, (short) check);
    if (pos != 0) {
        throw DataUtils.newIllegalStateException(DataUtils.ERROR_INTERNAL, "Page already stored");
    }
    pos = DataUtils.getPagePos(chunkId, start, pageLength, type);
    store.cachePage(pos, this, getMemory());
    if (type == DataUtils.PAGE_TYPE_NODE) {
        // cache again - this will make sure nodes stays in the cache
        // for a longer time
        store.cachePage(pos, this, getMemory());
    }
    long max = DataUtils.getPageMaxLength(pos);
    chunk.maxLen += max;
    chunk.maxLenLive += max;
    chunk.pageCount++;
    chunk.pageCountLive++;
    if (removedInMemory) {
        // if the page was removed _before_ the position was assigned, we
        // need to mark it removed here, so the fields are updated
        // when the next chunk is stored
        map.removePage(pos, memory);
    }
    return typePos + 1;
}
Also used : Compressor(org.h2.compress.Compressor)

Example 2 with Compressor

use of org.gridgain.internal.h2.compress.Compressor in project h2database by h2database.

the class Page method read.

/**
 * Read the page from the buffer.
 *
 * @param buff the buffer to read from
 */
private void read(ByteBuffer buff) {
    int chunkId = DataUtils.getPageChunkId(pos);
    int offset = DataUtils.getPageOffset(pos);
    int start = buff.position();
    // does not include optional part (pageNo)
    int pageLength = buff.getInt();
    int remaining = buff.remaining() + 4;
    if (pageLength > remaining || pageLength < 4) {
        throw DataUtils.newMVStoreException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected page length 4..{1}, got {2}", chunkId, remaining, pageLength);
    }
    short check = buff.getShort();
    int checkTest = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(offset) ^ DataUtils.getCheckValue(pageLength);
    if (check != (short) checkTest) {
        throw DataUtils.newMVStoreException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected check value {1}, got {2}", chunkId, checkTest, check);
    }
    pageNo = DataUtils.readVarInt(buff);
    if (pageNo < 0) {
        throw DataUtils.newMVStoreException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, got negative page No {1}", chunkId, pageNo);
    }
    int mapId = DataUtils.readVarInt(buff);
    if (mapId != map.getId()) {
        throw DataUtils.newMVStoreException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected map id {1}, got {2}", chunkId, map.getId(), mapId);
    }
    int keyCount = DataUtils.readVarInt(buff);
    keys = createKeyStorage(keyCount);
    int type = buff.get();
    if (isLeaf() != ((type & 1) == PAGE_TYPE_LEAF)) {
        throw DataUtils.newMVStoreException(DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected node type {1}, got {2}", chunkId, isLeaf() ? "0" : "1", type);
    }
    // to restrain hacky GenericDataType, which grabs the whole remainder of the buffer
    buff.limit(start + pageLength);
    if (!isLeaf()) {
        readPayLoad(buff);
    }
    boolean compressed = (type & DataUtils.PAGE_COMPRESSED) != 0;
    if (compressed) {
        Compressor compressor;
        if ((type & DataUtils.PAGE_COMPRESSED_HIGH) == DataUtils.PAGE_COMPRESSED_HIGH) {
            compressor = map.getStore().getCompressorHigh();
        } else {
            compressor = map.getStore().getCompressorFast();
        }
        int lenAdd = DataUtils.readVarInt(buff);
        int compLen = buff.remaining();
        byte[] comp;
        int pos = 0;
        if (buff.hasArray()) {
            comp = buff.array();
            pos = buff.arrayOffset() + buff.position();
        } else {
            comp = Utils.newBytes(compLen);
            buff.get(comp);
        }
        int l = compLen + lenAdd;
        buff = ByteBuffer.allocate(l);
        compressor.expand(comp, pos, compLen, buff.array(), buff.arrayOffset(), l);
    }
    map.getKeyType().read(buff, keys, keyCount);
    if (isLeaf()) {
        readPayLoad(buff);
    }
    diskSpaceUsed = pageLength;
    recalculateMemory();
}
Also used : Compressor(org.h2.compress.Compressor)

Example 3 with Compressor

use of org.gridgain.internal.h2.compress.Compressor in project h2database by h2database.

the class CompressTool method expand.

/**
 * Expands the compressed  data.
 *
 * @param in the byte array with the compressed data
 * @return the uncompressed data
 */
public byte[] expand(byte[] in) {
    if (in.length == 0) {
        throw DbException.get(ErrorCode.COMPRESSION_ERROR);
    }
    int algorithm = in[0];
    Compressor compress = getCompressor(algorithm);
    try {
        int len = readVariableInt(in, 1);
        int start = 1 + getVariableIntLength(len);
        byte[] buff = Utils.newBytes(len);
        compress.expand(in, start, in.length - start, buff, 0, len);
        return buff;
    } catch (Exception e) {
        throw DbException.get(ErrorCode.COMPRESSION_ERROR, e);
    }
}
Also used : Compressor(org.h2.compress.Compressor) IOException(java.io.IOException) DbException(org.h2.message.DbException)

Example 4 with Compressor

use of org.gridgain.internal.h2.compress.Compressor in project h2database by h2database.

the class CompressTool method compress.

/**
 * Compressed the data using the specified algorithm. If no algorithm is
 * supplied, LZF is used
 *
 * @param in the byte array with the original data
 * @param algorithm the algorithm (LZF, DEFLATE)
 * @return the compressed data
 */
public byte[] compress(byte[] in, String algorithm) {
    int len = in.length;
    if (in.length < 5) {
        algorithm = "NO";
    }
    Compressor compress = getCompressor(algorithm);
    byte[] buff = getBuffer((len < 100 ? len + 100 : len) * 2);
    int newLen = compress(in, in.length, compress, buff);
    return Utils.copyBytes(buff, newLen);
}
Also used : Compressor(org.h2.compress.Compressor)

Example 5 with Compressor

use of org.gridgain.internal.h2.compress.Compressor in project h2database by h2database.

the class CompressTool method getCompressor.

private static Compressor getCompressor(String algorithm) {
    if (algorithm == null) {
        algorithm = "LZF";
    }
    int idx = algorithm.indexOf(' ');
    String options = null;
    if (idx > 0) {
        options = algorithm.substring(idx + 1);
        algorithm = algorithm.substring(0, idx);
    }
    int a = getCompressAlgorithm(algorithm);
    Compressor compress = getCompressor(a);
    compress.setOptions(options);
    return compress;
}
Also used : Compressor(org.h2.compress.Compressor)

Aggregations

Compressor (org.h2.compress.Compressor)26 IOException (java.io.IOException)12 Compressor (org.gridgain.internal.h2.compress.Compressor)8 ByteBuffer (java.nio.ByteBuffer)7 DbException (org.h2.message.DbException)6 ByteArrayInputStream (java.io.ByteArrayInputStream)4 InputStream (java.io.InputStream)4 PrintWriter (java.io.PrintWriter)4 FileChannel (java.nio.channels.FileChannel)4 Connection (java.sql.Connection)4 ResultSet (java.sql.ResultSet)4 Statement (java.sql.Statement)4 ArrayList (java.util.ArrayList)4 TreeMap (java.util.TreeMap)4 CompressLZF (org.h2.compress.CompressLZF)3 DbException (org.gridgain.internal.h2.message.DbException)2 CompressLZF (org.gridgain.internal.h2.compress.CompressLZF)1