use of org.h2.compress.Compressor in project h2database by h2database.
the class Page method write.
/**
* Store the page and update the position.
*
* @param chunk the chunk
* @param buff the target buffer
* @return the position of the buffer just after the type
*/
private int write(Chunk chunk, WriteBuffer buff) {
int start = buff.position();
int len = keys.length;
int type = children != null ? DataUtils.PAGE_TYPE_NODE : DataUtils.PAGE_TYPE_LEAF;
buff.putInt(0).putShort((byte) 0).putVarInt(map.getId()).putVarInt(len);
int typePos = buff.position();
buff.put((byte) type);
if (type == DataUtils.PAGE_TYPE_NODE) {
writeChildren(buff);
for (int i = 0; i <= len; i++) {
buff.putVarLong(children[i].count);
}
}
int compressStart = buff.position();
map.getKeyType().write(buff, keys, len, true);
if (type == DataUtils.PAGE_TYPE_LEAF) {
map.getValueType().write(buff, values, len, false);
}
MVStore store = map.getStore();
int expLen = buff.position() - compressStart;
if (expLen > 16) {
int compressionLevel = store.getCompressionLevel();
if (compressionLevel > 0) {
Compressor compressor;
int compressType;
if (compressionLevel == 1) {
compressor = map.getStore().getCompressorFast();
compressType = DataUtils.PAGE_COMPRESSED;
} else {
compressor = map.getStore().getCompressorHigh();
compressType = DataUtils.PAGE_COMPRESSED_HIGH;
}
byte[] exp = new byte[expLen];
buff.position(compressStart).get(exp);
byte[] comp = new byte[expLen * 2];
int compLen = compressor.compress(exp, expLen, comp, 0);
int plus = DataUtils.getVarIntLen(compLen - expLen);
if (compLen + plus < expLen) {
buff.position(typePos).put((byte) (type + compressType));
buff.position(compressStart).putVarInt(expLen - compLen).put(comp, 0, compLen);
}
}
}
int pageLength = buff.position() - start;
int chunkId = chunk.id;
int check = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(start) ^ DataUtils.getCheckValue(pageLength);
buff.putInt(start, pageLength).putShort(start + 4, (short) check);
if (pos != 0) {
throw DataUtils.newIllegalStateException(DataUtils.ERROR_INTERNAL, "Page already stored");
}
pos = DataUtils.getPagePos(chunkId, start, pageLength, type);
store.cachePage(pos, this, getMemory());
if (type == DataUtils.PAGE_TYPE_NODE) {
// cache again - this will make sure nodes stays in the cache
// for a longer time
store.cachePage(pos, this, getMemory());
}
long max = DataUtils.getPageMaxLength(pos);
chunk.maxLen += max;
chunk.maxLenLive += max;
chunk.pageCount++;
chunk.pageCountLive++;
if (removedInMemory) {
// if the page was removed _before_ the position was assigned, we
// need to mark it removed here, so the fields are updated
// when the next chunk is stored
map.removePage(pos, memory);
}
return typePos + 1;
}
use of org.h2.compress.Compressor in project h2database by h2database.
the class CompressTool method expand.
/**
* Expands the compressed data.
*
* @param in the byte array with the compressed data
* @return the uncompressed data
*/
public byte[] expand(byte[] in) {
int algorithm = in[0];
Compressor compress = getCompressor(algorithm);
try {
int len = readVariableInt(in, 1);
int start = 1 + getVariableIntLength(len);
byte[] buff = Utils.newBytes(len);
compress.expand(in, start, in.length - start, buff, 0, len);
return buff;
} catch (Exception e) {
throw DbException.get(ErrorCode.COMPRESSION_ERROR, e);
}
}
use of org.h2.compress.Compressor in project h2database by h2database.
the class CompressTool method getCompressor.
private static Compressor getCompressor(String algorithm) {
if (algorithm == null) {
algorithm = "LZF";
}
int idx = algorithm.indexOf(' ');
String options = null;
if (idx > 0) {
options = algorithm.substring(idx + 1);
algorithm = algorithm.substring(0, idx);
}
int a = getCompressAlgorithm(algorithm);
Compressor compress = getCompressor(a);
compress.setOptions(options);
return compress;
}
use of org.h2.compress.Compressor in project h2database by h2database.
the class CompressTool method expand.
/**
* INTERNAL
*/
public static void expand(byte[] in, byte[] out, int outPos) {
int algorithm = in[0];
Compressor compress = getCompressor(algorithm);
try {
int len = readVariableInt(in, 1);
int start = 1 + getVariableIntLength(len);
compress.expand(in, start, in.length - start, out, outPos, len);
} catch (Exception e) {
throw DbException.get(ErrorCode.COMPRESSION_ERROR, e);
}
}
use of org.h2.compress.Compressor in project h2database by h2database.
the class CompressTool method compress.
/**
* Compressed the data using the specified algorithm. If no algorithm is
* supplied, LZF is used
*
* @param in the byte array with the original data
* @param algorithm the algorithm (LZF, DEFLATE)
* @return the compressed data
*/
public byte[] compress(byte[] in, String algorithm) {
int len = in.length;
if (in.length < 5) {
algorithm = "NO";
}
Compressor compress = getCompressor(algorithm);
byte[] buff = getBuffer((len < 100 ? len + 100 : len) * 2);
int newLen = compress(in, in.length, compress, buff);
return Utils.copyBytes(buff, newLen);
}
Aggregations