use of org.h2.mvstore.WriteBuffer in project 468H2Project by lukeunderwood42.
the class TestDataUtils method testWriteBuffer.
private static void testWriteBuffer() {
WriteBuffer buff = new WriteBuffer();
buff.put(new byte[1500000]);
buff.put(new byte[1900000]);
}
use of org.h2.mvstore.WriteBuffer in project 468H2Project by lukeunderwood42.
the class Page method write.
/**
* Store the page and update the position.
*
* @param chunk the chunk
* @param buff the target buffer
* @param toc prospective table of content
* @return the position of the buffer just after the type
*/
protected final int write(Chunk chunk, WriteBuffer buff, List<Long> toc) {
pageNo = toc.size();
int keyCount = getKeyCount();
int start = buff.position();
// placeholder for pageLength
buff.putInt(0).putShort(// placeholder for check
(byte) 0).putVarInt(pageNo).putVarInt(map.getId()).putVarInt(keyCount);
int typePos = buff.position();
int type = isLeaf() ? PAGE_TYPE_LEAF : DataUtils.PAGE_TYPE_NODE;
buff.put((byte) type);
int childrenPos = buff.position();
writeChildren(buff, true);
int compressStart = buff.position();
map.getKeyType().write(buff, keys, keyCount);
writeValues(buff);
MVStore store = map.getStore();
int expLen = buff.position() - compressStart;
if (expLen > 16) {
int compressionLevel = store.getCompressionLevel();
if (compressionLevel > 0) {
Compressor compressor;
int compressType;
if (compressionLevel == 1) {
compressor = store.getCompressorFast();
compressType = DataUtils.PAGE_COMPRESSED;
} else {
compressor = store.getCompressorHigh();
compressType = DataUtils.PAGE_COMPRESSED_HIGH;
}
byte[] comp = new byte[expLen * 2];
ByteBuffer byteBuffer = buff.getBuffer();
int pos = 0;
byte[] exp;
if (byteBuffer.hasArray()) {
exp = byteBuffer.array();
pos = byteBuffer.arrayOffset() + compressStart;
} else {
exp = Utils.newBytes(expLen);
buff.position(compressStart).get(exp);
}
int compLen = compressor.compress(exp, pos, expLen, comp, 0);
int plus = DataUtils.getVarIntLen(expLen - compLen);
if (compLen + plus < expLen) {
buff.position(typePos).put((byte) (type | compressType));
buff.position(compressStart).putVarInt(expLen - compLen).put(comp, 0, compLen);
}
}
}
int pageLength = buff.position() - start;
long tocElement = DataUtils.getTocElement(getMapId(), start, buff.position() - start, type);
toc.add(tocElement);
int chunkId = chunk.id;
int check = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(start) ^ DataUtils.getCheckValue(pageLength);
buff.putInt(start, pageLength).putShort(start + 4, (short) check);
if (isSaved()) {
throw DataUtils.newMVStoreException(DataUtils.ERROR_INTERNAL, "Page already stored");
}
long pagePos = DataUtils.getPagePos(chunkId, tocElement);
boolean isDeleted = isRemoved();
while (!posUpdater.compareAndSet(this, isDeleted ? 1L : 0L, pagePos)) {
isDeleted = isRemoved();
}
store.cachePage(this);
if (type == DataUtils.PAGE_TYPE_NODE) {
// cache again - this will make sure nodes stays in the cache
// for a longer time
store.cachePage(this);
}
int pageLengthEncoded = DataUtils.getPageMaxLength(pos);
boolean singleWriter = map.isSingleWriter();
chunk.accountForWrittenPage(pageLengthEncoded, singleWriter);
if (isDeleted) {
store.accountForRemovedPage(pagePos, chunk.version + 1, singleWriter, pageNo);
}
diskSpaceUsed = pageLengthEncoded != DataUtils.PAGE_LARGE ? pageLengthEncoded : pageLength;
return childrenPos;
}
use of org.h2.mvstore.WriteBuffer in project jackrabbit-oak by apache.
the class PersistentCache method broadcast.
void broadcast(CacheType type, Function<WriteBuffer, Void> writer) {
Broadcaster b = broadcaster;
if (b == null) {
return;
}
WriteBuffer buff = writeBuffer.get();
if (buff == null) {
buff = new WriteBuffer();
writeBuffer.set(buff);
}
buff.clear();
// space for the length
buff.putInt(0);
buff.put(broadcastId);
buff.put((byte) type.ordinal());
writer.apply(buff);
ByteBuffer byteBuff = buff.getBuffer();
int length = byteBuff.position();
byteBuff.limit(length);
// write length
byteBuff.putInt(0, length);
byteBuff.position(0);
b.send(byteBuff);
}
use of org.h2.mvstore.WriteBuffer in project h2database by h2database.
the class ValueDataType method writeValue.
private void writeValue(WriteBuffer buff, Value v) {
if (v == ValueNull.INSTANCE) {
buff.put((byte) 0);
return;
}
int type = v.getType();
switch(type) {
case Value.BOOLEAN:
buff.put((byte) (v.getBoolean() ? BOOLEAN_TRUE : BOOLEAN_FALSE));
break;
case Value.BYTE:
buff.put((byte) type).put(v.getByte());
break;
case Value.SHORT:
buff.put((byte) type).putShort(v.getShort());
break;
case Value.ENUM:
case Value.INT:
{
int x = v.getInt();
if (x < 0) {
buff.put((byte) INT_NEG).putVarInt(-x);
} else if (x < 16) {
buff.put((byte) (INT_0_15 + x));
} else {
buff.put((byte) type).putVarInt(x);
}
break;
}
case Value.LONG:
{
long x = v.getLong();
if (x < 0) {
buff.put((byte) LONG_NEG).putVarLong(-x);
} else if (x < 8) {
buff.put((byte) (LONG_0_7 + x));
} else {
buff.put((byte) type).putVarLong(x);
}
break;
}
case Value.DECIMAL:
{
BigDecimal x = v.getBigDecimal();
if (BigDecimal.ZERO.equals(x)) {
buff.put((byte) DECIMAL_0_1);
} else if (BigDecimal.ONE.equals(x)) {
buff.put((byte) (DECIMAL_0_1 + 1));
} else {
int scale = x.scale();
BigInteger b = x.unscaledValue();
int bits = b.bitLength();
if (bits <= 63) {
if (scale == 0) {
buff.put((byte) DECIMAL_SMALL_0).putVarLong(b.longValue());
} else {
buff.put((byte) DECIMAL_SMALL).putVarInt(scale).putVarLong(b.longValue());
}
} else {
byte[] bytes = b.toByteArray();
buff.put((byte) type).putVarInt(scale).putVarInt(bytes.length).put(bytes);
}
}
break;
}
case Value.TIME:
{
ValueTime t = (ValueTime) v;
long nanos = t.getNanos();
long millis = nanos / 1000000;
nanos -= millis * 1000000;
buff.put((byte) type).putVarLong(millis).putVarLong(nanos);
break;
}
case Value.DATE:
{
long x = ((ValueDate) v).getDateValue();
buff.put((byte) type).putVarLong(x);
break;
}
case Value.TIMESTAMP:
{
ValueTimestamp ts = (ValueTimestamp) v;
long dateValue = ts.getDateValue();
long nanos = ts.getTimeNanos();
long millis = nanos / 1000000;
nanos -= millis * 1000000;
buff.put((byte) type).putVarLong(dateValue).putVarLong(millis).putVarLong(nanos);
break;
}
case Value.TIMESTAMP_TZ:
{
ValueTimestampTimeZone ts = (ValueTimestampTimeZone) v;
long dateValue = ts.getDateValue();
long nanos = ts.getTimeNanos();
long millis = nanos / 1000000;
nanos -= millis * 1000000;
buff.put((byte) type).putVarLong(dateValue).putVarLong(millis).putVarLong(nanos).putVarInt(ts.getTimeZoneOffsetMins());
break;
}
case Value.JAVA_OBJECT:
{
byte[] b = v.getBytesNoCopy();
buff.put((byte) type).putVarInt(b.length).put(b);
break;
}
case Value.BYTES:
{
byte[] b = v.getBytesNoCopy();
int len = b.length;
if (len < 32) {
buff.put((byte) (BYTES_0_31 + len)).put(b);
} else {
buff.put((byte) type).putVarInt(b.length).put(b);
}
break;
}
case Value.UUID:
{
ValueUuid uuid = (ValueUuid) v;
buff.put((byte) type).putLong(uuid.getHigh()).putLong(uuid.getLow());
break;
}
case Value.STRING:
{
String s = v.getString();
int len = s.length();
if (len < 32) {
buff.put((byte) (STRING_0_31 + len)).putStringData(s, len);
} else {
buff.put((byte) type);
writeString(buff, s);
}
break;
}
case Value.STRING_IGNORECASE:
case Value.STRING_FIXED:
buff.put((byte) type);
writeString(buff, v.getString());
break;
case Value.DOUBLE:
{
double x = v.getDouble();
if (x == 1.0d) {
buff.put((byte) (DOUBLE_0_1 + 1));
} else {
long d = Double.doubleToLongBits(x);
if (d == ValueDouble.ZERO_BITS) {
buff.put((byte) DOUBLE_0_1);
} else {
buff.put((byte) type).putVarLong(Long.reverse(d));
}
}
break;
}
case Value.FLOAT:
{
float x = v.getFloat();
if (x == 1.0f) {
buff.put((byte) (FLOAT_0_1 + 1));
} else {
int f = Float.floatToIntBits(x);
if (f == ValueFloat.ZERO_BITS) {
buff.put((byte) FLOAT_0_1);
} else {
buff.put((byte) type).putVarInt(Integer.reverse(f));
}
}
break;
}
case Value.BLOB:
case Value.CLOB:
{
buff.put((byte) type);
ValueLobDb lob = (ValueLobDb) v;
byte[] small = lob.getSmall();
if (small == null) {
buff.putVarInt(-3).putVarInt(lob.getTableId()).putVarLong(lob.getLobId()).putVarLong(lob.getPrecision());
} else {
buff.putVarInt(small.length).put(small);
}
break;
}
case Value.ARRAY:
{
Value[] list = ((ValueArray) v).getList();
buff.put((byte) type).putVarInt(list.length);
for (Value x : list) {
writeValue(buff, x);
}
break;
}
case Value.RESULT_SET:
{
buff.put((byte) type);
try {
ResultSet rs = ((ValueResultSet) v).getResultSet();
rs.beforeFirst();
ResultSetMetaData meta = rs.getMetaData();
int columnCount = meta.getColumnCount();
buff.putVarInt(columnCount);
for (int i = 0; i < columnCount; i++) {
writeString(buff, meta.getColumnName(i + 1));
buff.putVarInt(meta.getColumnType(i + 1)).putVarInt(meta.getPrecision(i + 1)).putVarInt(meta.getScale(i + 1));
}
while (rs.next()) {
buff.put((byte) 1);
for (int i = 0; i < columnCount; i++) {
int t = org.h2.value.DataType.getValueTypeFromResultSet(meta, i + 1);
Value val = org.h2.value.DataType.readValue(null, rs, i + 1, t);
writeValue(buff, val);
}
}
buff.put((byte) 0);
rs.beforeFirst();
} catch (SQLException e) {
throw DbException.convert(e);
}
break;
}
case Value.GEOMETRY:
{
byte[] b = v.getBytes();
int len = b.length;
buff.put((byte) type).putVarInt(len).put(b);
break;
}
default:
if (JdbcUtils.customDataTypesHandler != null) {
byte[] b = v.getBytesNoCopy();
buff.put((byte) CUSTOM_DATA_TYPE).putVarInt(type).putVarInt(b.length).put(b);
break;
}
DbException.throwInternalError("type=" + v.getType());
}
}
use of org.h2.mvstore.WriteBuffer in project h2database by h2database.
the class Page method write.
/**
* Store the page and update the position.
*
* @param chunk the chunk
* @param buff the target buffer
* @param toc prospective table of content
* @return the position of the buffer just after the type
*/
protected final int write(Chunk chunk, WriteBuffer buff, List<Long> toc) {
pageNo = toc.size();
int keyCount = getKeyCount();
int start = buff.position();
// placeholder for pageLength
buff.putInt(0).putShort(// placeholder for check
(byte) 0).putVarInt(pageNo).putVarInt(map.getId()).putVarInt(keyCount);
int typePos = buff.position();
int type = isLeaf() ? PAGE_TYPE_LEAF : DataUtils.PAGE_TYPE_NODE;
buff.put((byte) type);
int childrenPos = buff.position();
writeChildren(buff, true);
int compressStart = buff.position();
map.getKeyType().write(buff, keys, keyCount);
writeValues(buff);
MVStore store = map.getStore();
int expLen = buff.position() - compressStart;
if (expLen > 16) {
int compressionLevel = store.getCompressionLevel();
if (compressionLevel > 0) {
Compressor compressor;
int compressType;
if (compressionLevel == 1) {
compressor = store.getCompressorFast();
compressType = DataUtils.PAGE_COMPRESSED;
} else {
compressor = store.getCompressorHigh();
compressType = DataUtils.PAGE_COMPRESSED_HIGH;
}
byte[] comp = new byte[expLen * 2];
ByteBuffer byteBuffer = buff.getBuffer();
int pos = 0;
byte[] exp;
if (byteBuffer.hasArray()) {
exp = byteBuffer.array();
pos = byteBuffer.arrayOffset() + compressStart;
} else {
exp = Utils.newBytes(expLen);
buff.position(compressStart).get(exp);
}
int compLen = compressor.compress(exp, pos, expLen, comp, 0);
int plus = DataUtils.getVarIntLen(expLen - compLen);
if (compLen + plus < expLen) {
buff.position(typePos).put((byte) (type | compressType));
buff.position(compressStart).putVarInt(expLen - compLen).put(comp, 0, compLen);
}
}
}
int pageLength = buff.position() - start;
long tocElement = DataUtils.getTocElement(getMapId(), start, buff.position() - start, type);
toc.add(tocElement);
int chunkId = chunk.id;
int check = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(start) ^ DataUtils.getCheckValue(pageLength);
buff.putInt(start, pageLength).putShort(start + 4, (short) check);
if (isSaved()) {
throw DataUtils.newMVStoreException(DataUtils.ERROR_INTERNAL, "Page already stored");
}
long pagePos = DataUtils.getPagePos(chunkId, tocElement);
boolean isDeleted = isRemoved();
while (!posUpdater.compareAndSet(this, isDeleted ? 1L : 0L, pagePos)) {
isDeleted = isRemoved();
}
store.cachePage(this);
if (type == DataUtils.PAGE_TYPE_NODE) {
// cache again - this will make sure nodes stays in the cache
// for a longer time
store.cachePage(this);
}
int pageLengthEncoded = DataUtils.getPageMaxLength(pos);
boolean singleWriter = map.isSingleWriter();
chunk.accountForWrittenPage(pageLengthEncoded, singleWriter);
if (isDeleted) {
store.accountForRemovedPage(pagePos, chunk.version + 1, singleWriter, pageNo);
}
diskSpaceUsed = pageLengthEncoded != DataUtils.PAGE_LARGE ? pageLengthEncoded : pageLength;
return childrenPos;
}
Aggregations