use of herddb.network.KeyValue in project herddb by diennea.
the class TableSpaceRestoreSourceFromFile method nextTableDataChunk.
@Override
public List<KeyValue> nextTableDataChunk() throws DataStorageManagerException {
try {
int numRecords = in.readVInt();
if (Integer.MIN_VALUE == numRecords) {
// EndOfTableMarker
listener.log("tablefinished", "table finished after " + currentTableSize + " records", Collections.singletonMap("count", numRecords));
return null;
}
listener.log("sendtabledata", "sending " + numRecords + ", total " + currentTableSize, Collections.singletonMap("count", numRecords));
List<KeyValue> records = new ArrayList<>(numRecords);
for (int i = 0; i < numRecords; i++) {
byte[] key = in.readArray();
byte[] value = in.readArray();
records.add(new KeyValue(key, value));
}
currentTableSize += numRecords;
return records;
} catch (IOException err) {
throw new DataStorageManagerException(err);
}
}
use of herddb.network.KeyValue in project herddb by diennea.
the class ServerSideConnectionPeer method handlePushTxLogChunk.
private void handlePushTxLogChunk(Message message, Channel _channel) {
try {
String tableSpace = (String) message.parameters.get("tableSpace");
List<KeyValue> data = (List<KeyValue>) message.parameters.get("data");
LOGGER.log(Level.INFO, "Received " + data.size() + " records for restore of txlog in tableSpace " + tableSpace);
List<DumpedLogEntry> entries = new ArrayList<>(data.size());
for (KeyValue kv : data) {
entries.add(new DumpedLogEntry(LogSequenceNumber.deserialize(kv.key), kv.value));
}
server.getManager().getTableSpaceManager(tableSpace).restoreRawDumpedEntryLogs(entries);
_channel.sendReplyMessage(message, Message.ACK(null));
} catch (Exception err) {
Message error = Message.ERROR(null, err);
if (err instanceof NotLeaderException) {
error.setParameter("notLeader", "true");
}
_channel.sendReplyMessage(message, error);
}
}
use of herddb.network.KeyValue in project herddb by diennea.
the class MessageUtils method readEncodedSimpleValue.
private static Object readEncodedSimpleValue(byte _opcode, ByteBuf encoded) {
switch(_opcode) {
case OPCODE_NULL_VALUE:
return null;
case OPCODE_STRING_VALUE:
return readUTF8String(encoded);
case OPCODE_INT_VALUE:
return encoded.readInt();
case OPCODE_V_INT_VALUE:
return ByteBufUtils.readVInt(encoded);
case OPCODE_Z_INT_VALUE:
return ByteBufUtils.readZInt(encoded);
case OPCODE_LONG_VALUE:
return encoded.readLong();
case OPCODE_V_LONG_VALUE:
return ByteBufUtils.readVLong(encoded);
case OPCODE_Z_LONG_VALUE:
return ByteBufUtils.readZLong(encoded);
case OPCODE_BOOLEAN_VALUE:
return encoded.readByte() == 1;
case OPCODE_DOUBLE_VALUE:
return ByteBufUtils.readDouble(encoded);
case OPCODE_MAP_VALUE:
{
int len = ByteBufUtils.readVInt(encoded);
Map<Object, Object> ret = new HashMap<>();
for (int i = 0; i < len; i++) {
Object mapkey = readEncodedSimpleValue(encoded);
Object value = readEncodedSimpleValue(encoded);
ret.put(mapkey, value);
}
return ret;
}
case OPCODE_TUPLELIST_VALUE:
{
int numColumns = ByteBufUtils.readVInt(encoded);
String[] columns = new String[numColumns];
for (int i = 0; i < numColumns; i++) {
columns[i] = readUTF8String(encoded);
}
int numRecords = ByteBufUtils.readVInt(encoded);
List<DataAccessor> records = new ArrayList<>(numRecords);
for (int i = 0; i < numRecords; i++) {
Map<String, Object> map = new HashMap<>();
for (int j = 0; j < numColumns; j++) {
Object value = readEncodedSimpleValue(encoded);
map.put(columns[j], value);
}
records.add(new MapDataAccessor(map, columns));
}
return new TuplesList(columns, records);
}
case OPCODE_MAP2_VALUE:
{
Map<Object, Object> ret = new HashMap<>();
while (true) {
byte sniff_opcode = encoded.readByte();
if (sniff_opcode == OPCODE_MAP2_VALUE_END) {
return ret;
}
Object mapkey = readEncodedSimpleValue(sniff_opcode, encoded);
Object value = readEncodedSimpleValue(encoded);
ret.put(mapkey, value);
}
}
case OPCODE_SET_VALUE:
{
int len = ByteBufUtils.readVInt(encoded);
Set<Object> ret = new HashSet<>();
for (int i = 0; i < len; i++) {
Object o = readEncodedSimpleValue(encoded);
ret.add(o);
}
return ret;
}
case OPCODE_LIST_VALUE:
{
int len = ByteBufUtils.readVInt(encoded);
List<Object> ret = new ArrayList<>(len);
for (int i = 0; i < len; i++) {
Object o = readEncodedSimpleValue(encoded);
ret.add(o);
}
return ret;
}
case OPCODE_BYTEARRAY_VALUE:
{
return ByteBufUtils.readArray(encoded);
}
case OPCODE_TIMESTAMP_VALUE:
return new java.sql.Timestamp(ByteBufUtils.readVLong(encoded));
case OPCODE_BYTE_VALUE:
return encoded.readByte();
case OPCODE_KEYVALUE_VALUE:
byte[] key = ByteBufUtils.readArray(encoded);
byte[] value = ByteBufUtils.readArray(encoded);
return new KeyValue(key, value);
default:
throw new RuntimeException("invalid opcode: " + _opcode);
}
}
use of herddb.network.KeyValue in project herddb by diennea.
the class SingleTableDumper method acceptRecord.
@Override
public void acceptRecord(Record record) {
try {
batch.add(new KeyValue(record.key.data, record.value.data));
if (batch.size() == fetchSize) {
Map<String, Object> data = new HashMap<>();
data.put("command", "data");
data.put("records", batch);
_channel.sendMessageWithReply(Message.TABLESPACE_DUMP_DATA(null, tableSpaceName, dumpId, data), timeout);
batch.clear();
}
} catch (Exception error) {
throw new RuntimeException(error);
}
}
use of herddb.network.KeyValue in project herddb by diennea.
the class TableSpaceRestoreSourceFromFile method nextTransactionLogChunk.
@Override
public List<KeyValue> nextTransactionLogChunk() throws DataStorageManagerException {
try {
int numRecords = in.readVInt();
listener.log("nexttxchunk", "sending " + numRecords + " tx log entries", Collections.singletonMap("count", numRecords));
List<KeyValue> records = new ArrayList<>(numRecords);
for (int i = 0; i < numRecords; i++) {
long ledgerId = in.readVLong();
long offset = in.readVLong();
byte[] value = in.readArray();
records.add(new KeyValue(new LogSequenceNumber(ledgerId, offset).serialize(), value));
}
return records;
} catch (IOException err) {
throw new DataStorageManagerException(err);
}
}
Aggregations