use of herddb.network.Message in project herddb by diennea.
the class DataMessageEncoder method write.
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
Message m = (Message) msg;
ByteBuf encoded = ctx.alloc().buffer();
MessageUtils.encodeMessage(encoded, m);
ctx.writeAndFlush(encoded, promise);
}
use of herddb.network.Message in project herddb by diennea.
the class RoutedClientSideConnection method executeGet.
GetResult executeGet(String tableSpace, String query, long tx, List<Object> params) throws HDBException, ClientSideMetadataProviderException {
Channel _channel = ensureOpen();
try {
Message message = Message.EXECUTE_STATEMENT(clientId, tableSpace, query, tx, false, params);
Message reply = _channel.sendMessageWithReply(message, timeout);
if (reply.type == Message.TYPE_ERROR) {
boolean notLeader = reply.parameters.get("notLeader") != null;
if (notLeader) {
this.connection.requestMetadataRefresh();
throw new RetryRequestException(reply + "");
}
throw new HDBException(reply);
}
long found = (Long) reply.parameters.get("updateCount");
long transactionId = (Long) reply.parameters.get("tx");
if (found <= 0) {
return new GetResult(null, transactionId);
} else {
return new GetResult((Map<String, Object>) reply.parameters.get("data"), transactionId);
}
} catch (InterruptedException | TimeoutException err) {
throw new HDBException(err);
}
}
use of herddb.network.Message in project herddb by diennea.
the class RoutedClientSideConnection method executeScan.
ScanResultSet executeScan(String tableSpace, String query, List<Object> params, long tx, int maxRows, int fetchSize) throws HDBException, ClientSideMetadataProviderException {
Channel _channel = ensureOpen();
try {
String scannerId = this.clientId + ":" + SCANNERID_GENERATOR.incrementAndGet();
Message message = Message.OPEN_SCANNER(clientId, tableSpace, query, scannerId, tx, params, fetchSize, maxRows);
LOGGER.log(Level.FINEST, "open scanner {0} for query {1}, params {2}", new Object[] { scannerId, query, params });
Message reply = _channel.sendMessageWithReply(message, timeout);
if (reply.type == Message.TYPE_ERROR) {
boolean notLeader = reply.parameters.get("notLeader") != null;
if (notLeader) {
this.connection.requestMetadataRefresh();
throw new RetryRequestException(reply + "");
}
throw new HDBException(reply);
}
TuplesList data = (TuplesList) reply.parameters.get("data");
List<DataAccessor> initialFetchBuffer = data.tuples;
String[] columnNames = data.columnNames;
boolean last = (Boolean) reply.parameters.get("last");
long transactionId = (Long) reply.parameters.get("tx");
// LOGGER.log(Level.SEVERE, "received first " + initialFetchBuffer.size() + " records for query " + query);
ScanResultSetImpl impl = new ScanResultSetImpl(scannerId, columnNames, initialFetchBuffer, fetchSize, last, transactionId);
return impl;
} catch (InterruptedException | TimeoutException err) {
throw new HDBException(err);
}
}
use of herddb.network.Message in project herddb by diennea.
the class TableSpaceManager method sendTransactionsDump.
private void sendTransactionsDump(List<Transaction> batch, Channel _channel, String dumpId, final int timeout, Message response_to_start) throws TimeoutException, InterruptedException {
if (batch.isEmpty()) {
return;
}
Map<String, Object> transactionsData = new HashMap<>();
transactionsData.put("command", "transactions");
List<byte[]> encodedTransactions = batch.stream().map(tr -> {
return tr.serialize();
}).collect(Collectors.toList());
transactionsData.put("transactions", encodedTransactions);
Message response_to_transactionsData = _channel.sendMessageWithReply(Message.TABLESPACE_DUMP_DATA(null, tableSpaceName, dumpId, transactionsData), timeout);
if (response_to_transactionsData.type != Message.TYPE_ACK) {
LOGGER.log(Level.SEVERE, "error response at transactionsData command: " + response_to_start.parameters);
}
batch.clear();
}
use of herddb.network.Message in project herddb by diennea.
the class RoutedClientSideConnection method messageReceived.
@Override
@SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT")
public void messageReceived(Message message, Channel _channel) {
switch(message.type) {
case Message.TYPE_TABLESPACE_DUMP_DATA:
{
String dumpId = (String) message.parameters.get("dumpId");
TableSpaceDumpReceiver receiver = dumpReceivers.get(dumpId);
LOGGER.log(Level.FINE, "receiver for {0}: {1}", new Object[] { dumpId, receiver });
if (receiver == null) {
if (_channel != null) {
_channel.sendReplyMessage(message, Message.ERROR(clientId, new Exception("no such dump receiver " + dumpId)));
}
return;
}
try {
Map<String, Object> values = (Map<String, Object>) message.parameters.get("values");
String command = (String) values.get("command") + "";
boolean sendAck = true;
switch(command) {
case "start":
{
long ledgerId = (long) values.get("ledgerid");
long offset = (long) values.get("offset");
receiver.start(new LogSequenceNumber(ledgerId, offset));
break;
}
case "beginTable":
{
byte[] tableDefinition = (byte[]) values.get("table");
Table table = Table.deserialize(tableDefinition);
Long estimatedSize = (Long) values.get("estimatedSize");
long dumpLedgerId = (Long) values.get("dumpLedgerid");
long dumpOffset = (Long) values.get("dumpOffset");
List<byte[]> indexesDef = (List<byte[]>) values.get("indexes");
List<Index> indexes = indexesDef.stream().map(Index::deserialize).collect(Collectors.toList());
Map<String, Object> stats = new HashMap<>();
stats.put("estimatedSize", estimatedSize);
stats.put("dumpLedgerId", dumpLedgerId);
stats.put("dumpOffset", dumpOffset);
receiver.beginTable(new DumpedTableMetadata(table, new LogSequenceNumber(dumpLedgerId, dumpOffset), indexes), stats);
break;
}
case "endTable":
{
receiver.endTable();
break;
}
case "finish":
{
long ledgerId = (long) values.get("ledgerid");
long offset = (long) values.get("offset");
receiver.finish(new LogSequenceNumber(ledgerId, offset));
sendAck = false;
break;
}
case "data":
{
List<KeyValue> data = (List<KeyValue>) values.get("records");
List<Record> records = new ArrayList<>(data.size());
for (KeyValue kv : data) {
records.add(new Record(new Bytes(kv.key), new Bytes(kv.value)));
}
receiver.receiveTableDataChunk(records);
break;
}
case "txlog":
{
List<KeyValue> data = (List<KeyValue>) values.get("records");
List<DumpedLogEntry> records = new ArrayList<>(data.size());
for (KeyValue kv : data) {
records.add(new DumpedLogEntry(LogSequenceNumber.deserialize(kv.key), kv.value));
}
receiver.receiveTransactionLogChunk(records);
break;
}
case "transactions":
{
String tableSpace = (String) values.get("tableSpace");
List<byte[]> data = (List<byte[]>) values.get("transactions");
List<Transaction> transactions = data.stream().map(array -> {
return Transaction.deserialize(tableSpace, array);
}).collect(Collectors.toList());
receiver.receiveTransactionsAtDump(transactions);
break;
}
default:
throw new DataStorageManagerException("invalid dump command:" + command);
}
if (_channel != null && sendAck) {
_channel.sendReplyMessage(message, Message.ACK(clientId));
}
} catch (DataStorageManagerException error) {
LOGGER.log(Level.SEVERE, "error while handling dump data", error);
if (_channel != null) {
_channel.sendReplyMessage(message, Message.ERROR(clientId, error));
}
}
}
break;
}
}
Aggregations