Search in sources :

Example 76 with DataStorageManagerException

use of herddb.storage.DataStorageManagerException in project herddb by diennea.

the class BLinkKeyToPageIndex method scanner.

@Override
public Stream<Entry<Bytes, Long>> scanner(IndexOperation operation, StatementEvaluationContext context, TableContext tableContext, AbstractIndexManager index) throws DataStorageManagerException, StatementExecutionException {
    if (operation instanceof PrimaryIndexSeek) {
        PrimaryIndexSeek seek = (PrimaryIndexSeek) operation;
        byte[] seekValue = seek.value.computeNewValue(null, context, tableContext);
        if (seekValue == null) {
            return Stream.empty();
        }
        Bytes key = Bytes.from_array(seekValue);
        Long pageId = getTree().search(key);
        if (pageId == null) {
            return Stream.empty();
        }
        return Stream.of(new AbstractMap.SimpleImmutableEntry<>(key, pageId));
    }
    if (operation instanceof PrimaryIndexPrefixScan) {
        PrimaryIndexPrefixScan scan = (PrimaryIndexPrefixScan) operation;
        // SQLRecordKeyFunction value = sis.value;
        byte[] refvalue = scan.value.computeNewValue(null, context, tableContext);
        Bytes firstKey = Bytes.from_array(refvalue);
        Bytes lastKey = firstKey.next();
        return getTree().scan(firstKey, lastKey);
    }
    // every predicate (WHEREs...) will always be evaluated anyway on every record, in order to guarantee correctness
    if (index != null) {
        return index.recordSetScanner(operation, context, tableContext, this);
    }
    if (operation == null) {
        Stream<Map.Entry<Bytes, Long>> baseStream = getTree().scan(null, null);
        return baseStream;
    } else if (operation instanceof PrimaryIndexRangeScan) {
        Bytes refminvalue;
        PrimaryIndexRangeScan sis = (PrimaryIndexRangeScan) operation;
        SQLRecordKeyFunction minKey = sis.minValue;
        if (minKey != null) {
            refminvalue = Bytes.from_array(minKey.computeNewValue(null, context, tableContext));
        } else {
            refminvalue = null;
        }
        Bytes refmaxvalue;
        SQLRecordKeyFunction maxKey = sis.maxValue;
        if (maxKey != null) {
            refmaxvalue = Bytes.from_array(maxKey.computeNewValue(null, context, tableContext));
        } else {
            refmaxvalue = null;
        }
        return getTree().scan(refminvalue, refmaxvalue, refmaxvalue != null);
    }
    throw new DataStorageManagerException("operation " + operation + " not implemented on " + this.getClass());
}
Also used : AbstractMap(java.util.AbstractMap) PrimaryIndexSeek(herddb.index.PrimaryIndexSeek) Bytes(herddb.utils.Bytes) Entry(java.util.Map.Entry) DataStorageManagerException(herddb.storage.DataStorageManagerException) AtomicLong(java.util.concurrent.atomic.AtomicLong) PrimaryIndexPrefixScan(herddb.index.PrimaryIndexPrefixScan) PrimaryIndexRangeScan(herddb.index.PrimaryIndexRangeScan) SQLRecordKeyFunction(herddb.sql.SQLRecordKeyFunction)

Example 77 with DataStorageManagerException

use of herddb.storage.DataStorageManagerException in project herddb by diennea.

the class BRINIndexManager method start.

@Override
public void start(LogSequenceNumber sequenceNumber) throws DataStorageManagerException {
    LOGGER.log(Level.SEVERE, " start index {0} uuid {1}", new Object[] { index.name, index.uuid });
    bootSequenceNumber = sequenceNumber;
    if (LogSequenceNumber.START_OF_TIME.equals(sequenceNumber)) {
        /* Empty index (booting from the start) */
        this.data.boot(new BlockRangeIndexMetadata<>(Collections.emptyList()));
        LOGGER.log(Level.SEVERE, "loaded empty index {0}", new Object[] { index.name });
    } else {
        IndexStatus status;
        try {
            status = dataStorageManager.getIndexStatus(tableSpaceUUID, index.uuid, sequenceNumber);
        } catch (DataStorageManagerException e) {
            LOGGER.log(Level.SEVERE, "cannot load index {0} due to {1}, it will be rebuilt", new Object[] { index.name, e });
            this.data.boot(new BlockRangeIndexMetadata<>(Collections.emptyList()));
            rebuild();
            return;
        }
        try {
            PageContents metadataBlock = PageContents.deserialize(status.indexData);
            this.data.boot(new BlockRangeIndexMetadata<>(metadataBlock.metadata));
        } catch (IOException e) {
            throw new DataStorageManagerException(e);
        }
        newPageId.set(status.newPageId);
        LOGGER.log(Level.SEVERE, "loaded index {0} {1} blocks", new Object[] { index.name, this.data.getNumBlocks() });
    }
}
Also used : IndexStatus(herddb.storage.IndexStatus) DataStorageManagerException(herddb.storage.DataStorageManagerException) IOException(java.io.IOException)

Example 78 with DataStorageManagerException

use of herddb.storage.DataStorageManagerException in project herddb by diennea.

the class BRINIndexManager method checkpoint.

@Override
public List<PostCheckpointAction> checkpoint(LogSequenceNumber sequenceNumber, boolean pin) throws DataStorageManagerException {
    try {
        BlockRangeIndexMetadata<Bytes> metadata = data.checkpoint();
        PageContents page = new PageContents();
        page.type = PageContents.TYPE_METADATA;
        page.metadata = metadata.getBlocksMetadata();
        byte[] contents = page.serialize();
        Set<Long> activePages = new HashSet<>();
        page.metadata.forEach(b -> {
            activePages.add(b.pageId);
        });
        IndexStatus indexStatus = new IndexStatus(index.name, sequenceNumber, newPageId.get(), activePages, contents);
        List<PostCheckpointAction> result = new ArrayList<>();
        result.addAll(dataStorageManager.indexCheckpoint(tableSpaceUUID, index.uuid, indexStatus, pin));
        LOGGER.log(Level.INFO, "checkpoint index {0} finished: logpos {1}, {2} blocks", new Object[] { index.name, sequenceNumber, Integer.toString(page.metadata.size()) });
        LOGGER.log(Level.FINE, "checkpoint index {0} finished: logpos {1}, pages {2}", new Object[] { index.name, sequenceNumber, activePages });
        return result;
    } catch (IOException err) {
        throw new DataStorageManagerException(err);
    }
}
Also used : DataStorageManagerException(herddb.storage.DataStorageManagerException) ArrayList(java.util.ArrayList) IOException(java.io.IOException) PostCheckpointAction(herddb.core.PostCheckpointAction) Bytes(herddb.utils.Bytes) IndexStatus(herddb.storage.IndexStatus) AtomicLong(java.util.concurrent.atomic.AtomicLong) HashSet(java.util.HashSet)

Example 79 with DataStorageManagerException

use of herddb.storage.DataStorageManagerException in project herddb by diennea.

the class RoutedClientSideConnection method messageReceived.

@Override
@SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT")
public void messageReceived(Message message, Channel _channel) {
    switch(message.type) {
        case Message.TYPE_TABLESPACE_DUMP_DATA:
            {
                String dumpId = (String) message.parameters.get("dumpId");
                TableSpaceDumpReceiver receiver = dumpReceivers.get(dumpId);
                LOGGER.log(Level.FINE, "receiver for {0}: {1}", new Object[] { dumpId, receiver });
                if (receiver == null) {
                    if (_channel != null) {
                        _channel.sendReplyMessage(message, Message.ERROR(clientId, new Exception("no such dump receiver " + dumpId)));
                    }
                    return;
                }
                try {
                    Map<String, Object> values = (Map<String, Object>) message.parameters.get("values");
                    String command = (String) values.get("command") + "";
                    boolean sendAck = true;
                    switch(command) {
                        case "start":
                            {
                                long ledgerId = (long) values.get("ledgerid");
                                long offset = (long) values.get("offset");
                                receiver.start(new LogSequenceNumber(ledgerId, offset));
                                break;
                            }
                        case "beginTable":
                            {
                                byte[] tableDefinition = (byte[]) values.get("table");
                                Table table = Table.deserialize(tableDefinition);
                                Long estimatedSize = (Long) values.get("estimatedSize");
                                long dumpLedgerId = (Long) values.get("dumpLedgerid");
                                long dumpOffset = (Long) values.get("dumpOffset");
                                List<byte[]> indexesDef = (List<byte[]>) values.get("indexes");
                                List<Index> indexes = indexesDef.stream().map(Index::deserialize).collect(Collectors.toList());
                                Map<String, Object> stats = new HashMap<>();
                                stats.put("estimatedSize", estimatedSize);
                                stats.put("dumpLedgerId", dumpLedgerId);
                                stats.put("dumpOffset", dumpOffset);
                                receiver.beginTable(new DumpedTableMetadata(table, new LogSequenceNumber(dumpLedgerId, dumpOffset), indexes), stats);
                                break;
                            }
                        case "endTable":
                            {
                                receiver.endTable();
                                break;
                            }
                        case "finish":
                            {
                                long ledgerId = (long) values.get("ledgerid");
                                long offset = (long) values.get("offset");
                                receiver.finish(new LogSequenceNumber(ledgerId, offset));
                                sendAck = false;
                                break;
                            }
                        case "data":
                            {
                                List<KeyValue> data = (List<KeyValue>) values.get("records");
                                List<Record> records = new ArrayList<>(data.size());
                                for (KeyValue kv : data) {
                                    records.add(new Record(new Bytes(kv.key), new Bytes(kv.value)));
                                }
                                receiver.receiveTableDataChunk(records);
                                break;
                            }
                        case "txlog":
                            {
                                List<KeyValue> data = (List<KeyValue>) values.get("records");
                                List<DumpedLogEntry> records = new ArrayList<>(data.size());
                                for (KeyValue kv : data) {
                                    records.add(new DumpedLogEntry(LogSequenceNumber.deserialize(kv.key), kv.value));
                                }
                                receiver.receiveTransactionLogChunk(records);
                                break;
                            }
                        case "transactions":
                            {
                                String tableSpace = (String) values.get("tableSpace");
                                List<byte[]> data = (List<byte[]>) values.get("transactions");
                                List<Transaction> transactions = data.stream().map(array -> {
                                    return Transaction.deserialize(tableSpace, array);
                                }).collect(Collectors.toList());
                                receiver.receiveTransactionsAtDump(transactions);
                                break;
                            }
                        default:
                            throw new DataStorageManagerException("invalid dump command:" + command);
                    }
                    if (_channel != null && sendAck) {
                        _channel.sendReplyMessage(message, Message.ACK(clientId));
                    }
                } catch (DataStorageManagerException error) {
                    LOGGER.log(Level.SEVERE, "error while handling dump data", error);
                    if (_channel != null) {
                        _channel.sendReplyMessage(message, Message.ERROR(clientId, error));
                    }
                }
            }
            break;
    }
}
Also used : Bytes(herddb.utils.Bytes) RetryRequestException(herddb.client.impl.RetryRequestException) TimeoutException(java.util.concurrent.TimeoutException) Table(herddb.model.Table) HashMap(java.util.HashMap) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock) KeyValue(herddb.network.KeyValue) ArrayList(java.util.ArrayList) Level(java.util.logging.Level) Channel(herddb.network.Channel) ChannelEventListener(herddb.network.ChannelEventListener) Transaction(herddb.model.Transaction) Map(java.util.Map) DataStorageManagerException(herddb.storage.DataStorageManagerException) DumpedLogEntry(herddb.backup.DumpedLogEntry) Index(herddb.model.Index) DataAccessor(herddb.utils.DataAccessor) Record(herddb.model.Record) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) LogSequenceNumber(herddb.log.LogSequenceNumber) Logger(java.util.logging.Logger) Collectors(java.util.stream.Collectors) AtomicLong(java.util.concurrent.atomic.AtomicLong) List(java.util.List) DumpedTableMetadata(herddb.backup.DumpedTableMetadata) Message(herddb.network.Message) SaslNettyClient(herddb.security.sasl.SaslNettyClient) BackupFileConstants(herddb.backup.BackupFileConstants) SaslUtils(herddb.security.sasl.SaslUtils) TuplesList(herddb.utils.TuplesList) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) ServerHostData(herddb.network.ServerHostData) DataStorageManagerException(herddb.storage.DataStorageManagerException) KeyValue(herddb.network.KeyValue) DumpedLogEntry(herddb.backup.DumpedLogEntry) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ArrayList(java.util.ArrayList) Index(herddb.model.Index) DumpedTableMetadata(herddb.backup.DumpedTableMetadata) Bytes(herddb.utils.Bytes) ArrayList(java.util.ArrayList) List(java.util.List) TuplesList(herddb.utils.TuplesList) Record(herddb.model.Record) Table(herddb.model.Table) LogSequenceNumber(herddb.log.LogSequenceNumber) RetryRequestException(herddb.client.impl.RetryRequestException) TimeoutException(java.util.concurrent.TimeoutException) DataStorageManagerException(herddb.storage.DataStorageManagerException) Transaction(herddb.model.Transaction) AtomicLong(java.util.concurrent.atomic.AtomicLong) HashMap(java.util.HashMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings)

Example 80 with DataStorageManagerException

use of herddb.storage.DataStorageManagerException in project herddb by diennea.

the class MemoryDataStorageManager method getLatestTableStatus.

@Override
public TableStatus getLatestTableStatus(String tableSpace, String tableName) throws DataStorageManagerException {
    LogSequenceNumber max = null;
    String prefix = tableSpace + "." + tableName + "_";
    for (String status : tableStatuses.keySet()) {
        if (status.startsWith(prefix)) {
            final LogSequenceNumber log = evaluateLogSequenceNumber(prefix.substring(0, prefix.length()));
            if (log != null) {
                if (max == null || log.after(max)) {
                    max = log;
                }
            }
        }
    }
    TableStatus latestStatus;
    if (max == null) {
        latestStatus = new TableStatus(tableName, LogSequenceNumber.START_OF_TIME, Bytes.from_long(1).data, 1, Collections.emptyMap());
    } else {
        byte[] data = tableStatuses.get(checkpointName(tableSpace, tableName, max));
        if (data == null) {
            latestStatus = new TableStatus(tableName, LogSequenceNumber.START_OF_TIME, Bytes.from_long(1).data, 1, Collections.emptyMap());
        } else {
            try {
                try (InputStream input = new SimpleByteArrayInputStream(data);
                    ExtendedDataInputStream dataIn = new ExtendedDataInputStream(input)) {
                    latestStatus = TableStatus.deserialize(dataIn);
                }
            } catch (IOException err) {
                throw new DataStorageManagerException(err);
            }
        }
    }
    return latestStatus;
}
Also used : ExtendedDataInputStream(herddb.utils.ExtendedDataInputStream) DataStorageManagerException(herddb.storage.DataStorageManagerException) ExtendedDataInputStream(herddb.utils.ExtendedDataInputStream) SimpleByteArrayInputStream(herddb.utils.SimpleByteArrayInputStream) InputStream(java.io.InputStream) TableStatus(herddb.storage.TableStatus) LogSequenceNumber(herddb.log.LogSequenceNumber) SimpleByteArrayInputStream(herddb.utils.SimpleByteArrayInputStream) IOException(java.io.IOException)

Aggregations

DataStorageManagerException (herddb.storage.DataStorageManagerException)80 IOException (java.io.IOException)46 ArrayList (java.util.ArrayList)28 LogSequenceNumber (herddb.log.LogSequenceNumber)23 Path (java.nio.file.Path)22 Bytes (herddb.utils.Bytes)19 Record (herddb.model.Record)15 StatementExecutionException (herddb.model.StatementExecutionException)15 ExtendedDataInputStream (herddb.utils.ExtendedDataInputStream)14 SimpleByteArrayInputStream (herddb.utils.SimpleByteArrayInputStream)14 InputStream (java.io.InputStream)14 AtomicLong (java.util.concurrent.atomic.AtomicLong)14 LogEntry (herddb.log.LogEntry)13 HashMap (java.util.HashMap)13 LogNotAvailableException (herddb.log.LogNotAvailableException)12 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)12 BufferedInputStream (java.io.BufferedInputStream)11 CommitLogResult (herddb.log.CommitLogResult)10 Table (herddb.model.Table)10 ExtendedDataOutputStream (herddb.utils.ExtendedDataOutputStream)10