Search in sources :

Example 21 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class PartitionedFileSetDataset method postTxCommit.

@Override
public void postTxCommit() {
    // simply delete the quarantine directory for this transaction
    try {
        Location quarantine = getQuarantineLocation();
        if (quarantine.exists()) {
            boolean deleteSuccess = quarantine.delete(true);
            if (!deleteSuccess) {
                throw new DataSetException(String.format("Error deleting quarantine location %s.", quarantine));
            }
        }
    } catch (IOException e) {
        throw new DataSetException(String.format("Error deleting quarantine location for tx %s.", tx.getWritePointer()), e);
    }
    this.tx = null;
    super.postTxCommit();
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Location(org.apache.twill.filesystem.Location)

Example 22 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class PartitionedFileSetDataset method onFailure.

@Override
public void onFailure() throws DataSetException {
    try {
        rollbackPartitionOperations();
        ((FileSetDataset) files).onFailure();
    } catch (Throwable caught) {
        Throwables.propagateIfPossible(caught, DataSetException.class);
        throw new DataSetException("Unable to rollback", caught);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) FileSetDataset(io.cdap.cdap.data2.dataset2.lib.file.FileSetDataset)

Example 23 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class PartitionedFileSetDataset method addPartition.

public void addPartition(PartitionKey key, String path, Map<String, String> metadata, boolean filesCreated, boolean allowAppend) {
    byte[] rowKey = generateRowKey(key, partitioning);
    Row row = partitionsTable.get(rowKey);
    boolean appending = !row.isEmpty();
    if (appending && !allowAppend) {
        throw new PartitionAlreadyExistsException(getName(), key);
    }
    if (appending) {
        // this can happen if user originally created the partition with a custom relative path
        String existingPath = Bytes.toString(row.get(RELATIVE_PATH));
        if (!path.equals(existingPath)) {
            throw new DataSetException(String.format("Attempting to append to Dataset '%s', to partition '%s' with a " + "different path. Original path: '%s'. New path: '%s'", getName(), key.toString(), existingPath, path));
        }
    }
    LOG.debug("{} partition with key {} and path {} to dataset {}", appending ? "Appending to" : "Creating", key, path, getName());
    AddPartitionOperation operation = new AddPartitionOperation(key, path, filesCreated);
    operationsInThisTx.add(operation);
    Put put = new Put(rowKey);
    byte[] nowInMillis = Bytes.toBytes(System.currentTimeMillis());
    if (!appending) {
        put.add(RELATIVE_PATH, Bytes.toBytes(path));
        put.add(CREATION_TIME_COL, nowInMillis);
    }
    put.add(LAST_MODIFICATION_TIME_COL, nowInMillis);
    // we allow updates, because an update will only happen if its an append
    addMetadataToPut(row, metadata, put, true);
    // index each row by its transaction's write pointer
    put.add(WRITE_PTR_COL, tx.getWritePointer());
    partitionsTable.put(put);
    if (!appending) {
        addPartitionToExplore(key, path);
        operation.setExplorePartitionCreated();
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) Row(io.cdap.cdap.api.dataset.table.Row) PartitionAlreadyExistsException(io.cdap.cdap.api.dataset.lib.PartitionAlreadyExistsException) Put(io.cdap.cdap.api.dataset.table.Put)

Example 24 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class BufferingTable method get.

@ReadOnly
@Override
public Row get(byte[] row, byte[] startColumn, byte[] stopColumn, int limit) {
    ensureTransactionIsStarted();
    reportRead(1);
    // checking if the row was deleted inside this tx
    NavigableMap<byte[], Update> buffCols = buff.get(row);
    // potential improvement: do not fetch columns available in in-mem buffer (we know them at this point)
    try {
        Map<byte[], byte[]> persistedCols = getPersisted(row, startColumn, stopColumn, limit);
        // adding server cols, and then overriding with buffered values
        NavigableMap<byte[], byte[]> result = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
        if (persistedCols != null) {
            result.putAll(persistedCols);
        }
        if (buffCols != null) {
            buffCols = getRange(buffCols, startColumn, stopColumn, limit);
            // null valued columns in in-memory buffer are deletes, so we need to delete them from the result list
            mergeToPersisted(result, buffCols, null);
        }
        // applying limit
        return new Result(row, head(result, limit));
    } catch (Exception e) {
        LOG.debug("get failed for table: " + getTransactionAwareName() + ", row: " + Bytes.toStringBinary(row), e);
        throw new DataSetException("get failed", e);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Result(io.cdap.cdap.api.dataset.table.Result) ReadOnly(io.cdap.cdap.api.annotation.ReadOnly)

Example 25 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class BufferingTable method get.

@ReadOnly
@Override
public List<Row> get(List<Get> gets) {
    ensureTransactionIsStarted();
    try {
        // get persisted, then overwrite with whats buffered
        List<Map<byte[], byte[]>> persistedRows = getPersisted(gets);
        // gets and rows lists are always of the same size
        Preconditions.checkArgument(gets.size() == persistedRows.size(), "Invalid number of rows fetched when performing multi-get. There must be one row for each get.");
        List<Row> result = Lists.newArrayListWithCapacity(persistedRows.size());
        Iterator<Map<byte[], byte[]>> persistedRowsIter = persistedRows.iterator();
        Iterator<Get> getIter = gets.iterator();
        while (persistedRowsIter.hasNext() && getIter.hasNext()) {
            Get get = getIter.next();
            Map<byte[], byte[]> persistedRow = persistedRowsIter.next();
            // navigable copy of the persisted data. Implementation may return immutable or unmodifiable maps,
            // so we make a copy here.
            NavigableMap<byte[], byte[]> rowColumns = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
            rowColumns.putAll(persistedRow);
            byte[] row = get.getRow();
            NavigableMap<byte[], Update> buffCols = buff.get(row);
            // merge what was in the buffer and what was persisted
            if (buffCols != null) {
                List<byte[]> getColumns = get.getColumns();
                byte[][] columns = getColumns == null ? null : getColumns.toArray(new byte[getColumns.size()][]);
                mergeToPersisted(rowColumns, buffCols, columns);
            }
            result.add(new Result(row, unwrapDeletes(rowColumns)));
        }
        return result;
    } catch (Exception e) {
        LOG.debug("multi-get failed for table: " + getTransactionAwareName(), e);
        throw new DataSetException("multi-get failed", e);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Result(io.cdap.cdap.api.dataset.table.Result) DataSetException(io.cdap.cdap.api.dataset.DataSetException) Get(io.cdap.cdap.api.dataset.table.Get) Row(io.cdap.cdap.api.dataset.table.Row) Map(java.util.Map) NavigableMap(java.util.NavigableMap) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) ReadOnly(io.cdap.cdap.api.annotation.ReadOnly)

Aggregations

DataSetException (io.cdap.cdap.api.dataset.DataSetException)74 IOException (java.io.IOException)54 ReadOnly (io.cdap.cdap.api.annotation.ReadOnly)14 Map (java.util.Map)12 TransactionFailureException (org.apache.tephra.TransactionFailureException)12 Location (org.apache.twill.filesystem.Location)12 PartitionKey (io.cdap.cdap.api.dataset.lib.PartitionKey)10 Result (io.cdap.cdap.api.dataset.table.Result)10 NavigableMap (java.util.NavigableMap)10 Test (org.junit.Test)10 PartitionAlreadyExistsException (io.cdap.cdap.api.dataset.lib.PartitionAlreadyExistsException)8 TimePartitionedFileSet (io.cdap.cdap.api.dataset.lib.TimePartitionedFileSet)8 Put (org.apache.hadoop.hbase.client.Put)8 ImmutableMap (com.google.common.collect.ImmutableMap)6 WriteOnly (io.cdap.cdap.api.annotation.WriteOnly)6 DatasetManagementException (io.cdap.cdap.api.dataset.DatasetManagementException)6 PartitionedFileSet (io.cdap.cdap.api.dataset.lib.PartitionedFileSet)6 Put (io.cdap.cdap.api.dataset.table.Put)6 Row (io.cdap.cdap.api.dataset.table.Row)6 UnauthorizedException (io.cdap.cdap.security.spi.authorization.UnauthorizedException)6