Search in sources :

Example 66 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class HBaseMetricsTable method get.

@Override
@Nullable
public byte[] get(byte[] row, byte[] column) {
    try {
        byte[] distributedKey = createDistributedRowKey(row);
        Get get = tableUtil.buildGet(distributedKey).addColumn(columnFamily, column).setMaxVersions(1).build();
        Result getResult = table.get(get);
        if (!getResult.isEmpty()) {
            return getResult.getValue(columnFamily, column);
        }
        return null;
    } catch (IOException e) {
        throw new DataSetException("Get failed on table " + tableId, e);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) Get(org.apache.hadoop.hbase.client.Get) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) Nullable(javax.annotation.Nullable)

Example 67 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class LevelDBTable method scanPersisted.

@ReadOnly
@Override
protected Scanner scanPersisted(Scan scan) throws Exception {
    FuzzyRowFilter filter = null;
    if (scan.getFilter() != null) {
        // todo: currently we support only FuzzyRowFilter as an experimental feature
        if (scan.getFilter() instanceof FuzzyRowFilter) {
            filter = (FuzzyRowFilter) scan.getFilter();
        } else {
            throw new DataSetException("Unknown filter type: " + scan.getFilter());
        }
    }
    final Scanner scanner = core.scan(scan.getStartRow(), scan.getStopRow(), filter, null, tx);
    return new Scanner() {

        @Nullable
        @Override
        public Row next() {
            return LevelDBTable.this.next(scanner);
        }

        @Override
        public void close() {
            scanner.close();
        }
    };
}
Also used : Scanner(io.cdap.cdap.api.dataset.table.Scanner) DataSetException(io.cdap.cdap.api.dataset.DataSetException) FuzzyRowFilter(io.cdap.cdap.data2.dataset2.lib.table.FuzzyRowFilter) ReadOnly(io.cdap.cdap.api.annotation.ReadOnly)

Example 68 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class ObjectStoreDataset method encode.

private byte[] encode(T object) {
    // encode T using schema
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    BinaryEncoder encoder = new BinaryEncoder(bos);
    try {
        this.datumWriter.encode(object, encoder);
    } catch (IOException e) {
        // SHOULD NEVER happen
        throw new DataSetException("Failed to encode object to be written: " + e.getMessage(), e);
    }
    return bos.toByteArray();
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) BinaryEncoder(io.cdap.cdap.common.io.BinaryEncoder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException)

Example 69 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class ObjectStoreDataset method decode.

private T decode(byte[] bytes) {
    if (bytes == null) {
        return null;
    }
    // decode T using schema
    ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
    BinaryDecoder decoder = new BinaryDecoder(bis);
    try {
        return getReflectionDatumReader().read(decoder, this.schema);
    } catch (IOException e) {
        // SHOULD NEVER happen
        throw new DataSetException("Failed to decode read object: " + e.getMessage(), e);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) ByteArrayInputStream(java.io.ByteArrayInputStream) IOException(java.io.IOException) BinaryDecoder(io.cdap.cdap.common.io.BinaryDecoder)

Example 70 with DataSetException

use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.

the class PartitionedFileSetTest method testUpdateMetadata.

@Test
public void testUpdateMetadata() throws Exception {
    final PartitionedFileSet dataset = dsFrameworkUtil.getInstance(pfsInstance);
    dsFrameworkUtil.newTransactionExecutor((TransactionAware) dataset).execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            PartitionOutput partitionOutput = dataset.getPartitionOutput(PARTITION_KEY);
            ImmutableMap<String, String> originalEntries = ImmutableMap.of("key1", "value1", "key2", "value2");
            partitionOutput.setMetadata(originalEntries);
            partitionOutput.addPartition();
            ImmutableMap<String, String> updatedMetadata = ImmutableMap.of("key3", "value3");
            dataset.addMetadata(PARTITION_KEY, updatedMetadata);
            PartitionDetail partitionDetail = dataset.getPartition(PARTITION_KEY);
            Assert.assertNotNull(partitionDetail);
            HashMap<String, String> combinedEntries = Maps.newHashMap();
            combinedEntries.putAll(originalEntries);
            combinedEntries.putAll(updatedMetadata);
            Assert.assertEquals(combinedEntries, partitionDetail.getMetadata().asMap());
            // using the setMetadata API, adding an entry, for a key that already exists will overwrite the previous value
            dataset.setMetadata(PARTITION_KEY, Collections.singletonMap("key3", "value4"));
            partitionDetail = dataset.getPartition(PARTITION_KEY);
            Assert.assertNotNull(partitionDetail);
            Assert.assertEquals(ImmutableMap.of("key1", "value1", "key2", "value2", "key3", "value4"), partitionDetail.getMetadata().asMap());
            // adding an entry, for a key that already exists will throw an Exception
            try {
                dataset.addMetadata(PARTITION_KEY, "key2", "value3");
                Assert.fail("Expected not to be able to update an existing metadata entry");
            } catch (DataSetException expected) {
            }
            // possible to remove multiple metadata entries; if a key doesn't exist, no error is thrown
            dataset.removeMetadata(PARTITION_KEY, ImmutableSet.of("key2", "key3", "key4"));
            // key2 and key3 were removed
            partitionDetail = dataset.getPartition(PARTITION_KEY);
            Assert.assertNotNull(partitionDetail);
            Assert.assertEquals(ImmutableMap.of("key1", "value1"), partitionDetail.getMetadata().asMap());
            try {
                // adding an entry, for a key that already exists will throw an Exception
                PartitionKey nonexistentPartitionKey = PartitionKey.builder().addIntField("i", 42).addLongField("l", 17L).addStringField("s", "nonexistent").build();
                dataset.addMetadata(nonexistentPartitionKey, "key2", "value3");
                Assert.fail("Expected not to be able to add metadata for a nonexistent partition");
            } catch (DataSetException expected) {
            }
        }
    });
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) PartitionOutput(io.cdap.cdap.api.dataset.lib.PartitionOutput) HashMap(java.util.HashMap) TransactionAware(org.apache.tephra.TransactionAware) PartitionKey(io.cdap.cdap.api.dataset.lib.PartitionKey) PartitionedFileSet(io.cdap.cdap.api.dataset.lib.PartitionedFileSet) TransactionExecutor(org.apache.tephra.TransactionExecutor) PartitionDetail(io.cdap.cdap.api.dataset.lib.PartitionDetail) DataSetException(io.cdap.cdap.api.dataset.DataSetException) PartitionNotFoundException(io.cdap.cdap.api.dataset.PartitionNotFoundException) PartitionAlreadyExistsException(io.cdap.cdap.api.dataset.lib.PartitionAlreadyExistsException) IOException(java.io.IOException) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Aggregations

DataSetException (io.cdap.cdap.api.dataset.DataSetException)74 IOException (java.io.IOException)54 ReadOnly (io.cdap.cdap.api.annotation.ReadOnly)14 Map (java.util.Map)12 TransactionFailureException (org.apache.tephra.TransactionFailureException)12 Location (org.apache.twill.filesystem.Location)12 PartitionKey (io.cdap.cdap.api.dataset.lib.PartitionKey)10 Result (io.cdap.cdap.api.dataset.table.Result)10 NavigableMap (java.util.NavigableMap)10 Test (org.junit.Test)10 PartitionAlreadyExistsException (io.cdap.cdap.api.dataset.lib.PartitionAlreadyExistsException)8 TimePartitionedFileSet (io.cdap.cdap.api.dataset.lib.TimePartitionedFileSet)8 Put (org.apache.hadoop.hbase.client.Put)8 ImmutableMap (com.google.common.collect.ImmutableMap)6 WriteOnly (io.cdap.cdap.api.annotation.WriteOnly)6 DatasetManagementException (io.cdap.cdap.api.dataset.DatasetManagementException)6 PartitionedFileSet (io.cdap.cdap.api.dataset.lib.PartitionedFileSet)6 Put (io.cdap.cdap.api.dataset.table.Put)6 Row (io.cdap.cdap.api.dataset.table.Row)6 UnauthorizedException (io.cdap.cdap.security.spi.authorization.UnauthorizedException)6