use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.
the class HBaseMetricsTable method get.
@Override
@Nullable
public byte[] get(byte[] row, byte[] column) {
try {
byte[] distributedKey = createDistributedRowKey(row);
Get get = tableUtil.buildGet(distributedKey).addColumn(columnFamily, column).setMaxVersions(1).build();
Result getResult = table.get(get);
if (!getResult.isEmpty()) {
return getResult.getValue(columnFamily, column);
}
return null;
} catch (IOException e) {
throw new DataSetException("Get failed on table " + tableId, e);
}
}
use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.
the class LevelDBTable method scanPersisted.
@ReadOnly
@Override
protected Scanner scanPersisted(Scan scan) throws Exception {
FuzzyRowFilter filter = null;
if (scan.getFilter() != null) {
// todo: currently we support only FuzzyRowFilter as an experimental feature
if (scan.getFilter() instanceof FuzzyRowFilter) {
filter = (FuzzyRowFilter) scan.getFilter();
} else {
throw new DataSetException("Unknown filter type: " + scan.getFilter());
}
}
final Scanner scanner = core.scan(scan.getStartRow(), scan.getStopRow(), filter, null, tx);
return new Scanner() {
@Nullable
@Override
public Row next() {
return LevelDBTable.this.next(scanner);
}
@Override
public void close() {
scanner.close();
}
};
}
use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.
the class ObjectStoreDataset method encode.
private byte[] encode(T object) {
// encode T using schema
ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryEncoder encoder = new BinaryEncoder(bos);
try {
this.datumWriter.encode(object, encoder);
} catch (IOException e) {
// SHOULD NEVER happen
throw new DataSetException("Failed to encode object to be written: " + e.getMessage(), e);
}
return bos.toByteArray();
}
use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.
the class ObjectStoreDataset method decode.
private T decode(byte[] bytes) {
if (bytes == null) {
return null;
}
// decode T using schema
ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
BinaryDecoder decoder = new BinaryDecoder(bis);
try {
return getReflectionDatumReader().read(decoder, this.schema);
} catch (IOException e) {
// SHOULD NEVER happen
throw new DataSetException("Failed to decode read object: " + e.getMessage(), e);
}
}
use of io.cdap.cdap.api.dataset.DataSetException in project cdap by cdapio.
the class PartitionedFileSetTest method testUpdateMetadata.
@Test
public void testUpdateMetadata() throws Exception {
final PartitionedFileSet dataset = dsFrameworkUtil.getInstance(pfsInstance);
dsFrameworkUtil.newTransactionExecutor((TransactionAware) dataset).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
PartitionOutput partitionOutput = dataset.getPartitionOutput(PARTITION_KEY);
ImmutableMap<String, String> originalEntries = ImmutableMap.of("key1", "value1", "key2", "value2");
partitionOutput.setMetadata(originalEntries);
partitionOutput.addPartition();
ImmutableMap<String, String> updatedMetadata = ImmutableMap.of("key3", "value3");
dataset.addMetadata(PARTITION_KEY, updatedMetadata);
PartitionDetail partitionDetail = dataset.getPartition(PARTITION_KEY);
Assert.assertNotNull(partitionDetail);
HashMap<String, String> combinedEntries = Maps.newHashMap();
combinedEntries.putAll(originalEntries);
combinedEntries.putAll(updatedMetadata);
Assert.assertEquals(combinedEntries, partitionDetail.getMetadata().asMap());
// using the setMetadata API, adding an entry, for a key that already exists will overwrite the previous value
dataset.setMetadata(PARTITION_KEY, Collections.singletonMap("key3", "value4"));
partitionDetail = dataset.getPartition(PARTITION_KEY);
Assert.assertNotNull(partitionDetail);
Assert.assertEquals(ImmutableMap.of("key1", "value1", "key2", "value2", "key3", "value4"), partitionDetail.getMetadata().asMap());
// adding an entry, for a key that already exists will throw an Exception
try {
dataset.addMetadata(PARTITION_KEY, "key2", "value3");
Assert.fail("Expected not to be able to update an existing metadata entry");
} catch (DataSetException expected) {
}
// possible to remove multiple metadata entries; if a key doesn't exist, no error is thrown
dataset.removeMetadata(PARTITION_KEY, ImmutableSet.of("key2", "key3", "key4"));
// key2 and key3 were removed
partitionDetail = dataset.getPartition(PARTITION_KEY);
Assert.assertNotNull(partitionDetail);
Assert.assertEquals(ImmutableMap.of("key1", "value1"), partitionDetail.getMetadata().asMap());
try {
// adding an entry, for a key that already exists will throw an Exception
PartitionKey nonexistentPartitionKey = PartitionKey.builder().addIntField("i", 42).addLongField("l", 17L).addStringField("s", "nonexistent").build();
dataset.addMetadata(nonexistentPartitionKey, "key2", "value3");
Assert.fail("Expected not to be able to add metadata for a nonexistent partition");
} catch (DataSetException expected) {
}
}
});
}
Aggregations