use of co.cask.cdap.api.dataset.DataSetException in project cdap by caskdata.
the class ObjectStoreDataset method encode.
private byte[] encode(T object) {
// encode T using schema
ByteArrayOutputStream bos = new ByteArrayOutputStream();
BinaryEncoder encoder = new BinaryEncoder(bos);
try {
this.datumWriter.encode(object, encoder);
} catch (IOException e) {
// SHOULD NEVER happen
throw new DataSetException("Failed to encode object to be written: " + e.getMessage(), e);
}
return bos.toByteArray();
}
use of co.cask.cdap.api.dataset.DataSetException in project cdap by caskdata.
the class LevelDBTable method scanPersisted.
@ReadOnly
@Override
protected Scanner scanPersisted(Scan scan) throws Exception {
FuzzyRowFilter filter = null;
if (scan.getFilter() != null) {
// todo: currently we support only FuzzyRowFilter as an experimental feature
if (scan.getFilter() instanceof FuzzyRowFilter) {
filter = (FuzzyRowFilter) scan.getFilter();
} else {
throw new DataSetException("Unknown filter type: " + scan.getFilter());
}
}
final Scanner scanner = core.scan(scan.getStartRow(), scan.getStopRow(), filter, null, tx);
return new Scanner() {
@Nullable
@Override
public Row next() {
return LevelDBTable.this.next(scanner);
}
@Override
public void close() {
scanner.close();
}
};
}
use of co.cask.cdap.api.dataset.DataSetException in project cdap by caskdata.
the class HBaseMetricsTable method increment.
@Override
public void increment(NavigableMap<byte[], NavigableMap<byte[], Long>> updates) {
List<Put> puts = Lists.newArrayList();
for (Map.Entry<byte[], NavigableMap<byte[], Long>> update : updates.entrySet()) {
Put increment = getIncrementalPut(update.getKey(), update.getValue());
puts.add(increment);
}
try {
hTable.put(puts);
hTable.flushCommits();
} catch (IOException e) {
// currently there is not other way to extract that from the HBase exception than string match
if (e.getMessage() != null && e.getMessage().contains("isn't 64 bits wide")) {
throw new NumberFormatException("Attempted to increment a value that is not convertible to long.");
}
throw new DataSetException("Increment failed on table " + tableId, e);
}
}
use of co.cask.cdap.api.dataset.DataSetException in project cdap by caskdata.
the class HBaseMetricsTable method incrementAndGet.
@Override
public long incrementAndGet(byte[] row, byte[] column, long delta) {
Increment increment = new Increment(row);
increment.addColumn(columnFamily, column, delta);
try {
Result result = hTable.increment(increment);
return Bytes.toLong(result.getValue(columnFamily, column));
} catch (IOException e) {
// currently there is not other way to extract that from the HBase exception than string match
if (e.getMessage() != null && e.getMessage().contains("isn't 64 bits wide")) {
throw new NumberFormatException("Attempted to increment a value that is not convertible to long," + " row: " + Bytes.toStringBinary(row) + " column: " + Bytes.toStringBinary(column));
}
throw new DataSetException("IncrementAndGet failed on table " + tableId, e);
}
}
use of co.cask.cdap.api.dataset.DataSetException in project cdap by caskdata.
the class HBaseMetricsTable method scan.
@Override
public Scanner scan(@Nullable byte[] startRow, @Nullable byte[] stopRow, @Nullable FuzzyRowFilter filter) {
ScanBuilder scanBuilder = tableUtil.buildScan();
configureRangeScan(scanBuilder, startRow, stopRow, filter);
try {
ResultScanner resultScanner = hTable.getScanner(scanBuilder.build());
return new HBaseScanner(resultScanner, columnFamily);
} catch (IOException e) {
throw new DataSetException("Scan failed on table " + tableId, e);
}
}
Aggregations