use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class TableTest method testBatchWritableKeyIsIgnored.
@Test
public void testBatchWritableKeyIsIgnored() throws Exception {
String tableName = "batchWritableTable";
getTableAdmin(CONTEXT1, tableName).create();
try (Table table = getTable(CONTEXT1, tableName)) {
// write in a transaction, three times, with key = null, a, q, always Put with row = a
Transaction tx = txClient.startShort();
((TransactionAware) table).startTx(tx);
table.write(null, new Put("a").add("x", "x"));
table.write(new byte[] { 'q' }, new Put("a").add("y", "y"));
table.write(new byte[] { 'a' }, new Put("a").add("z", "z"));
txClient.canCommitOrThrow(tx, ((TransactionAware) table).getTxChanges());
((TransactionAware) table).commitTx();
txClient.commitOrThrow(tx);
// validate that all writes went to row a, and row q was not written
tx = txClient.startShort();
((TransactionAware) table).startTx(tx);
Assert.assertTrue(table.get(new Get("q")).isEmpty());
Row row = table.get(new Get("a"));
Assert.assertEquals(3, row.getColumns().size());
Assert.assertEquals("x", row.getString("x"));
Assert.assertEquals("y", row.getString("y"));
Assert.assertEquals("z", row.getString("z"));
((TransactionAware) table).commitTx();
txClient.abort(tx);
} finally {
getTableAdmin(CONTEXT1, tableName).drop();
}
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class TableTest method testEmptyValuePut.
// TODO figure out what to do with this. As long as ObjectMappedTable writes empty values, we cannot
// throw exceptions, and this test is pointless.
@Ignore
@Test
public void testEmptyValuePut() throws Exception {
DatasetAdmin admin = getTableAdmin(CONTEXT1, MY_TABLE);
admin.create();
Transaction tx = txClient.startShort();
try (Table myTable = getTable(CONTEXT1, MY_TABLE)) {
try {
myTable.put(R1, C1, MT);
Assert.fail("Put with empty value should fail.");
} catch (IllegalArgumentException e) {
// expected
}
try {
myTable.put(R1, a(C1, C2), a(V1, MT));
Assert.fail("Put with empty value should fail.");
} catch (IllegalArgumentException e) {
// expected
}
try {
myTable.put(new Put(R1).add(C1, V1).add(C2, MT));
Assert.fail("Put with empty value should fail.");
} catch (IllegalArgumentException e) {
// expected
}
try {
myTable.compareAndSwap(R1, C1, V1, MT);
Assert.fail("CompareAndSwap with empty value should fail.");
} catch (IllegalArgumentException e) {
// expected
}
} finally {
txClient.abort(tx);
admin.drop();
}
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class TableTest method testMetrics.
private void testMetrics(boolean readless) throws Exception {
final String tableName = "survive";
DatasetProperties props = TableProperties.builder().setReadlessIncrementSupport(readless).build();
DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, props);
admin.create();
try (Table table = getTable(CONTEXT1, tableName, props)) {
final Map<String, Long> metrics = Maps.newHashMap();
((MeteredDataset) table).setMetricsCollector(new MetricsCollector() {
@Override
public void increment(String metricName, long value) {
Long old = metrics.get(metricName);
metrics.put(metricName, old == null ? value : old + value);
}
@Override
public void gauge(String metricName, long value) {
metrics.put(metricName, value);
}
});
// Note that we don't need to finish tx for metrics to be reported
Transaction tx0 = txClient.startShort();
((TransactionAware) table).startTx(tx0);
int writes = 0;
int reads = 0;
table.put(new Put(R1, C1, V1));
verifyDatasetMetrics(metrics, ++writes, reads);
table.compareAndSwap(R1, C1, V1, V2);
verifyDatasetMetrics(metrics, ++writes, ++reads);
// note: will not write anything as expected value will not match
table.compareAndSwap(R1, C1, V1, V2);
verifyDatasetMetrics(metrics, writes, ++reads);
table.increment(new Increment(R2, C2, 1L));
if (readless) {
verifyDatasetMetrics(metrics, ++writes, reads);
} else {
verifyDatasetMetrics(metrics, ++writes, ++reads);
}
table.incrementAndGet(new Increment(R2, C2, 1L));
verifyDatasetMetrics(metrics, ++writes, ++reads);
table.get(new Get(R1, C1, V1));
verifyDatasetMetrics(metrics, writes, ++reads);
Scanner scanner = table.scan(new Scan(null, null));
while (scanner.next() != null) {
verifyDatasetMetrics(metrics, writes, ++reads);
}
table.delete(new Delete(R1, C1, V1));
verifyDatasetMetrics(metrics, ++writes, reads);
} finally {
// drop table
admin.drop();
}
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class TableTest method testReadOwnWrite.
@Test
public void testReadOwnWrite() throws Exception {
final String tableName = "readOwnWrite";
DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName);
admin.create();
try (Table table = getTable(CONTEXT1, tableName)) {
Transaction tx = txClient.startShort();
try {
((TransactionAware) table).startTx(tx);
// Write some data, then flush it by calling commitTx.
table.put(new Put(R1, C1, V1));
((TransactionAware) table).commitTx();
// Try to read the previous write.
Assert.assertArrayEquals(V1, table.get(new Get(R1, C1)).get(C1));
} finally {
txClient.commitOrThrow(tx);
}
} finally {
// drop table
admin.drop();
}
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class MockRuntimeDatasetSink method transform.
@Override
public void transform(StructuredRecord input, Emitter<KeyValue<byte[], Put>> emitter) throws Exception {
byte[] rowkey = Bytes.toBytes(UUID.randomUUID());
Put put = new Put(rowkey);
put.add(SCHEMA_COL, input.getSchema().toString());
put.add(RECORD_COL, StructuredRecordStringConverter.toJsonString(input));
emitter.emit(new KeyValue<>(rowkey, put));
}
Aggregations