use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class TableConcurrentTest method testConcurrentOnSingleTable.
@Test(timeout = 120000)
public void testConcurrentOnSingleTable() throws Exception {
// Set of clients read and write data concurrently.
// * n clients increment a value with increasing values (+1, +2, ...) at specific row:column 100 times
// * n clients append 100 columns to a set of 4 rows which includes the row that gets incremented (2 at a time).
// Append is: read all columns, add <last_column+1>
// todo: improve to use deletes. E.g. in append - remove all existing before appending new
final int n = 5;
getTableAdmin(CONTEXT1, MY_TABLE).create();
try {
ExecutorService executor = Executors.newFixedThreadPool(n * 2);
// start threads
for (int i = 0; i < n; i++) {
executor.submit(new IncrementingClient(txExecutorFactory));
executor.submit(new AppendingClient(txExecutorFactory));
}
// wait for finish
executor.shutdown();
executor.awaitTermination(2, TimeUnit.MINUTES);
// verify result
final T table = getTable(CONTEXT1, MY_TABLE);
TransactionExecutor txExecutor = txExecutorFactory.createExecutor(Lists.newArrayList((TransactionAware) table));
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
verifyIncrements();
verifyAppends();
}
private void verifyAppends() throws Exception {
for (byte[] row : ROWS_TO_APPEND_TO) {
Map<byte[], byte[]> cols = table.get(row).getColumns();
Assert.assertFalse(cols.isEmpty());
// +1 because there was one extra column that we incremented
boolean isIncrementedColumn = Arrays.equals(ROW_TO_INCREMENT, row);
Assert.assertEquals(n * 100 + (isIncrementedColumn ? 1 : 0), cols.size());
for (int i = 0; i < n * 100; i++) {
Assert.assertArrayEquals(Bytes.toBytes("foo" + i), cols.get(Bytes.toBytes("column" + i)));
}
}
}
private void verifyIncrements() throws Exception {
Map<byte[], byte[]> result = table.get(ROW_TO_INCREMENT, new byte[][] { COLUMN_TO_INCREMENT }).getColumns();
Assert.assertFalse(result.isEmpty());
byte[] val = result.get(COLUMN_TO_INCREMENT);
long sum1to100 = ((1 + 99) * 99 / 2);
Assert.assertEquals(n * sum1to100, Bytes.toLong(val));
}
});
} finally {
getTableAdmin(CONTEXT1, MY_TABLE).drop();
}
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class ObjectStoreDatasetTest method testStringStore.
@Test
public void testStringStore() throws Exception {
DatasetId strings = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("strings");
createObjectStoreInstance(strings, String.class);
final ObjectStoreDataset<String> stringStore = dsFrameworkUtil.getInstance(strings);
TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor(stringStore);
final String string = "this is a string";
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
stringStore.write(a, string);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
String result = stringStore.read(a);
Assert.assertEquals(string, result);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
deleteAndVerify(stringStore, a);
}
});
dsFrameworkUtil.deleteInstance(strings);
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class ObjectStoreDatasetTest method testScanObjectStore.
@Test
public void testScanObjectStore() throws Exception {
DatasetId scan = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("scan");
createObjectStoreInstance(scan, String.class);
final ObjectStoreDataset<String> t = dsFrameworkUtil.getInstance(scan);
TransactionExecutor txnl = dsFrameworkUtil.newTransactionExecutor(t);
// write 10 values
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
for (int i = 0; i < 10; i++) {
byte[] key = Bytes.toBytes(i);
t.write(key, String.valueOf(i));
}
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Iterator<KeyValue<byte[], String>> objectsIterator = t.scan(Bytes.toBytes(0), Bytes.toBytes(10));
int sum = 0;
while (objectsIterator.hasNext()) {
sum += Integer.parseInt(objectsIterator.next().getValue());
}
//checking the sum equals sum of values from (0..9) which are the rows written and scanned for.
Assert.assertEquals(45, sum);
}
});
// start a transaction, scan part of them elements using scanner, close the scanner,
// then call next() on scanner, it should fail
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
CloseableIterator<KeyValue<byte[], String>> objectsIterator = t.scan(Bytes.toBytes(0), Bytes.toBytes(10));
int rowCount = 0;
while (objectsIterator.hasNext() && (rowCount < 5)) {
rowCount++;
}
objectsIterator.close();
try {
objectsIterator.next();
Assert.fail("Reading after closing Scanner returned result.");
} catch (NoSuchElementException e) {
}
}
});
dsFrameworkUtil.deleteInstance(scan);
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class ObjectStoreDatasetTest method testInnerStore.
@Test
public void testInnerStore() throws Exception {
DatasetId inners = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("inners");
createObjectStoreInstance(inners, new TypeToken<CustomWithInner.Inner<Integer>>() {
}.getType());
final ObjectStoreDataset<CustomWithInner.Inner<Integer>> innerStore = dsFrameworkUtil.getInstance(inners);
TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor(innerStore);
final CustomWithInner.Inner<Integer> inner = new CustomWithInner.Inner<>(42, 99);
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
innerStore.write(a, inner);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
CustomWithInner.Inner<Integer> result = innerStore.read(a);
Assert.assertEquals(inner, result);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
deleteAndVerify(innerStore, a);
}
});
dsFrameworkUtil.deleteInstance(inners);
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class ObjectStoreDatasetTest method testCustomStore.
@Test
public void testCustomStore() throws Exception {
DatasetId customs = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("customs");
createObjectStoreInstance(customs, new TypeToken<Custom>() {
}.getType());
final ObjectStoreDataset<Custom> customStore = dsFrameworkUtil.getInstance(customs);
TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor(customStore);
final Custom custom = new Custom(42, Lists.newArrayList("one", "two"));
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
customStore.write(a, custom);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Custom result = customStore.read(a);
Assert.assertEquals(custom, result);
}
});
final Custom custom2 = new Custom(-1, null);
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
customStore.write(a, custom2);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Custom result = customStore.read(a);
Assert.assertEquals(custom2, result);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
deleteAndVerify(customStore, a);
}
});
dsFrameworkUtil.deleteInstance(customs);
}
Aggregations