use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class UsageDatasetTest method testProgramDatasetMapping.
@Test
public void testProgramDatasetMapping() throws Exception {
final UsageDataset usageDataset = getUsageDataset("testProgramDatasetMapping");
TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor((TransactionAware) usageDataset);
// Add mappings
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
usageDataset.register(flow11, datasetInstance1);
usageDataset.register(flow11, datasetInstance3);
usageDataset.register(flow12, datasetInstance2);
usageDataset.register(service11, datasetInstance1);
usageDataset.register(flow21, datasetInstance2);
usageDataset.register(service21, datasetInstance1);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// Verify program mappings
Assert.assertEquals(ImmutableSet.of(datasetInstance1, datasetInstance3), usageDataset.getDatasets(flow11));
Assert.assertEquals(ImmutableSet.of(datasetInstance2), usageDataset.getDatasets(flow12));
Assert.assertEquals(ImmutableSet.of(datasetInstance1), usageDataset.getDatasets(service11));
Assert.assertEquals(ImmutableSet.of(datasetInstance2), usageDataset.getDatasets(flow21));
Assert.assertEquals(ImmutableSet.of(datasetInstance1), usageDataset.getDatasets(service21));
// Verify app mappings
Assert.assertEquals(ImmutableSet.of(datasetInstance1, datasetInstance2, datasetInstance3), usageDataset.getDatasets(flow11.getParent()));
Assert.assertEquals(ImmutableSet.of(datasetInstance1, datasetInstance2), usageDataset.getDatasets(flow21.getParent()));
// Verify dataset mappings
Assert.assertEquals(ImmutableSet.of(flow11, service11, service21), usageDataset.getPrograms(datasetInstance1));
Assert.assertEquals(ImmutableSet.of(flow12, flow21), usageDataset.getPrograms(datasetInstance2));
Assert.assertEquals(ImmutableSet.of(flow11), usageDataset.getPrograms(datasetInstance3));
}
});
// --------- Delete app1 -----------
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
usageDataset.unregister(flow11.getParent());
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// There should be no mappings for programs of app1 now
Assert.assertEquals(ImmutableSet.<DatasetId>of(), usageDataset.getDatasets(flow11));
Assert.assertEquals(ImmutableSet.<DatasetId>of(), usageDataset.getDatasets(flow12));
Assert.assertEquals(ImmutableSet.<DatasetId>of(), usageDataset.getDatasets(service11));
Assert.assertEquals(ImmutableSet.of(datasetInstance2), usageDataset.getDatasets(flow21));
Assert.assertEquals(ImmutableSet.of(datasetInstance1), usageDataset.getDatasets(service21));
// Verify app mappings
Assert.assertEquals(ImmutableSet.<DatasetId>of(), usageDataset.getDatasets(flow11.getParent()));
Assert.assertEquals(ImmutableSet.of(datasetInstance1, datasetInstance2), usageDataset.getDatasets(flow21.getParent()));
// Verify dataset mappings
Assert.assertEquals(ImmutableSet.of(service21), usageDataset.getPrograms(datasetInstance1));
Assert.assertEquals(ImmutableSet.of(flow21), usageDataset.getPrograms(datasetInstance2));
Assert.assertEquals(ImmutableSet.<ProgramId>of(), usageDataset.getPrograms(datasetInstance3));
}
});
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class HiveExploreServiceFileSetTestRun method doTransaction.
private void doTransaction(Dataset dataset, final Runnable runnable) throws Exception {
TransactionExecutor executor = new DefaultTransactionExecutor(transactionSystemClient, (TransactionAware) dataset);
executor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
runnable.run();
}
});
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class HBaseQueueDebugger method scanQueue.
/**
* Only works for {@link co.cask.cdap.data2.transaction.queue.hbase.ShardedHBaseQueueStrategy}.
*/
public QueueStatistics scanQueue(final QueueName queueName, @Nullable Long consumerGroupId) throws Exception {
HBaseConsumerStateStore stateStore;
try {
stateStore = queueAdmin.getConsumerStateStore(queueName);
} catch (IllegalStateException e) {
throw new NotFoundException(queueName);
}
TransactionExecutor txExecutor = Transactions.createTransactionExecutor(txExecutorFactory, stateStore);
Multimap<Long, QueueBarrier> barriers = txExecutor.execute(new TransactionExecutor.Function<HBaseConsumerStateStore, Multimap<Long, QueueBarrier>>() {
@Override
public Multimap<Long, QueueBarrier> apply(HBaseConsumerStateStore input) throws Exception {
return input.getAllBarriers();
}
}, stateStore);
printProgress("Got %d barriers\n", barriers.size());
QueueStatistics stats = new QueueStatistics();
if (consumerGroupId != null) {
barriers = Multimaps.filterKeys(barriers, Predicates.equalTo(consumerGroupId));
}
for (Map.Entry<Long, Collection<QueueBarrier>> entry : barriers.asMap().entrySet()) {
long groupId = entry.getKey();
Collection<QueueBarrier> groupBarriers = entry.getValue();
printProgress("Scanning barriers for group %d\n", groupId);
int currentSection = 1;
PeekingIterator<QueueBarrier> barrierIterator = Iterators.peekingIterator(groupBarriers.iterator());
while (barrierIterator.hasNext()) {
QueueBarrier start = barrierIterator.next();
QueueBarrier end = barrierIterator.hasNext() ? barrierIterator.peek() : null;
printProgress("Scanning section %d/%d...\n", currentSection, groupBarriers.size());
scanQueue(txExecutor, stateStore, queueName, start, end, stats);
printProgress("Current results: %s\n", stats.getReport(showTxTimestampOnly()));
currentSection++;
}
printProgress("Scanning complete");
}
System.out.printf("Results for queue %s: %s\n", queueName.toString(), stats.getReport(showTxTimestampOnly()));
return stats;
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class DatasetInstanceHandlerTest method testCreateDelete.
@Test
public void testCreateDelete() throws Exception {
try {
deployModule("default-table", InMemoryTableModule.class);
deployModule("default-core", CoreDatasetsModule.class);
// cannot create instance with same name again
Assert.assertEquals(HttpStatus.SC_OK, createInstance("myTable1", "table", DatasetProperties.EMPTY).getResponseCode());
Assert.assertEquals(HttpStatus.SC_OK, createInstance("myTable2", "table", DatasetProperties.EMPTY).getResponseCode());
Assert.assertEquals(2, getInstances().getResponseObject().size());
// we want to verify that data is also gone, so we write smth to tables first
final Table table1 = dsFramework.getDataset(NamespaceId.DEFAULT.dataset("myTable1"), DatasetDefinition.NO_ARGUMENTS, null);
final Table table2 = dsFramework.getDataset(NamespaceId.DEFAULT.dataset("myTable2"), DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table1);
Assert.assertNotNull(table2);
TransactionExecutor txExecutor = new DefaultTransactionExecutor(new InMemoryTxSystemClient(txManager), ImmutableList.of((TransactionAware) table1, (TransactionAware) table2));
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
table1.put(new Put("key1", "col1", "val1"));
table2.put(new Put("key2", "col2", "val2"));
}
});
// verify that we can read the data
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Assert.assertEquals("val1", table1.get(new Get("key1", "col1")).getString("col1"));
Assert.assertEquals("val2", table2.get(new Get("key2", "col2")).getString("col2"));
}
});
// delete table, check that it is deleted, create again and verify that it is empty
Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("myTable1").getResponseCode());
ObjectResponse<List<DatasetSpecificationSummary>> instances = getInstances();
Assert.assertEquals(1, instances.getResponseObject().size());
Assert.assertEquals("myTable2", instances.getResponseObject().get(0).getName());
Assert.assertEquals(HttpStatus.SC_OK, createInstance("myTable1", "table", DatasetProperties.EMPTY).getResponseCode());
Assert.assertEquals(2, getInstances().getResponseObject().size());
// verify that table1 is empty. Note: it is ok for test purpose to re-use the table clients
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Assert.assertTrue(table1.get(new Get("key1", "col1")).isEmpty());
Assert.assertEquals("val2", table2.get(new Get("key2", "col2")).getString("col2"));
// writing smth to table1 for subsequent test
table1.put(new Put("key3", "col3", "val3"));
}
});
// delete all tables, check that they deleted, create again and verify that they are empty
deleteInstances();
Assert.assertEquals(0, getInstances().getResponseObject().size());
Assert.assertEquals(HttpStatus.SC_OK, createInstance("myTable1", "table", DatasetProperties.EMPTY).getResponseCode());
Assert.assertEquals(HttpStatus.SC_OK, createInstance("myTable2", "table", DatasetProperties.EMPTY).getResponseCode());
Assert.assertEquals(2, getInstances().getResponseObject().size());
// verify that tables are empty. Note: it is ok for test purpose to re-use the table clients
txExecutor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Assert.assertTrue(table1.get(new Get("key3", "col3")).isEmpty());
Assert.assertTrue(table2.get(new Get("key2", "col2")).isEmpty());
}
});
} finally {
// cleanup
deleteInstances();
Assert.assertEquals(HttpStatus.SC_OK, deleteModules().getResponseCode());
}
}
use of org.apache.tephra.TransactionExecutor in project cdap by caskdata.
the class IndexedTableTest method testIncrementIndexing.
@Test
public void testIncrementIndexing() throws Exception {
DatasetId incrTabInstance = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("incrtab");
dsFrameworkUtil.createInstance("indexedTable", incrTabInstance, DatasetProperties.builder().add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "idx1,idx2,idx3").build());
final IndexedTable iTable = dsFrameworkUtil.getInstance(incrTabInstance);
final byte[] idxCol1 = Bytes.toBytes("idx1");
final byte[] idxCol2 = Bytes.toBytes("idx2");
final byte[] idxCol3 = Bytes.toBytes("idx3");
final byte[] row1 = Bytes.toBytes("row1");
try {
TransactionExecutor tx = dsFrameworkUtil.newTransactionExecutor(iTable);
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
long result = iTable.incrementAndGet(row1, idxCol1, 1);
assertEquals(1L, result);
}
});
final byte[] oneBytes = Bytes.toBytes(1L);
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
try (Scanner scanner = iTable.readByIndex(idxCol1, oneBytes)) {
Row row = scanner.next();
TableAssert.assertRow(row, row1, new byte[][] { idxCol1 }, new byte[][] { oneBytes });
assertEmpty(scanner);
}
}
});
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
long result = iTable.incrementAndGet(row1, idxCol1, 1);
assertEquals(2L, result);
}
});
final byte[] twoBytes = Bytes.toBytes(2L);
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// previous index by value 1 should be gone
Scanner scanner = iTable.readByIndex(idxCol1, oneBytes);
try {
assertEmpty(scanner);
} finally {
scanner.close();
}
// should now be indexed by value 2
scanner = iTable.readByIndex(idxCol1, twoBytes);
try {
Row row = scanner.next();
TableAssert.assertRow(row, row1, new byte[][] { idxCol1 }, new byte[][] { twoBytes });
assertEmpty(scanner);
} finally {
scanner.close();
}
}
});
final byte[] threeBytes = Bytes.toBytes(3L);
final byte[][] idxCols = new byte[][] { idxCol1, idxCol2, idxCol3 };
final byte[][] expectedValues = new byte[][] { threeBytes, oneBytes, oneBytes };
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Row result = iTable.incrementAndGet(row1, idxCols, new long[] { 1, 1, 1 });
assertNotNull(result);
TableAssert.assertColumns(result, idxCols, expectedValues);
}
});
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Scanner scanner = iTable.readByIndex(idxCol1, threeBytes);
try {
Row row = scanner.next();
TableAssert.assertRow(row, row1, idxCols, expectedValues);
assertEmpty(scanner);
} finally {
scanner.close();
}
scanner = iTable.readByIndex(idxCol2, oneBytes);
try {
Row row = scanner.next();
TableAssert.assertRow(row, row1, idxCols, expectedValues);
assertEmpty(scanner);
} finally {
scanner.close();
}
scanner = iTable.readByIndex(idxCol3, oneBytes);
try {
Row row = scanner.next();
TableAssert.assertRow(row, row1, idxCols, expectedValues);
assertEmpty(scanner);
} finally {
scanner.close();
}
}
});
final byte[] row2 = Bytes.toBytes("row2");
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// read-less increment on an indexed column should throw an exception
try {
iTable.increment(row2, idxCol1, 1L);
fail("Expected IllegalArgumentException performing increment on indexed column");
} catch (IllegalArgumentException iae) {
// expected
}
// read-less increment on a non-indexed column should succeed
iTable.increment(row2, valCol, 1L);
byte[] result = iTable.get(row2, valCol);
assertArrayEquals(oneBytes, result);
}
});
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
iTable.put(row2, valCol, valA);
}
});
// increment against a column with non-long value should fail
tx.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
try {
iTable.incrementAndGet(row2, valCol, 1L);
fail("Expected NumberFormatException from increment on a column with non-long value");
} catch (NumberFormatException nfe) {
// expected
}
}
});
} finally {
dsFrameworkUtil.deleteInstance(incrTabInstance);
}
}
Aggregations