use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.
the class ObjectStoreDatasetTest method testBatchReads.
@Test
public void testBatchReads() throws Exception {
DatasetId batch = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("batch");
createObjectStoreInstance(batch, String.class);
final ObjectStoreDataset<String> t = dsFrameworkUtil.getInstance(batch);
TransactionExecutor txnl = dsFrameworkUtil.newTransactionExecutor(t);
final SortedSet<Long> keysWritten = Sets.newTreeSet();
// write 1000 random values to the table and remember them in a set
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Random rand = new Random(451);
for (int i = 0; i < 1000; i++) {
long keyLong = rand.nextLong();
byte[] key = Bytes.toBytes(keyLong);
t.write(key, Long.toString(keyLong));
keysWritten.add(keyLong);
}
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// get the splits for the table
List<Split> splits = t.getSplits();
// read each split and verify the keys
SortedSet<Long> keysToVerify = Sets.newTreeSet(keysWritten);
verifySplits(t, splits, keysToVerify);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// get specific number of splits for a subrange
TreeSet<Long> keysToVerify = Sets.newTreeSet(keysWritten.subSet(0x10000000L, 0x40000000L));
List<Split> splits = t.getSplits(5, Bytes.toBytes(0x10000000L), Bytes.toBytes(0x40000000L));
Assert.assertTrue(splits.size() <= 5);
// read each split and verify the keys
verifySplits(t, splits, keysToVerify);
}
});
deleteAndVerifyInBatch(t, txnl, keysWritten);
dsFrameworkUtil.deleteInstance(batch);
}
use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.
the class ObjectStoreDatasetTest method testInstantiateWrongClass.
@Test
public void testInstantiateWrongClass() throws Exception {
DatasetId pairs = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("pairs");
createObjectStoreInstance(pairs, new TypeToken<ImmutablePair<Integer, String>>() {
}.getType());
// note: due to type erasure, this succeeds
final ObjectStoreDataset<Custom> store = dsFrameworkUtil.getInstance(pairs);
TransactionExecutor storeTxnl = dsFrameworkUtil.newTransactionExecutor(store);
// but now it must fail with incompatible type
try {
storeTxnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Custom custom = new Custom(42, Lists.newArrayList("one", "two"));
store.write(a, custom);
}
});
Assert.fail("write should have failed with incompatible type");
} catch (TransactionFailureException e) {
// expected
}
// write a correct object to the pair store
final ObjectStoreDataset<ImmutablePair<Integer, String>> pairStore = dsFrameworkUtil.getInstance(pairs);
TransactionExecutor pairStoreTxnl = dsFrameworkUtil.newTransactionExecutor(pairStore);
final ImmutablePair<Integer, String> pair = new ImmutablePair<>(1, "second");
pairStoreTxnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// should succeed
pairStore.write(a, pair);
}
});
pairStoreTxnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
ImmutablePair<Integer, String> actualPair = pairStore.read(a);
Assert.assertEquals(pair, actualPair);
}
});
// now try to read that as a custom object, should fail with class cast
try {
storeTxnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Custom custom = store.read(a);
Preconditions.checkNotNull(custom);
}
});
Assert.fail("write should have failed with class cast exception");
} catch (TransactionFailureException e) {
// expected
}
pairStoreTxnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
deleteAndVerify(pairStore, a);
}
});
dsFrameworkUtil.deleteInstance(pairs);
}
use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.
the class ObjectStoreDatasetTest method testBatchCustomList.
@Test
public void testBatchCustomList() throws Exception {
DatasetId customlist = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("customlist");
createObjectStoreInstance(customlist, new TypeToken<List<Custom>>() {
}.getType());
final ObjectStoreDataset<List<Custom>> customStore = dsFrameworkUtil.getInstance(customlist);
TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor(customStore);
final SortedSet<Long> keysWritten = Sets.newTreeSet();
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
List<Custom> customList1 = Arrays.asList(new Custom(1, Lists.newArrayList("one", "ONE")), new Custom(2, Lists.newArrayList("two", "TWO")));
Random rand = new Random(100);
long key1 = rand.nextLong();
keysWritten.add(key1);
customStore.write(Bytes.toBytes(key1), customList1);
List<Custom> customList2 = Arrays.asList(new Custom(3, Lists.newArrayList("three", "THREE")), new Custom(4, Lists.newArrayList("four", "FOUR")));
long key2 = rand.nextLong();
keysWritten.add(key2);
customStore.write(Bytes.toBytes(key2), customList2);
}
});
final SortedSet<Long> keysWrittenCopy = ImmutableSortedSet.copyOf(keysWritten);
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// get the splits for the table
List<Split> splits = customStore.getSplits();
for (Split split : splits) {
SplitReader<byte[], List<Custom>> reader = customStore.createSplitReader(split);
reader.initialize(split);
while (reader.nextKeyValue()) {
byte[] key = reader.getCurrentKey();
Assert.assertTrue(keysWritten.remove(Bytes.toLong(key)));
}
}
// verify all keys have been read
if (!keysWritten.isEmpty()) {
System.out.println("Remaining [" + keysWritten.size() + "]: " + keysWritten);
}
Assert.assertTrue(keysWritten.isEmpty());
}
});
deleteAndVerifyInBatch(customStore, txnl, keysWrittenCopy);
dsFrameworkUtil.deleteInstance(customlist);
}
use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.
the class ExploreExecutorHttpHandler method disableDataset.
/**
* Disable ad-hoc exploration of a dataset instance.
*/
@POST
@Path("datasets/{dataset}/disable")
public void disableDataset(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
DatasetSpecification datasetSpec = retrieveDatasetSpec(responder, datasetId);
if (datasetSpec == null) {
// this means the spec could not be retrieved and retrievedDatasetSpec() already responded
return;
}
disableDataset(responder, datasetId, datasetSpec);
}
use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.
the class ExploreExecutorHttpHandler method dropPartition.
// this should really be a DELETE request. However, the partition key must be passed in the body
// of the request, and that does not work with many HTTP clients, including Java's URLConnection.
@POST
@Path("datasets/{dataset}/deletePartition")
public void dropPartition(final HttpRequest request, final HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName, @HeaderParam(Constants.Security.Headers.PROGRAM_ID) String programId) throws Exception {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
propagateUserId(request);
impersonator.doAs(getEntityToImpersonate(datasetId, programId), new Callable<Void>() {
@Override
public Void call() throws Exception {
doDropPartition(request, responder, datasetId);
return null;
}
});
}
Aggregations