Search in sources :

Example 11 with DatasetId

use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.

the class ObjectStoreDatasetTest method testBatchReads.

@Test
public void testBatchReads() throws Exception {
    DatasetId batch = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("batch");
    createObjectStoreInstance(batch, String.class);
    final ObjectStoreDataset<String> t = dsFrameworkUtil.getInstance(batch);
    TransactionExecutor txnl = dsFrameworkUtil.newTransactionExecutor(t);
    final SortedSet<Long> keysWritten = Sets.newTreeSet();
    // write 1000 random values to the table and remember them in a set
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            Random rand = new Random(451);
            for (int i = 0; i < 1000; i++) {
                long keyLong = rand.nextLong();
                byte[] key = Bytes.toBytes(keyLong);
                t.write(key, Long.toString(keyLong));
                keysWritten.add(keyLong);
            }
        }
    });
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // get the splits for the table
            List<Split> splits = t.getSplits();
            // read each split and verify the keys
            SortedSet<Long> keysToVerify = Sets.newTreeSet(keysWritten);
            verifySplits(t, splits, keysToVerify);
        }
    });
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // get specific number of splits for a subrange
            TreeSet<Long> keysToVerify = Sets.newTreeSet(keysWritten.subSet(0x10000000L, 0x40000000L));
            List<Split> splits = t.getSplits(5, Bytes.toBytes(0x10000000L), Bytes.toBytes(0x40000000L));
            Assert.assertTrue(splits.size() <= 5);
            // read each split and verify the keys
            verifySplits(t, splits, keysToVerify);
        }
    });
    deleteAndVerifyInBatch(t, txnl, keysWritten);
    dsFrameworkUtil.deleteInstance(batch);
}
Also used : TransactionExecutor(org.apache.tephra.TransactionExecutor) SortedSet(java.util.SortedSet) ImmutableSortedSet(com.google.common.collect.ImmutableSortedSet) TransactionFailureException(org.apache.tephra.TransactionFailureException) NoSuchElementException(java.util.NoSuchElementException) DatasetId(co.cask.cdap.proto.id.DatasetId) Random(java.util.Random) TreeSet(java.util.TreeSet) List(java.util.List) Test(org.junit.Test)

Example 12 with DatasetId

use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.

the class ObjectStoreDatasetTest method testInstantiateWrongClass.

@Test
public void testInstantiateWrongClass() throws Exception {
    DatasetId pairs = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("pairs");
    createObjectStoreInstance(pairs, new TypeToken<ImmutablePair<Integer, String>>() {
    }.getType());
    // note: due to type erasure, this succeeds
    final ObjectStoreDataset<Custom> store = dsFrameworkUtil.getInstance(pairs);
    TransactionExecutor storeTxnl = dsFrameworkUtil.newTransactionExecutor(store);
    // but now it must fail with incompatible type
    try {
        storeTxnl.execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws Exception {
                Custom custom = new Custom(42, Lists.newArrayList("one", "two"));
                store.write(a, custom);
            }
        });
        Assert.fail("write should have failed with incompatible type");
    } catch (TransactionFailureException e) {
    // expected
    }
    // write a correct object to the pair store
    final ObjectStoreDataset<ImmutablePair<Integer, String>> pairStore = dsFrameworkUtil.getInstance(pairs);
    TransactionExecutor pairStoreTxnl = dsFrameworkUtil.newTransactionExecutor(pairStore);
    final ImmutablePair<Integer, String> pair = new ImmutablePair<>(1, "second");
    pairStoreTxnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // should succeed
            pairStore.write(a, pair);
        }
    });
    pairStoreTxnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            ImmutablePair<Integer, String> actualPair = pairStore.read(a);
            Assert.assertEquals(pair, actualPair);
        }
    });
    // now try to read that as a custom object, should fail with class cast
    try {
        storeTxnl.execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws Exception {
                Custom custom = store.read(a);
                Preconditions.checkNotNull(custom);
            }
        });
        Assert.fail("write should have failed with class cast exception");
    } catch (TransactionFailureException e) {
    // expected
    }
    pairStoreTxnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            deleteAndVerify(pairStore, a);
        }
    });
    dsFrameworkUtil.deleteInstance(pairs);
}
Also used : TransactionExecutor(org.apache.tephra.TransactionExecutor) TransactionFailureException(org.apache.tephra.TransactionFailureException) NoSuchElementException(java.util.NoSuchElementException) DatasetId(co.cask.cdap.proto.id.DatasetId) TransactionFailureException(org.apache.tephra.TransactionFailureException) ImmutablePair(co.cask.cdap.common.utils.ImmutablePair) TypeToken(com.google.common.reflect.TypeToken) Test(org.junit.Test)

Example 13 with DatasetId

use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.

the class ObjectStoreDatasetTest method testBatchCustomList.

@Test
public void testBatchCustomList() throws Exception {
    DatasetId customlist = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("customlist");
    createObjectStoreInstance(customlist, new TypeToken<List<Custom>>() {
    }.getType());
    final ObjectStoreDataset<List<Custom>> customStore = dsFrameworkUtil.getInstance(customlist);
    TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor(customStore);
    final SortedSet<Long> keysWritten = Sets.newTreeSet();
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            List<Custom> customList1 = Arrays.asList(new Custom(1, Lists.newArrayList("one", "ONE")), new Custom(2, Lists.newArrayList("two", "TWO")));
            Random rand = new Random(100);
            long key1 = rand.nextLong();
            keysWritten.add(key1);
            customStore.write(Bytes.toBytes(key1), customList1);
            List<Custom> customList2 = Arrays.asList(new Custom(3, Lists.newArrayList("three", "THREE")), new Custom(4, Lists.newArrayList("four", "FOUR")));
            long key2 = rand.nextLong();
            keysWritten.add(key2);
            customStore.write(Bytes.toBytes(key2), customList2);
        }
    });
    final SortedSet<Long> keysWrittenCopy = ImmutableSortedSet.copyOf(keysWritten);
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            // get the splits for the table
            List<Split> splits = customStore.getSplits();
            for (Split split : splits) {
                SplitReader<byte[], List<Custom>> reader = customStore.createSplitReader(split);
                reader.initialize(split);
                while (reader.nextKeyValue()) {
                    byte[] key = reader.getCurrentKey();
                    Assert.assertTrue(keysWritten.remove(Bytes.toLong(key)));
                }
            }
            // verify all keys have been read
            if (!keysWritten.isEmpty()) {
                System.out.println("Remaining [" + keysWritten.size() + "]: " + keysWritten);
            }
            Assert.assertTrue(keysWritten.isEmpty());
        }
    });
    deleteAndVerifyInBatch(customStore, txnl, keysWrittenCopy);
    dsFrameworkUtil.deleteInstance(customlist);
}
Also used : SplitReader(co.cask.cdap.api.data.batch.SplitReader) TransactionExecutor(org.apache.tephra.TransactionExecutor) TransactionFailureException(org.apache.tephra.TransactionFailureException) NoSuchElementException(java.util.NoSuchElementException) DatasetId(co.cask.cdap.proto.id.DatasetId) Random(java.util.Random) TypeToken(com.google.common.reflect.TypeToken) List(java.util.List) Split(co.cask.cdap.api.data.batch.Split) Test(org.junit.Test)

Example 14 with DatasetId

use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.

the class ExploreExecutorHttpHandler method disableDataset.

/**
   * Disable ad-hoc exploration of a dataset instance.
   */
@POST
@Path("datasets/{dataset}/disable")
public void disableDataset(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) {
    final DatasetId datasetId = new DatasetId(namespace, datasetName);
    DatasetSpecification datasetSpec = retrieveDatasetSpec(responder, datasetId);
    if (datasetSpec == null) {
        // this means the spec could not be retrieved and retrievedDatasetSpec() already responded
        return;
    }
    disableDataset(responder, datasetId, datasetSpec);
}
Also used : DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) DatasetId(co.cask.cdap.proto.id.DatasetId) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST)

Example 15 with DatasetId

use of co.cask.cdap.proto.id.DatasetId in project cdap by caskdata.

the class ExploreExecutorHttpHandler method dropPartition.

// this should really be a DELETE request. However, the partition key must be passed in the body
// of the request, and that does not work with many HTTP clients, including Java's URLConnection.
@POST
@Path("datasets/{dataset}/deletePartition")
public void dropPartition(final HttpRequest request, final HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName, @HeaderParam(Constants.Security.Headers.PROGRAM_ID) String programId) throws Exception {
    final DatasetId datasetId = new DatasetId(namespace, datasetName);
    propagateUserId(request);
    impersonator.doAs(getEntityToImpersonate(datasetId, programId), new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            doDropPartition(request, responder, datasetId);
            return null;
        }
    });
}
Also used : BadRequestException(co.cask.cdap.common.BadRequestException) ExploreException(co.cask.cdap.explore.service.ExploreException) SQLException(java.sql.SQLException) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) JsonSyntaxException(com.google.gson.JsonSyntaxException) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) DatasetId(co.cask.cdap.proto.id.DatasetId) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST)

Aggregations

DatasetId (co.cask.cdap.proto.id.DatasetId)180 Test (org.junit.Test)96 ProgramId (co.cask.cdap.proto.id.ProgramId)34 StreamId (co.cask.cdap.proto.id.StreamId)34 Path (javax.ws.rs.Path)34 TransactionExecutor (org.apache.tephra.TransactionExecutor)31 NamespaceId (co.cask.cdap.proto.id.NamespaceId)25 ApplicationId (co.cask.cdap.proto.id.ApplicationId)23 IOException (java.io.IOException)19 POST (javax.ws.rs.POST)17 TransactionFailureException (org.apache.tephra.TransactionFailureException)17 DatasetSpecification (co.cask.cdap.api.dataset.DatasetSpecification)16 QueryResult (co.cask.cdap.proto.QueryResult)16 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)15 ColumnDesc (co.cask.cdap.proto.ColumnDesc)14 Map (java.util.Map)13 NoSuchElementException (java.util.NoSuchElementException)13 Table (co.cask.cdap.api.dataset.table.Table)12 NamespaceMeta (co.cask.cdap.proto.NamespaceMeta)11 ProgramRunId (co.cask.cdap.proto.id.ProgramRunId)11