Search in sources :

Example 56 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetInstanceHandlerTest method testBasics.

@Test
public void testBasics() throws Exception {
    // nothing has been created, modules and types list is empty
    List<DatasetSpecificationSummary> instances = getInstances().getResponseObject();
    // nothing in the beginning
    Assert.assertEquals(0, instances.size());
    try {
        // create dataset instance with type that is not yet known to the system should fail
        DatasetProperties props = DatasetProperties.builder().add("prop1", "val1").build();
        Assert.assertEquals(HttpStatus.SC_NOT_FOUND, createInstance("dataset1", "datasetType2", props).getResponseCode());
        // deploy modules
        deployModule("module1", TestModule1.class);
        deployModule("module2", TestModule2.class);
        // create dataset instance
        String description = "test instance description";
        HttpResponse response = createInstance("dataset1", "datasetType2", description, props);
        Assert.assertEquals(HttpStatus.SC_OK, response.getResponseCode());
        // verify module cannot be deleted which type is used for the dataset
        int modulesBeforeDelete = getModules().getResponseObject().size();
        Assert.assertEquals(HttpStatus.SC_CONFLICT, deleteModule("module2").getResponseCode());
        Assert.assertEquals(HttpStatus.SC_CONFLICT, deleteModules().getResponseCode());
        Assert.assertEquals(modulesBeforeDelete, getModules().getResponseObject().size());
        // Verify that dataset instance can be retrieved using specified properties
        Map<String, String> properties = new HashMap<>();
        properties.putAll(props.getProperties());
        instances = getInstancesWithProperties(NamespaceId.DEFAULT.getNamespace(), properties).getResponseObject();
        Assert.assertEquals(1, instances.size());
        properties.put("some_prop_not_associated_with_dataset", "somevalue");
        instances = getInstancesWithProperties(NamespaceId.DEFAULT.getNamespace(), properties).getResponseObject();
        Assert.assertEquals(0, instances.size());
        // verify instance was created
        instances = getInstances().getResponseObject();
        Assert.assertEquals(1, instances.size());
        // verifying spec is same as expected
        DatasetSpecification dataset1Spec = createType2Spec("dataset1", "datasetType2", description, props);
        Assert.assertEquals(spec2Summary(dataset1Spec), instances.get(0));
        // verify created instance info can be retrieved
        DatasetMeta datasetInfo = getInstanceObject("dataset1").getResponseObject();
        Assert.assertEquals(dataset1Spec, datasetInfo.getSpec());
        Assert.assertEquals(dataset1Spec.getType(), datasetInfo.getType().getName());
        // type meta should have 2 modules that has to be loaded to create type's class
        // and in the order they must be loaded
        List<DatasetModuleMeta> modules = datasetInfo.getType().getModules();
        Assert.assertEquals(2, modules.size());
        DatasetTypeHandlerTest.verify(modules.get(0), "module1", TestModule1.class, ImmutableList.of("datasetType1"), Collections.<String>emptyList(), ImmutableList.of("module2"));
        DatasetTypeHandlerTest.verify(modules.get(1), "module2", TestModule2.class, ImmutableList.of("datasetType2"), ImmutableList.of("module1"), Collections.<String>emptyList());
        // try to retrieve non-existed instance
        Assert.assertEquals(HttpStatus.SC_NOT_FOUND, getInstance("non-existing-dataset").getResponseCode());
        // cannot create instance with same name again
        Assert.assertEquals(HttpStatus.SC_CONFLICT, createInstance("dataset1", "datasetType2", props).getResponseCode());
        Assert.assertEquals(1, getInstances().getResponseObject().size());
        // cannot delete non-existing dataset instance
        Assert.assertEquals(HttpStatus.SC_NOT_FOUND, deleteInstance("non-existing-dataset").getResponseCode());
        Assert.assertEquals(1, getInstances().getResponseObject().size());
        // verify creation of dataset instance with null properties
        Assert.assertEquals(HttpStatus.SC_OK, createInstance("nullPropertiesTable", "datasetType2").getResponseCode());
        // since dataset instance description is not provided, we are using the description given by the dataset type
        DatasetSpecification nullPropertiesTableSpec = createType2Spec("nullPropertiesTable", "datasetType2", TestModule2.DESCRIPTION, DatasetProperties.EMPTY);
        DatasetSpecificationSummary actualSummary = getSummaryForInstance("nullPropertiesTable", getInstances().getResponseObject());
        Assert.assertEquals(spec2Summary(nullPropertiesTableSpec), actualSummary);
        // delete dataset instance
        Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("dataset1").getResponseCode());
        Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("nullPropertiesTable").getResponseCode());
        Assert.assertEquals(0, getInstances().getResponseObject().size());
        // create workflow local dataset instance
        DatasetProperties localDSProperties = DatasetProperties.builder().add("prop1", "val1").add(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY, "true").build();
        Assert.assertEquals(HttpStatus.SC_OK, createInstance("localDSInstance", "datasetType2", localDSProperties).getResponseCode());
        // getInstances call should still return 0
        Assert.assertEquals(0, getInstances().getResponseObject().size());
        Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("localDSInstance").getResponseCode());
        // delete dataset modules
        Assert.assertEquals(HttpStatus.SC_OK, deleteModule("module2").getResponseCode());
        Assert.assertEquals(HttpStatus.SC_OK, deleteModule("module1").getResponseCode());
    } finally {
        deleteInstance("dataset1");
        deleteInstance("nullPropertiesTable");
        deleteInstance("localDSInstance");
        deleteModule("module2");
        deleteModule("module1");
    }
}
Also used : DatasetModuleMeta(co.cask.cdap.proto.DatasetModuleMeta) HashMap(java.util.HashMap) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) HttpResponse(co.cask.common.http.HttpResponse) DatasetMeta(co.cask.cdap.proto.DatasetMeta) DatasetSpecificationSummary(co.cask.cdap.proto.DatasetSpecificationSummary) Test(org.junit.Test)

Example 57 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetServiceAuthorizationTest method testDatasetInstances.

@Test
public void testDatasetInstances() throws Exception {
    final DatasetId dsId = NamespaceId.DEFAULT.dataset("myds");
    final DatasetId dsId1 = NamespaceId.DEFAULT.dataset("myds1");
    DatasetId dsId2 = NamespaceId.DEFAULT.dataset("myds2");
    SecurityRequestContext.setUserId(ALICE.getName());
    assertAuthorizationFailure(new DatasetOperationExecutor() {

        @Override
        public void execute() throws Exception {
            dsFramework.addInstance(Table.class.getName(), dsId, DatasetProperties.EMPTY);
        }
    }, "Alice should not be able to add a dataset instance since she does not have ADMIN privileges on the dataset");
    // grant alice ADMIN access to the dsId and ADMIN access on the dataset type
    grantAndAssertSuccess(dsId, ALICE, ImmutableSet.of(Action.ADMIN));
    // now adding an instance should succeed
    dsFramework.addInstance(Table.class.getName(), dsId, DatasetProperties.EMPTY);
    // alice should be able to perform all operations on the dataset
    Assert.assertTrue(dsFramework.hasInstance(dsId));
    Assert.assertNotNull(dsFramework.getDataset(dsId, ImmutableMap.<String, String>of(), null));
    dsFramework.updateInstance(dsId, DatasetProperties.builder().add("key", "value").build());
    // operations should fail for bob
    SecurityRequestContext.setUserId(BOB.getName());
    assertAuthorizationFailure(new DatasetOperationExecutor() {

        @Override
        public void execute() throws Exception {
            dsFramework.getDataset(dsId, ImmutableMap.<String, String>of(), null);
        }
    }, String.format("Expected %s to not be have access to %s.", BOB, dsId));
    assertAuthorizationFailure(new DatasetOperationExecutor() {

        @Override
        public void execute() throws Exception {
            dsFramework.updateInstance(dsId, DatasetProperties.builder().add("key", "val").build());
        }
    }, String.format("Expected %s to not be have %s privilege on %s.", BOB, Action.ADMIN, dsId));
    assertAuthorizationFailure(new DatasetOperationExecutor() {

        @Override
        public void execute() throws Exception {
            dsFramework.truncateInstance(dsId);
        }
    }, String.format("Expected %s to not be have %s privilege on %s.", BOB, Action.ADMIN, dsId));
    grantAndAssertSuccess(dsId, BOB, ImmutableSet.of(Action.ADMIN));
    // now update should succeed
    dsFramework.updateInstance(dsId, DatasetProperties.builder().add("key", "val").build());
    // as should truncate
    dsFramework.truncateInstance(dsId);
    DatasetSpecification datasetSpec = dsFramework.getDatasetSpec(dsId);
    Assert.assertNotNull(datasetSpec);
    Assert.assertEquals("val", datasetSpec.getProperty("key"));
    // grant Bob corresponding privilege to create the dataset
    grantAndAssertSuccess(dsId1, BOB, ImmutableSet.of(Action.ADMIN));
    grantAndAssertSuccess(dsId2, BOB, ImmutableSet.of(Action.ADMIN));
    dsFramework.addInstance(Table.class.getName(), dsId1, DatasetProperties.EMPTY);
    dsFramework.addInstance(Table.class.getName(), dsId2, DatasetProperties.EMPTY);
    // since Bob now has some privileges on all datasets, the list API should return all datasets for him
    Assert.assertEquals(ImmutableSet.of(dsId, dsId1, dsId2), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    // Alice should only be able to see dsId, since she only has privilege on this dataset
    SecurityRequestContext.setUserId(ALICE.getName());
    Assert.assertEquals(ImmutableSet.of(dsId), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    // Grant privileges on other datasets to user Alice
    grantAndAssertSuccess(dsId1, ALICE, ImmutableSet.of(Action.EXECUTE));
    grantAndAssertSuccess(dsId2, ALICE, ImmutableSet.of(Action.EXECUTE));
    // Alice should not be able to delete any datasets since she does not have ADMIN on all datasets in the namespace
    try {
        dsFramework.deleteAllInstances(NamespaceId.DEFAULT);
        Assert.fail();
    } catch (Exception e) {
        Assert.assertTrue(e.getMessage().contains("is not authorized to perform actions"));
    }
    // alice should still be able to see all dataset instances
    Assert.assertEquals(ImmutableSet.of(dsId1, dsId2, dsId), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    // should get an authorization error if alice tries to delete datasets that she does not have permissions on
    assertAuthorizationFailure(new DatasetOperationExecutor() {

        @Override
        public void execute() throws Exception {
            dsFramework.deleteInstance(dsId1);
        }
    }, String.format("Alice should not be able to delete instance %s since she does not have privileges", dsId1));
    grantAndAssertSuccess(dsId1, ALICE, ImmutableSet.of(Action.ADMIN));
    Assert.assertEquals(ImmutableSet.of(dsId1, dsId2, dsId), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    // since Alice now is ADMIN for dsId1, she should be able to delete it
    dsFramework.deleteInstance(dsId1);
    // Now Alice only see dsId2 and dsId from list.
    Assert.assertEquals(ImmutableSet.of(dsId2, dsId), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    // Bob should be able to see dsId and dsId2
    SecurityRequestContext.setUserId(BOB.getName());
    Assert.assertEquals(ImmutableSet.of(dsId2, dsId), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    dsFramework.deleteInstance(dsId2);
    SecurityRequestContext.setUserId(ALICE.getName());
    dsFramework.deleteInstance(dsId);
    grantAndAssertSuccess(dsId2, ALICE, EnumSet.of(Action.ADMIN));
    // add add the instance again
    dsFramework.addInstance(Table.class.getName(), dsId, DatasetProperties.EMPTY);
    dsFramework.addInstance(Table.class.getName(), dsId1, DatasetProperties.EMPTY);
    dsFramework.addInstance(Table.class.getName(), dsId2, DatasetProperties.EMPTY);
    Assert.assertEquals(ImmutableSet.of(dsId, dsId1, dsId2), summaryToDatasetIdSet(dsFramework.getInstances(NamespaceId.DEFAULT)));
    // should be successful since ALICE has ADMIN on all datasets
    dsFramework.deleteAllInstances(NamespaceId.DEFAULT);
    Assert.assertTrue(dsFramework.getInstances(NamespaceId.DEFAULT).isEmpty());
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) InstanceNotFoundException(co.cask.cdap.api.dataset.InstanceNotFoundException) IOException(java.io.IOException) UnauthorizedException(co.cask.cdap.security.spi.authorization.UnauthorizedException) DatasetId(co.cask.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 58 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class PrefixedTable method getDataset.

@Override
public PrefixedTable getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
    DatasetSpecification kvTableSpec = spec.getSpecification("table");
    KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, DatasetDefinition.NO_ARGUMENTS, classLoader);
    return new PrefixedTable(spec.getName(), table, arguments);
}
Also used : DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification)

Example 59 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetDefinitionRegistryWithDefaultModules method testFileSetReconfigure.

private void testFileSetReconfigure(boolean expectSuccess, DatasetDefinition def, Boolean wasExternal, String path, Boolean newExternal, String newPath, DatasetProperties extraProps) throws IncompatibleUpdateException {
    Assert.assertTrue(def instanceof Reconfigurable);
    DatasetProperties props = buildFileSetProps(extraProps, wasExternal, path);
    DatasetProperties newProps = buildFileSetProps(extraProps, newExternal, newPath);
    DatasetSpecification spec = def.configure("fs", props);
    if (expectSuccess) {
        ((Reconfigurable) def).reconfigure("fs", newProps, spec);
    } else {
        try {
            ((Reconfigurable) def).reconfigure("fs", newProps, spec);
            Assert.fail("reconfigure should have thrown exception");
        } catch (IncompatibleUpdateException e) {
        // expected
        }
    }
}
Also used : DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) Reconfigurable(co.cask.cdap.api.dataset.Reconfigurable) IncompatibleUpdateException(co.cask.cdap.api.dataset.IncompatibleUpdateException)

Example 60 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetDefinitionRegistryWithDefaultModules method testTimeSeriesReconfigure.

private void testTimeSeriesReconfigure(DatasetDefinition def) throws IncompatibleUpdateException {
    DatasetProperties props = DatasetProperties.builder().add(TimeseriesDataset.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, String.valueOf(TimeUnit.HOURS.toMillis(1))).build();
    DatasetProperties compatProps = TableProperties.builder().setTTL(TimeUnit.DAYS.toSeconds(1)).add(TimeseriesDataset.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, String.valueOf(TimeUnit.HOURS.toMillis(1))).build();
    DatasetProperties incompatProps = TableProperties.builder().setTTL(TimeUnit.DAYS.toSeconds(1)).add(TimeseriesDataset.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, String.valueOf(TimeUnit.HOURS.toMillis(2))).build();
    DatasetSpecification spec = def.configure("tt", props);
    Assert.assertTrue(def instanceof Reconfigurable);
    ((Reconfigurable) def).reconfigure("tt", compatProps, spec);
    try {
        ((Reconfigurable) def).reconfigure("tt", incompatProps, spec);
        Assert.fail("reconfigure should have thrown exception");
    } catch (IncompatibleUpdateException e) {
    // expected
    }
}
Also used : DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) Reconfigurable(co.cask.cdap.api.dataset.Reconfigurable) IncompatibleUpdateException(co.cask.cdap.api.dataset.IncompatibleUpdateException)

Aggregations

DatasetSpecification (co.cask.cdap.api.dataset.DatasetSpecification)72 DatasetId (co.cask.cdap.proto.id.DatasetId)21 DatasetProperties (co.cask.cdap.api.dataset.DatasetProperties)17 IncompatibleUpdateException (co.cask.cdap.api.dataset.IncompatibleUpdateException)15 Test (org.junit.Test)14 DatasetDefinition (co.cask.cdap.api.dataset.DatasetDefinition)11 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)10 POST (javax.ws.rs.POST)10 Path (javax.ws.rs.Path)10 DatasetAdmin (co.cask.cdap.api.dataset.DatasetAdmin)9 DatasetTypeMeta (co.cask.cdap.proto.DatasetTypeMeta)9 NotFoundException (co.cask.cdap.common.NotFoundException)8 AbstractDatasetDefinition (co.cask.cdap.api.dataset.lib.AbstractDatasetDefinition)7 BadRequestException (co.cask.cdap.common.BadRequestException)7 IOException (java.io.IOException)7 DatasetSpecificationSummary (co.cask.cdap.proto.DatasetSpecificationSummary)6 Map (java.util.Map)6 DatasetNotFoundException (co.cask.cdap.common.DatasetNotFoundException)5 Reconfigurable (co.cask.cdap.api.dataset.Reconfigurable)4 Updatable (co.cask.cdap.api.dataset.Updatable)4