Search in sources :

Example 16 with DatasetSpecificationSummary

use of co.cask.cdap.proto.DatasetSpecificationSummary in project cdap by caskdata.

the class DatasetInstanceHandlerTest method testBasics.

@Test
public void testBasics() throws Exception {
    // nothing has been created, modules and types list is empty
    List<DatasetSpecificationSummary> instances = getInstances().getResponseObject();
    // nothing in the beginning
    Assert.assertEquals(0, instances.size());
    try {
        // create dataset instance with type that is not yet known to the system should fail
        DatasetProperties props = DatasetProperties.builder().add("prop1", "val1").build();
        Assert.assertEquals(HttpStatus.SC_NOT_FOUND, createInstance("dataset1", "datasetType2", props).getResponseCode());
        // deploy modules
        deployModule("module1", TestModule1.class);
        deployModule("module2", TestModule2.class);
        // create dataset instance
        String description = "test instance description";
        HttpResponse response = createInstance("dataset1", "datasetType2", description, props);
        Assert.assertEquals(HttpStatus.SC_OK, response.getResponseCode());
        // verify module cannot be deleted which type is used for the dataset
        int modulesBeforeDelete = getModules().getResponseObject().size();
        Assert.assertEquals(HttpStatus.SC_CONFLICT, deleteModule("module2").getResponseCode());
        Assert.assertEquals(HttpStatus.SC_CONFLICT, deleteModules().getResponseCode());
        Assert.assertEquals(modulesBeforeDelete, getModules().getResponseObject().size());
        // verify instance was created
        instances = getInstances().getResponseObject();
        Assert.assertEquals(1, instances.size());
        // verifying spec is same as expected
        DatasetSpecification dataset1Spec = createType2Spec("dataset1", "datasetType2", description, props);
        Assert.assertEquals(spec2Summary(dataset1Spec), instances.get(0));
        // verify created instance info can be retrieved
        DatasetMeta datasetInfo = getInstanceObject("dataset1").getResponseObject();
        Assert.assertEquals(dataset1Spec, datasetInfo.getSpec());
        Assert.assertEquals(dataset1Spec.getType(), datasetInfo.getType().getName());
        // type meta should have 2 modules that has to be loaded to create type's class
        // and in the order they must be loaded
        List<DatasetModuleMeta> modules = datasetInfo.getType().getModules();
        Assert.assertEquals(2, modules.size());
        DatasetTypeHandlerTest.verify(modules.get(0), "module1", TestModule1.class, ImmutableList.of("datasetType1"), Collections.<String>emptyList(), ImmutableList.of("module2"));
        DatasetTypeHandlerTest.verify(modules.get(1), "module2", TestModule2.class, ImmutableList.of("datasetType2"), ImmutableList.of("module1"), Collections.<String>emptyList());
        // try to retrieve non-existed instance
        Assert.assertEquals(HttpStatus.SC_NOT_FOUND, getInstance("non-existing-dataset").getResponseCode());
        // cannot create instance with same name again
        Assert.assertEquals(HttpStatus.SC_CONFLICT, createInstance("dataset1", "datasetType2", props).getResponseCode());
        Assert.assertEquals(1, getInstances().getResponseObject().size());
        // cannot delete non-existing dataset instance
        Assert.assertEquals(HttpStatus.SC_NOT_FOUND, deleteInstance("non-existing-dataset").getResponseCode());
        Assert.assertEquals(1, getInstances().getResponseObject().size());
        // verify creation of dataset instance with null properties
        Assert.assertEquals(HttpStatus.SC_OK, createInstance("nullPropertiesTable", "datasetType2").getResponseCode());
        // since dataset instance description is not provided, we are using the description given by the dataset type
        DatasetSpecification nullPropertiesTableSpec = createType2Spec("nullPropertiesTable", "datasetType2", TestModule2.DESCRIPTION, DatasetProperties.EMPTY);
        DatasetSpecificationSummary actualSummary = getSummaryForInstance("nullPropertiesTable", getInstances().getResponseObject());
        Assert.assertEquals(spec2Summary(nullPropertiesTableSpec), actualSummary);
        // delete dataset instance
        Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("dataset1").getResponseCode());
        Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("nullPropertiesTable").getResponseCode());
        Assert.assertEquals(0, getInstances().getResponseObject().size());
        // create workflow local dataset instance
        DatasetProperties localDSProperties = DatasetProperties.builder().add("prop1", "val1").add(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY, "true").build();
        Assert.assertEquals(HttpStatus.SC_OK, createInstance("localDSInstance", "datasetType2", localDSProperties).getResponseCode());
        // getInstances call should still return 0
        Assert.assertEquals(0, getInstances().getResponseObject().size());
        Assert.assertEquals(HttpStatus.SC_OK, deleteInstance("localDSInstance").getResponseCode());
        // delete dataset modules
        Assert.assertEquals(HttpStatus.SC_OK, deleteModule("module2").getResponseCode());
        Assert.assertEquals(HttpStatus.SC_OK, deleteModule("module1").getResponseCode());
    } finally {
        deleteInstance("dataset1");
        deleteInstance("nullPropertiesTable");
        deleteInstance("localDSInstance");
        deleteModule("module2");
        deleteModule("module1");
    }
}
Also used : DatasetModuleMeta(co.cask.cdap.proto.DatasetModuleMeta) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) HttpResponse(co.cask.common.http.HttpResponse) DatasetMeta(co.cask.cdap.proto.DatasetMeta) DatasetSpecificationSummary(co.cask.cdap.proto.DatasetSpecificationSummary) Test(org.junit.Test)

Example 17 with DatasetSpecificationSummary

use of co.cask.cdap.proto.DatasetSpecificationSummary in project cdap by caskdata.

the class AbstractDatasetFrameworkTest method testNamespaceInstanceIsolation.

@Test
@SuppressWarnings("ConstantConditions")
public void testNamespaceInstanceIsolation() throws Exception {
    DatasetFramework framework = getFramework();
    // create 2 namespaces
    NamespaceId namespace1 = new NamespaceId("ns1");
    NamespaceId namespace2 = new NamespaceId("ns2");
    namespaceAdmin.create(new NamespaceMeta.Builder().setName(namespace1).build());
    namespaceAdmin.create(new NamespaceMeta.Builder().setName(namespace2).build());
    namespacedLocationFactory.get(namespace1).mkdirs();
    namespacedLocationFactory.get(namespace2).mkdirs();
    // create 2 tables, one in each namespace. both tables have the same name.
    DatasetId table1ID = namespace1.dataset("table");
    DatasetId table2ID = namespace2.dataset("table");
    // have slightly different properties so that we can distinguish between them
    framework.addInstance(Table.class.getName(), table1ID, DatasetProperties.builder().add("tag", "table1").build());
    framework.addInstance(Table.class.getName(), table2ID, DatasetProperties.builder().add("tag", "table2").build());
    // perform some data operations to make sure they are not the same underlying table
    final Table table1 = framework.getDataset(table1ID, Maps.<String, String>newHashMap(), null);
    final Table table2 = framework.getDataset(table2ID, Maps.<String, String>newHashMap(), null);
    TransactionExecutor txnl = new DefaultTransactionExecutor(new MinimalTxSystemClient(), (TransactionAware) table1, (TransactionAware) table2);
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            table1.put(Bytes.toBytes("rowkey"), Bytes.toBytes("column"), Bytes.toBytes("val1"));
            table2.put(Bytes.toBytes("rowkey"), Bytes.toBytes("column"), Bytes.toBytes("val2"));
        }
    });
    // check data is different, which means they are different underlying tables
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            Assert.assertEquals("val1", Bytes.toString(table1.get(Bytes.toBytes("rowkey"), Bytes.toBytes("column"))));
            Assert.assertEquals("val2", Bytes.toString(table2.get(Bytes.toBytes("rowkey"), Bytes.toBytes("column"))));
        }
    });
    // check get all in a namespace only includes those in that namespace
    Collection<DatasetSpecificationSummary> specs = framework.getInstances(namespace1);
    Assert.assertEquals(1, specs.size());
    Assert.assertEquals("table1", specs.iterator().next().getProperties().get("tag"));
    specs = framework.getInstances(namespace2);
    Assert.assertEquals(1, specs.size());
    Assert.assertEquals("table2", specs.iterator().next().getProperties().get("tag"));
    // delete one instance and make sure the other still exists
    framework.deleteInstance(table1ID);
    Assert.assertFalse(framework.hasInstance(table1ID));
    Assert.assertTrue(framework.hasInstance(table2ID));
    // delete all instances in one namespace and make sure the other still exists
    framework.addInstance(Table.class.getName(), table1ID, DatasetProperties.EMPTY);
    framework.deleteAllInstances(namespace1);
    Assert.assertTrue(framework.hasInstance(table2ID));
    // delete one namespace and make sure the other still exists
    namespacedLocationFactory.get(namespace1).delete(true);
    Assert.assertTrue(framework.hasInstance(table2ID));
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) TransactionExecutor(org.apache.tephra.TransactionExecutor) DefaultTransactionExecutor(org.apache.tephra.DefaultTransactionExecutor) DatasetSpecificationSummary(co.cask.cdap.proto.DatasetSpecificationSummary) InstanceConflictException(co.cask.cdap.api.dataset.InstanceConflictException) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) IOException(java.io.IOException) DatasetId(co.cask.cdap.proto.id.DatasetId) MinimalTxSystemClient(org.apache.tephra.inmemory.MinimalTxSystemClient) LineageWriterDatasetFramework(co.cask.cdap.data2.metadata.writer.LineageWriterDatasetFramework) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) DefaultTransactionExecutor(org.apache.tephra.DefaultTransactionExecutor) NamespaceId(co.cask.cdap.proto.id.NamespaceId) Test(org.junit.Test)

Example 18 with DatasetSpecificationSummary

use of co.cask.cdap.proto.DatasetSpecificationSummary in project cdap by caskdata.

the class HBaseDatasetMetricsReporter method report.

private void report(Map<TableId, HBaseTableUtil.TableStats> tableStats) throws IOException {
    Map<String, String> reverseNamespaceMap = hBaseTableUtil.getHBaseToCDAPNamespaceMap();
    for (Map.Entry<TableId, HBaseTableUtil.TableStats> statEntry : tableStats.entrySet()) {
        String hbaseNamespace = statEntry.getKey().getNamespace();
        String cdapNamespace = reverseNamespaceMap.get(hbaseNamespace);
        // tableNames that doesn't start with user are ignored
        if (NamespaceId.SYSTEM.getNamespace().equals(cdapNamespace)) {
            continue;
        }
        String tableName = statEntry.getKey().getTableName();
        try {
            Collection<DatasetSpecificationSummary> instances = dsFramework.getInstances(new NamespaceId(cdapNamespace));
            for (DatasetSpecificationSummary spec : instances) {
                DatasetSpecification specification = dsFramework.getDatasetSpec(new DatasetId(cdapNamespace, spec.getName()));
                if (specification.isParent(tableName)) {
                    MetricsContext collector = metricsService.getContext(ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, cdapNamespace, Constants.Metrics.Tag.DATASET, spec.getName()));
                    collector.gauge("dataset.size.mb", statEntry.getValue().getTotalSizeMB());
                    break;
                }
            }
        } catch (DatasetManagementException | ServiceUnavailableException e) {
        // No op
        }
    }
}
Also used : TableId(co.cask.cdap.data2.util.TableId) MetricsContext(co.cask.cdap.api.metrics.MetricsContext) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) ServiceUnavailableException(co.cask.cdap.common.ServiceUnavailableException) DatasetSpecificationSummary(co.cask.cdap.proto.DatasetSpecificationSummary) DatasetId(co.cask.cdap.proto.id.DatasetId) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) NamespaceId(co.cask.cdap.proto.id.NamespaceId) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap)

Aggregations

DatasetSpecificationSummary (co.cask.cdap.proto.DatasetSpecificationSummary)18 DatasetId (co.cask.cdap.proto.id.DatasetId)8 Test (org.junit.Test)6 DatasetSpecification (co.cask.cdap.api.dataset.DatasetSpecification)5 IOException (java.io.IOException)4 Map (java.util.Map)4 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)3 DatasetProperties (co.cask.cdap.api.dataset.DatasetProperties)3 DatasetMeta (co.cask.cdap.proto.DatasetMeta)3 NamespaceId (co.cask.cdap.proto.id.NamespaceId)3 ImmutableMap (com.google.common.collect.ImmutableMap)3 File (java.io.File)3 MetricsContext (co.cask.cdap.api.metrics.MetricsContext)2 DatasetClient (co.cask.cdap.client.DatasetClient)2 TableId (co.cask.cdap.data2.util.TableId)2 DatasetModuleMeta (co.cask.cdap.proto.DatasetModuleMeta)2 ProgramId (co.cask.cdap.proto.id.ProgramId)2 HttpResponse (co.cask.common.http.HttpResponse)2 HashMap (java.util.HashMap)2 ArtifactSummary (co.cask.cdap.api.artifact.ArtifactSummary)1