Search in sources :

Example 16 with DatasetSpecification

use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetDefinitionRegistryWithDefaultModules method testIndexedTableReconfigure.

@Test
public void testIndexedTableReconfigure() throws IncompatibleUpdateException {
    DatasetDefinition indexedTableDef = registry.get(IndexedTable.class.getName());
    Assert.assertTrue(indexedTableDef instanceof Reconfigurable);
    DatasetProperties props = TableProperties.builder().setReadlessIncrementSupport(false).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "a,b,c").build();
    DatasetSpecification spec = indexedTableDef.configure("idxtb", props);
    DatasetProperties compat = TableProperties.builder().setReadlessIncrementSupport(// turning on is ok
    true).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "c,b,a").build();
    spec = ((Reconfigurable) indexedTableDef).reconfigure("idxtb", compat, spec);
    DatasetProperties incompat = TableProperties.builder().setReadlessIncrementSupport(true).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "a,d").build();
    try {
        ((Reconfigurable) indexedTableDef).reconfigure("idxtb", incompat, spec);
        Assert.fail("reconfigure should have thrown exception");
    } catch (IncompatibleUpdateException e) {
    // expected
    }
    incompat = TableProperties.builder().setReadlessIncrementSupport(// turning off is not ok
    false).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "a,b,c").build();
    try {
        ((Reconfigurable) indexedTableDef).reconfigure("idxtb", incompat, spec);
        Assert.fail("reconfigure should have thrown exception");
    } catch (IncompatibleUpdateException e) {
    // expected
    }
}
Also used : DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties) DatasetSpecification(io.cdap.cdap.api.dataset.DatasetSpecification) DatasetDefinition(io.cdap.cdap.api.dataset.DatasetDefinition) Reconfigurable(io.cdap.cdap.api.dataset.Reconfigurable) IncompatibleUpdateException(io.cdap.cdap.api.dataset.IncompatibleUpdateException) Test(org.junit.Test)

Example 17 with DatasetSpecification

use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetDefinitionRegistryWithDefaultModules method testCompositeDatasetReconfigure.

// tests that CompositeDatasetDefinition correctly delegates reconfigure() to its embedded types
@Test
public void testCompositeDatasetReconfigure() throws IncompatibleUpdateException {
    CompositeDatasetDefinition composite = new CompositeDatasetDefinition("composite", "pedantic", new PedanticDatasetDefinition("pedantic")) {

        @Override
        public Dataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map arguments, ClassLoader classLoader) {
            return null;
        }
    };
    DatasetSpecification spec = composite.configure("nn", DatasetProperties.EMPTY);
    DatasetSpecification respec = composite.reconfigure("nn", DatasetProperties.EMPTY, spec);
    Assert.assertEquals(spec, respec);
    try {
        composite.reconfigure("nn", DatasetProperties.builder().add("immutable", "x").build(), spec);
        Assert.fail("reconfigure should have thrown exception");
    } catch (IncompatibleUpdateException e) {
    // expected
    }
}
Also used : DatasetSpecification(io.cdap.cdap.api.dataset.DatasetSpecification) DatasetContext(io.cdap.cdap.api.dataset.DatasetContext) Map(java.util.Map) IncompatibleUpdateException(io.cdap.cdap.api.dataset.IncompatibleUpdateException) Test(org.junit.Test)

Example 18 with DatasetSpecification

use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class TestDatasetDefinition method getDataset.

@Override
public TestDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
    DatasetSpecification kvTableSpec = spec.getSpecification("kv");
    KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, DatasetDefinition.NO_ARGUMENTS, classLoader);
    return new TestDataset(spec, table, arguments);
}
Also used : KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) DatasetSpecification(io.cdap.cdap.api.dataset.DatasetSpecification)

Example 19 with DatasetSpecification

use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class DatasetsUtilTest method testFix.

private void testFix(String type, DatasetProperties props) {
    DatasetDefinition def = DatasetFrameworkTestUtil.getDatasetDefinition(inMemoryDatasetFramework, NamespaceId.DEFAULT, type);
    Assert.assertNotNull(def);
    DatasetSpecification spec = def.configure("nn", props);
    Map<String, String> originalProperties = DatasetsUtil.fixOriginalProperties(spec).getOriginalProperties();
    Assert.assertEquals(props.getProperties(), originalProperties);
}
Also used : DatasetSpecification(io.cdap.cdap.api.dataset.DatasetSpecification) DatasetDefinition(io.cdap.cdap.api.dataset.DatasetDefinition)

Example 20 with DatasetSpecification

use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class HBaseTableTest method testCachedEncodedTransaction.

@Test
public void testCachedEncodedTransaction() throws Exception {
    String tableName = "testEncodedTxTable";
    DatasetProperties props = DatasetProperties.EMPTY;
    getTableAdmin(CONTEXT1, tableName, props).create();
    DatasetSpecification tableSpec = DatasetSpecification.builder(tableName, HBaseTable.class.getName()).build();
    // use a transaction codec that counts the number of times encode() is called
    final AtomicInteger encodeCount = new AtomicInteger();
    final TransactionCodec codec = new TransactionCodec() {

        @Override
        public byte[] encode(Transaction tx) throws IOException {
            encodeCount.incrementAndGet();
            return super.encode(tx);
        }
    };
    // use a table util that creates an HTable that validates the encoded tx on each get
    final AtomicReference<Transaction> txRef = new AtomicReference<>();
    HBaseTableUtil util = new DelegatingHBaseTableUtil(hBaseTableUtil) {

        @Override
        public org.apache.hadoop.hbase.client.Table createTable(Configuration conf, TableId tableId) throws IOException {
            org.apache.hadoop.hbase.client.Table table = super.createTable(conf, tableId);
            return new DelegatingTable(table) {

                @Override
                public Result get(org.apache.hadoop.hbase.client.Get get) throws IOException {
                    Assert.assertEquals(txRef.get().getTransactionId(), codec.decode(get.getAttribute(TxConstants.TX_OPERATION_ATTRIBUTE_KEY)).getTransactionId());
                    return super.get(get);
                }

                @Override
                public Result[] get(List<org.apache.hadoop.hbase.client.Get> gets) throws IOException {
                    for (org.apache.hadoop.hbase.client.Get get : gets) {
                        Assert.assertEquals(txRef.get().getTransactionId(), codec.decode(get.getAttribute(TxConstants.TX_OPERATION_ATTRIBUTE_KEY)).getTransactionId());
                    }
                    return super.get(gets);
                }

                @Override
                public ResultScanner getScanner(org.apache.hadoop.hbase.client.Scan scan) throws IOException {
                    Assert.assertEquals(txRef.get().getTransactionId(), codec.decode(scan.getAttribute(TxConstants.TX_OPERATION_ATTRIBUTE_KEY)).getTransactionId());
                    return super.getScanner(scan);
                }
            };
        }
    };
    HBaseTable table = new HBaseTable(CONTEXT1, tableSpec, Collections.<String, String>emptyMap(), cConf, TEST_HBASE.getConfiguration(), util, codec);
    DetachedTxSystemClient txSystemClient = new DetachedTxSystemClient();
    // test all operations: only the first one encodes
    Transaction tx = txSystemClient.startShort();
    txRef.set(tx);
    table.startTx(tx);
    table.put(b("row1"), b("col1"), b("val1"));
    Assert.assertEquals(0, encodeCount.get());
    table.get(b("row"));
    Assert.assertEquals(1, encodeCount.get());
    table.get(ImmutableList.of(new Get("a"), new Get("b")));
    Assert.assertEquals(1, encodeCount.get());
    Scanner scanner = table.scan(new Scan(null, null));
    Assert.assertEquals(1, encodeCount.get());
    scanner.close();
    table.increment(b("z"), b("z"), 0L);
    Assert.assertEquals(1, encodeCount.get());
    table.commitTx();
    table.postTxCommit();
    // test that for the next tx, we encode again
    tx = txSystemClient.startShort();
    txRef.set(tx);
    table.startTx(tx);
    table.get(b("row"));
    Assert.assertEquals(2, encodeCount.get());
    table.commitTx();
    // test that we encode again, even of postTxCommit was not called
    tx = txSystemClient.startShort();
    txRef.set(tx);
    table.startTx(tx);
    table.get(b("row"));
    Assert.assertEquals(3, encodeCount.get());
    table.commitTx();
    table.rollbackTx();
    // test that rollback does not encode the tx
    Assert.assertEquals(3, encodeCount.get());
    // test that we encode again if the previous tx rolled back
    tx = txSystemClient.startShort();
    txRef.set(tx);
    table.startTx(tx);
    table.get(b("row"));
    Assert.assertEquals(4, encodeCount.get());
    table.commitTx();
    table.close();
    Assert.assertEquals(4, encodeCount.get());
}
Also used : TableId(io.cdap.cdap.data2.util.TableId) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Scanner(io.cdap.cdap.api.dataset.table.Scanner) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Configuration(org.apache.hadoop.conf.Configuration) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) Result(org.apache.hadoop.hbase.client.Result) DelegatingTable(io.cdap.cdap.data2.util.hbase.DelegatingTable) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties) DatasetSpecification(io.cdap.cdap.api.dataset.DatasetSpecification) AtomicReference(java.util.concurrent.atomic.AtomicReference) HBaseTableUtil(io.cdap.cdap.data2.util.hbase.HBaseTableUtil) Transaction(org.apache.tephra.Transaction) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TransactionCodec(org.apache.tephra.TransactionCodec) Get(io.cdap.cdap.api.dataset.table.Get) Scan(io.cdap.cdap.api.dataset.table.Scan) BufferingTableTest(io.cdap.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Aggregations

DatasetSpecification (io.cdap.cdap.api.dataset.DatasetSpecification)62 DatasetId (io.cdap.cdap.proto.id.DatasetId)15 DatasetProperties (io.cdap.cdap.api.dataset.DatasetProperties)14 Test (org.junit.Test)14 DatasetDefinition (io.cdap.cdap.api.dataset.DatasetDefinition)12 IncompatibleUpdateException (io.cdap.cdap.api.dataset.IncompatibleUpdateException)11 DatasetAdmin (io.cdap.cdap.api.dataset.DatasetAdmin)10 DatasetManagementException (io.cdap.cdap.api.dataset.DatasetManagementException)9 IOException (java.io.IOException)9 AbstractDatasetDefinition (io.cdap.cdap.api.dataset.lib.AbstractDatasetDefinition)7 DatasetContext (io.cdap.cdap.api.dataset.DatasetContext)6 DatasetTypeMeta (io.cdap.cdap.proto.DatasetTypeMeta)6 TableId (io.cdap.cdap.data2.util.TableId)5 Map (java.util.Map)5 POST (javax.ws.rs.POST)5 Path (javax.ws.rs.Path)5 Reconfigurable (io.cdap.cdap.api.dataset.Reconfigurable)4 Updatable (io.cdap.cdap.api.dataset.Updatable)4 KeyValueTable (io.cdap.cdap.api.dataset.lib.KeyValueTable)4 DatasetModuleMeta (io.cdap.cdap.proto.DatasetModuleMeta)4