Search in sources :

Example 26 with Get

use of co.cask.cdap.api.dataset.table.Get in project cdap by caskdata.

the class KeyValueTable method readAll.

/**
   * Reads the values for an array of given keys.
   *
   * @param keys the keys to be read
   * @return a map of the stored values, keyed by key
   */
@ReadOnly
public Map<byte[], byte[]> readAll(byte[][] keys) {
    List<Get> gets = new ArrayList<>(keys.length);
    for (byte[] key : keys) {
        gets.add(new Get(key).add(KEY_COLUMN));
    }
    List<Row> results = table.get(gets);
    Map<byte[], byte[]> values = new TreeMap<>(Bytes.BYTES_COMPARATOR);
    for (Row row : results) {
        if (row.get(KEY_COLUMN) != null) {
            values.put(row.getRow(), row.get(KEY_COLUMN));
        }
    }
    return values;
}
Also used : Get(co.cask.cdap.api.dataset.table.Get) ArrayList(java.util.ArrayList) Row(co.cask.cdap.api.dataset.table.Row) TreeMap(java.util.TreeMap) ReadOnly(co.cask.cdap.api.annotation.ReadOnly)

Example 27 with Get

use of co.cask.cdap.api.dataset.table.Get in project cdap by caskdata.

the class TableTest method testMetrics.

private void testMetrics(boolean readless) throws Exception {
    final String tableName = "survive";
    DatasetProperties props = TableProperties.builder().setReadlessIncrementSupport(readless).build();
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, props);
    admin.create();
    Table table = getTable(CONTEXT1, tableName, props);
    final Map<String, Long> metrics = Maps.newHashMap();
    ((MeteredDataset) table).setMetricsCollector(new MetricsCollector() {

        @Override
        public void increment(String metricName, long value) {
            Long old = metrics.get(metricName);
            metrics.put(metricName, old == null ? value : old + value);
        }

        @Override
        public void gauge(String metricName, long value) {
            metrics.put(metricName, value);
        }
    });
    // Note that we don't need to finish tx for metrics to be reported
    Transaction tx0 = txClient.startShort();
    ((TransactionAware) table).startTx(tx0);
    int writes = 0;
    int reads = 0;
    table.put(new Put(R1, C1, V1));
    verifyDatasetMetrics(metrics, ++writes, reads);
    table.compareAndSwap(R1, C1, V1, V2);
    verifyDatasetMetrics(metrics, ++writes, ++reads);
    // note: will not write anything as expected value will not match
    table.compareAndSwap(R1, C1, V1, V2);
    verifyDatasetMetrics(metrics, writes, ++reads);
    table.increment(new Increment(R2, C2, 1L));
    if (readless) {
        verifyDatasetMetrics(metrics, ++writes, reads);
    } else {
        verifyDatasetMetrics(metrics, ++writes, ++reads);
    }
    table.incrementAndGet(new Increment(R2, C2, 1L));
    verifyDatasetMetrics(metrics, ++writes, ++reads);
    table.get(new Get(R1, C1, V1));
    verifyDatasetMetrics(metrics, writes, ++reads);
    Scanner scanner = table.scan(new Scan(null, null));
    while (scanner.next() != null) {
        verifyDatasetMetrics(metrics, writes, ++reads);
    }
    table.delete(new Delete(R1, C1, V1));
    verifyDatasetMetrics(metrics, ++writes, reads);
    // drop table
    admin.drop();
}
Also used : MetricsCollector(co.cask.cdap.api.metrics.MetricsCollector) Delete(co.cask.cdap.api.dataset.table.Delete) Scanner(co.cask.cdap.api.dataset.table.Scanner) Table(co.cask.cdap.api.dataset.table.Table) HBaseTable(co.cask.cdap.data2.dataset2.lib.table.hbase.HBaseTable) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetAdmin(co.cask.cdap.api.dataset.DatasetAdmin) Put(co.cask.cdap.api.dataset.table.Put) Transaction(org.apache.tephra.Transaction) TransactionAware(org.apache.tephra.TransactionAware) Increment(co.cask.cdap.api.dataset.table.Increment) Get(co.cask.cdap.api.dataset.table.Get) MeteredDataset(co.cask.cdap.api.dataset.metrics.MeteredDataset) Scan(co.cask.cdap.api.dataset.table.Scan)

Example 28 with Get

use of co.cask.cdap.api.dataset.table.Get in project cdap by caskdata.

the class TableTest method testBatchWritableKeyIsIgnored.

@Test
public void testBatchWritableKeyIsIgnored() throws Exception {
    String tableName = "batchWritableTable";
    getTableAdmin(CONTEXT1, tableName).create();
    try {
        // write in a transaction, three times, with key = null, a, q, always Put with row = a
        Transaction tx = txClient.startShort();
        Table table = getTable(CONTEXT1, tableName);
        ((TransactionAware) table).startTx(tx);
        table.write(null, new Put("a").add("x", "x"));
        table.write(new byte[] { 'q' }, new Put("a").add("y", "y"));
        table.write(new byte[] { 'a' }, new Put("a").add("z", "z"));
        Assert.assertTrue(txClient.canCommit(tx, ((TransactionAware) table).getTxChanges()));
        ((TransactionAware) table).commitTx();
        Assert.assertTrue(txClient.commit(tx));
        // validate that all writes went to row a, and row q was not written
        tx = txClient.startShort();
        ((TransactionAware) table).startTx(tx);
        Assert.assertTrue(table.get(new Get("q")).isEmpty());
        Row row = table.get(new Get("a"));
        Assert.assertEquals(3, row.getColumns().size());
        Assert.assertEquals("x", row.getString("x"));
        Assert.assertEquals("y", row.getString("y"));
        Assert.assertEquals("z", row.getString("z"));
        ((TransactionAware) table).commitTx();
        txClient.abort(tx);
    } finally {
        getTableAdmin(CONTEXT1, tableName).drop();
    }
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) HBaseTable(co.cask.cdap.data2.dataset2.lib.table.hbase.HBaseTable) Transaction(org.apache.tephra.Transaction) TransactionAware(org.apache.tephra.TransactionAware) Get(co.cask.cdap.api.dataset.table.Get) Row(co.cask.cdap.api.dataset.table.Row) Put(co.cask.cdap.api.dataset.table.Put) Test(org.junit.Test)

Example 29 with Get

use of co.cask.cdap.api.dataset.table.Get in project cdap by caskdata.

the class TableTest method testReadOwnWrite.

@Test
public void testReadOwnWrite() throws Exception {
    final String tableName = "readOwnWrite";
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName);
    admin.create();
    Table table = getTable(CONTEXT1, tableName);
    Transaction tx = txClient.startShort();
    try {
        ((TransactionAware) table).startTx(tx);
        // Write some data, then flush it by calling commitTx.
        table.put(new Put(R1, C1, V1));
        ((TransactionAware) table).commitTx();
        // Try to read the previous write.
        Assert.assertArrayEquals(V1, table.get(new Get(R1, C1)).get(C1));
    } finally {
        txClient.commit(tx);
    }
    // drop table
    admin.drop();
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) HBaseTable(co.cask.cdap.data2.dataset2.lib.table.hbase.HBaseTable) Transaction(org.apache.tephra.Transaction) TransactionAware(org.apache.tephra.TransactionAware) Get(co.cask.cdap.api.dataset.table.Get) DatasetAdmin(co.cask.cdap.api.dataset.DatasetAdmin) Put(co.cask.cdap.api.dataset.table.Put) Test(org.junit.Test)

Example 30 with Get

use of co.cask.cdap.api.dataset.table.Get in project cdap by caskdata.

the class TableTest method testMultiGetWithEmpty.

@Test
public void testMultiGetWithEmpty() throws Exception {
    DatasetAdmin admin = getTableAdmin(CONTEXT1, MY_TABLE);
    admin.create();
    try {
        Transaction tx = txClient.startShort();
        Table myTable = getTable(CONTEXT1, MY_TABLE);
        ((TransactionAware) myTable).startTx(tx);
        myTable.put(R1, C1, V1);
        myTable.put(R1, C2, V2);
        myTable.put(R1, C3, V3);
        myTable.put(R1, C4, V4);
        List<Get> gets = new ArrayList<>();
        // the second and fourth Gets are requesting 0 columns. This tests correctness of batch-get logic, when there
        // is/are empty Gets among them.
        gets.add(new Get(R1, C1));
        gets.add(new Get(R1, ImmutableList.<byte[]>of()));
        gets.add(new Get(R1, C2, C3));
        gets.add(new Get(R1, ImmutableList.<byte[]>of()));
        gets.add(new Get(R1, C4));
        List<Row> rows = myTable.get(gets);
        // first off, the Gets at index two and four should be empty
        Assert.assertEquals(0, rows.get(1).getColumns().size());
        Assert.assertEquals(0, rows.get(3).getColumns().size());
        // verify the results of the other Gets
        Assert.assertEquals(1, rows.get(0).getColumns().size());
        Assert.assertArrayEquals(V1, rows.get(0).get(C1));
        Assert.assertEquals(2, rows.get(2).getColumns().size());
        Assert.assertArrayEquals(V2, rows.get(2).get(C2));
        Assert.assertArrayEquals(V3, rows.get(2).get(C3));
        Assert.assertEquals(1, rows.get(4).getColumns().size());
        Assert.assertArrayEquals(V4, rows.get(4).get(C4));
        txClient.abort(tx);
    } finally {
        admin.drop();
    }
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) HBaseTable(co.cask.cdap.data2.dataset2.lib.table.hbase.HBaseTable) Transaction(org.apache.tephra.Transaction) TransactionAware(org.apache.tephra.TransactionAware) Get(co.cask.cdap.api.dataset.table.Get) ArrayList(java.util.ArrayList) DatasetAdmin(co.cask.cdap.api.dataset.DatasetAdmin) Row(co.cask.cdap.api.dataset.table.Row) Test(org.junit.Test)

Aggregations

Get (co.cask.cdap.api.dataset.table.Get)31 Table (co.cask.cdap.api.dataset.table.Table)17 Row (co.cask.cdap.api.dataset.table.Row)16 Test (org.junit.Test)15 Put (co.cask.cdap.api.dataset.table.Put)13 TransactionAware (org.apache.tephra.TransactionAware)9 DatasetAdmin (co.cask.cdap.api.dataset.DatasetAdmin)8 Transaction (org.apache.tephra.Transaction)8 HBaseTable (co.cask.cdap.data2.dataset2.lib.table.hbase.HBaseTable)7 KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)6 TransactionExecutor (org.apache.tephra.TransactionExecutor)6 NotFoundException (co.cask.cdap.common.NotFoundException)5 IOException (java.io.IOException)4 ReadOnly (co.cask.cdap.api.annotation.ReadOnly)3 DatasetProperties (co.cask.cdap.api.dataset.DatasetProperties)3 Delete (co.cask.cdap.api.dataset.table.Delete)3 Scanner (co.cask.cdap.api.dataset.table.Scanner)3 ProgramSchedule (co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule)3 ApplicationManager (co.cask.cdap.test.ApplicationManager)3 FlowManager (co.cask.cdap.test.FlowManager)3