Search in sources :

Example 1 with Increment

use of io.cdap.cdap.api.dataset.table.Increment in project cdap by caskdata.

the class TableTest method testMetrics.

private void testMetrics(boolean readless) throws Exception {
    final String tableName = "survive";
    DatasetProperties props = TableProperties.builder().setReadlessIncrementSupport(readless).build();
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, props);
    admin.create();
    try (Table table = getTable(CONTEXT1, tableName, props)) {
        final Map<String, Long> metrics = Maps.newHashMap();
        ((MeteredDataset) table).setMetricsCollector(new MetricsCollector() {

            @Override
            public void increment(String metricName, long value) {
                Long old = metrics.get(metricName);
                metrics.put(metricName, old == null ? value : old + value);
            }

            @Override
            public void gauge(String metricName, long value) {
                metrics.put(metricName, value);
            }
        });
        // Note that we don't need to finish tx for metrics to be reported
        Transaction tx0 = txClient.startShort();
        ((TransactionAware) table).startTx(tx0);
        int writes = 0;
        int reads = 0;
        table.put(new Put(R1, C1, V1));
        verifyDatasetMetrics(metrics, ++writes, reads);
        table.compareAndSwap(R1, C1, V1, V2);
        verifyDatasetMetrics(metrics, ++writes, ++reads);
        // note: will not write anything as expected value will not match
        table.compareAndSwap(R1, C1, V1, V2);
        verifyDatasetMetrics(metrics, writes, ++reads);
        table.increment(new Increment(R2, C2, 1L));
        if (readless) {
            verifyDatasetMetrics(metrics, ++writes, reads);
        } else {
            verifyDatasetMetrics(metrics, ++writes, ++reads);
        }
        table.incrementAndGet(new Increment(R2, C2, 1L));
        verifyDatasetMetrics(metrics, ++writes, ++reads);
        table.get(new Get(R1, C1, V1));
        verifyDatasetMetrics(metrics, writes, ++reads);
        Scanner scanner = table.scan(new Scan(null, null));
        while (scanner.next() != null) {
            verifyDatasetMetrics(metrics, writes, ++reads);
        }
        table.delete(new Delete(R1, C1, V1));
        verifyDatasetMetrics(metrics, ++writes, reads);
    } finally {
        // drop table
        admin.drop();
    }
}
Also used : MetricsCollector(io.cdap.cdap.api.metrics.MetricsCollector) Delete(io.cdap.cdap.api.dataset.table.Delete) Scanner(io.cdap.cdap.api.dataset.table.Scanner) Table(io.cdap.cdap.api.dataset.table.Table) HBaseTable(io.cdap.cdap.data2.dataset2.lib.table.hbase.HBaseTable) DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties) DatasetAdmin(io.cdap.cdap.api.dataset.DatasetAdmin) Put(io.cdap.cdap.api.dataset.table.Put) Transaction(org.apache.tephra.Transaction) TransactionAware(org.apache.tephra.TransactionAware) Increment(io.cdap.cdap.api.dataset.table.Increment) Get(io.cdap.cdap.api.dataset.table.Get) MeteredDataset(io.cdap.cdap.api.dataset.metrics.MeteredDataset) Scan(io.cdap.cdap.api.dataset.table.Scan)

Example 2 with Increment

use of io.cdap.cdap.api.dataset.table.Increment in project cdap by caskdata.

the class CharCountProgram method run.

@Override
public void run(final JavaSparkExecutionContext sec) throws Exception {
    JavaSparkContext sc = new JavaSparkContext();
    // Verify the codec is being set
    Preconditions.checkArgument("org.apache.spark.io.LZFCompressionCodec".equals(sc.getConf().get("spark.io.compression.codec")));
    // read the dataset
    JavaPairRDD<byte[], String> inputData = sec.fromDataset("keys");
    // create a new RDD with the same key but with a new value which is the length of the string
    final JavaPairRDD<byte[], byte[]> stringLengths = inputData.mapToPair(new PairFunction<Tuple2<byte[], String>, byte[], byte[]>() {

        @Override
        public Tuple2<byte[], byte[]> call(Tuple2<byte[], String> stringTuple2) throws Exception {
            return new Tuple2<>(stringTuple2._1(), Bytes.toBytes(stringTuple2._2().length()));
        }
    });
    // write a total count to a table (that emits a metric we can validate in the test case)
    sec.execute(new TxRunnable() {

        @Override
        public void run(DatasetContext context) throws Exception {
            long count = stringLengths.count();
            Table totals = context.getDataset("totals");
            totals.increment(new Increment("total").add("total", count));
            // write the character count to dataset
            sec.saveAsDataset(stringLengths, "count");
        }
    });
}
Also used : Table(io.cdap.cdap.api.dataset.table.Table) Tuple2(scala.Tuple2) TxRunnable(io.cdap.cdap.api.TxRunnable) Increment(io.cdap.cdap.api.dataset.table.Increment) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) DatasetContext(io.cdap.cdap.api.data.DatasetContext)

Example 3 with Increment

use of io.cdap.cdap.api.dataset.table.Increment in project cdap by caskdata.

the class TableTest method testMultiIncrementWithFlush.

private void testMultiIncrementWithFlush(boolean readless) throws Exception {
    final String tableName = "incrFlush";
    DatasetProperties props = TableProperties.builder().setReadlessIncrementSupport(readless).build();
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, props);
    admin.create();
    Map<String, String> args = new HashMap<>();
    if (readless) {
        args.put(HBaseTable.SAFE_INCREMENTS, "true");
    }
    try (Table table = getTable(CONTEXT1, tableName, props, args);
        Table table2 = getTable(CONTEXT1, tableName, props, args)) {
        Transaction tx = txClient.startShort();
        try {
            ((TransactionAware) table).startTx(tx);
            // Write an increment, then flush it by calling commitTx.
            table.increment(new Increment(R1, C1, 10L));
            ((TransactionAware) table).commitTx();
        } finally {
            // invalidate the tx, leaving an excluded write in the table
            txClient.invalidate(tx.getTransactionId());
        }
        // validate the first write is not visible
        tx = txClient.startShort();
        try {
            ((TransactionAware) table).startTx(tx);
            Assert.assertEquals(null, table.get(new Get(R1, C1)).getLong(C1));
        } finally {
            txClient.commitOrThrow(tx);
        }
        tx = txClient.startShort();
        try {
            ((TransactionAware) table).startTx(tx);
            // Write an increment, then flush it by calling commitTx.
            table.increment(new Increment(R1, C1, 1L));
            ((TransactionAware) table).commitTx();
            // Write another increment, from both table instances
            table.increment(new Increment(R1, C1, 1L));
            if (readless) {
                ((TransactionAware) table2).startTx(tx);
                table2.increment(new Increment(R1, C1, 1L));
                ((TransactionAware) table2).commitTx();
            }
            ((TransactionAware) table).commitTx();
        } finally {
            txClient.commitOrThrow(tx);
        }
        // validate all increments are visible to a new tx
        tx = txClient.startShort();
        try {
            ((TransactionAware) table).startTx(tx);
            Assert.assertEquals(new Long(readless ? 3L : 2L), table.get(new Get(R1, C1)).getLong(C1));
        } finally {
            txClient.commitOrThrow(tx);
        }
    } finally {
        // drop table
        admin.drop();
    }
}
Also used : Table(io.cdap.cdap.api.dataset.table.Table) HBaseTable(io.cdap.cdap.data2.dataset2.lib.table.hbase.HBaseTable) Transaction(org.apache.tephra.Transaction) HashMap(java.util.HashMap) TransactionAware(org.apache.tephra.TransactionAware) DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties) Increment(io.cdap.cdap.api.dataset.table.Increment) Get(io.cdap.cdap.api.dataset.table.Get) DatasetAdmin(io.cdap.cdap.api.dataset.DatasetAdmin)

Example 4 with Increment

use of io.cdap.cdap.api.dataset.table.Increment in project cdap by caskdata.

the class MetricsTableOnTable method increment.

@Override
public void increment(byte[] row, Map<byte[], Long> increments) {
    Increment increment = new Increment(row);
    for (Map.Entry<byte[], Long> columnUpdate : increments.entrySet()) {
        increment.add(columnUpdate.getKey(), columnUpdate.getValue());
    }
    table.increment(increment);
}
Also used : Increment(io.cdap.cdap.api.dataset.table.Increment) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap)

Example 5 with Increment

use of io.cdap.cdap.api.dataset.table.Increment in project cdap by caskdata.

the class MetricsTableOnTable method increment.

@Override
public void increment(NavigableMap<byte[], NavigableMap<byte[], Long>> updates) {
    for (Map.Entry<byte[], NavigableMap<byte[], Long>> rowUpdate : updates.entrySet()) {
        Increment increment = new Increment(rowUpdate.getKey());
        for (Map.Entry<byte[], Long> columnUpdate : rowUpdate.getValue().entrySet()) {
            increment.add(columnUpdate.getKey(), columnUpdate.getValue());
        }
        table.increment(increment);
    }
}
Also used : NavigableMap(java.util.NavigableMap) Increment(io.cdap.cdap.api.dataset.table.Increment) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap)

Aggregations

Increment (io.cdap.cdap.api.dataset.table.Increment)5 Table (io.cdap.cdap.api.dataset.table.Table)3 DatasetAdmin (io.cdap.cdap.api.dataset.DatasetAdmin)2 DatasetProperties (io.cdap.cdap.api.dataset.DatasetProperties)2 Get (io.cdap.cdap.api.dataset.table.Get)2 HBaseTable (io.cdap.cdap.data2.dataset2.lib.table.hbase.HBaseTable)2 Map (java.util.Map)2 NavigableMap (java.util.NavigableMap)2 SortedMap (java.util.SortedMap)2 Transaction (org.apache.tephra.Transaction)2 TransactionAware (org.apache.tephra.TransactionAware)2 TxRunnable (io.cdap.cdap.api.TxRunnable)1 DatasetContext (io.cdap.cdap.api.data.DatasetContext)1 MeteredDataset (io.cdap.cdap.api.dataset.metrics.MeteredDataset)1 Delete (io.cdap.cdap.api.dataset.table.Delete)1 Put (io.cdap.cdap.api.dataset.table.Put)1 Scan (io.cdap.cdap.api.dataset.table.Scan)1 Scanner (io.cdap.cdap.api.dataset.table.Scanner)1 MetricsCollector (io.cdap.cdap.api.metrics.MetricsCollector)1 HashMap (java.util.HashMap)1