Search in sources :

Example 11 with Result

use of io.cdap.cdap.api.dataset.table.Result in project cdap by caskdata.

the class BufferingTable method internalIncrementAndGet.

@ReadWrite
protected Row internalIncrementAndGet(byte[] row, byte[][] columns, long[] amounts) {
    // Logic:
    // * fetching current values
    // * updating values
    // * updating in-memory store
    // * returning updated values as result
    // NOTE: there is more efficient way to do it, but for now we want more simple implementation, not over-optimizing
    Map<byte[], byte[]> rowMap;
    try {
        rowMap = getRowMap(row, columns);
        reportRead(1);
    } catch (Exception e) {
        LOG.debug("incrementAndGet failed for table: " + getTransactionAwareName() + ", row: " + Bytes.toStringBinary(row), e);
        throw new DataSetException("incrementAndGet failed", e);
    }
    byte[][] updatedValues = new byte[columns.length][];
    NavigableMap<byte[], byte[]> result = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (int i = 0; i < columns.length; i++) {
        byte[] column = columns[i];
        byte[] val = rowMap.get(column);
        // converting to long
        long longVal;
        if (val == null) {
            longVal = 0L;
        } else {
            if (val.length != Bytes.SIZEOF_LONG) {
                throw new NumberFormatException("Attempted to increment a value that is not convertible to long," + " row: " + Bytes.toStringBinary(row) + " column: " + Bytes.toStringBinary(column));
            }
            longVal = Bytes.toLong(val);
        }
        longVal += amounts[i];
        updatedValues[i] = Bytes.toBytes(longVal);
        result.put(column, updatedValues[i]);
    }
    putInternal(row, columns, updatedValues);
    reportWrite(1, getSize(row) + getSize(columns) + getSize(amounts));
    return new Result(row, result);
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Result(io.cdap.cdap.api.dataset.table.Result) ReadWrite(io.cdap.cdap.api.annotation.ReadWrite)

Example 12 with Result

use of io.cdap.cdap.api.dataset.table.Result in project cdap by cdapio.

the class IndexedTable method incrementAndGet.

/**
 * Increments (atomically) the specified row and columns by the specified amounts, and returns the new values.
 * Note that performing this operation on an indexed column will generally have a negative impact on performance,
 * since up to three writes will need to be performed for every increment (one removing the index for the previous,
 * pre-increment value, one adding the index for the incremented value, and one for the increment itself).
 *
 * @see Table#incrementAndGet(byte[], byte[][], long[])
 */
@ReadWrite
@Override
public Row incrementAndGet(byte[] row, byte[][] columns, long[] amounts) {
    if (columns.length != amounts.length) {
        throw new IllegalArgumentException("Size of columns and amounts arguments must match");
    }
    Row existingRow = table.get(row, columns);
    byte[][] updatedValues = new byte[columns.length][];
    NavigableMap<byte[], byte[]> result = new TreeMap<>(Bytes.BYTES_COMPARATOR);
    for (int i = 0; i < columns.length; i++) {
        long existingValue = 0L;
        byte[] existingBytes = existingRow.get(columns[i]);
        if (existingBytes != null) {
            if (existingBytes.length != Bytes.SIZEOF_LONG) {
                throw new NumberFormatException("Attempted to increment a value that is not convertible to long," + " row: " + Bytes.toStringBinary(row) + " column: " + Bytes.toStringBinary(columns[i]));
            }
            existingValue = Bytes.toLong(existingBytes);
            if (indexedColumns.contains(columns[i])) {
                index.delete(createIndexKey(row, columns[i], existingBytes), IDX_COL);
            }
        }
        updatedValues[i] = Bytes.toBytes(existingValue + amounts[i]);
        result.put(columns[i], updatedValues[i]);
        if (indexedColumns.contains(columns[i])) {
            index.put(createIndexKey(row, columns[i], updatedValues[i]), IDX_COL, row);
        }
    }
    table.put(row, columns, updatedValues);
    return new Result(row, result);
}
Also used : Row(io.cdap.cdap.api.dataset.table.Row) TreeMap(java.util.TreeMap) Result(io.cdap.cdap.api.dataset.table.Result) ReadWrite(io.cdap.cdap.api.annotation.ReadWrite)

Example 13 with Result

use of io.cdap.cdap.api.dataset.table.Result in project cdap by cdapio.

the class BufferingTable method get.

@ReadOnly
@Override
public Row get(byte[] row, byte[][] columns) {
    ensureTransactionIsStarted();
    reportRead(1);
    try {
        return new Result(row, getRowMap(row, columns));
    } catch (Exception e) {
        LOG.debug("get failed for table: " + getTransactionAwareName() + ", row: " + Bytes.toStringBinary(row), e);
        throw new DataSetException("get failed", e);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Result(io.cdap.cdap.api.dataset.table.Result) ReadOnly(io.cdap.cdap.api.annotation.ReadOnly)

Example 14 with Result

use of io.cdap.cdap.api.dataset.table.Result in project cdap by cdapio.

the class BufferingTable method internalIncrementAndGet.

@ReadWrite
protected Row internalIncrementAndGet(byte[] row, byte[][] columns, long[] amounts) {
    // Logic:
    // * fetching current values
    // * updating values
    // * updating in-memory store
    // * returning updated values as result
    // NOTE: there is more efficient way to do it, but for now we want more simple implementation, not over-optimizing
    Map<byte[], byte[]> rowMap;
    try {
        rowMap = getRowMap(row, columns);
        reportRead(1);
    } catch (Exception e) {
        LOG.debug("incrementAndGet failed for table: " + getTransactionAwareName() + ", row: " + Bytes.toStringBinary(row), e);
        throw new DataSetException("incrementAndGet failed", e);
    }
    byte[][] updatedValues = new byte[columns.length][];
    NavigableMap<byte[], byte[]> result = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    for (int i = 0; i < columns.length; i++) {
        byte[] column = columns[i];
        byte[] val = rowMap.get(column);
        // converting to long
        long longVal;
        if (val == null) {
            longVal = 0L;
        } else {
            if (val.length != Bytes.SIZEOF_LONG) {
                throw new NumberFormatException("Attempted to increment a value that is not convertible to long," + " row: " + Bytes.toStringBinary(row) + " column: " + Bytes.toStringBinary(column));
            }
            longVal = Bytes.toLong(val);
        }
        longVal += amounts[i];
        updatedValues[i] = Bytes.toBytes(longVal);
        result.put(column, updatedValues[i]);
    }
    putInternal(row, columns, updatedValues);
    reportWrite(1, getSize(row) + getSize(columns) + getSize(amounts));
    return new Result(row, result);
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Result(io.cdap.cdap.api.dataset.table.Result) ReadWrite(io.cdap.cdap.api.annotation.ReadWrite)

Example 15 with Result

use of io.cdap.cdap.api.dataset.table.Result in project hydrator-plugins by cdapio.

the class RowRecordTransformerTest method testTransform.

@Test
public void testTransform() throws Exception {
    byte[] rowKey = Bytes.toBytes(28);
    // (boolean, int, long, float, double, bytes, string)
    final Schema schema = Schema.recordOf("record", Schema.Field.of("boolField", Schema.nullableOf(Schema.of(Schema.Type.BOOLEAN))), Schema.Field.of("intField", Schema.of(Schema.Type.INT)), Schema.Field.of("longField", Schema.nullableOf(Schema.of(Schema.Type.LONG))), Schema.Field.of("floatField", Schema.nullableOf(Schema.of(Schema.Type.FLOAT))), Schema.Field.of("doubleField", Schema.nullableOf(Schema.of(Schema.Type.DOUBLE))), Schema.Field.of("bytesField", Schema.nullableOf(Schema.of(Schema.Type.BYTES))), Schema.Field.of("stringField", Schema.nullableOf(Schema.of(Schema.Type.STRING))));
    // can't use a hash map because we need to look up by byte[]
    Map<byte[], byte[]> inputColumns = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    inputColumns.put(Bytes.toBytes("boolField"), Bytes.toBytes(true));
    inputColumns.put(Bytes.toBytes("longField"), Bytes.toBytes(512L));
    inputColumns.put(Bytes.toBytes("floatField"), Bytes.toBytes(3.14f));
    inputColumns.put(Bytes.toBytes("bytesField"), Bytes.toBytes("foo"));
    inputColumns.put(Bytes.toBytes("stringField"), Bytes.toBytes("rock"));
    // include some extra columns, they shouldn't show up in the result
    inputColumns.put(Bytes.toBytes("extraField"), Bytes.toBytes("bar"));
    Row input = new Result(rowKey, inputColumns);
    RowRecordTransformer transformer = new RowRecordTransformer(schema, "intField");
    StructuredRecord actual = transformer.toRecord(input);
    Assert.assertTrue(actual.get("boolField"));
    Assert.assertEquals(512L, actual.<Long>get("longField").longValue());
    Assert.assertTrue(Math.abs(3.14f - (Float) actual.get("floatField")) < 0.000001);
    Assert.assertEquals("foo", Bytes.toString((byte[]) actual.get("bytesField")));
    Assert.assertEquals("rock", actual.get("stringField"));
    Assert.assertNull(actual.get("extraField"));
    // this was a nullable field and no data was set for it
    Assert.assertNull(actual.get("doubleField"));
}
Also used : Schema(io.cdap.cdap.api.data.schema.Schema) Row(io.cdap.cdap.api.dataset.table.Row) StructuredRecord(io.cdap.cdap.api.data.format.StructuredRecord) Result(io.cdap.cdap.api.dataset.table.Result) Test(org.junit.Test)

Aggregations

Result (io.cdap.cdap.api.dataset.table.Result)15 DataSetException (io.cdap.cdap.api.dataset.DataSetException)10 IOException (java.io.IOException)10 ReadOnly (io.cdap.cdap.api.annotation.ReadOnly)8 Row (io.cdap.cdap.api.dataset.table.Row)7 ReadWrite (io.cdap.cdap.api.annotation.ReadWrite)4 Get (io.cdap.cdap.api.dataset.table.Get)4 TreeMap (java.util.TreeMap)4 Test (org.junit.Test)3 DatasetAdmin (io.cdap.cdap.api.dataset.DatasetAdmin)2 Table (io.cdap.cdap.api.dataset.table.Table)2 HBaseTable (io.cdap.cdap.data2.dataset2.lib.table.hbase.HBaseTable)2 Map (java.util.Map)2 NavigableMap (java.util.NavigableMap)2 ConcurrentSkipListMap (java.util.concurrent.ConcurrentSkipListMap)2 Transaction (org.apache.tephra.Transaction)2 TransactionAware (org.apache.tephra.TransactionAware)2 StructuredRecord (io.cdap.cdap.api.data.format.StructuredRecord)1 Schema (io.cdap.cdap.api.data.schema.Schema)1