Search in sources :

Example 1 with RequestContext

use of com.google.cloud.bigtable.data.v2.internal.RequestContext in project java-bigtable-hbase by googleapis.

the class TestAppendAdapter method testMultipleColumnFamiliesWithSameQualifiers.

@Test
public void testMultipleColumnFamiliesWithSameQualifiers() {
    byte[] rowKey = dataHelper.randomData("rk1-");
    byte[] family1 = Bytes.toBytes("family1");
    byte[] qualifier1 = Bytes.toBytes("qualifier1");
    byte[] value1 = Bytes.toBytes("value1");
    byte[] family2 = Bytes.toBytes("family2");
    byte[] value2 = Bytes.toBytes("value2");
    Append append = new Append(rowKey);
    append.add(family1, qualifier1, value1);
    append.add(family2, qualifier1, value2);
    ReadModifyWriteRow readModifyWriteRow = ReadModifyWriteRow.create(TABLE_ID, ByteString.copyFrom(rowKey));
    appendAdapter.adapt(append, readModifyWriteRow);
    ReadModifyWriteRowRequest request = readModifyWriteRow.toProto(requestContext);
    List<ReadModifyWriteRule> rules = request.getRulesList();
    Assert.assertEquals(2, rules.size());
    Assert.assertEquals("family1", rules.get(0).getFamilyName());
    Assert.assertEquals("qualifier1", rules.get(0).getColumnQualifier().toStringUtf8());
    Assert.assertEquals("value1", rules.get(0).getAppendValue().toStringUtf8());
    Assert.assertEquals("family2", rules.get(1).getFamilyName());
    Assert.assertEquals("qualifier1", rules.get(1).getColumnQualifier().toStringUtf8());
    // Value3 as it was added after value2:
    Assert.assertEquals("value2", rules.get(1).getAppendValue().toStringUtf8());
}
Also used : Append(org.apache.hadoop.hbase.client.Append) ReadModifyWriteRowRequest(com.google.bigtable.v2.ReadModifyWriteRowRequest) ReadModifyWriteRule(com.google.bigtable.v2.ReadModifyWriteRule) ReadModifyWriteRow(com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow) Test(org.junit.Test)

Example 2 with RequestContext

use of com.google.cloud.bigtable.data.v2.internal.RequestContext in project java-bigtable-hbase by googleapis.

the class TestAppendAdapter method testMultipleAppendsWithDuplicates.

@Test
public void testMultipleAppendsWithDuplicates() {
    byte[] rowKey = dataHelper.randomData("rk1-");
    byte[] family1 = Bytes.toBytes("family1");
    byte[] qualifier1 = Bytes.toBytes("qualifier1");
    byte[] value1 = Bytes.toBytes("value1");
    byte[] family2 = Bytes.toBytes("family2");
    byte[] qualifier2 = Bytes.toBytes("qualifier2");
    byte[] value2 = Bytes.toBytes("value2");
    byte[] value3 = Bytes.toBytes("value3");
    Append append = new Append(rowKey);
    append.add(family1, qualifier1, value1);
    append.add(family2, qualifier2, value2);
    append.add(family2, qualifier2, value3);
    ReadModifyWriteRow readModifyWriteRow = ReadModifyWriteRow.create(TABLE_ID, ByteString.copyFrom(rowKey));
    appendAdapter.adapt(append, readModifyWriteRow);
    ReadModifyWriteRowRequest request = readModifyWriteRow.toProto(requestContext);
    List<ReadModifyWriteRule> rules = request.getRulesList();
    Assert.assertEquals(2, rules.size());
    Assert.assertEquals("family1", rules.get(0).getFamilyName());
    Assert.assertEquals("qualifier1", rules.get(0).getColumnQualifier().toStringUtf8());
    Assert.assertEquals("value1", rules.get(0).getAppendValue().toStringUtf8());
    Assert.assertEquals("family2", rules.get(1).getFamilyName());
    Assert.assertEquals("qualifier2", rules.get(1).getColumnQualifier().toStringUtf8());
    // Value3 as it was added after value2:
    Assert.assertEquals("value3", rules.get(1).getAppendValue().toStringUtf8());
}
Also used : Append(org.apache.hadoop.hbase.client.Append) ReadModifyWriteRowRequest(com.google.bigtable.v2.ReadModifyWriteRowRequest) ReadModifyWriteRule(com.google.bigtable.v2.ReadModifyWriteRule) ReadModifyWriteRow(com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow) Test(org.junit.Test)

Example 3 with RequestContext

use of com.google.cloud.bigtable.data.v2.internal.RequestContext in project java-bigtable-hbase by googleapis.

the class TestIncrementAdapter method testMultipleIncrement.

@Test
public void testMultipleIncrement() {
    byte[] rowKey = dataHelper.randomData("rk1-");
    byte[] family1 = Bytes.toBytes("family1");
    byte[] qualifier1 = Bytes.toBytes("qualifier1");
    long amount1 = 1234;
    byte[] family2 = Bytes.toBytes("family2");
    byte[] qualifier2 = Bytes.toBytes("qualifier2");
    long amount2 = 4321;
    Increment incr = new Increment(rowKey);
    incr.addColumn(family1, qualifier1, amount1);
    incr.addColumn(family2, qualifier2, amount2);
    ReadModifyWriteRow readModifyWriteRow = ReadModifyWriteRow.create(TABLE_ID, ByteString.copyFrom(rowKey));
    incrementAdapter.adapt(incr, readModifyWriteRow);
    ReadModifyWriteRowRequest requestBuilder = readModifyWriteRow.toProto(requestContext);
    Assert.assertEquals(2, requestBuilder.getRulesCount());
    ReadModifyWriteRule rule = requestBuilder.getRules(0);
    Assert.assertEquals("family1", rule.getFamilyName());
    Assert.assertEquals("qualifier1", rule.getColumnQualifier().toStringUtf8());
    Assert.assertEquals(amount1, rule.getIncrementAmount());
    rule = requestBuilder.getRules(1);
    Assert.assertEquals("family2", rule.getFamilyName());
    Assert.assertEquals("qualifier2", rule.getColumnQualifier().toStringUtf8());
    Assert.assertEquals(amount2, rule.getIncrementAmount());
}
Also used : ReadModifyWriteRowRequest(com.google.bigtable.v2.ReadModifyWriteRowRequest) Increment(org.apache.hadoop.hbase.client.Increment) ReadModifyWriteRule(com.google.bigtable.v2.ReadModifyWriteRule) ReadModifyWriteRow(com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow) Test(org.junit.Test)

Example 4 with RequestContext

use of com.google.cloud.bigtable.data.v2.internal.RequestContext in project java-bigtable-hbase by googleapis.

the class TestIncrementAdapter method testSingleIncrement.

@Test
public void testSingleIncrement() {
    byte[] rowKey = dataHelper.randomData("rk1-");
    byte[] family = Bytes.toBytes("family");
    byte[] qualifier = Bytes.toBytes("qualifier");
    long amount = 1234;
    Increment incr = new Increment(rowKey);
    incr.addColumn(family, qualifier, amount);
    ReadModifyWriteRow readModifyWriteRow = ReadModifyWriteRow.create(TABLE_ID, ByteString.copyFrom(rowKey));
    incrementAdapter.adapt(incr, readModifyWriteRow);
    ReadModifyWriteRowRequest requestBuilder = readModifyWriteRow.toProto(requestContext);
    Assert.assertEquals(1, requestBuilder.getRulesCount());
    ReadModifyWriteRule rule = requestBuilder.getRules(0);
    Assert.assertEquals("qualifier", rule.getColumnQualifier().toStringUtf8());
    Assert.assertEquals("family", rule.getFamilyName());
    Assert.assertEquals(amount, rule.getIncrementAmount());
}
Also used : ReadModifyWriteRowRequest(com.google.bigtable.v2.ReadModifyWriteRowRequest) Increment(org.apache.hadoop.hbase.client.Increment) ReadModifyWriteRule(com.google.bigtable.v2.ReadModifyWriteRule) ReadModifyWriteRow(com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow) Test(org.junit.Test)

Example 5 with RequestContext

use of com.google.cloud.bigtable.data.v2.internal.RequestContext in project java-bigtable-hbase by googleapis.

the class TestScanAdapter method testColFamilyTimeRange.

@Test
public void testColFamilyTimeRange() throws IOException {
    String colFamily1 = "cf1", colFamily2 = "cf2";
    long rangeStart = 10000L, rangeEnd = 99999L, secRangeStart = 100L, secRangeEnd = 999L;
    Scan scan = new Scan().setColumnFamilyTimeRange(colFamily1.getBytes(), rangeStart, rangeEnd).setColumnFamilyTimeRange(colFamily2.getBytes(), secRangeStart, secRangeEnd);
    scanAdapter.adapt(scan, throwingReadHooks, query);
    Filters.Filter expected = FILTERS.chain().filter(FILTERS.limit().cellsPerColumn(1)).filter(FILTERS.interleave().filter(FILTERS.chain().filter(FILTERS.family().regex(colFamily1)).filter(FILTERS.timestamp().range().of(rangeStart * 1000, rangeEnd * 1000))).filter(FILTERS.chain().filter(FILTERS.family().regex(colFamily2)).filter(FILTERS.timestamp().range().of(secRangeStart * 1000, secRangeEnd * 1000))));
    Assert.assertEquals(expected.toProto(), query.toProto(requestContext).getFilter());
}
Also used : Filters(com.google.cloud.bigtable.data.v2.models.Filters) BigtableExtendedScan(com.google.cloud.bigtable.hbase.BigtableExtendedScan) Scan(org.apache.hadoop.hbase.client.Scan) ByteString(com.google.protobuf.ByteString) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)12 ReadModifyWriteRowRequest (com.google.bigtable.v2.ReadModifyWriteRowRequest)9 ReadModifyWriteRow (com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow)8 ByteString (com.google.protobuf.ByteString)7 ReadModifyWriteRule (com.google.bigtable.v2.ReadModifyWriteRule)6 HeaderTracerUnaryCallable (com.google.cloud.bigtable.data.v2.stub.metrics.HeaderTracerUnaryCallable)6 StatsHeadersUnaryCallable (com.google.cloud.bigtable.data.v2.stub.metrics.StatsHeadersUnaryCallable)4 ImmutableMap (com.google.common.collect.ImmutableMap)4 Map (java.util.Map)4 Append (org.apache.hadoop.hbase.client.Append)4 Increment (org.apache.hadoop.hbase.client.Increment)4 SpanName (com.google.api.gax.tracing.SpanName)3 Filters (com.google.cloud.bigtable.data.v2.models.Filters)3 Query (com.google.cloud.bigtable.data.v2.models.Query)3 TracedUnaryCallable (com.google.api.gax.tracing.TracedUnaryCallable)2 CheckAndMutateRowRequest (com.google.bigtable.v2.CheckAndMutateRowRequest)2 CheckAndMutateRowResponse (com.google.bigtable.v2.CheckAndMutateRowResponse)2 ReadRowsRequest (com.google.bigtable.v2.ReadRowsRequest)2 BulkMutation (com.google.cloud.bigtable.data.v2.models.BulkMutation)2 TracedBatcherUnaryCallable (com.google.cloud.bigtable.data.v2.stub.metrics.TracedBatcherUnaryCallable)2