Search in sources :

Example 66 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestReversibleScanners method testReversibleRegionScanner.

@Test
public void testReversibleRegionScanner() throws IOException {
    byte[] FAMILYNAME2 = Bytes.toBytes("testCf2");
    TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYNAME)).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYNAME2)).build();
    HRegion region = TEST_UTIL.createLocalHRegion(htd, null, null);
    loadDataToRegion(region, FAMILYNAME2);
    // verify row count with forward scan
    Scan scan = new Scan();
    InternalScanner scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, ROWSIZE * QUALSIZE * 2, ROWSIZE, true);
    // Case1:Full reversed scan
    scan.setReversed(true);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, ROWSIZE * QUALSIZE * 2, ROWSIZE, false);
    // Case2:Full reversed scan with one family
    scan = new Scan();
    scan.setReversed(true);
    scan.addFamily(FAMILYNAME);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, ROWSIZE * QUALSIZE, ROWSIZE, false);
    // Case3:Specify qualifiers + One family
    byte[][] specifiedQualifiers = { QUALS[1], QUALS[2] };
    for (byte[] specifiedQualifier : specifiedQualifiers) scan.addColumn(FAMILYNAME, specifiedQualifier);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, ROWSIZE * 2, ROWSIZE, false);
    // Case4:Specify qualifiers + Two families
    for (byte[] specifiedQualifier : specifiedQualifiers) scan.addColumn(FAMILYNAME2, specifiedQualifier);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, ROWSIZE * 2 * 2, ROWSIZE, false);
    // Case5: Case4 + specify start row
    int startRowNum = ROWSIZE * 3 / 4;
    scan.withStartRow(ROWS[startRowNum]);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, (startRowNum + 1) * 2 * 2, (startRowNum + 1), false);
    // Case6: Case4 + specify stop row
    int stopRowNum = ROWSIZE / 4;
    scan.withStartRow(HConstants.EMPTY_BYTE_ARRAY);
    scan.withStopRow(ROWS[stopRowNum]);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, (ROWSIZE - stopRowNum - 1) * 2 * 2, (ROWSIZE - stopRowNum - 1), false);
    // Case7: Case4 + specify start row + specify stop row
    scan.withStartRow(ROWS[startRowNum]);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, (startRowNum - stopRowNum) * 2 * 2, (startRowNum - stopRowNum), false);
    // Case8: Case7 + SingleColumnValueFilter
    int valueNum = startRowNum % VALUESIZE;
    Filter filter = new SingleColumnValueFilter(FAMILYNAME, specifiedQualifiers[0], CompareOperator.EQUAL, VALUES[valueNum]);
    scan.setFilter(filter);
    scanner = region.getScanner(scan);
    int unfilteredRowNum = (startRowNum - stopRowNum) / VALUESIZE + (stopRowNum / VALUESIZE == valueNum ? 0 : 1);
    verifyCountAndOrder(scanner, unfilteredRowNum * 2 * 2, unfilteredRowNum, false);
    // Case9: Case7 + PageFilter
    int pageSize = 10;
    filter = new PageFilter(pageSize);
    scan.setFilter(filter);
    scanner = region.getScanner(scan);
    int expectedRowNum = pageSize;
    verifyCountAndOrder(scanner, expectedRowNum * 2 * 2, expectedRowNum, false);
    // Case10: Case7 + FilterList+MUST_PASS_ONE
    SingleColumnValueFilter scvFilter1 = new SingleColumnValueFilter(FAMILYNAME, specifiedQualifiers[0], CompareOperator.EQUAL, VALUES[0]);
    SingleColumnValueFilter scvFilter2 = new SingleColumnValueFilter(FAMILYNAME, specifiedQualifiers[0], CompareOperator.EQUAL, VALUES[1]);
    expectedRowNum = 0;
    for (int i = startRowNum; i > stopRowNum; i--) {
        if (i % VALUESIZE == 0 || i % VALUESIZE == 1) {
            expectedRowNum++;
        }
    }
    filter = new FilterList(Operator.MUST_PASS_ONE, scvFilter1, scvFilter2);
    scan.setFilter(filter);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, expectedRowNum * 2 * 2, expectedRowNum, false);
    // Case10: Case7 + FilterList+MUST_PASS_ALL
    filter = new FilterList(Operator.MUST_PASS_ALL, scvFilter1, scvFilter2);
    expectedRowNum = 0;
    scan.setFilter(filter);
    scanner = region.getScanner(scan);
    verifyCountAndOrder(scanner, expectedRowNum * 2 * 2, expectedRowNum, false);
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Scan(org.apache.hadoop.hbase.client.Scan) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 67 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestHRegion method testCheckAndMutate_WithFilters.

@Test
@Deprecated
public void testCheckAndMutate_WithFilters() throws Throwable {
    final byte[] FAMILY = Bytes.toBytes("fam");
    // Setting up region
    this.region = initHRegion(tableName, method, CONF, FAMILY);
    // Put one row
    Put put = new Put(row);
    put.addColumn(FAMILY, Bytes.toBytes("A"), Bytes.toBytes("a"));
    put.addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"));
    put.addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c"));
    region.put(put);
    // Put with success
    boolean ok = region.checkAndMutate(row, new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b"))), new Put(row).addColumn(FAMILY, Bytes.toBytes("D"), Bytes.toBytes("d")));
    assertTrue(ok);
    Result result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("D")));
    assertEquals("d", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("D"))));
    // Put with failure
    ok = region.checkAndMutate(row, new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("c"))), new Put(row).addColumn(FAMILY, Bytes.toBytes("E"), Bytes.toBytes("e")));
    assertFalse(ok);
    assertTrue(region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("E"))).isEmpty());
    // Delete with success
    ok = region.checkAndMutate(row, new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b"))), new Delete(row).addColumns(FAMILY, Bytes.toBytes("D")));
    assertTrue(ok);
    assertTrue(region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("D"))).isEmpty());
    // Mutate with success
    ok = region.checkAndRowMutate(row, new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b"))), new RowMutations(row).add((Mutation) new Put(row).addColumn(FAMILY, Bytes.toBytes("E"), Bytes.toBytes("e"))).add((Mutation) new Delete(row).addColumns(FAMILY, Bytes.toBytes("A"))));
    assertTrue(ok);
    result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("E")));
    assertEquals("e", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("E"))));
    assertTrue(region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("A"))).isEmpty());
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Get(org.apache.hadoop.hbase.client.Get) FilterList(org.apache.hadoop.hbase.filter.FilterList) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Result(org.apache.hadoop.hbase.client.Result) RowMutations(org.apache.hadoop.hbase.client.RowMutations) Test(org.junit.Test)

Example 68 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestHRegion method testCheckAndAppend.

@Test
public void testCheckAndAppend() throws Throwable {
    final byte[] FAMILY = Bytes.toBytes("fam");
    // Setting up region
    this.region = initHRegion(tableName, method, CONF, FAMILY);
    region.put(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), Bytes.toBytes("a")));
    // CheckAndAppend with correct value
    CheckAndMutateResult res = region.checkAndMutate(CheckAndMutate.newBuilder(row).ifEquals(FAMILY, Bytes.toBytes("A"), Bytes.toBytes("a")).build(new Append(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"))));
    assertTrue(res.isSuccess());
    assertEquals("b", Bytes.toString(res.getResult().getValue(FAMILY, Bytes.toBytes("B"))));
    Result result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B")));
    assertEquals("b", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
    // CheckAndAppend with wrong value
    res = region.checkAndMutate(CheckAndMutate.newBuilder(row).ifEquals(FAMILY, Bytes.toBytes("A"), Bytes.toBytes("b")).build(new Append(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"))));
    assertFalse(res.isSuccess());
    assertNull(res.getResult());
    result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B")));
    assertEquals("b", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
    region.put(new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c")));
    // CheckAndAppend with a filter and correct value
    res = region.checkAndMutate(CheckAndMutate.newBuilder(row).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("C"), CompareOperator.EQUAL, Bytes.toBytes("c")))).build(new Append(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("bb"))));
    assertTrue(res.isSuccess());
    assertEquals("bbb", Bytes.toString(res.getResult().getValue(FAMILY, Bytes.toBytes("B"))));
    result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B")));
    assertEquals("bbb", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
    // CheckAndAppend with a filter and wrong value
    res = region.checkAndMutate(CheckAndMutate.newBuilder(row).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("b")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("C"), CompareOperator.EQUAL, Bytes.toBytes("d")))).build(new Append(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("bb"))));
    assertFalse(res.isSuccess());
    assertNull(res.getResult());
    result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B")));
    assertEquals("bbb", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
}
Also used : Append(org.apache.hadoop.hbase.client.Append) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Get(org.apache.hadoop.hbase.client.Get) FilterList(org.apache.hadoop.hbase.filter.FilterList) Put(org.apache.hadoop.hbase.client.Put) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 69 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestHRegion method testCheckAndMutate_WithFiltersAndTimeRange.

@Test
@Deprecated
public void testCheckAndMutate_WithFiltersAndTimeRange() throws Throwable {
    final byte[] FAMILY = Bytes.toBytes("fam");
    // Setting up region
    this.region = initHRegion(tableName, method, CONF, FAMILY);
    // Put with specifying the timestamp
    region.put(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a")));
    // Put with success
    boolean ok = region.checkAndMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), TimeRange.between(0, 101), new Put(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b")));
    assertTrue(ok);
    Result result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B")));
    assertEquals("b", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
    // Put with failure
    ok = region.checkAndMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), TimeRange.between(0, 100), new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c")));
    assertFalse(ok);
    assertTrue(region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("C"))).isEmpty());
    // Mutate with success
    ok = region.checkAndRowMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), TimeRange.between(0, 101), new RowMutations(row).add((Mutation) new Put(row).addColumn(FAMILY, Bytes.toBytes("D"), Bytes.toBytes("d"))).add((Mutation) new Delete(row).addColumns(FAMILY, Bytes.toBytes("A"))));
    assertTrue(ok);
    result = region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("D")));
    assertEquals("d", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("D"))));
    assertTrue(region.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("A"))).isEmpty());
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Get(org.apache.hadoop.hbase.client.Get) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Result(org.apache.hadoop.hbase.client.Result) RowMutations(org.apache.hadoop.hbase.client.RowMutations) Test(org.junit.Test)

Example 70 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestHRegion method testCheckAndMutate_wrongMutationType.

@Test
@Deprecated
public void testCheckAndMutate_wrongMutationType() throws Throwable {
    // Setting up region
    this.region = initHRegion(tableName, method, CONF, fam1);
    try {
        region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL, new BinaryComparator(value1), new Increment(row).addColumn(fam1, qual1, 1));
        fail("should throw DoNotRetryIOException");
    } catch (DoNotRetryIOException e) {
        assertEquals("Unsupported mutate type: INCREMENT", e.getMessage());
    }
    try {
        region.checkAndMutate(row, new SingleColumnValueFilter(fam1, qual1, CompareOperator.EQUAL, value1), new Increment(row).addColumn(fam1, qual1, 1));
        fail("should throw DoNotRetryIOException");
    } catch (DoNotRetryIOException e) {
        assertEquals("Unsupported mutate type: INCREMENT", e.getMessage());
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Increment(org.apache.hadoop.hbase.client.Increment) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Test(org.junit.Test)

Aggregations

SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)71 Test (org.junit.Test)39 FilterList (org.apache.hadoop.hbase.filter.FilterList)28 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)16 Scan (org.apache.hadoop.hbase.client.Scan)15 Put (org.apache.hadoop.hbase.client.Put)13 Result (org.apache.hadoop.hbase.client.Result)13 Filter (org.apache.hadoop.hbase.filter.Filter)12 Delete (org.apache.hadoop.hbase.client.Delete)8 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)8 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)8 TableName (org.apache.hadoop.hbase.TableName)7 Table (org.apache.hadoop.hbase.client.Table)7 BitComparator (org.apache.hadoop.hbase.filter.BitComparator)7 ArrayList (java.util.ArrayList)6 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)6 Get (org.apache.hadoop.hbase.client.Get)6 Mutation (org.apache.hadoop.hbase.client.Mutation)6 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)6 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)6