Search in sources :

Example 81 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestFilter method testFilterListWithPrefixFilter.

// HBASE-9747
@Test
public void testFilterListWithPrefixFilter() throws IOException {
    byte[] family = Bytes.toBytes("f1");
    byte[] qualifier = Bytes.toBytes("q1");
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
    htd.addFamily(new HColumnDescriptor(family));
    HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
    Region testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    for (int i = 0; i < 5; i++) {
        Put p = new Put(Bytes.toBytes((char) ('a' + i) + "row"));
        p.setDurability(Durability.SKIP_WAL);
        p.addColumn(family, qualifier, Bytes.toBytes(String.valueOf(111 + i)));
        testRegion.put(p);
    }
    testRegion.flush(true);
    // rows starting with "b"
    PrefixFilter pf = new PrefixFilter(new byte[] { 'b' });
    // rows with value of column 'q1' set to '113'
    SingleColumnValueFilter scvf = new SingleColumnValueFilter(family, qualifier, CompareOp.EQUAL, Bytes.toBytes("113"));
    // combine these two with OR in a FilterList
    FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, pf, scvf);
    Scan s1 = new Scan();
    s1.setFilter(filterList);
    InternalScanner scanner = testRegion.getScanner(s1);
    List<Cell> results = new ArrayList<>();
    int resultCount = 0;
    while (scanner.next(results)) {
        resultCount++;
        byte[] row = CellUtil.cloneRow(results.get(0));
        LOG.debug("Found row: " + Bytes.toStringBinary(row));
        assertTrue(Bytes.equals(row, Bytes.toBytes("brow")) || Bytes.equals(row, Bytes.toBytes("crow")));
        results.clear();
    }
    assertEquals(2, resultCount);
    scanner.close();
    WAL wal = ((HRegion) testRegion).getWAL();
    ((HRegion) testRegion).close();
    wal.close();
}
Also used : WAL(org.apache.hadoop.hbase.wal.WAL) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ArrayList(java.util.ArrayList) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Region(org.apache.hadoop.hbase.regionserver.Region) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 82 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestWALObserver method getBasic3FamilyHTableDescriptor.

private HTableDescriptor getBasic3FamilyHTableDescriptor(final TableName tableName) {
    HTableDescriptor htd = new HTableDescriptor(tableName);
    for (int i = 0; i < TEST_FAMILY.length; i++) {
        HColumnDescriptor a = new HColumnDescriptor(TEST_FAMILY[i]);
        htd.addFamily(a);
    }
    return htd;
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 83 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestWALObserver method createBasic3FamilyHTD.

private HTableDescriptor createBasic3FamilyHTD(final String tableName) {
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
    HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a"));
    htd.addFamily(a);
    HColumnDescriptor b = new HColumnDescriptor(Bytes.toBytes("b"));
    htd.addFamily(b);
    HColumnDescriptor c = new HColumnDescriptor(Bytes.toBytes("c"));
    htd.addFamily(c);
    return htd;
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 84 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class FilterTestingCluster method createTable.

protected static void createTable(TableName tableName, String columnFamilyName) {
    assertNotNull("HBaseAdmin is not initialized successfully.", admin);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    HColumnDescriptor colDef = new HColumnDescriptor(Bytes.toBytes(columnFamilyName));
    desc.addFamily(colDef);
    try {
        admin.createTable(desc);
        createdTables.add(tableName);
        assertTrue("Fail to create the table", admin.tableExists(tableName));
    } catch (IOException e) {
        assertNull("Exception found while creating table", e);
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 85 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestFilter method testNestedFilterListWithSCVF.

// TODO: intentionally disabled?
public void testNestedFilterListWithSCVF() throws IOException {
    byte[] columnStatus = Bytes.toBytes("S");
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
    htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
    HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
    Region testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    for (int i = 0; i < 10; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.setDurability(Durability.SKIP_WAL);
        p.addColumn(FAMILIES[0], columnStatus, Bytes.toBytes(i % 2));
        testRegion.put(p);
    }
    testRegion.flush(true);
    // 1. got rows > "row4"
    Filter rowFilter = new RowFilter(CompareOp.GREATER, new BinaryComparator(Bytes.toBytes("row4")));
    Scan s1 = new Scan();
    s1.setFilter(rowFilter);
    InternalScanner scanner = testRegion.getScanner(s1);
    List<Cell> results = new ArrayList<>();
    int i = 5;
    for (boolean done = true; done; i++) {
        done = scanner.next(results);
        assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
        assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
        results.clear();
    }
    // 2. got rows <= "row4" and S=
    FilterList subFilterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
    Filter subFilter1 = new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes("row4")));
    subFilterList.addFilter(subFilter1);
    Filter subFilter2 = new SingleColumnValueFilter(FAMILIES[0], columnStatus, CompareOp.EQUAL, Bytes.toBytes(0));
    subFilterList.addFilter(subFilter2);
    s1 = new Scan();
    s1.setFilter(subFilterList);
    scanner = testRegion.getScanner(s1);
    results = new ArrayList<>();
    for (i = 0; i <= 4; i += 2) {
        scanner.next(results);
        assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
        assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
        results.clear();
    }
    assertFalse(scanner.next(results));
    // 3. let's begin to verify nested filter list
    // 3.1 add rowFilter, then add subFilterList
    FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
    filterList.addFilter(rowFilter);
    filterList.addFilter(subFilterList);
    s1 = new Scan();
    s1.setFilter(filterList);
    scanner = testRegion.getScanner(s1);
    results = new ArrayList<>();
    for (i = 0; i <= 4; i += 2) {
        scanner.next(results);
        assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
        assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
        results.clear();
    }
    for (i = 5; i <= 9; i++) {
        scanner.next(results);
        assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
        assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
        results.clear();
    }
    assertFalse(scanner.next(results));
    // 3.2 MAGIC here! add subFilterList first, then add rowFilter
    filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
    filterList.addFilter(subFilterList);
    filterList.addFilter(rowFilter);
    s1 = new Scan();
    s1.setFilter(filterList);
    scanner = testRegion.getScanner(s1);
    results = new ArrayList<>();
    for (i = 0; i <= 4; i += 2) {
        scanner.next(results);
        assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
        assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
        results.clear();
    }
    for (i = 5; i <= 9; i++) {
        scanner.next(results);
        assertTrue(CellUtil.matchingRow(results.get(0), Bytes.toBytes("row" + i)));
        assertEquals(Bytes.toInt(CellUtil.cloneValue(results.get(0))), i % 2);
        results.clear();
    }
    assertFalse(scanner.next(results));
    WAL wal = ((HRegion) testRegion).getWAL();
    ((HRegion) testRegion).close();
    wal.close();
}
Also used : WAL(org.apache.hadoop.hbase.wal.WAL) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ArrayList(java.util.ArrayList) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Region(org.apache.hadoop.hbase.regionserver.Region) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)679 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)561 Test (org.junit.Test)358 TableName (org.apache.hadoop.hbase.TableName)200 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)137 Put (org.apache.hadoop.hbase.client.Put)132 Table (org.apache.hadoop.hbase.client.Table)118 IOException (java.io.IOException)112 Admin (org.apache.hadoop.hbase.client.Admin)112 Path (org.apache.hadoop.fs.Path)81 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)74 ArrayList (java.util.ArrayList)66 Configuration (org.apache.hadoop.conf.Configuration)65 Connection (org.apache.hadoop.hbase.client.Connection)52 Scan (org.apache.hadoop.hbase.client.Scan)50 Result (org.apache.hadoop.hbase.client.Result)45 FileSystem (org.apache.hadoop.fs.FileSystem)44 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)42 Connection (java.sql.Connection)41 Properties (java.util.Properties)38