Search in sources :

Example 91 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestReplicationSmallTests method testHBase14905.

@Test(timeout = 300000)
public // VerifyReplication should honor versions option
void testHBase14905() throws Exception {
    // normal Batch tests
    byte[] qualifierName = Bytes.toBytes("f1");
    Put put = new Put(Bytes.toBytes("r1"));
    put.addColumn(famName, qualifierName, Bytes.toBytes("v1002"));
    htable1.put(put);
    put.addColumn(famName, qualifierName, Bytes.toBytes("v1001"));
    htable1.put(put);
    put.addColumn(famName, qualifierName, Bytes.toBytes("v1112"));
    htable1.put(put);
    Scan scan = new Scan();
    scan.setMaxVersions(100);
    ResultScanner scanner1 = htable1.getScanner(scan);
    Result[] res1 = scanner1.next(1);
    scanner1.close();
    assertEquals(1, res1.length);
    assertEquals(3, res1[0].getColumnCells(famName, qualifierName).size());
    for (int i = 0; i < NB_RETRIES; i++) {
        scan = new Scan();
        scan.setMaxVersions(100);
        scanner1 = htable2.getScanner(scan);
        res1 = scanner1.next(1);
        scanner1.close();
        if (res1.length != 1) {
            LOG.info("Only got " + res1.length + " rows");
            Thread.sleep(SLEEP_TIME);
        } else {
            int cellNumber = res1[0].getColumnCells(famName, Bytes.toBytes("f1")).size();
            if (cellNumber != 3) {
                LOG.info("Only got " + cellNumber + " cells");
                Thread.sleep(SLEEP_TIME);
            } else {
                break;
            }
        }
        if (i == NB_RETRIES - 1) {
            fail("Waited too much time for normal batch replication");
        }
    }
    put.addColumn(famName, qualifierName, Bytes.toBytes("v1111"));
    htable2.put(put);
    put.addColumn(famName, qualifierName, Bytes.toBytes("v1112"));
    htable2.put(put);
    scan = new Scan();
    scan.setMaxVersions(100);
    scanner1 = htable2.getScanner(scan);
    res1 = scanner1.next(NB_ROWS_IN_BATCH);
    scanner1.close();
    assertEquals(1, res1.length);
    assertEquals(5, res1[0].getColumnCells(famName, qualifierName).size());
    String[] args = new String[] { "--versions=100", PEER_ID, tableName.getNameAsString() };
    runVerifyReplication(args, 0, 1);
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 92 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class OfflineMetaRebuildTestCore method tableRowCount.

/**
   * Returns the number of rows in a given table. HBase must be up and the table
   * should be present (will wait for timeout for a while otherwise)
   *
   * @return # of rows in the specified table
   */
protected int tableRowCount(Configuration conf, TableName table) throws IOException {
    Table t = TEST_UTIL.getConnection().getTable(table);
    Scan st = new Scan();
    ResultScanner rst = t.getScanner(st);
    int count = 0;
    for (@SuppressWarnings("unused") Result rt : rst) {
        count++;
    }
    t.close();
    return count;
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 93 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestResultSizeEstimation method testResultSizeEstimation.

@Test
public void testResultSizeEstimation() throws Exception {
    byte[] ROW1 = Bytes.toBytes("testRow1");
    byte[] ROW2 = Bytes.toBytes("testRow2");
    byte[] FAMILY = Bytes.toBytes("testFamily");
    byte[] QUALIFIER = Bytes.toBytes("testQualifier");
    byte[] VALUE = Bytes.toBytes("testValue");
    final TableName tableName = TableName.valueOf(name.getMethodName());
    byte[][] FAMILIES = new byte[][] { FAMILY };
    Table table = TEST_UTIL.createTable(tableName, FAMILIES);
    Put p = new Put(ROW1);
    p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE));
    table.put(p);
    p = new Put(ROW2);
    p.add(new KeyValue(ROW2, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE));
    table.put(p);
    Scan s = new Scan();
    s.setMaxResultSize(SCANNER_DATA_LIMIT);
    ResultScanner rs = table.getScanner(s);
    int count = 0;
    while (rs.next() != null) {
        count++;
    }
    assertEquals("Result size estimation did not work properly", 2, count);
    rs.close();
    table.close();
}
Also used : TableName(org.apache.hadoop.hbase.TableName) KeyValue(org.apache.hadoop.hbase.KeyValue) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 94 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestLoadAndSwitchEncodeOnDisk method assertAllOnLine.

private void assertAllOnLine(final Table t) throws IOException {
    List<HRegionLocation> regions;
    try (RegionLocator rl = TEST_UTIL.getConnection().getRegionLocator(t.getName())) {
        regions = rl.getAllRegionLocations();
    }
    for (HRegionLocation e : regions) {
        byte[] startkey = e.getRegionInfo().getStartKey();
        Scan s = new Scan(startkey);
        ResultScanner scanner = t.getScanner(s);
        Result r = scanner.next();
        org.junit.Assert.assertTrue(r != null && r.size() > 0);
        scanner.close();
    }
}
Also used : RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 95 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project javaee7-samples by javaee-samples.

the class PersonSessionBean method getPersons.

public List<Person> getPersons() throws IOException {
    List<Person> persons = new ArrayList<>();
    try (HTableInterface table = pool.getTable(personsTable)) {
        Scan scan = new Scan();
        scan.addFamily(Bytes.toBytes(personsColumnFamily));
        try (ResultScanner resultScanner = table.getScanner(scan)) {
            for (Result result : resultScanner) {
                for (KeyValue kv : result.raw()) {
                    Person p = new Person();
                    //                    p.setTitle(Bytes.toString(kv.getQualifier()));
                    //                    p.setBody(Bytes.toString(kv.getValue()));
                    //                    p.setId(Bytes.toString(result.getRow()));
                    persons.add(person);
                }
            }
        }
    }
    return persons;
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)342 Scan (org.apache.hadoop.hbase.client.Scan)303 Result (org.apache.hadoop.hbase.client.Result)302 Table (org.apache.hadoop.hbase.client.Table)164 Test (org.junit.Test)152 Cell (org.apache.hadoop.hbase.Cell)106 IOException (java.io.IOException)102 TableName (org.apache.hadoop.hbase.TableName)89 Delete (org.apache.hadoop.hbase.client.Delete)79 Connection (org.apache.hadoop.hbase.client.Connection)77 Put (org.apache.hadoop.hbase.client.Put)75 ArrayList (java.util.ArrayList)71 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)47 InterruptedIOException (java.io.InterruptedIOException)46 CellScanner (org.apache.hadoop.hbase.CellScanner)42 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)31 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)29 HTable (org.apache.hadoop.hbase.client.HTable)29 Admin (org.apache.hadoop.hbase.client.Admin)24 Get (org.apache.hadoop.hbase.client.Get)23