Search in sources :

Example 81 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestRegionMergeTransactionOnCluster method verifyRowCount.

private void verifyRowCount(Table table, int expectedRegionNum) throws IOException {
    ResultScanner scanner = table.getScanner(new Scan());
    int rowCount = 0;
    while (scanner.next() != null) {
        rowCount++;
    }
    assertEquals(expectedRegionNum, rowCount);
    scanner.close();
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan)

Example 82 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestRegionServerReadRequestMetrics method testReadRequestsCountWithTTLExpiration.

@Test
public void testReadRequestsCountWithTTLExpiration() throws Exception {
    putTTLExpiredData();
    Scan scan = new Scan();
    scan.addFamily(CF2);
    try (ResultScanner scanner = table.getScanner(scan)) {
        int resultCount = 0;
        for (Result ignore : scanner) {
            resultCount++;
        }
        testReadRequests(resultCount, 2, 1);
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 83 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestRegionServerReadRequestMetrics method testReadRequestsCountWithDeletedRow.

@Test
public void testReadRequestsCountWithDeletedRow() throws Exception {
    try {
        Delete delete = new Delete(ROW3);
        table.delete(delete);
        Scan scan = new Scan();
        try (ResultScanner scanner = table.getScanner(scan)) {
            int resultCount = 0;
            for (Result ignore : scanner) {
                resultCount++;
            }
            testReadRequests(resultCount, 2, 1);
        }
    } finally {
        Put put = new Put(ROW3);
        put.addColumn(CF1, COL1, VAL1);
        put.addColumn(CF1, COL2, VAL2);
        table.put(put);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 84 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestScanner method count.

/*
   * @param hri Region
   * @param flushIndex At what row we start the flush.
   * @param concurrent if the flush should be concurrent or sync.
   * @return Count of rows found.
   * @throws IOException
   */
private int count(final Table countTable, final int flushIndex, boolean concurrent) throws IOException {
    LOG.info("Taking out counting scan");
    Scan scan = new Scan();
    for (byte[] qualifier : EXPLICIT_COLS) {
        scan.addColumn(HConstants.CATALOG_FAMILY, qualifier);
    }
    ResultScanner s = countTable.getScanner(scan);
    int count = 0;
    boolean justFlushed = false;
    while (s.next() != null) {
        if (justFlushed) {
            LOG.info("after next() just after next flush");
            justFlushed = false;
        }
        count++;
        if (flushIndex == count) {
            LOG.info("Starting flush at flush index " + flushIndex);
            Thread t = new Thread() {

                public void run() {
                    try {
                        region.flush(true);
                        LOG.info("Finishing flush");
                    } catch (IOException e) {
                        LOG.info("Failed flush cache");
                    }
                }
            };
            if (concurrent) {
                // concurrently flush.
                t.start();
            } else {
                // sync flush
                t.run();
            }
            LOG.info("Continuing on after kicking off background flush");
            justFlushed = true;
        }
    }
    s.close();
    LOG.info("Found " + count + " items");
    return count;
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException)

Example 85 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestScannerHeartbeatMessages method testEquivalenceOfScanWithHeartbeats.

/**
   * Test the equivalence of a scan versus the same scan executed when heartbeat messages are
   * necessary
   * @param scan The scan configuration being tested
   * @param rowSleepTime The time to sleep between fetches of row cells
   * @param cfSleepTime The time to sleep between fetches of column family cells
   * @param sleepBeforeCf set to true when column family sleeps should occur before the cells for
   *          that column family are fetched
   * @throws Exception
   */
private void testEquivalenceOfScanWithHeartbeats(final Scan scan, int rowSleepTime, int cfSleepTime, boolean sleepBeforeCf) throws Exception {
    disableSleeping();
    final ResultScanner scanner = TABLE.getScanner(scan);
    final ResultScanner scannerWithHeartbeats = TABLE.getScanner(scan);
    Result r1 = null;
    Result r2 = null;
    while ((r1 = scanner.next()) != null) {
        // Enforce the specified sleep conditions during calls to the heartbeat scanner
        configureSleepTime(rowSleepTime, cfSleepTime, sleepBeforeCf);
        r2 = scannerWithHeartbeats.next();
        disableSleeping();
        assertTrue(r2 != null);
        try {
            Result.compareResults(r1, r2);
        } catch (Exception e) {
            fail(e.getMessage());
        }
    }
    assertTrue(scannerWithHeartbeats.next() == null);
    scanner.close();
    scannerWithHeartbeats.close();
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) IOException(java.io.IOException) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)342 Scan (org.apache.hadoop.hbase.client.Scan)303 Result (org.apache.hadoop.hbase.client.Result)302 Table (org.apache.hadoop.hbase.client.Table)164 Test (org.junit.Test)152 Cell (org.apache.hadoop.hbase.Cell)106 IOException (java.io.IOException)102 TableName (org.apache.hadoop.hbase.TableName)89 Delete (org.apache.hadoop.hbase.client.Delete)79 Connection (org.apache.hadoop.hbase.client.Connection)77 Put (org.apache.hadoop.hbase.client.Put)75 ArrayList (java.util.ArrayList)71 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)47 InterruptedIOException (java.io.InterruptedIOException)46 CellScanner (org.apache.hadoop.hbase.CellScanner)42 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)31 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)29 HTable (org.apache.hadoop.hbase.client.HTable)29 Admin (org.apache.hadoop.hbase.client.Admin)24 Get (org.apache.hadoop.hbase.client.Get)23