Search in sources :

Example 56 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestLoadIncrementalHFilesSplitRecovery method assertExpectedTable.

/**
   * Checks that all columns have the expected value and that there is the
   * expected number of rows.
   * @throws IOException
   */
void assertExpectedTable(final Connection connection, TableName table, int count, int value) throws IOException {
    HTableDescriptor[] htds = util.getAdmin().listTables(table.getNameAsString());
    assertEquals(htds.length, 1);
    Table t = null;
    try {
        t = connection.getTable(table);
        Scan s = new Scan();
        ResultScanner sr = t.getScanner(s);
        int i = 0;
        for (Result r : sr) {
            i++;
            for (NavigableMap<byte[], byte[]> nm : r.getNoVersionMap().values()) {
                for (byte[] val : nm.values()) {
                    assertTrue(Bytes.equals(val, value(value)));
                }
            }
        }
        assertEquals(count, i);
    } catch (IOException e) {
        fail("Failed due to exception");
    } finally {
        if (t != null)
            t.close();
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result)

Example 57 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestLoadIncrementalHFilesSplitRecovery method assertExpectedTable.

/**
   * Checks that all columns have the expected value and that there is the
   * expected number of rows.
   * @throws IOException
   */
void assertExpectedTable(TableName table, int count, int value) throws IOException {
    HTableDescriptor[] htds = util.getAdmin().listTables(table.getNameAsString());
    assertEquals(htds.length, 1);
    Table t = null;
    try {
        t = util.getConnection().getTable(table);
        Scan s = new Scan();
        ResultScanner sr = t.getScanner(s);
        int i = 0;
        for (Result r : sr) {
            i++;
            for (NavigableMap<byte[], byte[]> nm : r.getNoVersionMap().values()) {
                for (byte[] val : nm.values()) {
                    assertTrue(Bytes.equals(val, value(value)));
                }
            }
        }
        assertEquals(count, i);
    } catch (IOException e) {
        fail("Failed due to exception");
    } finally {
        if (t != null)
            t.close();
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result)

Example 58 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestImportExport method testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily.

@Test
public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
    final TableName exportTable = TableName.valueOf(name.getMethodName());
    HTableDescriptor desc = new HTableDescriptor(exportTable);
    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE));
    UTIL.getAdmin().createTable(desc);
    Table exportT = UTIL.getConnection().getTable(exportTable);
    //Add first version of QUAL
    Put p = new Put(ROW1);
    p.addColumn(FAMILYA, QUAL, now, QUAL);
    exportT.put(p);
    //Add Delete family marker
    Delete d = new Delete(ROW1, now + 3);
    exportT.delete(d);
    //Add second version of QUAL
    p = new Put(ROW1);
    p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes());
    exportT.put(p);
    //Add second Delete family marker
    d = new Delete(ROW1, now + 7);
    exportT.delete(d);
    String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", exportTable.getNameAsString(), FQ_OUTPUT_DIR, // max number of key versions per key to export
    "1000" };
    assertTrue(runExport(args));
    final String importTable = name.getMethodName() + "import";
    desc = new HTableDescriptor(TableName.valueOf(importTable));
    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE));
    UTIL.getAdmin().createTable(desc);
    Table importT = UTIL.getConnection().getTable(TableName.valueOf(importTable));
    args = new String[] { importTable, FQ_OUTPUT_DIR };
    assertTrue(runImport(args));
    Scan s = new Scan();
    s.setMaxVersions();
    s.setRaw(true);
    ResultScanner importedTScanner = importT.getScanner(s);
    Result importedTResult = importedTScanner.next();
    ResultScanner exportedTScanner = exportT.getScanner(s);
    Result exportedTResult = exportedTScanner.next();
    try {
        Result.compareResults(exportedTResult, importedTResult);
    } catch (Exception e) {
        fail("Original and imported tables data comparision failed with error:" + e.getMessage());
    } finally {
        exportT.close();
        importT.close();
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Scan(org.apache.hadoop.hbase.client.Scan) Put(org.apache.hadoop.hbase.client.Put) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 59 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestImportTSVWithVisibilityLabels method issueDeleteAndVerifyData.

private void issueDeleteAndVerifyData(TableName tableName) throws IOException {
    LOG.debug("Validating table after delete.");
    Table table = util.getConnection().getTable(tableName);
    boolean verified = false;
    long pause = conf.getLong("hbase.client.pause", 5 * 1000);
    int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
    for (int i = 0; i < numRetries; i++) {
        try {
            Delete d = new Delete(Bytes.toBytes("KEY"));
            d.addFamily(Bytes.toBytes(FAMILY));
            d.setCellVisibility(new CellVisibility("private&secret"));
            table.delete(d);
            Scan scan = new Scan();
            // Scan entire family.
            scan.addFamily(Bytes.toBytes(FAMILY));
            scan.setAuthorizations(new Authorizations("secret", "private"));
            ResultScanner resScanner = table.getScanner(scan);
            Result[] next = resScanner.next(5);
            assertEquals(0, next.length);
            verified = true;
            break;
        } catch (NullPointerException e) {
        // If here, a cell was empty. Presume its because updates came in
        // after the scanner had been opened. Wait a while and retry.
        }
        try {
            Thread.sleep(pause);
        } catch (InterruptedException e) {
        // continue
        }
    }
    table.close();
    assertTrue(verified);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) CellVisibility(org.apache.hadoop.hbase.security.visibility.CellVisibility) Result(org.apache.hadoop.hbase.client.Result) Scan(org.apache.hadoop.hbase.client.Scan)

Example 60 with ResultScanner

use of org.apache.hadoop.hbase.client.ResultScanner in project hbase by apache.

the class TestImportTsv method validateTable.

/**
   * Confirm ImportTsv via data in online table.
   */
private static void validateTable(Configuration conf, TableName tableName, String family, int valueMultiplier, boolean isDryRun) throws IOException {
    LOG.debug("Validating table.");
    Connection connection = ConnectionFactory.createConnection(conf);
    Table table = connection.getTable(tableName);
    boolean verified = false;
    long pause = conf.getLong("hbase.client.pause", 5 * 1000);
    int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
    for (int i = 0; i < numRetries; i++) {
        try {
            Scan scan = new Scan();
            // Scan entire family.
            scan.addFamily(Bytes.toBytes(family));
            ResultScanner resScanner = table.getScanner(scan);
            int numRows = 0;
            for (Result res : resScanner) {
                numRows++;
                assertEquals(2, res.size());
                List<Cell> kvs = res.listCells();
                assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
                assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
                assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
                assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
            // Only one result set is expected, so let it loop.
            }
            if (isDryRun) {
                assertEquals(0, numRows);
            } else {
                assertEquals(1, numRows);
            }
            verified = true;
            break;
        } catch (NullPointerException e) {
        // If here, a cell was empty. Presume its because updates came in
        // after the scanner had been opened. Wait a while and retry.
        }
        try {
            Thread.sleep(pause);
        } catch (InterruptedException e) {
        // continue
        }
    }
    table.close();
    connection.close();
    assertTrue(verified);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)342 Scan (org.apache.hadoop.hbase.client.Scan)303 Result (org.apache.hadoop.hbase.client.Result)302 Table (org.apache.hadoop.hbase.client.Table)164 Test (org.junit.Test)152 Cell (org.apache.hadoop.hbase.Cell)106 IOException (java.io.IOException)102 TableName (org.apache.hadoop.hbase.TableName)89 Delete (org.apache.hadoop.hbase.client.Delete)79 Connection (org.apache.hadoop.hbase.client.Connection)77 Put (org.apache.hadoop.hbase.client.Put)75 ArrayList (java.util.ArrayList)71 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)47 InterruptedIOException (java.io.InterruptedIOException)46 CellScanner (org.apache.hadoop.hbase.CellScanner)42 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)31 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)29 HTable (org.apache.hadoop.hbase.client.HTable)29 Admin (org.apache.hadoop.hbase.client.Admin)24 Get (org.apache.hadoop.hbase.client.Get)23