Search in sources :

Example 1 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TestClientScanner method testNoResultsHint.

@Test
@SuppressWarnings("unchecked")
public void testNoResultsHint() throws IOException {
    final Result[] results = new Result[1];
    KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1, Type.Maximum);
    results[0] = Result.create(new Cell[] { kv1 });
    RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
    Mockito.when(rpcFactory.<Result[]>newCaller()).thenReturn(caller);
    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class), Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {

        private int count = 0;

        @Override
        public Result[] answer(InvocationOnMock invocation) throws Throwable {
            ScannerCallableWithReplicas callable = invocation.getArgumentAt(0, ScannerCallableWithReplicas.class);
            switch(count) {
                case // initialize
                0:
                    count++;
                    callable.currentScannerCallable.setMoreResultsInRegion(MoreResults.UNKNOWN);
                    return results;
                // detect no more results
                case 1:
                case // close
                2:
                    count++;
                    return new Result[0];
                default:
                    throw new RuntimeException("Expected only 2 invocations");
            }
        }
    });
    // Set a much larger cache and buffer size than we'll provide
    scan.setCaching(100);
    scan.setMaxResultSize(1000 * 1000);
    try (MockClientScanner scanner = new MockClientScanner(conf, scan, TableName.valueOf(name.getMethodName()), clusterConn, rpcFactory, controllerFactory, pool, Integer.MAX_VALUE)) {
        scanner.setRpcFinished(true);
        InOrder inOrder = Mockito.inOrder(caller);
        scanner.loadCache();
        // One for fetching the results
        // One for fetching empty results and quit as we do not have moreResults hint.
        inOrder.verify(caller, Mockito.times(2)).callWithoutRetries(Mockito.any(RetryingCallable.class), Mockito.anyInt());
        assertEquals(1, scanner.cache.size());
        Result r = scanner.cache.poll();
        assertNotNull(r);
        CellScanner cs = r.cellScanner();
        assertTrue(cs.advance());
        assertEquals(kv1, cs.current());
        assertFalse(cs.advance());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) InOrder(org.mockito.InOrder) CellScanner(org.apache.hadoop.hbase.CellScanner) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 2 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TestClientScanner method testNoMoreResults.

@Test
@SuppressWarnings("unchecked")
public void testNoMoreResults() throws IOException {
    final Result[] results = new Result[1];
    KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1, Type.Maximum);
    results[0] = Result.create(new Cell[] { kv1 });
    RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
    Mockito.when(rpcFactory.<Result[]>newCaller()).thenReturn(caller);
    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class), Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {

        private int count = 0;

        @Override
        public Result[] answer(InvocationOnMock invocation) throws Throwable {
            ScannerCallableWithReplicas callable = invocation.getArgumentAt(0, ScannerCallableWithReplicas.class);
            switch(count) {
                case // initialize
                0:
                    count++;
                    callable.currentScannerCallable.setMoreResultsInRegion(MoreResults.NO);
                    return results;
                case // close
                1:
                    count++;
                    return null;
                default:
                    throw new RuntimeException("Expected only 2 invocations");
            }
        }
    });
    Mockito.when(rpcFactory.<Result[]>newCaller()).thenReturn(caller);
    // Set a much larger cache and buffer size than we'll provide
    scan.setCaching(100);
    scan.setMaxResultSize(1000 * 1000);
    try (MockClientScanner scanner = new MockClientScanner(conf, scan, TableName.valueOf(name.getMethodName()), clusterConn, rpcFactory, controllerFactory, pool, Integer.MAX_VALUE)) {
        scanner.setRpcFinished(true);
        InOrder inOrder = Mockito.inOrder(caller);
        scanner.loadCache();
        inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(Mockito.any(RetryingCallable.class), Mockito.anyInt());
        assertEquals(1, scanner.cache.size());
        Result r = scanner.cache.poll();
        assertNotNull(r);
        CellScanner cs = r.cellScanner();
        assertTrue(cs.advance());
        assertEquals(kv1, cs.current());
        assertFalse(cs.advance());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) InOrder(org.mockito.InOrder) CellScanner(org.apache.hadoop.hbase.CellScanner) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 3 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TestClientScanner method testSizeLimit.

@Test
@SuppressWarnings("unchecked")
public void testSizeLimit() throws IOException {
    final Result[] results = new Result[1];
    KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1, Type.Maximum);
    results[0] = Result.create(new Cell[] { kv1 });
    RpcRetryingCaller<Result[]> caller = Mockito.mock(RpcRetryingCaller.class);
    Mockito.when(rpcFactory.<Result[]>newCaller()).thenReturn(caller);
    Mockito.when(caller.callWithoutRetries(Mockito.any(RetryingCallable.class), Mockito.anyInt())).thenAnswer(new Answer<Result[]>() {

        private int count = 0;

        @Override
        public Result[] answer(InvocationOnMock invocation) throws Throwable {
            ScannerCallableWithReplicas callable = invocation.getArgumentAt(0, ScannerCallableWithReplicas.class);
            switch(count) {
                case // initialize
                0:
                    count++;
                    // if we set no here the implementation will trigger a close
                    callable.currentScannerCallable.setMoreResultsInRegion(MoreResults.YES);
                    return results;
                case // close
                1:
                    count++;
                    return null;
                default:
                    throw new RuntimeException("Expected only 2 invocations");
            }
        }
    });
    Mockito.when(rpcFactory.<Result[]>newCaller()).thenReturn(caller);
    // Set a much larger cache
    scan.setCaching(100);
    // The single key-value will exit the loop
    scan.setMaxResultSize(1);
    try (MockClientScanner scanner = new MockClientScanner(conf, scan, TableName.valueOf(name.getMethodName()), clusterConn, rpcFactory, controllerFactory, pool, Integer.MAX_VALUE)) {
        InOrder inOrder = Mockito.inOrder(caller);
        scanner.loadCache();
        inOrder.verify(caller, Mockito.times(1)).callWithoutRetries(Mockito.any(RetryingCallable.class), Mockito.anyInt());
        assertEquals(1, scanner.cache.size());
        Result r = scanner.cache.poll();
        assertNotNull(r);
        CellScanner cs = r.cellScanner();
        assertTrue(cs.advance());
        assertEquals(kv1, cs.current());
        assertFalse(cs.advance());
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) InOrder(org.mockito.InOrder) CellScanner(org.apache.hadoop.hbase.CellScanner) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 4 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class ReplicationProtbufUtil method replicateWALEntry.

/**
   * A helper to replicate a list of WAL entries using admin protocol.
   * @param admin Admin service
   * @param entries Array of WAL entries to be replicated
   * @param replicationClusterId Id which will uniquely identify source cluster FS client
   *          configurations in the replication configuration directory
   * @param sourceBaseNamespaceDir Path to source cluster base namespace directory
   * @param sourceHFileArchiveDir Path to the source cluster hfile archive directory
   * @throws java.io.IOException
   */
public static void replicateWALEntry(final AdminService.BlockingInterface admin, final Entry[] entries, String replicationClusterId, Path sourceBaseNamespaceDir, Path sourceHFileArchiveDir) throws IOException {
    Pair<AdminProtos.ReplicateWALEntryRequest, CellScanner> p = buildReplicateWALEntryRequest(entries, null, replicationClusterId, sourceBaseNamespaceDir, sourceHFileArchiveDir);
    HBaseRpcController controller = new HBaseRpcControllerImpl(p.getSecond());
    try {
        admin.replicateWALEntry(controller, p.getFirst());
    } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException e) {
        throw ProtobufUtil.handleRemoteException(e);
    }
}
Also used : HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) HBaseRpcControllerImpl(org.apache.hadoop.hbase.ipc.HBaseRpcControllerImpl) CellScanner(org.apache.hadoop.hbase.CellScanner) SizedCellScanner(org.apache.hadoop.hbase.io.SizedCellScanner)

Example 5 with CellScanner

use of org.apache.hadoop.hbase.CellScanner in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testVisibilityLabelsWithDeleteFamily.

@Test
public void testVisibilityLabelsWithDeleteFamily() throws Exception {
    setAuths();
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    try (Table table = createTableAndWriteDataWithLabels(tableName, SECRET, CONFIDENTIAL + "|" + TOPSECRET)) {
        PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Delete d = new Delete(row2);
                    d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
                    d.addFamily(fam);
                    table.delete(d);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(actiona);
        TEST_UTIL.getAdmin().flush(tableName);
        Scan s = new Scan();
        s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL));
        ResultScanner scanner = table.getScanner(s);
        Result[] next = scanner.next(3);
        assertTrue(next.length == 1);
        CellScanner cellScanner = next[0].cellScanner();
        cellScanner.advance();
        Cell current = cellScanner.current();
        assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length));
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) CellScanner(org.apache.hadoop.hbase.CellScanner) Result(org.apache.hadoop.hbase.client.Result) TableName(org.apache.hadoop.hbase.TableName) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

CellScanner (org.apache.hadoop.hbase.CellScanner)111 Cell (org.apache.hadoop.hbase.Cell)87 Test (org.junit.Test)69 Result (org.apache.hadoop.hbase.client.Result)67 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)60 Scan (org.apache.hadoop.hbase.client.Scan)58 Table (org.apache.hadoop.hbase.client.Table)56 TableName (org.apache.hadoop.hbase.TableName)51 IOException (java.io.IOException)49 Connection (org.apache.hadoop.hbase.client.Connection)41 Delete (org.apache.hadoop.hbase.client.Delete)39 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)38 InterruptedIOException (java.io.InterruptedIOException)34 KeyValue (org.apache.hadoop.hbase.KeyValue)15 Put (org.apache.hadoop.hbase.client.Put)15 ArrayList (java.util.ArrayList)13 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)10 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)10 RetriesExhaustedWithDetailsException (org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException)7 Tag (org.apache.hadoop.hbase.Tag)6