Search in sources :

Example 41 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project hbase by apache.

the class TestMasterRegionOnTwoFileSystems method testRecovery.

@Test
public void testRecovery() throws IOException {
    int countPerRound = 100;
    for (int round = 0; round < 5; round++) {
        for (int i = 0; i < countPerRound; i++) {
            int row = round * countPerRound + i;
            Put put = new Put(Bytes.toBytes(row)).addColumn(CF, CQ, Bytes.toBytes(row));
            region.update(r -> r.put(put));
        }
        region.close(true);
        region = createMasterRegion(ServerName.valueOf("localhost", 12345, EnvironmentEdgeManager.currentTime() + round + 1));
        try (RegionScanner scanner = region.getRegionScanner(new Scan())) {
            List<Cell> cells = new ArrayList<>();
            boolean moreValues = true;
            for (int i = 0; i < (round + 1) * countPerRound; i++) {
                assertTrue(moreValues);
                moreValues = scanner.next(cells);
                assertEquals(1, cells.size());
                Result result = Result.create(cells);
                cells.clear();
                assertEquals(i, Bytes.toInt(result.getRow()));
                assertEquals(i, Bytes.toInt(result.getValue(CF, CQ)));
            }
            assertFalse(moreValues);
        }
    }
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 42 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project hbase by apache.

the class TestFilter method testRegionScannerReseek.

@Test
public void testRegionScannerReseek() throws Exception {
    // create new rows and column family to show how reseek works..
    for (byte[] ROW : ROWS_THREE) {
        Put p = new Put(ROW);
        p.setDurability(Durability.SKIP_WAL);
        for (byte[] QUALIFIER : QUALIFIERS_THREE) {
            p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
        }
        this.region.put(p);
    }
    for (byte[] ROW : ROWS_FOUR) {
        Put p = new Put(ROW);
        p.setDurability(Durability.SKIP_WAL);
        for (byte[] QUALIFIER : QUALIFIERS_FOUR) {
            p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
        }
        this.region.put(p);
    }
    // Flush
    this.region.flush(true);
    // Insert second half (reverse families)
    for (byte[] ROW : ROWS_THREE) {
        Put p = new Put(ROW);
        p.setDurability(Durability.SKIP_WAL);
        for (byte[] QUALIFIER : QUALIFIERS_THREE) {
            p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
        }
        this.region.put(p);
    }
    for (byte[] ROW : ROWS_FOUR) {
        Put p = new Put(ROW);
        p.setDurability(Durability.SKIP_WAL);
        for (byte[] QUALIFIER : QUALIFIERS_FOUR) {
            p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
        }
        this.region.put(p);
    }
    Scan s = new Scan();
    // set a start row
    s.withStartRow(ROWS_FOUR[1]);
    RegionScanner scanner = region.getScanner(s);
    // reseek to row three.
    scanner.reseek(ROWS_THREE[1]);
    List<Cell> results = new ArrayList<>();
    // the results should belong to ROWS_THREE[1]
    scanner.next(results);
    for (Cell keyValue : results) {
        assertTrue("The rows with ROWS_TWO as row key should be appearing.", CellUtil.matchingRows(keyValue, ROWS_THREE[1]));
    }
    // again try to reseek to a value before ROWS_THREE[1]
    scanner.reseek(ROWS_ONE[1]);
    results = new ArrayList<>();
    // This time no seek would have been done to ROWS_ONE[1]
    scanner.next(results);
    for (Cell keyValue : results) {
        assertFalse("Cannot rewind back to a value less than previous reseek.", Bytes.toString(CellUtil.cloneRow(keyValue)).contains("testRowOne"));
    }
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 43 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project hbase by apache.

the class TestFuzzyRowFilterEndToEnd method runScanner.

private void runScanner(Table hTable, int expectedSize, Filter filter) throws IOException {
    String cf = "f";
    Scan scan = new Scan();
    scan.addFamily(Bytes.toBytes(cf));
    scan.setFilter(filter);
    List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(TableName.valueOf(table));
    HRegion first = regions.get(0);
    first.getScanner(scan);
    RegionScanner scanner = first.getScanner(scan);
    List<Cell> results = new ArrayList<>();
    // Result result;
    long timeBeforeScan = EnvironmentEdgeManager.currentTime();
    int found = 0;
    while (scanner.next(results)) {
        found += results.size();
        results.clear();
    }
    found += results.size();
    long scanTime = EnvironmentEdgeManager.currentTime() - timeBeforeScan;
    scanner.close();
    LOG.info("\nscan time = " + scanTime + "ms");
    LOG.info("found " + found + " results\n");
    assertEquals(expectedSize, found);
}
Also used : HRegion(org.apache.hadoop.hbase.regionserver.HRegion) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell)

Example 44 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project Gaffer by gchq.

the class GafferCoprocessorTest method shouldDelegatePostScannerOpenToQueryScanner.

@Test
public void shouldDelegatePostScannerOpenToQueryScanner() throws IOException {
    // Given
    final ObserverContext<RegionCoprocessorEnvironment> e = mock(ObserverContext.class);
    final Scan scan = mock(Scan.class);
    final RegionScanner scanner = mock(RegionScanner.class);
    // When
    final QueryScanner queryScanner = (QueryScanner) coprocessor.postScannerOpen(e, scan, scanner);
    // Then
    assertNotNull(queryScanner);
}
Also used : RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Scan(org.apache.hadoop.hbase.client.Scan) QueryScanner(uk.gov.gchq.gaffer.hbasestore.coprocessor.scanner.QueryScanner) Test(org.junit.jupiter.api.Test)

Example 45 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project phoenix by apache.

the class TestLocalTableState method testOnlyLoadsRequestedColumns.

@SuppressWarnings("unchecked")
@Test
public void testOnlyLoadsRequestedColumns() throws Exception {
    // setup mocks
    RegionCoprocessorEnvironment env = Mockito.mock(RegionCoprocessorEnvironment.class);
    Region region = Mockito.mock(Region.class);
    Mockito.when(env.getRegion()).thenReturn(region);
    RegionScanner scanner = Mockito.mock(RegionScanner.class);
    Mockito.when(region.getScanner(Mockito.any(Scan.class))).thenReturn(scanner);
    final KeyValue storedKv = new KeyValue(row, fam, qual, ts, Type.Put, Bytes.toBytes("stored-value"));
    storedKv.setSequenceId(2);
    Mockito.when(scanner.next(Mockito.any(List.class))).thenAnswer(new Answer<Boolean>() {

        @Override
        public Boolean answer(InvocationOnMock invocation) throws Throwable {
            List<KeyValue> list = (List<KeyValue>) invocation.getArguments()[0];
            list.add(storedKv);
            return false;
        }
    });
    LocalHBaseState state = new LocalTable(env);
    Put pendingUpdate = new Put(row);
    pendingUpdate.add(fam, qual, ts, val);
    LocalTableState table = new LocalTableState(env, state, pendingUpdate);
    // do the lookup for the given column
    ColumnReference col = new ColumnReference(fam, qual);
    table.setCurrentTimestamp(ts);
    // check that the value is there
    Pair<Scanner, IndexUpdate> p = table.getIndexedColumnsTableState(Arrays.asList(col), false, false, indexMetaData);
    Scanner s = p.getFirst();
    // make sure it read the table the one time
    assertEquals("Didn't get the stored keyvalue!", storedKv, s.next());
    // on the second lookup it shouldn't access the underlying table again - the cached columns
    // should know they are done
    p = table.getIndexedColumnsTableState(Arrays.asList(col), false, false, indexMetaData);
    s = p.getFirst();
    assertEquals("Lost already loaded update!", storedKv, s.next());
    Mockito.verify(env, Mockito.times(1)).getRegion();
    Mockito.verify(region, Mockito.times(1)).getScanner(Mockito.any(Scan.class));
}
Also used : LocalTable(org.apache.phoenix.hbase.index.covered.data.LocalTable) Scanner(org.apache.phoenix.hbase.index.scanner.Scanner) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) KeyValue(org.apache.hadoop.hbase.KeyValue) Put(org.apache.hadoop.hbase.client.Put) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) LocalHBaseState(org.apache.phoenix.hbase.index.covered.data.LocalHBaseState) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Region(org.apache.hadoop.hbase.regionserver.Region) Scan(org.apache.hadoop.hbase.client.Scan) List(java.util.List) ColumnReference(org.apache.phoenix.hbase.index.covered.update.ColumnReference) Test(org.junit.Test)

Aggregations

RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)97 Scan (org.apache.hadoop.hbase.client.Scan)75 Cell (org.apache.hadoop.hbase.Cell)59 ArrayList (java.util.ArrayList)35 Test (org.junit.Test)35 Put (org.apache.hadoop.hbase.client.Put)33 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)25 Region (org.apache.hadoop.hbase.regionserver.Region)20 List (java.util.List)18 TableId (co.cask.cdap.data2.util.TableId)17 IOException (java.io.IOException)14 Delete (org.apache.hadoop.hbase.client.Delete)14 RegionCoprocessorEnvironment (org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment)12 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)12 KeyValue (org.apache.hadoop.hbase.KeyValue)11 Configuration (org.apache.hadoop.conf.Configuration)9 ColumnReference (org.apache.phoenix.hbase.index.covered.update.ColumnReference)9 PMetaDataEntity (org.apache.phoenix.schema.PMetaDataEntity)9 InvocationOnMock (org.mockito.invocation.InvocationOnMock)8 Result (org.apache.hadoop.hbase.client.Result)6