Search in sources :

Example 6 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project phoenix by apache.

the class TestNonTxIndexBuilder method setup.

/**
     * Test setup so that {@link NonTxIndexBuilder#getIndexUpdate(Mutation, IndexMetaData)} can be
     * called, where any read requests to
     * {@link LocalTable#getCurrentRowState(Mutation, Collection, boolean)} are read from our test
     * field 'currentRowCells'
     */
@Before
public void setup() throws Exception {
    RegionCoprocessorEnvironment env = Mockito.mock(RegionCoprocessorEnvironment.class);
    Configuration conf = new Configuration(false);
    conf.set(NonTxIndexBuilder.CODEC_CLASS_NAME_KEY, PhoenixIndexCodec.class.getName());
    Mockito.when(env.getConfiguration()).thenReturn(conf);
    // the following is used by LocalTable#getCurrentRowState()
    Region mockRegion = Mockito.mock(Region.class);
    Mockito.when(env.getRegion()).thenReturn(mockRegion);
    Mockito.when(mockRegion.getScanner(Mockito.any(Scan.class))).thenAnswer(new Answer<RegionScanner>() {

        @Override
        public RegionScanner answer(InvocationOnMock invocation) throws Throwable {
            Scan sArg = (Scan) invocation.getArguments()[0];
            TimeRange timeRange = sArg.getTimeRange();
            return getMockTimeRangeRegionScanner(timeRange);
        }
    });
    // the following is called by PhoenixIndexCodec#getIndexUpserts() , getIndexDeletes()
    HRegionInfo mockRegionInfo = Mockito.mock(HRegionInfo.class);
    Mockito.when(mockRegion.getRegionInfo()).thenReturn(mockRegionInfo);
    Mockito.when(mockRegionInfo.getStartKey()).thenReturn(Bytes.toBytes("a"));
    Mockito.when(mockRegionInfo.getEndKey()).thenReturn(Bytes.toBytes("z"));
    mockIndexMetaData = Mockito.mock(PhoenixIndexMetaData.class);
    Mockito.when(mockIndexMetaData.isImmutableRows()).thenReturn(false);
    Mockito.when(mockIndexMetaData.getIndexMaintainers()).thenReturn(Collections.singletonList(getTestIndexMaintainer()));
    indexBuilder = new NonTxIndexBuilder();
    indexBuilder.setup(env);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TimeRange(org.apache.hadoop.hbase.io.TimeRange) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) PhoenixIndexMetaData(org.apache.phoenix.index.PhoenixIndexMetaData) BaseRegionScanner(org.apache.phoenix.coprocessor.BaseRegionScanner) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) PhoenixIndexCodec(org.apache.phoenix.index.PhoenixIndexCodec) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Region(org.apache.hadoop.hbase.regionserver.Region) Scan(org.apache.hadoop.hbase.client.Scan) Before(org.junit.Before)

Example 7 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project ranger by apache.

the class RangerAuthorizationCoprocessor method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Scan scan, RegionScanner s) throws IOException {
    final RegionScanner ret;
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerAuthorizationCoprocessor.preScannerOpen()");
    }
    try {
        activatePluginClassLoader();
        ret = implRegionObserver.preScannerOpen(c, scan, s);
    } finally {
        deactivatePluginClassLoader();
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerAuthorizationCoprocessor.preScannerOpen()");
    }
    return ret;
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner)

Example 8 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project ranger by apache.

the class RangerAuthorizationCoprocessor method postScannerOpen.

@Override
public RegionScanner postScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Scan scan, RegionScanner s) throws IOException {
    final RegionScanner ret;
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerAuthorizationCoprocessor.postScannerOpen()");
    }
    try {
        activatePluginClassLoader();
        ret = implRegionObserver.postScannerOpen(c, scan, s);
    } finally {
        deactivatePluginClassLoader();
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerAuthorizationCoprocessor.postScannerOpen()");
    }
    return ret;
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner)

Example 9 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class IncrementHandler method preGetOp.

@Override
public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> ctx, Get get, List<Cell> results) throws IOException {
    Scan scan = new Scan(get);
    scan.setMaxVersions();
    scan.setFilter(Filters.combine(new IncrementFilter(), scan.getFilter()));
    RegionScanner scanner = null;
    try {
        scanner = new IncrementSummingScanner(region, scan.getBatch(), region.getScanner(scan), ScanType.USER_SCAN);
        scanner.next(results);
        ctx.bypass();
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Scan(org.apache.hadoop.hbase.client.Scan)

Example 10 with RegionScanner

use of org.apache.hadoop.hbase.regionserver.RegionScanner in project cdap by caskdata.

the class IncrementSummingScannerTest method testMultiColumnFlushAndCompact.

@Test
public void testMultiColumnFlushAndCompact() throws Exception {
    TableId tableId = TableId.from(NamespaceId.DEFAULT.getNamespace(), "testMultiColumnFlushAndCompact");
    byte[] familyBytes = Bytes.toBytes("f");
    byte[] columnBytes = Bytes.toBytes("c");
    byte[] columnBytes2 = Bytes.toBytes("c2");
    HRegion region = createRegion(tableId, familyBytes);
    try {
        region.initialize();
        long now = 1;
        byte[] row1 = Bytes.toBytes("row1");
        byte[] row2 = Bytes.toBytes("row2");
        // Initial put to row1,c2
        Put row1P = new Put(row1);
        row1P.add(familyBytes, columnBytes2, now - 1, Bytes.toBytes(5L));
        region.put(row1P);
        // Initial put to row2,c
        Put row2P = new Put(row2);
        row2P.add(familyBytes, columnBytes, now - 1, Bytes.toBytes(10L));
        region.put(row2P);
        // Generate some increments
        long ts = now;
        for (int i = 0; i < 50; i++) {
            region.put(generateIncrementPut(familyBytes, columnBytes, row1, ts));
            region.put(generateIncrementPut(familyBytes, columnBytes, row2, ts));
            region.put(generateIncrementPut(familyBytes, columnBytes2, row1, ts));
            ts++;
        }
        // First scanner represents flush scanner
        RegionScanner scanner = new IncrementSummingScanner(region, -1, region.getScanner(new Scan().setMaxVersions()), ScanType.COMPACT_RETAIN_DELETES, now + 15, -1);
        // Second scanner is a user scan, this is to help in easy asserts
        scanner = new IncrementSummingScanner(region, -1, scanner, ScanType.USER_SCAN);
        List<Cell> results = Lists.newArrayList();
        assertTrue(scanner.next(results, 10));
        assertEquals(2, results.size());
        Cell cell = results.get(0);
        assertNotNull(cell);
        assertEquals("row1", Bytes.toString(cell.getRow()));
        assertEquals("c", Bytes.toString(cell.getQualifier()));
        assertEquals(50, Bytes.toLong(cell.getValue()));
        cell = results.get(1);
        assertNotNull(cell);
        assertEquals("row1", Bytes.toString(cell.getRow()));
        assertEquals("c2", Bytes.toString(cell.getQualifier()));
        assertEquals(55, Bytes.toLong(cell.getValue()));
        results.clear();
        assertFalse(scanner.next(results, 10));
        assertEquals(1, results.size());
        cell = results.get(0);
        assertNotNull(cell);
        assertEquals("row2", Bytes.toString(cell.getRow()));
        assertEquals(60, Bytes.toLong(cell.getValue()));
    } finally {
        region.close();
    }
}
Also used : TableId(co.cask.cdap.data2.util.TableId) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Put(org.apache.hadoop.hbase.client.Put) HBase10CDHTest(co.cask.cdap.data.hbase.HBase10CDHTest) Test(org.junit.Test)

Aggregations

RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)97 Scan (org.apache.hadoop.hbase.client.Scan)75 Cell (org.apache.hadoop.hbase.Cell)59 ArrayList (java.util.ArrayList)35 Test (org.junit.Test)35 Put (org.apache.hadoop.hbase.client.Put)33 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)25 Region (org.apache.hadoop.hbase.regionserver.Region)20 List (java.util.List)18 TableId (co.cask.cdap.data2.util.TableId)17 IOException (java.io.IOException)14 Delete (org.apache.hadoop.hbase.client.Delete)14 RegionCoprocessorEnvironment (org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment)12 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)12 KeyValue (org.apache.hadoop.hbase.KeyValue)11 Configuration (org.apache.hadoop.conf.Configuration)9 ColumnReference (org.apache.phoenix.hbase.index.covered.update.ColumnReference)9 PMetaDataEntity (org.apache.phoenix.schema.PMetaDataEntity)9 InvocationOnMock (org.mockito.invocation.InvocationOnMock)8 Result (org.apache.hadoop.hbase.client.Result)6