Search in sources :

Example 6 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class ZooKeeperScanPolicyObserver method preCompactScannerOpen.

@Override
public InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s) throws IOException {
    ScanInfo scanInfo = getScanInfo(store, c.getEnvironment());
    if (scanInfo == null) {
        // take default action
        return null;
    }
    Scan scan = new Scan();
    scan.setMaxVersions(scanInfo.getMaxVersions());
    return new StoreScanner(store, scanInfo, scan, scanners, scanType, store.getSmallestReadPoint(), earliestPutTs);
}
Also used : ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 7 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class ZooKeeperScanPolicyObserver method getScanInfo.

protected ScanInfo getScanInfo(Store store, RegionCoprocessorEnvironment e) {
    byte[] data = ((ZKWatcher) e.getSharedData().get(zkkey)).getData();
    if (data == null) {
        return null;
    }
    ScanInfo oldSI = store.getScanInfo();
    if (oldSI.getTtl() == Long.MAX_VALUE) {
        return null;
    }
    long ttl = Math.max(EnvironmentEdgeManager.currentTime() - Bytes.toLong(data), oldSI.getTtl());
    return new ScanInfo(oldSI.getConfiguration(), store.getFamily(), ttl, oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
}
Also used : ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo)

Example 8 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class TestDateTieredCompactor method createCompactor.

private DateTieredCompactor createCompactor(StoreFileWritersCapture writers, final KeyValue[] input, List<StoreFile> storefiles) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
    final Store store = mock(Store.class);
    when(store.getStorefiles()).thenReturn(storefiles);
    when(store.getFamily()).thenReturn(col);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
    when(store.getComparator()).thenReturn(CellComparator.COMPARATOR);
    long maxSequenceId = StoreFile.getMaxSequenceIdInList(storefiles);
    when(store.getMaxSequenceId()).thenReturn(maxSequenceId);
    return new DateTieredCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) FileSystem(org.apache.hadoop.fs.FileSystem) Store(org.apache.hadoop.hbase.regionserver.Store) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List)

Example 9 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class TestUserScanQueryMatcher method testMatch_ExpiredWildcard.

/**
   * Verify that {@link ScanQueryMatcher} only skips expired KeyValue instances and does not exit
   * early from the row (skipping later non-expired KeyValues). This version mimics a Get with
   * wildcard-inferred column qualifiers.
   * @throws IOException
   */
@Test
public void testMatch_ExpiredWildcard() throws IOException {
    long testTTL = 1000;
    MatchCode[] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.DONE };
    long now = EnvironmentEdgeManager.currentTime();
    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan, new ScanInfo(this.conf, fam2, 0, 1, testTTL, KeepDeletedCells.FALSE, 0, rowComparator), null, now - testTTL, now, null);
    KeyValue[] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now - 100, data), new KeyValue(row1, fam2, col2, now - 50, data), new KeyValue(row1, fam2, col3, now - 5000, data), new KeyValue(row1, fam2, col4, now - 500, data), new KeyValue(row1, fam2, col5, now - 10000, data), new KeyValue(row2, fam1, col1, now - 10, data) };
    KeyValue k = kvs[0];
    qm.setToNewRow(k);
    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<>(kvs.length);
    for (KeyValue kv : kvs) {
        actual.add(qm.match(kv));
    }
    assertEquals(expected.length, actual.size());
    for (int i = 0; i < expected.length; i++) {
        LOG.debug("expected " + expected[i] + ", actual " + actual.get(i));
        assertEquals(expected[i], actual.get(i));
    }
}
Also used : MatchCode(org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) Test(org.junit.Test)

Example 10 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class TestCompactionScanQueryMatcher method testDropDeletes.

private void testDropDeletes(byte[] from, byte[] to, byte[][] rows, MatchCode... expected) throws IOException {
    long now = EnvironmentEdgeManager.currentTime();
    // Set time to purge deletes to negative value to avoid it ever happening.
    ScanInfo scanInfo = new ScanInfo(this.conf, fam2, 0, 1, ttl, KeepDeletedCells.FALSE, -1L, rowComparator);
    CompactionScanQueryMatcher qm = CompactionScanQueryMatcher.create(scanInfo, ScanType.COMPACT_RETAIN_DELETES, Long.MAX_VALUE, HConstants.OLDEST_TIMESTAMP, HConstants.OLDEST_TIMESTAMP, now, from, to, null);
    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<>(rows.length);
    byte[] prevRow = null;
    for (byte[] row : rows) {
        if (prevRow == null || !Bytes.equals(prevRow, row)) {
            qm.setToNewRow(KeyValueUtil.createFirstOnRow(row));
            prevRow = row;
        }
        actual.add(qm.match(new KeyValue(row, fam2, null, now, Type.Delete)));
    }
    assertEquals(expected.length, actual.size());
    for (int i = 0; i < expected.length; i++) {
        LOG.debug("expected " + expected[i] + ", actual " + actual.get(i));
        assertEquals(expected[i], actual.get(i));
    }
}
Also used : MatchCode(org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo)

Aggregations

ScanInfo (org.apache.hadoop.hbase.regionserver.ScanInfo)12 ArrayList (java.util.ArrayList)8 KeyValue (org.apache.hadoop.hbase.KeyValue)6 MatchCode (org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode)6 Test (org.junit.Test)5 List (java.util.List)3 Scan (org.apache.hadoop.hbase.client.Scan)3 StoreScanner (org.apache.hadoop.hbase.regionserver.StoreScanner)3 Configuration (org.apache.hadoop.conf.Configuration)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)2 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)2 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)2 InternalScanner (org.apache.hadoop.hbase.regionserver.InternalScanner)2 ScanType (org.apache.hadoop.hbase.regionserver.ScanType)2 Store (org.apache.hadoop.hbase.regionserver.Store)2 StoreFileScanner (org.apache.hadoop.hbase.regionserver.StoreFileScanner)2 Scanner (org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner)2 Cell (org.apache.hadoop.hbase.Cell)1