Search in sources :

Example 1 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class ZooKeeperScanPolicyObserver method preFlushScannerOpen.

@Override
public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
    ScanInfo scanInfo = getScanInfo(store, c.getEnvironment());
    if (scanInfo == null) {
        // take default action
        return null;
    }
    Scan scan = new Scan();
    scan.setMaxVersions(scanInfo.getMaxVersions());
    return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
}
Also used : ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 2 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class PartitionedMobCompactor method createScanner.

/**
   * Creates a store scanner.
   * @param filesToCompact The files to be compacted.
   * @param scanType The scan type.
   * @return The store scanner.
   * @throws IOException if IO failure is encountered
   */
private StoreScanner createScanner(List<StoreFile> filesToCompact, ScanType scanType) throws IOException {
    List scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact, false, true, false, false, HConstants.LATEST_TIMESTAMP);
    Scan scan = new Scan();
    scan.setMaxVersions(column.getMaxVersions());
    long ttl = HStore.determineTTLFromFamily(column);
    ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparator.COMPARATOR);
    return new StoreScanner(scan, scanInfo, scanType, null, scanners, 0L, HConstants.LATEST_TIMESTAMP);
}
Also used : List(java.util.List) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 3 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class TestUserScanQueryMatcher method testMatch_Wildcard.

@Test
public void testMatch_Wildcard() throws IOException {
    // Moving up from the Tracker by using Gets and List<KeyValue> instead
    // of just byte []
    // Expected result
    List<MatchCode> expected = new ArrayList<>(6);
    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
    expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
    expected.add(ScanQueryMatcher.MatchCode.DONE);
    long now = EnvironmentEdgeManager.currentTime();
    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan, new ScanInfo(this.conf, fam2, 0, 1, ttl, KeepDeletedCells.FALSE, 0, rowComparator), null, now - ttl, now, null);
    List<KeyValue> memstore = new ArrayList<>(6);
    memstore.add(new KeyValue(row1, fam2, col1, 1, data));
    memstore.add(new KeyValue(row1, fam2, col2, 1, data));
    memstore.add(new KeyValue(row1, fam2, col3, 1, data));
    memstore.add(new KeyValue(row1, fam2, col4, 1, data));
    memstore.add(new KeyValue(row1, fam2, col5, 1, data));
    memstore.add(new KeyValue(row2, fam1, col1, 1, data));
    List<ScanQueryMatcher.MatchCode> actual = new ArrayList<>(memstore.size());
    KeyValue k = memstore.get(0);
    qm.setToNewRow(k);
    for (KeyValue kv : memstore) {
        actual.add(qm.match(kv));
    }
    assertEquals(expected.size(), actual.size());
    for (int i = 0; i < expected.size(); i++) {
        LOG.debug("expected " + expected.get(i) + ", actual " + actual.get(i));
        assertEquals(expected.get(i), actual.get(i));
    }
}
Also used : MatchCode(org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) Test(org.junit.Test)

Example 4 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class TestUserScanQueryMatcher method testMatch_ExpiredExplicit.

/**
   * Verify that {@link ScanQueryMatcher} only skips expired KeyValue instances and does not exit
   * early from the row (skipping later non-expired KeyValues). This version mimics a Get with
   * explicitly specified column qualifiers.
   * @throws IOException
   */
@Test
public void testMatch_ExpiredExplicit() throws IOException {
    long testTTL = 1000;
    MatchCode[] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW, ScanQueryMatcher.MatchCode.DONE };
    long now = EnvironmentEdgeManager.currentTime();
    UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan, new ScanInfo(this.conf, fam2, 0, 1, testTTL, KeepDeletedCells.FALSE, 0, rowComparator), get.getFamilyMap().get(fam2), now - testTTL, now, null);
    KeyValue[] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now - 100, data), new KeyValue(row1, fam2, col2, now - 50, data), new KeyValue(row1, fam2, col3, now - 5000, data), new KeyValue(row1, fam2, col4, now - 500, data), new KeyValue(row1, fam2, col5, now - 10000, data), new KeyValue(row2, fam1, col1, now - 10, data) };
    KeyValue k = kvs[0];
    qm.setToNewRow(k);
    List<MatchCode> actual = new ArrayList<>(kvs.length);
    for (KeyValue kv : kvs) {
        actual.add(qm.match(kv));
    }
    assertEquals(expected.length, actual.size());
    for (int i = 0; i < expected.length; i++) {
        LOG.debug("expected " + expected[i] + ", actual " + actual.get(i));
        assertEquals(expected[i], actual.get(i));
    }
}
Also used : MatchCode(org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) Test(org.junit.Test)

Example 5 with ScanInfo

use of org.apache.hadoop.hbase.regionserver.ScanInfo in project hbase by apache.

the class TestStripeCompactor method createCompactor.

private StripeCompactor createCompactor(StoreFileWritersCapture writers, KeyValue[] input) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
    Store store = mock(Store.class);
    when(store.getFamily()).thenReturn(col);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
    when(store.getComparator()).thenReturn(CellComparator.COMPARATOR);
    return new StripeCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) FileSystem(org.apache.hadoop.fs.FileSystem) Store(org.apache.hadoop.hbase.regionserver.Store) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List)

Aggregations

ScanInfo (org.apache.hadoop.hbase.regionserver.ScanInfo)12 ArrayList (java.util.ArrayList)8 KeyValue (org.apache.hadoop.hbase.KeyValue)6 MatchCode (org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode)6 Test (org.junit.Test)5 List (java.util.List)3 Scan (org.apache.hadoop.hbase.client.Scan)3 StoreScanner (org.apache.hadoop.hbase.regionserver.StoreScanner)3 Configuration (org.apache.hadoop.conf.Configuration)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)2 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)2 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)2 InternalScanner (org.apache.hadoop.hbase.regionserver.InternalScanner)2 ScanType (org.apache.hadoop.hbase.regionserver.ScanType)2 Store (org.apache.hadoop.hbase.regionserver.Store)2 StoreFileScanner (org.apache.hadoop.hbase.regionserver.StoreFileScanner)2 Scanner (org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner)2 Cell (org.apache.hadoop.hbase.Cell)1