Search in sources :

Example 11 with StoreScanner

use of org.apache.hadoop.hbase.regionserver.StoreScanner in project cdap by caskdata.

the class PayloadTableRegionObserver method preFlushScannerOpen.

@Override
public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
    LOG.info("preFlush, filter using PayloadDataFilter");
    Scan scan = new Scan();
    scan.setFilter(new PayloadDataFilter(c.getEnvironment(), System.currentTimeMillis(), prefixLength, topicMetadataCache));
    return new StoreScanner(store, store.getScanInfo(), scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_DROP_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 12 with StoreScanner

use of org.apache.hadoop.hbase.regionserver.StoreScanner in project cdap by caskdata.

the class PayloadTableRegionObserver method preCompactScannerOpen.

@Override
public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s, CompactionRequest request) throws IOException {
    LOG.info("preCompact, filter using PayloadDataFilter");
    Scan scan = new Scan();
    scan.setFilter(new PayloadDataFilter(c.getEnvironment(), System.currentTimeMillis(), prefixLength, topicMetadataCache));
    return new StoreScanner(store, store.getScanInfo(), scan, scanners, scanType, store.getSmallestReadPoint(), earliestPutTs);
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 13 with StoreScanner

use of org.apache.hadoop.hbase.regionserver.StoreScanner in project cdap by caskdata.

the class MessageTableRegionObserver method preFlushScannerOpen.

@Override
public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
    LOG.info("preFlush, filter using MessageDataFilter");
    TransactionVisibilityState txVisibilityState = txStateCache.getLatestState();
    Scan scan = new Scan();
    scan.setFilter(new MessageDataFilter(c.getEnvironment(), System.currentTimeMillis(), prefixLength, topicMetadataCache, txVisibilityState));
    return new LoggingInternalScanner("MessageDataFilter", "preFlush", new StoreScanner(store, store.getScanInfo(), scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_DROP_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP), txVisibilityState);
}
Also used : TransactionVisibilityState(org.apache.tephra.persist.TransactionVisibilityState) Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 14 with StoreScanner

use of org.apache.hadoop.hbase.regionserver.StoreScanner in project cdap by caskdata.

the class PayloadTableRegionObserver method preFlushScannerOpen.

@Override
public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
    LOG.info("preFlush, filter using PayloadDataFilter");
    Scan scan = new Scan();
    scan.setFilter(new PayloadDataFilter(c.getEnvironment(), System.currentTimeMillis(), prefixLength, topicMetadataCache));
    return new StoreScanner(store, store.getScanInfo(), scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_DROP_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Example 15 with StoreScanner

use of org.apache.hadoop.hbase.regionserver.StoreScanner in project cdap by caskdata.

the class PayloadTableRegionObserver method preCompactScannerOpen.

@Override
public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s, CompactionRequest request) throws IOException {
    LOG.info("preCompact, filter using PayloadDataFilter");
    Scan scan = new Scan();
    scan.setFilter(new PayloadDataFilter(c.getEnvironment(), System.currentTimeMillis(), prefixLength, topicMetadataCache));
    return new StoreScanner(store, store.getScanInfo(), scan, scanners, scanType, store.getSmallestReadPoint(), earliestPutTs);
}
Also used : Scan(org.apache.hadoop.hbase.client.Scan) StoreScanner(org.apache.hadoop.hbase.regionserver.StoreScanner)

Aggregations

StoreScanner (org.apache.hadoop.hbase.regionserver.StoreScanner)36 Scan (org.apache.hadoop.hbase.client.Scan)33 TransactionVisibilityState (org.apache.tephra.persist.TransactionVisibilityState)14 ArrayList (java.util.ArrayList)3 ScanInfo (org.apache.hadoop.hbase.regionserver.ScanInfo)3 IOException (java.io.IOException)2 Path (org.apache.hadoop.fs.Path)2 Cell (org.apache.hadoop.hbase.Cell)2 ScannerContext (org.apache.hadoop.hbase.regionserver.ScannerContext)2 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)2 Date (java.util.Date)1 List (java.util.List)1 WriterFactory (org.apache.hadoop.hbase.regionserver.AbstractMultiFileWriter.WriterFactory)1 StoreFile (org.apache.hadoop.hbase.regionserver.StoreFile)1