Search in sources :

Example 1 with Scanner

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner in project hbase by apache.

the class TestStripeCompactor method createCompactor.

private StripeCompactor createCompactor(StoreFileWritersCapture writers, KeyValue[] input) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
    HStore store = mock(HStore.class);
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
    return new StripeCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore)

Example 2 with Scanner

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner in project hbase by apache.

the class TestDateTieredCompactor method createCompactor.

private DateTieredCompactor createCompactor(StoreFileWritersCapture writers, final KeyValue[] input, List<StoreFile> storefiles) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR);
    final Store store = mock(Store.class);
    when(store.getStorefiles()).thenReturn(storefiles);
    when(store.getFamily()).thenReturn(col);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
    when(store.getComparator()).thenReturn(CellComparator.COMPARATOR);
    long maxSequenceId = StoreFile.getMaxSequenceIdInList(storefiles);
    when(store.getMaxSequenceId()).thenReturn(maxSequenceId);
    return new DateTieredCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) FileSystem(org.apache.hadoop.fs.FileSystem) Store(org.apache.hadoop.hbase.regionserver.Store) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List)

Example 3 with Scanner

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner in project hbase by apache.

the class TestDateTieredCompactor method createCompactor.

private DateTieredCompactor createCompactor(StoreFileWritersCapture writers, final KeyValue[] input, List<HStoreFile> storefiles) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
    HStore store = mock(HStore.class);
    when(store.getStorefiles()).thenReturn(storefiles);
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
    OptionalLong maxSequenceId = StoreUtils.getMaxSequenceIdInList(storefiles);
    when(store.getMaxSequenceId()).thenReturn(maxSequenceId);
    return new DateTieredCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) OptionalLong(java.util.OptionalLong) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore)

Aggregations

ArrayList (java.util.ArrayList)3 List (java.util.List)3 Configuration (org.apache.hadoop.conf.Configuration)3 FileSystem (org.apache.hadoop.fs.FileSystem)3 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)3 InternalScanner (org.apache.hadoop.hbase.regionserver.InternalScanner)3 ScanInfo (org.apache.hadoop.hbase.regionserver.ScanInfo)3 ScanType (org.apache.hadoop.hbase.regionserver.ScanType)3 StoreFileScanner (org.apache.hadoop.hbase.regionserver.StoreFileScanner)3 Scanner (org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner)3 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)2 CreateStoreFileWriterParams (org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams)2 HStore (org.apache.hadoop.hbase.regionserver.HStore)2 StoreEngine (org.apache.hadoop.hbase.regionserver.StoreEngine)2 OptionalLong (java.util.OptionalLong)1 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)1 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)1 Store (org.apache.hadoop.hbase.regionserver.Store)1