Search in sources :

Example 1 with StoreEngine

use of org.apache.hadoop.hbase.regionserver.StoreEngine in project hbase by apache.

the class TestStripeCompactor method createCompactor.

private StripeCompactor createCompactor(StoreFileWritersCapture writers, KeyValue[] input) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
    HStore store = mock(HStore.class);
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
    return new StripeCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore)

Example 2 with StoreEngine

use of org.apache.hadoop.hbase.regionserver.StoreEngine in project hbase by apache.

the class TestStripeCompactionPolicy method createCompactor.

private StripeCompactor createCompactor() throws Exception {
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("foo"));
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    HStore store = mock(HStore.class);
    RegionInfo info = mock(RegionInfo.class);
    when(info.getRegionNameAsString()).thenReturn("testRegion");
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getRegionInfo()).thenReturn(info);
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner();
    return new StripeCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore) StoreFileWritersCapture(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)

Example 3 with StoreEngine

use of org.apache.hadoop.hbase.regionserver.StoreEngine in project hbase by apache.

the class TestDateTieredCompactor method createCompactor.

private DateTieredCompactor createCompactor(StoreFileWritersCapture writers, final KeyValue[] input, List<HStoreFile> storefiles) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
    HStore store = mock(HStore.class);
    when(store.getStorefiles()).thenReturn(storefiles);
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
    OptionalLong maxSequenceId = StoreUtils.getMaxSequenceIdInList(storefiles);
    when(store.getMaxSequenceId()).thenReturn(maxSequenceId);
    return new DateTieredCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) OptionalLong(java.util.OptionalLong) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore)

Aggregations

ArrayList (java.util.ArrayList)3 List (java.util.List)3 Configuration (org.apache.hadoop.conf.Configuration)3 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)3 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)3 CreateStoreFileWriterParams (org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams)3 HStore (org.apache.hadoop.hbase.regionserver.HStore)3 InternalScanner (org.apache.hadoop.hbase.regionserver.InternalScanner)3 ScanInfo (org.apache.hadoop.hbase.regionserver.ScanInfo)3 ScanType (org.apache.hadoop.hbase.regionserver.ScanType)3 StoreEngine (org.apache.hadoop.hbase.regionserver.StoreEngine)3 StoreFileScanner (org.apache.hadoop.hbase.regionserver.StoreFileScanner)3 FileSystem (org.apache.hadoop.fs.FileSystem)2 Scanner (org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner)2 OptionalLong (java.util.OptionalLong)1 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)1 StoreFileWritersCapture (org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)1 ImmutableList (org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList)1