use of org.apache.hadoop.hbase.regionserver.StoreEngine in project hbase by apache.
the class TestStripeCompactor method createCompactor.
private StripeCompactor createCompactor(StoreFileWritersCapture writers, KeyValue[] input) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner(input);
// Create store mock that is satisfactory for compactor.
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
when(store.getScanInfo()).thenReturn(si);
when(store.areWritesEnabled()).thenReturn(true);
when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
StoreEngine storeEngine = mock(StoreEngine.class);
when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
when(store.getStoreEngine()).thenReturn(storeEngine);
when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
return new StripeCompactor(conf, store) {
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
use of org.apache.hadoop.hbase.regionserver.StoreEngine in project hbase by apache.
the class TestStripeCompactionPolicy method createCompactor.
private StripeCompactor createCompactor() throws Exception {
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("foo"));
StoreFileWritersCapture writers = new StoreFileWritersCapture();
HStore store = mock(HStore.class);
RegionInfo info = mock(RegionInfo.class);
when(info.getRegionNameAsString()).thenReturn("testRegion");
when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
when(store.getRegionInfo()).thenReturn(info);
StoreEngine storeEngine = mock(StoreEngine.class);
when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
when(store.getStoreEngine()).thenReturn(storeEngine);
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner();
return new StripeCompactor(conf, store) {
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
use of org.apache.hadoop.hbase.regionserver.StoreEngine in project hbase by apache.
the class TestDateTieredCompactor method createCompactor.
private DateTieredCompactor createCompactor(StoreFileWritersCapture writers, final KeyValue[] input, List<HStoreFile> storefiles) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner(input);
// Create store mock that is satisfactory for compactor.
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
HStore store = mock(HStore.class);
when(store.getStorefiles()).thenReturn(storefiles);
when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
when(store.getScanInfo()).thenReturn(si);
when(store.areWritesEnabled()).thenReturn(true);
when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
StoreEngine storeEngine = mock(StoreEngine.class);
when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
when(store.getStoreEngine()).thenReturn(storeEngine);
when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
OptionalLong maxSequenceId = StoreUtils.getMaxSequenceIdInList(storefiles);
when(store.getMaxSequenceId()).thenReturn(maxSequenceId);
return new DateTieredCompactor(conf, store) {
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
Aggregations