use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.
the class TestRSStatusServlet method setupBasicMocks.
@Before
public void setupBasicMocks() throws IOException, ServiceException {
rs = Mockito.mock(HRegionServer.class);
rpcServices = Mockito.mock(RSRpcServices.class);
rpcServer = Mockito.mock(RpcServerInterface.class);
Mockito.doReturn(HBaseConfiguration.create()).when(rs).getConfiguration();
Mockito.doReturn(rpcServices).when(rs).getRSRpcServices();
Mockito.doReturn(rpcServer).when(rs).getRpcServer();
Mockito.doReturn(fakeResponse).when(rpcServices).getServerInfo((RpcController) Mockito.any(), (GetServerInfoRequest) Mockito.any());
// Fake ZKW
ZooKeeperWatcher zkw = Mockito.mock(ZooKeeperWatcher.class);
Mockito.doReturn("fakequorum").when(zkw).getQuorum();
Mockito.doReturn(zkw).when(rs).getZooKeeper();
// Fake CacheConfig
LOG.warn("The " + HConstants.HFILE_BLOCK_CACHE_SIZE_KEY + " is set to 0");
CacheConfig cacheConf = Mockito.mock(CacheConfig.class);
Mockito.doReturn(null).when(cacheConf).getBlockCache();
Mockito.doReturn(cacheConf).when(rs).getCacheConfig();
// Fake MasterAddressTracker
MasterAddressTracker mat = Mockito.mock(MasterAddressTracker.class);
Mockito.doReturn(fakeMasterAddress).when(mat).getMasterAddress();
Mockito.doReturn(mat).when(rs).getMasterAddressTracker();
MetricsRegionServer rms = Mockito.mock(MetricsRegionServer.class);
Mockito.doReturn(new MetricsRegionServerWrapperStub()).when(rms).getRegionServerWrapper();
Mockito.doReturn(rms).when(rs).getRegionServerMetrics();
MetricsHBaseServer ms = Mockito.mock(MetricsHBaseServer.class);
Mockito.doReturn(new MetricsHBaseServerWrapperStub()).when(ms).getHBaseServerWrapper();
Mockito.doReturn(ms).when(rpcServer).getMetrics();
}
use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.
the class TestMobStoreCompaction method createHFile.
/**
* Create an HFile with the given number of bytes
*/
private void createHFile(Path path, int rowIdx, byte[] dummyData) throws IOException {
HFileContext meta = new HFileContextBuilder().build();
HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(meta).create();
long now = System.currentTimeMillis();
try {
KeyValue kv = new KeyValue(Bytes.add(STARTROW, Bytes.toBytes(rowIdx)), COLUMN_FAMILY, Bytes.toBytes("colX"), now, dummyData);
writer.append(kv);
} finally {
writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
writer.close();
}
}
use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.
the class TestMobStoreCompaction method countMobCellsInMobFiles.
private int countMobCellsInMobFiles(int expectedNumDelfiles) throws IOException {
Configuration copyOfConf = new Configuration(conf);
copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f);
CacheConfig cacheConfig = new CacheConfig(copyOfConf);
Path mobDirPath = MobUtils.getMobFamilyPath(conf, htd.getTableName(), hcd.getNameAsString());
List<StoreFile> sfs = new ArrayList<>();
int numDelfiles = 0;
int size = 0;
if (fs.exists(mobDirPath)) {
for (FileStatus f : fs.listStatus(mobDirPath)) {
StoreFile sf = new StoreFile(fs, f.getPath(), conf, cacheConfig, BloomType.NONE);
sfs.add(sf);
if (StoreFileInfo.isDelFile(sf.getPath())) {
numDelfiles++;
}
}
List scanners = StoreFileScanner.getScannersForStoreFiles(sfs, false, true, false, false, HConstants.LATEST_TIMESTAMP);
Scan scan = new Scan();
scan.setMaxVersions(hcd.getMaxVersions());
long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);
long ttl = HStore.determineTTLFromFamily(hcd);
ScanInfo scanInfo = new ScanInfo(copyOfConf, hcd, ttl, timeToPurgeDeletes, CellComparator.COMPARATOR);
StoreScanner scanner = new StoreScanner(scan, scanInfo, ScanType.COMPACT_DROP_DELETES, null, scanners, 0L, HConstants.LATEST_TIMESTAMP);
try {
size += UTIL.countRows(scanner);
} finally {
scanner.close();
}
}
// assert the number of the existing del files
assertEquals(expectedNumDelfiles, numDelfiles);
return size;
}
use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.
the class TestReversibleScanners method testReversibleStoreScanner.
@Test
public void testReversibleStoreScanner() throws IOException {
// write data to one memstore and two store files
FileSystem fs = TEST_UTIL.getTestFileSystem();
Path hfilePath = new Path(new Path(TEST_UTIL.getDataTestDir("testReversibleStoreScanner"), "regionname"), "familyname");
CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
HFileContextBuilder hcBuilder = new HFileContextBuilder();
hcBuilder.withBlockSize(2 * 1024);
HFileContext hFileContext = hcBuilder.build();
StoreFileWriter writer1 = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
StoreFileWriter writer2 = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
MemStore memstore = new DefaultMemStore();
writeMemstoreAndStoreFiles(memstore, new StoreFileWriter[] { writer1, writer2 });
StoreFile sf1 = new StoreFile(fs, writer1.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
StoreFile sf2 = new StoreFile(fs, writer2.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
ScanType scanType = ScanType.USER_SCAN;
ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), FAMILYNAME, 0, Integer.MAX_VALUE, Long.MAX_VALUE, KeepDeletedCells.FALSE, 0, CellComparator.COMPARATOR);
// Case 1.Test a full reversed scan
Scan scan = new Scan();
scan.setReversed(true);
StoreScanner storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, MAXMVCC);
verifyCountAndOrder(storeScanner, QUALSIZE * ROWSIZE, ROWSIZE, false);
// Case 2.Test reversed scan with a specified start row
int startRowNum = ROWSIZE / 2;
byte[] startRow = ROWS[startRowNum];
scan.setStartRow(startRow);
storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, MAXMVCC);
verifyCountAndOrder(storeScanner, QUALSIZE * (startRowNum + 1), startRowNum + 1, false);
// Case 3.Test reversed scan with a specified start row and specified
// qualifiers
assertTrue(QUALSIZE > 2);
scan.addColumn(FAMILYNAME, QUALS[0]);
scan.addColumn(FAMILYNAME, QUALS[2]);
storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, MAXMVCC);
verifyCountAndOrder(storeScanner, 2 * (startRowNum + 1), startRowNum + 1, false);
// Case 4.Test reversed scan with mvcc based on case 3
for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
LOG.info("Setting read point to " + readPoint);
storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, readPoint);
int expectedRowCount = 0;
int expectedKVCount = 0;
for (int i = startRowNum; i >= 0; i--) {
int kvCount = 0;
if (makeMVCC(i, 0) <= readPoint) {
kvCount++;
}
if (makeMVCC(i, 2) <= readPoint) {
kvCount++;
}
if (kvCount > 0) {
expectedRowCount++;
expectedKVCount += kvCount;
}
}
verifyCountAndOrder(storeScanner, expectedKVCount, expectedRowCount, false);
}
}
use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.
the class TestReversibleScanners method testReversibleStoreFileScanner.
@Test
public void testReversibleStoreFileScanner() throws IOException {
FileSystem fs = TEST_UTIL.getTestFileSystem();
Path hfilePath = new Path(new Path(TEST_UTIL.getDataTestDir("testReversibleStoreFileScanner"), "regionname"), "familyname");
CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
HFileContextBuilder hcBuilder = new HFileContextBuilder();
hcBuilder.withBlockSize(2 * 1024);
hcBuilder.withDataBlockEncoding(encoding);
HFileContext hFileContext = hcBuilder.build();
StoreFileWriter writer = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
writeStoreFile(writer);
StoreFile sf = new StoreFile(fs, writer.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, Long.MAX_VALUE);
StoreFileScanner scanner = scanners.get(0);
seekTestOfReversibleKeyValueScanner(scanner);
for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
LOG.info("Setting read point to " + readPoint);
scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, readPoint);
seekTestOfReversibleKeyValueScannerWithMVCC(scanners.get(0), readPoint);
}
}
}
Aggregations