Search in sources :

Example 16 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestRSStatusServlet method setupBasicMocks.

@Before
public void setupBasicMocks() throws IOException, ServiceException {
    rs = Mockito.mock(HRegionServer.class);
    rpcServices = Mockito.mock(RSRpcServices.class);
    rpcServer = Mockito.mock(RpcServerInterface.class);
    Mockito.doReturn(HBaseConfiguration.create()).when(rs).getConfiguration();
    Mockito.doReturn(rpcServices).when(rs).getRSRpcServices();
    Mockito.doReturn(rpcServer).when(rs).getRpcServer();
    Mockito.doReturn(fakeResponse).when(rpcServices).getServerInfo((RpcController) Mockito.any(), (GetServerInfoRequest) Mockito.any());
    // Fake ZKW
    ZooKeeperWatcher zkw = Mockito.mock(ZooKeeperWatcher.class);
    Mockito.doReturn("fakequorum").when(zkw).getQuorum();
    Mockito.doReturn(zkw).when(rs).getZooKeeper();
    // Fake CacheConfig
    LOG.warn("The " + HConstants.HFILE_BLOCK_CACHE_SIZE_KEY + " is set to 0");
    CacheConfig cacheConf = Mockito.mock(CacheConfig.class);
    Mockito.doReturn(null).when(cacheConf).getBlockCache();
    Mockito.doReturn(cacheConf).when(rs).getCacheConfig();
    // Fake MasterAddressTracker
    MasterAddressTracker mat = Mockito.mock(MasterAddressTracker.class);
    Mockito.doReturn(fakeMasterAddress).when(mat).getMasterAddress();
    Mockito.doReturn(mat).when(rs).getMasterAddressTracker();
    MetricsRegionServer rms = Mockito.mock(MetricsRegionServer.class);
    Mockito.doReturn(new MetricsRegionServerWrapperStub()).when(rms).getRegionServerWrapper();
    Mockito.doReturn(rms).when(rs).getRegionServerMetrics();
    MetricsHBaseServer ms = Mockito.mock(MetricsHBaseServer.class);
    Mockito.doReturn(new MetricsHBaseServerWrapperStub()).when(ms).getHBaseServerWrapper();
    Mockito.doReturn(ms).when(rpcServer).getMetrics();
}
Also used : MasterAddressTracker(org.apache.hadoop.hbase.zookeeper.MasterAddressTracker) ZooKeeperWatcher(org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher) RpcServerInterface(org.apache.hadoop.hbase.ipc.RpcServerInterface) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) MetricsHBaseServer(org.apache.hadoop.hbase.ipc.MetricsHBaseServer) MetricsHBaseServerWrapperStub(org.apache.hadoop.hbase.ipc.MetricsHBaseServerWrapperStub) Before(org.junit.Before)

Example 17 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestMobStoreCompaction method createHFile.

/**
   * Create an HFile with the given number of bytes
   */
private void createHFile(Path path, int rowIdx, byte[] dummyData) throws IOException {
    HFileContext meta = new HFileContextBuilder().build();
    HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(meta).create();
    long now = System.currentTimeMillis();
    try {
        KeyValue kv = new KeyValue(Bytes.add(STARTROW, Bytes.toBytes(rowIdx)), COLUMN_FAMILY, Bytes.toBytes("colX"), now, dummyData);
        writer.append(kv);
    } finally {
        writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
        writer.close();
    }
}
Also used : HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 18 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestMobStoreCompaction method countMobCellsInMobFiles.

private int countMobCellsInMobFiles(int expectedNumDelfiles) throws IOException {
    Configuration copyOfConf = new Configuration(conf);
    copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f);
    CacheConfig cacheConfig = new CacheConfig(copyOfConf);
    Path mobDirPath = MobUtils.getMobFamilyPath(conf, htd.getTableName(), hcd.getNameAsString());
    List<StoreFile> sfs = new ArrayList<>();
    int numDelfiles = 0;
    int size = 0;
    if (fs.exists(mobDirPath)) {
        for (FileStatus f : fs.listStatus(mobDirPath)) {
            StoreFile sf = new StoreFile(fs, f.getPath(), conf, cacheConfig, BloomType.NONE);
            sfs.add(sf);
            if (StoreFileInfo.isDelFile(sf.getPath())) {
                numDelfiles++;
            }
        }
        List scanners = StoreFileScanner.getScannersForStoreFiles(sfs, false, true, false, false, HConstants.LATEST_TIMESTAMP);
        Scan scan = new Scan();
        scan.setMaxVersions(hcd.getMaxVersions());
        long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0);
        long ttl = HStore.determineTTLFromFamily(hcd);
        ScanInfo scanInfo = new ScanInfo(copyOfConf, hcd, ttl, timeToPurgeDeletes, CellComparator.COMPARATOR);
        StoreScanner scanner = new StoreScanner(scan, scanInfo, ScanType.COMPACT_DROP_DELETES, null, scanners, 0L, HConstants.LATEST_TIMESTAMP);
        try {
            size += UTIL.countRows(scanner);
        } finally {
            scanner.close();
        }
    }
    // assert the number of the existing del files
    assertEquals(expectedNumDelfiles, numDelfiles);
    return size;
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) Scan(org.apache.hadoop.hbase.client.Scan) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig)

Example 19 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestReversibleScanners method testReversibleStoreScanner.

@Test
public void testReversibleStoreScanner() throws IOException {
    // write data to one memstore and two store files
    FileSystem fs = TEST_UTIL.getTestFileSystem();
    Path hfilePath = new Path(new Path(TEST_UTIL.getDataTestDir("testReversibleStoreScanner"), "regionname"), "familyname");
    CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
    HFileContextBuilder hcBuilder = new HFileContextBuilder();
    hcBuilder.withBlockSize(2 * 1024);
    HFileContext hFileContext = hcBuilder.build();
    StoreFileWriter writer1 = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
    StoreFileWriter writer2 = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
    MemStore memstore = new DefaultMemStore();
    writeMemstoreAndStoreFiles(memstore, new StoreFileWriter[] { writer1, writer2 });
    StoreFile sf1 = new StoreFile(fs, writer1.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
    StoreFile sf2 = new StoreFile(fs, writer2.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
    ScanType scanType = ScanType.USER_SCAN;
    ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), FAMILYNAME, 0, Integer.MAX_VALUE, Long.MAX_VALUE, KeepDeletedCells.FALSE, 0, CellComparator.COMPARATOR);
    // Case 1.Test a full reversed scan
    Scan scan = new Scan();
    scan.setReversed(true);
    StoreScanner storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, MAXMVCC);
    verifyCountAndOrder(storeScanner, QUALSIZE * ROWSIZE, ROWSIZE, false);
    // Case 2.Test reversed scan with a specified start row
    int startRowNum = ROWSIZE / 2;
    byte[] startRow = ROWS[startRowNum];
    scan.setStartRow(startRow);
    storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, MAXMVCC);
    verifyCountAndOrder(storeScanner, QUALSIZE * (startRowNum + 1), startRowNum + 1, false);
    // Case 3.Test reversed scan with a specified start row and specified
    // qualifiers
    assertTrue(QUALSIZE > 2);
    scan.addColumn(FAMILYNAME, QUALS[0]);
    scan.addColumn(FAMILYNAME, QUALS[2]);
    storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, MAXMVCC);
    verifyCountAndOrder(storeScanner, 2 * (startRowNum + 1), startRowNum + 1, false);
    // Case 4.Test reversed scan with mvcc based on case 3
    for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
        LOG.info("Setting read point to " + readPoint);
        storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanType, scanInfo, readPoint);
        int expectedRowCount = 0;
        int expectedKVCount = 0;
        for (int i = startRowNum; i >= 0; i--) {
            int kvCount = 0;
            if (makeMVCC(i, 0) <= readPoint) {
                kvCount++;
            }
            if (makeMVCC(i, 2) <= readPoint) {
                kvCount++;
            }
            if (kvCount > 0) {
                expectedRowCount++;
                expectedKVCount += kvCount;
            }
        }
        verifyCountAndOrder(storeScanner, expectedKVCount, expectedRowCount, false);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FileSystem(org.apache.hadoop.fs.FileSystem) Scan(org.apache.hadoop.hbase.client.Scan) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 20 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestReversibleScanners method testReversibleStoreFileScanner.

@Test
public void testReversibleStoreFileScanner() throws IOException {
    FileSystem fs = TEST_UTIL.getTestFileSystem();
    Path hfilePath = new Path(new Path(TEST_UTIL.getDataTestDir("testReversibleStoreFileScanner"), "regionname"), "familyname");
    CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        HFileContextBuilder hcBuilder = new HFileContextBuilder();
        hcBuilder.withBlockSize(2 * 1024);
        hcBuilder.withDataBlockEncoding(encoding);
        HFileContext hFileContext = hcBuilder.build();
        StoreFileWriter writer = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
        writeStoreFile(writer);
        StoreFile sf = new StoreFile(fs, writer.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE);
        List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, Long.MAX_VALUE);
        StoreFileScanner scanner = scanners.get(0);
        seekTestOfReversibleKeyValueScanner(scanner);
        for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
            LOG.info("Setting read point to " + readPoint);
            scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, readPoint);
            seekTestOfReversibleKeyValueScannerWithMVCC(scanners.get(0), readPoint);
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) FileSystem(org.apache.hadoop.fs.FileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)63 Path (org.apache.hadoop.fs.Path)28 Test (org.junit.Test)26 Configuration (org.apache.hadoop.conf.Configuration)21 HFile (org.apache.hadoop.hbase.io.hfile.HFile)21 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)21 FileSystem (org.apache.hadoop.fs.FileSystem)20 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)20 BlockCache (org.apache.hadoop.hbase.io.hfile.BlockCache)15 KeyValue (org.apache.hadoop.hbase.KeyValue)14 TableName (org.apache.hadoop.hbase.TableName)14 Region (org.apache.hadoop.hbase.regionserver.Region)13 Store (org.apache.hadoop.hbase.regionserver.Store)13 Cell (org.apache.hadoop.hbase.Cell)10 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)10 CombinedBlockCache (org.apache.hadoop.hbase.io.hfile.CombinedBlockCache)10 IOException (java.io.IOException)9 CountDownLatch (java.util.concurrent.CountDownLatch)8 FileStatus (org.apache.hadoop.fs.FileStatus)8 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)8