Search in sources :

Example 31 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class TestHFileArchiving method testArchiveStoreFilesDifferentFileSystems.

private void testArchiveStoreFilesDifferentFileSystems(String walDir, String expectedBase, ArchivingFunction<Configuration, FileSystem, RegionInfo, Path, byte[], Collection<HStoreFile>> archivingFunction) throws IOException {
    FileSystem mockedFileSystem = mock(FileSystem.class);
    Configuration conf = new Configuration(UTIL.getConfiguration());
    if (walDir != null) {
        conf.set(CommonFSUtils.HBASE_WAL_DIR, walDir);
    }
    Path filePath = new Path("/mockDir/wals/mockFile");
    when(mockedFileSystem.getScheme()).thenReturn("mockFS");
    when(mockedFileSystem.mkdirs(any())).thenReturn(true);
    when(mockedFileSystem.exists(any())).thenReturn(true);
    RegionInfo mockedRegion = mock(RegionInfo.class);
    TableName tableName = TableName.valueOf("mockTable");
    when(mockedRegion.getTable()).thenReturn(tableName);
    when(mockedRegion.getEncodedName()).thenReturn("mocked-region-encoded-name");
    Path tableDir = new Path("mockFS://mockDir/tabledir");
    byte[] family = Bytes.toBytes("testfamily");
    HStoreFile mockedFile = mock(HStoreFile.class);
    List<HStoreFile> list = new ArrayList<>();
    list.add(mockedFile);
    when(mockedFile.getPath()).thenReturn(filePath);
    when(mockedFileSystem.rename(any(), any())).thenReturn(true);
    archivingFunction.apply(conf, mockedFileSystem, mockedRegion, tableDir, family, list);
    ArgumentCaptor<Path> pathCaptor = ArgumentCaptor.forClass(Path.class);
    verify(mockedFileSystem, times(2)).rename(pathCaptor.capture(), any());
    String expectedDir = expectedBase + "archive/data/default/mockTable/mocked-region-encoded-name/testfamily/mockFile";
    assertTrue(pathCaptor.getAllValues().get(0).toString().equals(expectedDir));
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo)

Example 32 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class HFileArchiver method archive.

private static void archive(FileSystem fs, RegionInfo regionInfo, byte[] family, Collection<HStoreFile> compactedFiles, Path storeArchiveDir) throws IOException {
    // sometimes in testing, we don't have rss, so we need to check for that
    if (fs == null) {
        LOG.warn("Passed filesystem is null, so just deleting files without archiving for {}," + "family={}", Bytes.toString(regionInfo.getRegionName()), Bytes.toString(family));
        deleteStoreFilesWithoutArchiving(compactedFiles);
        return;
    }
    // short circuit if we don't have any files to delete
    if (compactedFiles.isEmpty()) {
        LOG.debug("No files to dispose of, done!");
        return;
    }
    // build the archive path
    if (regionInfo == null || family == null)
        throw new IOException("Need to have a region and a family to archive from.");
    // make sure we don't archive if we can't and that the archive dir exists
    if (!fs.mkdirs(storeArchiveDir)) {
        throw new IOException("Could not make archive directory (" + storeArchiveDir + ") for store:" + Bytes.toString(family) + ", deleting compacted files instead.");
    }
    // otherwise we attempt to archive the store files
    LOG.debug("Archiving compacted files.");
    // Wrap the storefile into a File
    StoreToFile getStorePath = new StoreToFile(fs);
    Collection<File> storeFiles = compactedFiles.stream().map(getStorePath).collect(Collectors.toList());
    // do the actual archive
    List<File> failedArchive = resolveAndArchive(fs, storeArchiveDir, storeFiles, EnvironmentEdgeManager.currentTime());
    if (!failedArchive.isEmpty()) {
        throw new FailedArchiveException("Failed to archive/delete all the files for region:" + Bytes.toString(regionInfo.getRegionName()) + ", family:" + Bytes.toString(family) + " into " + storeArchiveDir + ". Something is probably awry on the filesystem.", failedArchive.stream().map(FUNC_FILE_TO_PATH).collect(Collectors.toList()));
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) MultipleIOException(org.apache.hadoop.io.MultipleIOException) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile)

Example 33 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class TestMobFile method testReadKeyValue.

@Test
public void testReadKeyValue() throws Exception {
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
    String caseName = testName.getMethodName();
    MobTestUtil.writeStoreFile(writer, caseName);
    MobFile mobFile = new MobFile(new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
    byte[] family = Bytes.toBytes(caseName);
    byte[] qualify = Bytes.toBytes(caseName);
    // Test the start key
    // The start key bytes
    byte[] startKey = Bytes.toBytes("aa");
    KeyValue expectedKey = new KeyValue(startKey, family, qualify, Long.MAX_VALUE, Type.Put, startKey);
    KeyValue seekKey = expectedKey.createKeyOnly(false);
    Cell cell = mobFile.readCell(seekKey, false).getCell();
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the end key
    // The end key bytes
    byte[] endKey = Bytes.toBytes("zz");
    expectedKey = new KeyValue(endKey, family, qualify, Long.MAX_VALUE, Type.Put, endKey);
    seekKey = expectedKey.createKeyOnly(false);
    cell = mobFile.readCell(seekKey, false).getCell();
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the random key
    byte[] randomKey = Bytes.toBytes(MobTestUtil.generateRandomString(2));
    expectedKey = new KeyValue(randomKey, family, qualify, Long.MAX_VALUE, Type.Put, randomKey);
    seekKey = expectedKey.createKeyOnly(false);
    cell = mobFile.readCell(seekKey, false).getCell();
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the key which is less than the start key
    // Smaller than "aa"
    byte[] lowerKey = Bytes.toBytes("a1");
    expectedKey = new KeyValue(startKey, family, qualify, Long.MAX_VALUE, Type.Put, startKey);
    seekKey = new KeyValue(lowerKey, family, qualify, Long.MAX_VALUE, Type.Put, lowerKey);
    cell = mobFile.readCell(seekKey, false).getCell();
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the key which is more than the end key
    // Bigger than "zz"
    byte[] upperKey = Bytes.toBytes("z{");
    seekKey = new KeyValue(upperKey, family, qualify, Long.MAX_VALUE, Type.Put, upperKey);
    assertNull(mobFile.readCell(seekKey, false));
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) FileSystem(org.apache.hadoop.fs.FileSystem) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 34 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class TestMobStoreCompaction method countMobCellsInMetadata.

private long countMobCellsInMetadata() throws IOException {
    long mobCellsCount = 0;
    Path mobDirPath = MobUtils.getMobFamilyPath(conf, tableDescriptor.getTableName(), familyDescriptor.getNameAsString());
    Configuration copyOfConf = new Configuration(conf);
    copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f);
    CacheConfig cacheConfig = new CacheConfig(copyOfConf);
    if (fs.exists(mobDirPath)) {
        FileStatus[] files = UTIL.getTestFileSystem().listStatus(mobDirPath);
        for (FileStatus file : files) {
            HStoreFile sf = new HStoreFile(fs, file.getPath(), conf, cacheConfig, BloomType.NONE, true);
            sf.initReader();
            Map<byte[], byte[]> fileInfo = sf.getReader().loadFileInfo();
            byte[] count = fileInfo.get(MOB_CELLS_COUNT);
            assertTrue(count != null);
            mobCellsCount += Bytes.toLong(count);
        }
    }
    return mobCellsCount;
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig)

Example 35 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class TestMobFile method testGetScanner.

@Test
public void testGetScanner() throws Exception {
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
    MobTestUtil.writeStoreFile(writer, testName.getMethodName());
    MobFile mobFile = new MobFile(new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
    assertNotNull(mobFile.getScanner());
    assertTrue(mobFile.getScanner() instanceof StoreFileScanner);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) FileSystem(org.apache.hadoop.fs.FileSystem) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) Test(org.junit.Test)

Aggregations

HStoreFile (org.apache.hadoop.hbase.regionserver.HStoreFile)44 ArrayList (java.util.ArrayList)18 Test (org.junit.Test)16 Path (org.apache.hadoop.fs.Path)11 Configuration (org.apache.hadoop.conf.Configuration)8 HStore (org.apache.hadoop.hbase.regionserver.HStore)8 StripeInformationProvider (org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider)8 IOException (java.io.IOException)6 OptionalLong (java.util.OptionalLong)6 TableName (org.apache.hadoop.hbase.TableName)5 Put (org.apache.hadoop.hbase.client.Put)5 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)5 FileSystem (org.apache.hadoop.fs.FileSystem)4 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)4 StoreFileReader (org.apache.hadoop.hbase.regionserver.StoreFileReader)4 ImmutableList (org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList)4 InterruptedIOException (java.io.InterruptedIOException)3 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)3 ManualEnvironmentEdge (org.apache.hadoop.hbase.util.ManualEnvironmentEdge)3 FileNotFoundException (java.io.FileNotFoundException)2