Search in sources :

Example 6 with HFileCorruptionChecker

use of org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker in project hbase by apache.

the class TestHBaseFsckOneRS method testQuarantineMissingHFile.

/**
   * This creates a table and simulates the race situation where a concurrent compaction or split
   * has removed an hfile after the corruption checker learned about it.
   */
@Test(timeout = 180000)
public void testQuarantineMissingHFile() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    // inject a fault in the hfcc created.
    final FileSystem fs = FileSystem.get(conf);
    HBaseFsck hbck = new HBaseFsck(conf, hbfsckExecutorService) {

        @Override
        public HFileCorruptionChecker createHFileCorruptionChecker(boolean sidelineCorruptHFiles) throws IOException {
            return new HFileCorruptionChecker(conf, executor, sidelineCorruptHFiles) {

                AtomicBoolean attemptedFirstHFile = new AtomicBoolean(false);

                @Override
                protected void checkHFile(Path p) throws IOException {
                    if (attemptedFirstHFile.compareAndSet(false, true)) {
                        // make sure delete happened.
                        assertTrue(fs.delete(p, true));
                    }
                    super.checkHFile(p);
                }
            };
        }
    };
    // 4 attempted, but 1 missing.
    doQuarantineTest(tableName, hbck, 4, 0, 0, 0, 1);
    hbck.close();
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) FileSystem(org.apache.hadoop.fs.FileSystem) HFileCorruptionChecker(org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker) Test(org.junit.Test)

Example 7 with HFileCorruptionChecker

use of org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker in project hbase by apache.

the class TestHBaseFsckOneRS method testQuarantineCorruptHFile.

/**
   * This creates a table and then corrupts an hfile.  Hbck should quarantine the file.
   */
@Test(timeout = 180000)
public void testQuarantineCorruptHFile() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    try {
        setupTable(tableName);
        assertEquals(ROWKEYS.length, countRows());
        // flush is async.
        admin.flush(tableName);
        FileSystem fs = FileSystem.get(conf);
        Path hfile = getFlushedHFile(fs, tableName);
        // Mess it up by leaving a hole in the assignment, meta, and hdfs data
        admin.disableTable(tableName);
        // create new corrupt file called deadbeef (valid hfile name)
        Path corrupt = new Path(hfile.getParent(), "deadbeef");
        TestHFile.truncateFile(fs, hfile, corrupt);
        LOG.info("Created corrupted file " + corrupt);
        HBaseFsck.debugLsr(conf, FSUtils.getRootDir(conf));
        // we cannot enable here because enable never finished due to the corrupt region.
        HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, tableName);
        assertEquals(res.getRetCode(), 0);
        HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
        assertEquals(hfcc.getHFilesChecked(), 5);
        assertEquals(hfcc.getCorrupted().size(), 1);
        assertEquals(hfcc.getFailures().size(), 0);
        assertEquals(hfcc.getQuarantined().size(), 1);
        assertEquals(hfcc.getMissing().size(), 0);
        // Its been fixed, verify that we can enable.
        admin.enableTable(tableName);
    } finally {
        cleanupTable(tableName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) FileSystem(org.apache.hadoop.fs.FileSystem) HFileCorruptionChecker(org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker) Test(org.junit.Test)

Example 8 with HFileCorruptionChecker

use of org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker in project hbase by apache.

the class BaseTestHBaseFsck method doQuarantineTest.

/**
   * Test that use this should have a timeout, because this method could potentially wait forever.
  */
protected void doQuarantineTest(TableName table, HBaseFsck hbck, int check, int corrupt, int fail, int quar, int missing) throws Exception {
    try {
        setupTable(table);
        assertEquals(ROWKEYS.length, countRows());
        // flush is async.
        admin.flush(table);
        // Mess it up by leaving a hole in the assignment, meta, and hdfs data
        admin.disableTable(table);
        String[] args = { "-sidelineCorruptHFiles", "-repairHoles", "-ignorePreCheckPermission", table.getNameAsString() };
        HBaseFsck res = hbck.exec(hbfsckExecutorService, args);
        HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
        assertEquals(hfcc.getHFilesChecked(), check);
        assertEquals(hfcc.getCorrupted().size(), corrupt);
        assertEquals(hfcc.getFailures().size(), fail);
        assertEquals(hfcc.getQuarantined().size(), quar);
        assertEquals(hfcc.getMissing().size(), missing);
        // its been fixed, verify that we can enable
        admin.enableTableAsync(table);
        while (!admin.isTableEnabled(table)) {
            try {
                Thread.sleep(250);
            } catch (InterruptedException e) {
                e.printStackTrace();
                fail("Interrupted when trying to enable table " + table);
            }
        }
    } finally {
        cleanupTable(table);
    }
}
Also used : HFileCorruptionChecker(org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker)

Aggregations

HFileCorruptionChecker (org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker)8 Path (org.apache.hadoop.fs.Path)7 TableName (org.apache.hadoop.hbase.TableName)6 Test (org.junit.Test)6 FileSystem (org.apache.hadoop.fs.FileSystem)5 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)3 IOException (java.io.IOException)1 InterruptedIOException (java.io.InterruptedIOException)1 ArrayList (java.util.ArrayList)1 Put (org.apache.hadoop.hbase.client.Put)1 Table (org.apache.hadoop.hbase.client.Table)1 AccessDeniedException (org.apache.hadoop.hbase.security.AccessDeniedException)1 Ignore (org.junit.Ignore)1