Search in sources :

Example 51 with MasterProcedureEnv

use of org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv in project hbase by apache.

the class TestMergeTableRegionsProcedure method testMergeWithoutPONR.

@Test
public void testMergeWithoutPONR() throws Exception {
    final TableName tableName = TableName.valueOf("testMergeWithoutPONR");
    final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
    List<RegionInfo> tableRegions = createTable(tableName);
    ProcedureTestingUtility.waitNoProcedureRunning(procExec);
    ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
    RegionInfo[] regionsToMerge = new RegionInfo[2];
    regionsToMerge[0] = tableRegions.get(0);
    regionsToMerge[1] = tableRegions.get(1);
    long procId = procExec.submitProcedure(new MergeTableRegionsProcedure(procExec.getEnvironment(), regionsToMerge, true));
    // Execute until step 9 of split procedure
    // NOTE: step 9 is after step MERGE_TABLE_REGIONS_UPDATE_META
    MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId, 9, false);
    // Unset Toggle Kill and make ProcExec work correctly
    ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, false);
    MasterProcedureTestingUtility.restartMasterProcedureExecutor(procExec);
    ProcedureTestingUtility.waitProcedure(procExec, procId);
    assertRegionCount(tableName, initialRegionCount - 1);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) Test(org.junit.Test)

Example 52 with MasterProcedureEnv

use of org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv in project hbase by apache.

the class MockMasterServices method startProcedureExecutor.

private void startProcedureExecutor(final RSProcedureDispatcher remoteDispatcher) throws IOException {
    final Configuration conf = getConfiguration();
    this.procedureStore = new NoopProcedureStore();
    this.procedureStore.registerListener(new ProcedureStoreListener() {

        @Override
        public void abortProcess() {
            abort("The Procedure Store lost the lease", null);
        }
    });
    this.procedureEnv = new MasterProcedureEnv(this, remoteDispatcher != null ? remoteDispatcher : new RSProcedureDispatcher(this));
    this.procedureExecutor = new ProcedureExecutor<>(conf, procedureEnv, procedureStore, procedureEnv.getProcedureScheduler());
    final int numThreads = conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, Math.max(Runtime.getRuntime().availableProcessors(), MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
    final boolean abortOnCorruption = conf.getBoolean(MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION, MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
    this.procedureStore.start(numThreads);
    ProcedureTestingUtility.initAndStartWorkers(procedureExecutor, numThreads, abortOnCorruption);
    this.procedureEnv.getRemoteDispatcher().start();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) NoopProcedureStore(org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore) ProcedureStoreListener(org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureStoreListener) RSProcedureDispatcher(org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher)

Example 53 with MasterProcedureEnv

use of org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv in project hbase by apache.

the class TestMergesSplitsAddToTracker method testCommitMergedRegion.

@Test
public void testCommitMergedRegion() throws Exception {
    TableName table = createTable(null);
    // splitting the table first
    TEST_UTIL.getAdmin().split(table, Bytes.toBytes("002"));
    // Add data and flush to create files in the two different regions
    putThreeRowsAndFlush(table);
    List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(table);
    HRegion first = regions.get(0);
    HRegion second = regions.get(1);
    HRegionFileSystem regionFS = first.getRegionFileSystem();
    RegionInfo mergeResult = RegionInfoBuilder.newBuilder(table).setStartKey(first.getRegionInfo().getStartKey()).setEndKey(second.getRegionInfo().getEndKey()).setSplit(false).setRegionId(first.getRegionInfo().getRegionId() + EnvironmentEdgeManager.currentTime()).build();
    HRegionFileSystem mergeFS = HRegionFileSystem.createRegionOnFileSystem(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), regionFS.getFileSystem(), regionFS.getTableDir(), mergeResult);
    List<Path> mergedFiles = new ArrayList<>();
    // merge file from first region
    mergedFiles.add(mergeFileFromRegion(first, mergeFS));
    // merge file from second region
    mergedFiles.add(mergeFileFromRegion(second, mergeFS));
    MasterProcedureEnv env = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getEnvironment();
    mergeFS.commitMergedRegion(mergedFiles, env);
    // validate
    FileSystem fs = first.getRegionFileSystem().getFileSystem();
    Path finalMergeDir = new Path(first.getRegionFileSystem().getTableDir(), mergeResult.getEncodedName());
    verifyFilesAreTracked(finalMergeDir, fs);
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) StoreFileTrackerForTest(org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerForTest) Test(org.junit.Test)

Example 54 with MasterProcedureEnv

use of org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv in project hbase by apache.

the class TestMergesSplitsAddToTracker method testCommitDaughterRegion.

@Test
public void testCommitDaughterRegion() throws Exception {
    TableName table = createTable(null);
    // first put some data in order to have a store file created
    putThreeRowsAndFlush(table);
    HRegion region = TEST_UTIL.getHBaseCluster().getRegions(table).get(0);
    HRegionFileSystem regionFS = region.getStores().get(0).getRegionFileSystem();
    RegionInfo daughterA = RegionInfoBuilder.newBuilder(table).setStartKey(region.getRegionInfo().getStartKey()).setEndKey(Bytes.toBytes("002")).setSplit(false).setRegionId(region.getRegionInfo().getRegionId() + EnvironmentEdgeManager.currentTime()).build();
    RegionInfo daughterB = RegionInfoBuilder.newBuilder(table).setStartKey(Bytes.toBytes("002")).setEndKey(region.getRegionInfo().getEndKey()).setSplit(false).setRegionId(region.getRegionInfo().getRegionId()).build();
    HStoreFile file = (HStoreFile) region.getStore(FAMILY_NAME).getStorefiles().toArray()[0];
    List<Path> splitFilesA = new ArrayList<>();
    splitFilesA.add(regionFS.splitStoreFile(daughterA, Bytes.toString(FAMILY_NAME), file, Bytes.toBytes("002"), false, region.getSplitPolicy()));
    List<Path> splitFilesB = new ArrayList<>();
    splitFilesB.add(regionFS.splitStoreFile(daughterB, Bytes.toString(FAMILY_NAME), file, Bytes.toBytes("002"), true, region.getSplitPolicy()));
    MasterProcedureEnv env = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getEnvironment();
    Path resultA = regionFS.commitDaughterRegion(daughterA, splitFilesA, env);
    Path resultB = regionFS.commitDaughterRegion(daughterB, splitFilesB, env);
    FileSystem fs = regionFS.getFileSystem();
    verifyFilesAreTracked(resultA, fs);
    verifyFilesAreTracked(resultB, fs);
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) StoreFileTrackerForTest(org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerForTest) Test(org.junit.Test)

Example 55 with MasterProcedureEnv

use of org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv in project hbase by apache.

the class TestSplitTableRegionProcedure method testRecoveryAndDoubleExecution.

@Test
public void testRecoveryAndDoubleExecution() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
    RegionInfo[] regions = MasterProcedureTestingUtility.createTable(procExec, tableName, null, columnFamilyName1, columnFamilyName2);
    insertData(UTIL, tableName, rowCount, startRowNum, columnFamilyName1, columnFamilyName2);
    int splitRowNum = startRowNum + rowCount / 2;
    byte[] splitKey = Bytes.toBytes("" + splitRowNum);
    assertTrue("not able to find a splittable region", regions != null);
    assertTrue("not able to find a splittable region", regions.length == 1);
    ProcedureTestingUtility.waitNoProcedureRunning(procExec);
    ProcedureTestingUtility.setKillIfHasParent(procExec, false);
    ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
    // collect AM metrics before test
    collectAssignmentManagerMetrics();
    // Split region of the table
    long procId = procExec.submitProcedure(new SplitTableRegionProcedure(procExec.getEnvironment(), regions[0], splitKey));
    // Restart the executor and execute the step twice
    MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId);
    ProcedureTestingUtility.assertProcNotFailed(procExec, procId);
    verify(tableName, splitRowNum);
    assertEquals(splitSubmittedCount + 1, splitProcMetrics.getSubmittedCounter().getCount());
    assertEquals(splitFailedCount, splitProcMetrics.getFailedCounter().getCount());
}
Also used : TableName(org.apache.hadoop.hbase.TableName) MasterProcedureEnv(org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) Test(org.junit.Test)

Aggregations

MasterProcedureEnv (org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv)65 Test (org.junit.Test)48 TableName (org.apache.hadoop.hbase.TableName)42 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)39 IOException (java.io.IOException)16 ProcedureExecutor (org.apache.hadoop.hbase.procedure2.ProcedureExecutor)16 ArrayList (java.util.ArrayList)13 Path (org.apache.hadoop.fs.Path)13 HBaseClassTestRule (org.apache.hadoop.hbase.HBaseClassTestRule)13 MasterTests (org.apache.hadoop.hbase.testclassification.MasterTests)13 Bytes (org.apache.hadoop.hbase.util.Bytes)13 ClassRule (org.junit.ClassRule)13 Category (org.junit.experimental.categories.Category)13 BeforeClass (org.junit.BeforeClass)12 HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)11 ServerName (org.apache.hadoop.hbase.ServerName)10 Procedure (org.apache.hadoop.hbase.procedure2.Procedure)10 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)10 MediumTests (org.apache.hadoop.hbase.testclassification.MediumTests)10 AfterClass (org.junit.AfterClass)10