use of org.apache.hadoop.hbase.procedure2.ProcedureExecutor in project hbase by apache.
the class TestAccessController method testListProcedures.
@Test
public void testListProcedures() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
final ProcedureExecutor<MasterProcedureEnv> procExec = TEST_UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
Procedure proc = new TestTableDDLProcedure(procExec.getEnvironment(), tableName);
proc.setOwner(USER_OWNER);
final long procId = procExec.submitProcedure(proc);
final List<ProcedureInfo> procInfoList = procExec.listProcedures();
AccessTestAction listProceduresAction = new AccessTestAction() {
@Override
public Object run() throws Exception {
List<ProcedureInfo> procInfoListClone = new ArrayList<>(procInfoList.size());
for (ProcedureInfo pi : procInfoList) {
procInfoListClone.add(pi.clone());
}
ACCESS_CONTROLLER.postListProcedures(ObserverContext.createAndPrepare(CP_ENV, null), procInfoListClone);
return null;
}
};
verifyAllowed(listProceduresAction, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
verifyAllowed(listProceduresAction, USER_OWNER);
verifyIfNull(listProceduresAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
}
use of org.apache.hadoop.hbase.procedure2.ProcedureExecutor in project hbase by apache.
the class HMaster method startProcedureExecutor.
private void startProcedureExecutor() throws IOException {
final MasterProcedureEnv procEnv = new MasterProcedureEnv(this);
final Path walDir = new Path(FSUtils.getWALRootDir(this.conf), MasterProcedureConstants.MASTER_PROCEDURE_LOGDIR);
procedureStore = new WALProcedureStore(conf, walDir.getFileSystem(conf), walDir, new MasterProcedureEnv.WALStoreLeaseRecovery(this));
procedureStore.registerListener(new MasterProcedureEnv.MasterProcedureStoreListener(this));
procedureExecutor = new ProcedureExecutor(conf, procEnv, procedureStore, procEnv.getProcedureScheduler());
configurationManager.registerObserver(procEnv);
final int numThreads = conf.getInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, Math.max(Runtime.getRuntime().availableProcessors(), MasterProcedureConstants.DEFAULT_MIN_MASTER_PROCEDURE_THREADS));
final boolean abortOnCorruption = conf.getBoolean(MasterProcedureConstants.EXECUTOR_ABORT_ON_CORRUPTION, MasterProcedureConstants.DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION);
procedureStore.start(numThreads);
procedureExecutor.start(numThreads, abortOnCorruption);
}
use of org.apache.hadoop.hbase.procedure2.ProcedureExecutor in project hbase by apache.
the class ProcedureTestingUtility method restart.
public static <TEnv> void restart(ProcedureExecutor<TEnv> procExecutor, boolean avoidTestKillDuringRestart, boolean failOnCorrupted, Callable<Void> stopAction, Callable<Void> actionBeforeStartWorker, Callable<Void> startAction, boolean abort, boolean startWorkers) throws Exception {
final ProcedureStore procStore = procExecutor.getStore();
final int storeThreads = procExecutor.getCorePoolSize();
final int execThreads = procExecutor.getCorePoolSize();
final ProcedureExecutor.Testing testing = procExecutor.testing;
if (avoidTestKillDuringRestart) {
procExecutor.testing = null;
}
// stop
LOG.info("RESTART - Stop");
procExecutor.stop();
procStore.stop(abort);
if (stopAction != null) {
stopAction.call();
}
procExecutor.join();
procExecutor.getScheduler().clear();
// nothing running...
// re-start
LOG.info("RESTART - Start");
procStore.start(storeThreads);
procExecutor.init(execThreads, failOnCorrupted);
if (actionBeforeStartWorker != null) {
actionBeforeStartWorker.call();
}
if (avoidTestKillDuringRestart) {
procExecutor.testing = testing;
}
if (startWorkers) {
procExecutor.startWorkers();
}
if (startAction != null) {
startAction.call();
}
}
use of org.apache.hadoop.hbase.procedure2.ProcedureExecutor in project hbase by apache.
the class ProcedureTestingUtility method submitAndWait.
public static <TEnv> long submitAndWait(Configuration conf, TEnv env, Procedure<TEnv> proc) throws IOException {
NoopProcedureStore procStore = new NoopProcedureStore();
ProcedureExecutor<TEnv> procExecutor = new ProcedureExecutor<>(conf, env, procStore);
procStore.start(1);
initAndStartWorkers(procExecutor, 1, false, true);
try {
return submitAndWait(procExecutor, proc, HConstants.NO_NONCE, HConstants.NO_NONCE);
} finally {
procStore.stop(false);
procExecutor.stop();
}
}
use of org.apache.hadoop.hbase.procedure2.ProcedureExecutor in project hbase by apache.
the class TestProcedureInMemoryChore method setUp.
@Before
public void setUp() throws IOException {
htu = new HBaseCommonTestingUtil();
procEnv = new TestProcEnv();
procStore = new NoopProcedureStore();
procExecutor = new ProcedureExecutor<>(htu.getConfiguration(), procEnv, procStore);
procExecutor.testing = new ProcedureExecutor.Testing();
procStore.start(PROCEDURE_EXECUTOR_SLOTS);
ProcedureTestingUtility.initAndStartWorkers(procExecutor, PROCEDURE_EXECUTOR_SLOTS, true);
}
Aggregations