use of org.apache.hadoop.hbase.ProcedureInfo in project hbase by apache.
the class HBaseAdmin method convert.
private static ProcedureInfo convert(final ProcedureProtos.Procedure procProto) {
NonceKey nonceKey = null;
if (procProto.getNonce() != HConstants.NO_NONCE) {
nonceKey = new NonceKey(procProto.getNonceGroup(), procProto.getNonce());
}
org.apache.hadoop.hbase.ProcedureState procedureState = org.apache.hadoop.hbase.ProcedureState.valueOf(procProto.getState().name());
return new ProcedureInfo(procProto.getProcId(), procProto.getClassName(), procProto.getOwner(), procedureState, procProto.hasParentId() ? procProto.getParentId() : -1, nonceKey, procProto.hasException() ? ForeignExceptionUtil.toIOException(procProto.getException()) : null, procProto.getLastUpdate(), procProto.getStartTime(), procProto.hasResult() ? procProto.getResult().toByteArray() : null);
}
use of org.apache.hadoop.hbase.ProcedureInfo in project hbase by apache.
the class TestAccessController method testListProcedures.
@Test
public void testListProcedures() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
final ProcedureExecutor<MasterProcedureEnv> procExec = TEST_UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
Procedure proc = new TestTableDDLProcedure(procExec.getEnvironment(), tableName);
proc.setOwner(USER_OWNER);
final long procId = procExec.submitProcedure(proc);
final List<ProcedureInfo> procInfoList = procExec.listProcedures();
AccessTestAction listProceduresAction = new AccessTestAction() {
@Override
public Object run() throws Exception {
List<ProcedureInfo> procInfoListClone = new ArrayList<>(procInfoList.size());
for (ProcedureInfo pi : procInfoList) {
procInfoListClone.add(pi.clone());
}
ACCESS_CONTROLLER.postListProcedures(ObserverContext.createAndPrepare(CP_ENV, null), procInfoListClone);
return null;
}
};
verifyAllowed(listProceduresAction, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
verifyAllowed(listProceduresAction, USER_OWNER);
verifyIfNull(listProceduresAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
}
use of org.apache.hadoop.hbase.ProcedureInfo in project hbase by apache.
the class ProcedureExecutor method removeResult.
/**
* Mark the specified completed procedure, as ready to remove.
* @param procId the ID of the procedure to remove
*/
public void removeResult(final long procId) {
final ProcedureInfo result = completed.get(procId);
if (result == null) {
assert !procedures.containsKey(procId) : "procId=" + procId + " is still running";
if (LOG.isDebugEnabled()) {
LOG.debug("procId=" + procId + " already removed by the cleaner.");
}
return;
}
// The CompletedProcedureCleaner will take care of deletion, once the TTL is expired.
result.setClientAckTime(EnvironmentEdgeManager.currentTime());
}
use of org.apache.hadoop.hbase.ProcedureInfo in project hbase by apache.
the class ProcedureExecutor method procedureFinished.
private void procedureFinished(final Procedure proc) {
// call the procedure completion cleanup handler
execCompletionCleanup(proc);
// update the executor internal state maps
final ProcedureInfo procInfo = ProcedureUtil.convertToProcedureInfo(proc, proc.getNonceKey());
if (!proc.shouldWaitClientAck(getEnvironment())) {
procInfo.setClientAckTime(0);
}
completed.put(procInfo.getProcId(), procInfo);
rollbackStack.remove(proc.getProcId());
procedures.remove(proc.getProcId());
// call the runnableSet completion cleanup handler
try {
scheduler.completionCleanup(proc);
} catch (Throwable e) {
// Catch NullPointerExceptions or similar errors...
LOG.error("CODE-BUG: uncatched runtime exception for completion cleanup: " + proc, e);
}
// Notify the listeners
sendProcedureFinishedNotification(proc.getProcId());
}
use of org.apache.hadoop.hbase.ProcedureInfo in project hbase by apache.
the class ProcedureExecutor method load.
private void load(final boolean abortOnCorruption) throws IOException {
Preconditions.checkArgument(completed.isEmpty(), "completed not empty");
Preconditions.checkArgument(rollbackStack.isEmpty(), "rollback state not empty");
Preconditions.checkArgument(procedures.isEmpty(), "procedure map not empty");
Preconditions.checkArgument(scheduler.size() == 0, "run queue not empty");
store.load(new ProcedureStore.ProcedureLoader() {
@Override
public void setMaxProcId(long maxProcId) {
assert lastProcId.get() < 0 : "expected only one call to setMaxProcId()";
LOG.debug("Load maxProcId=" + maxProcId);
lastProcId.set(maxProcId);
}
@Override
public void load(ProcedureIterator procIter) throws IOException {
loadProcedures(procIter, abortOnCorruption);
}
@Override
public void handleCorrupted(ProcedureIterator procIter) throws IOException {
int corruptedCount = 0;
while (procIter.hasNext()) {
ProcedureInfo proc = procIter.nextAsProcedureInfo();
LOG.error("Corrupt " + proc);
corruptedCount++;
}
if (abortOnCorruption && corruptedCount > 0) {
throw new IOException("found " + corruptedCount + " corrupted procedure(s) on replay");
}
}
});
}
Aggregations