use of org.apache.hadoop.util.Shell.ExitCodeException in project hadoop by apache.
the class HardLink method getLinkCount.
/**
* Retrieves the number of links to the specified file.
*/
public static int getLinkCount(File fileName) throws IOException {
if (fileName == null) {
throw new IOException("invalid argument to getLinkCount: file name is null");
}
if (!fileName.exists()) {
throw new FileNotFoundException(fileName + " not found.");
}
// construct and execute shell command
String[] cmd = getHardLinkCommand.linkCount(fileName);
String inpMsg = null;
String errMsg = null;
int exitValue = -1;
BufferedReader in = null;
ShellCommandExecutor shexec = new ShellCommandExecutor(cmd);
try {
shexec.execute();
in = new BufferedReader(new StringReader(shexec.getOutput()));
inpMsg = in.readLine();
exitValue = shexec.getExitCode();
if (inpMsg == null || exitValue != 0) {
throw createIOException(fileName, inpMsg, errMsg, exitValue, null);
}
if (Shell.SOLARIS) {
String[] result = inpMsg.split("\\s+");
return Integer.parseInt(result[1]);
} else {
return Integer.parseInt(inpMsg);
}
} catch (ExitCodeException e) {
inpMsg = shexec.getOutput();
errMsg = e.getMessage();
exitValue = e.getExitCode();
throw createIOException(fileName, inpMsg, errMsg, exitValue, e);
} catch (NumberFormatException e) {
throw createIOException(fileName, inpMsg, errMsg, exitValue, e);
} finally {
IOUtils.closeStream(in);
}
}
use of org.apache.hadoop.util.Shell.ExitCodeException in project hadoop by apache.
the class ProcessTree method isProcessGroupAlive.
/**
* Is the process group with still alive?
*
* This method assumes that isAlive is called on a pid that was alive not
* too long ago, and hence assumes no chance of pid-wrapping-around.
*
* @param pgrpId process group id
* @return true if any of process in group is alive.
*/
public static boolean isProcessGroupAlive(String pgrpId) {
ShellCommandExecutor shexec = null;
try {
String[] args = { "kill", "-0", "-" + pgrpId };
shexec = new ShellCommandExecutor(args);
shexec.execute();
} catch (ExitCodeException ee) {
return false;
} catch (IOException ioe) {
LOG.warn("Error executing shell command " + shexec.toString() + ioe);
return false;
}
return (shexec.getExitCode() == 0 ? true : false);
}
use of org.apache.hadoop.util.Shell.ExitCodeException in project hadoop by apache.
the class PrivilegedOperationExecutor method executePrivilegedOperation.
/**
* Executes a privileged operation. It is up to the callers to ensure that
* each privileged operation's parameters are constructed correctly. The
* parameters are passed verbatim to the container-executor binary.
*
* @param prefixCommands in some cases ( e.g priorities using nice ),
* prefix commands are necessary
* @param operation the type and arguments for the operation to be executed
* @param workingDir (optional) working directory for execution
* @param env (optional) env of the command will include specified vars
* @param grabOutput return (possibly large) shell command output
* @param inheritParentEnv inherit the env vars from the parent process
* @return stdout contents from shell executor - useful for some privileged
* operations - e.g --tc_read
* @throws org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationException
*/
public String executePrivilegedOperation(List<String> prefixCommands, PrivilegedOperation operation, File workingDir, Map<String, String> env, boolean grabOutput, boolean inheritParentEnv) throws PrivilegedOperationException {
String[] fullCommandArray = getPrivilegedOperationExecutionCommand(prefixCommands, operation);
ShellCommandExecutor exec = new ShellCommandExecutor(fullCommandArray, workingDir, env, 0L, inheritParentEnv);
try {
exec.execute();
if (LOG.isDebugEnabled()) {
LOG.debug("command array:");
LOG.debug(Arrays.toString(fullCommandArray));
LOG.debug("Privileged Execution Operation Output:");
LOG.debug(exec.getOutput());
}
} catch (ExitCodeException e) {
if (operation.isFailureLoggingEnabled()) {
StringBuilder logBuilder = new StringBuilder("Shell execution returned " + "exit code: ").append(exec.getExitCode()).append(". Privileged Execution Operation Stderr: ").append(System.lineSeparator()).append(e.getMessage()).append(System.lineSeparator()).append("Stdout: " + exec.getOutput()).append(System.lineSeparator());
logBuilder.append("Full command array for failed execution: ").append(System.lineSeparator());
logBuilder.append(Arrays.toString(fullCommandArray));
LOG.warn(logBuilder.toString());
}
//'message' - so, we have to extract it and set it as the error out
throw new PrivilegedOperationException(e, e.getExitCode(), exec.getOutput(), e.getMessage());
} catch (IOException e) {
LOG.warn("IOException executing command: ", e);
throw new PrivilegedOperationException(e);
}
if (grabOutput) {
return exec.getOutput();
}
return null;
}
use of org.apache.hadoop.util.Shell.ExitCodeException in project hadoop by apache.
the class TestContainerLaunch method testContainerLaunchStdoutAndStderrDiagnostics.
@Test(timeout = 20000)
public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException {
File shellFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
// echo "hello" to stdout and "error" to stderr and exit code with 2;
String command = Shell.WINDOWS ? "@echo \"hello\" & @echo \"error\" 1>&2 & exit /b 2" : "echo \"hello\"; echo \"error\" 1>&2; exit 2;";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
FileUtil.setExecutable(shellFile, true);
writer.println(command);
writer.close();
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
FileOutputStream fos = new FileOutputStream(shellFile, true);
Map<String, String> env = new HashMap<String, String>();
List<String> commands = new ArrayList<String>();
commands.add(command);
ContainerExecutor exec = new DefaultContainerExecutor();
exec.setConf(new YarnConfiguration());
exec.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
fos.flush();
fos.close();
Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { shellFile.getAbsolutePath() }, tmpDir);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch (ExitCodeException e) {
diagnostics = e.getMessage();
}
// test stderr
Assert.assertTrue(diagnostics.contains("error"));
// test stdout
Assert.assertTrue(shexc.getOutput().contains("hello"));
Assert.assertTrue(shexc.getExitCode() == 2);
} finally {
// cleanup
if (shellFile != null && shellFile.exists()) {
shellFile.delete();
}
}
}
use of org.apache.hadoop.util.Shell.ExitCodeException in project hadoop by apache.
the class TestContainerLaunch method testInvalidEnvSyntaxDiagnostics.
@Test(timeout = 20000)
public void testInvalidEnvSyntaxDiagnostics() throws IOException {
File shellFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
FileOutputStream fos = new FileOutputStream(shellFile);
FileUtil.setExecutable(shellFile, true);
Map<String, String> env = new HashMap<String, String>();
// invalid env
env.put("APPLICATION_WORKFLOW_CONTEXT", "{\"workflowId\":\"609f91c5cd83\"," + "\"workflowName\":\"\n\ninsert table " + "\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, ");
List<String> commands = new ArrayList<String>();
DefaultContainerExecutor defaultContainerExecutor = new DefaultContainerExecutor();
defaultContainerExecutor.setConf(new YarnConfiguration());
defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
fos.flush();
fos.close();
// It is supposed that LANG is set as C.
Map<String, String> cmdEnv = new HashMap<String, String>();
cmdEnv.put("LANG", "C");
Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { shellFile.getAbsolutePath() }, tmpDir, cmdEnv);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch (ExitCodeException e) {
diagnostics = e.getMessage();
}
Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ? "is not recognized as an internal or external command" : "command not found"));
Assert.assertTrue(shexc.getExitCode() != 0);
} finally {
// cleanup
if (shellFile != null && shellFile.exists()) {
shellFile.delete();
}
}
}
Aggregations