Search in sources :

Example 1 with LogCLIHelpers

use of org.apache.hadoop.yarn.logaggregation.LogCLIHelpers in project hadoop by apache.

the class CLI method run.

public int run(String[] argv) throws Exception {
    int exitCode = -1;
    if (argv.length < 1) {
        displayUsage("");
        return exitCode;
    }
    // process arguments
    String cmd = argv[0];
    String submitJobFile = null;
    String jobid = null;
    String taskid = null;
    String historyFileOrJobId = null;
    String historyOutFile = null;
    String historyOutFormat = HistoryViewer.HUMAN_FORMAT;
    String counterGroupName = null;
    String counterName = null;
    JobPriority jp = null;
    String taskType = null;
    String taskState = null;
    int fromEvent = 0;
    int nEvents = 0;
    int jpvalue = 0;
    String configOutFile = null;
    boolean getStatus = false;
    boolean getCounter = false;
    boolean killJob = false;
    boolean listEvents = false;
    boolean viewHistory = false;
    boolean viewAllHistory = false;
    boolean listJobs = false;
    boolean listAllJobs = false;
    boolean listActiveTrackers = false;
    boolean listBlacklistedTrackers = false;
    boolean displayTasks = false;
    boolean killTask = false;
    boolean failTask = false;
    boolean setJobPriority = false;
    boolean logs = false;
    boolean downloadConfig = false;
    if ("-submit".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        submitJobFile = argv[1];
    } else if ("-status".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        getStatus = true;
    } else if ("-counter".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        getCounter = true;
        jobid = argv[1];
        counterGroupName = argv[2];
        counterName = argv[3];
    } else if ("-kill".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        killJob = true;
    } else if ("-set-priority".equals(cmd)) {
        if (argv.length != 3) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        try {
            jp = JobPriority.valueOf(argv[2]);
        } catch (IllegalArgumentException iae) {
            try {
                jpvalue = Integer.parseInt(argv[2]);
            } catch (NumberFormatException ne) {
                LOG.info(ne);
                displayUsage(cmd);
                return exitCode;
            }
        }
        setJobPriority = true;
    } else if ("-events".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        fromEvent = Integer.parseInt(argv[2]);
        nEvents = Integer.parseInt(argv[3]);
        listEvents = true;
    } else if ("-history".equals(cmd)) {
        viewHistory = true;
        if (argv.length < 2 || argv.length > 7) {
            displayUsage(cmd);
            return exitCode;
        }
        // Some arguments are optional while others are not, and some require
        // second arguments.  Due to this, the indexing can vary depending on
        // what's specified and what's left out, as summarized in the below table:
        // [all] <jobHistoryFile|jobId> [-outfile <file>] [-format <human|json>]
        //   1                  2            3       4         5         6
        //   1                  2            3       4
        //   1                  2                              3         4
        //   1                  2
        //                      1            2       3         4         5
        //                      1            2       3
        //                      1                              2         3
        //                      1
        // "all" is optional, but comes first if specified
        int index = 1;
        if ("all".equals(argv[index])) {
            index++;
            viewAllHistory = true;
            if (argv.length == 2) {
                displayUsage(cmd);
                return exitCode;
            }
        }
        // Get the job history file or job id argument
        historyFileOrJobId = argv[index++];
        // "-outfile" is optional, but if specified requires a second argument
        if (argv.length > index + 1 && "-outfile".equals(argv[index])) {
            index++;
            historyOutFile = argv[index++];
        }
        // "-format" is optional, but if specified required a second argument
        if (argv.length > index + 1 && "-format".equals(argv[index])) {
            index++;
            historyOutFormat = argv[index++];
        }
        // Check for any extra arguments that don't belong here
        if (argv.length > index) {
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-list".equals(cmd)) {
        if (argv.length != 1 && !(argv.length == 2 && "all".equals(argv[1]))) {
            displayUsage(cmd);
            return exitCode;
        }
        if (argv.length == 2 && "all".equals(argv[1])) {
            listAllJobs = true;
        } else {
            listJobs = true;
        }
    } else if ("-kill-task".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        killTask = true;
        taskid = argv[1];
    } else if ("-fail-task".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        failTask = true;
        taskid = argv[1];
    } else if ("-list-active-trackers".equals(cmd)) {
        if (argv.length != 1) {
            displayUsage(cmd);
            return exitCode;
        }
        listActiveTrackers = true;
    } else if ("-list-blacklisted-trackers".equals(cmd)) {
        if (argv.length != 1) {
            displayUsage(cmd);
            return exitCode;
        }
        listBlacklistedTrackers = true;
    } else if ("-list-attempt-ids".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        taskType = argv[2];
        taskState = argv[3];
        displayTasks = true;
        if (!taskTypes.contains(org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
            System.out.println("Error: Invalid task-type: " + taskType);
            displayUsage(cmd);
            return exitCode;
        }
        if (!taskStates.contains(org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
            System.out.println("Error: Invalid task-state: " + taskState);
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-logs".equals(cmd)) {
        if (argv.length == 2 || argv.length == 3) {
            logs = true;
            jobid = argv[1];
            if (argv.length == 3) {
                taskid = argv[2];
            } else {
                taskid = null;
            }
        } else {
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-config".equals(cmd)) {
        downloadConfig = true;
        if (argv.length != 3) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        configOutFile = argv[2];
    } else {
        displayUsage(cmd);
        return exitCode;
    }
    // initialize cluster
    cluster = createCluster();
    // Submit the request
    try {
        if (submitJobFile != null) {
            Job job = Job.getInstance(new JobConf(submitJobFile));
            job.submit();
            System.out.println("Created job " + job.getJobID());
            exitCode = 0;
        } else if (getStatus) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                Counters counters = job.getCounters();
                System.out.println();
                System.out.println(job);
                if (counters != null) {
                    System.out.println(counters);
                } else {
                    System.out.println("Counters not available. Job is retired.");
                }
                exitCode = 0;
            }
        } else if (getCounter) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                Counters counters = job.getCounters();
                if (counters == null) {
                    System.out.println("Counters not available for retired job " + jobid);
                    exitCode = -1;
                } else {
                    System.out.println(getCounter(counters, counterGroupName, counterName));
                    exitCode = 0;
                }
            }
        } else if (killJob) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                JobStatus jobStatus = job.getStatus();
                if (jobStatus.getState() == JobStatus.State.FAILED) {
                    System.out.println("Could not mark the job " + jobid + " as killed, as it has already failed.");
                    exitCode = -1;
                } else if (jobStatus.getState() == JobStatus.State.KILLED) {
                    System.out.println("The job " + jobid + " has already been killed.");
                    exitCode = -1;
                } else if (jobStatus.getState() == JobStatus.State.SUCCEEDED) {
                    System.out.println("Could not kill the job " + jobid + ", as it has already succeeded.");
                    exitCode = -1;
                } else {
                    job.killJob();
                    System.out.println("Killed job " + jobid);
                    exitCode = 0;
                }
            }
        } else if (setJobPriority) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                if (jp != null) {
                    job.setPriority(jp);
                } else {
                    job.setPriorityAsInteger(jpvalue);
                }
                System.out.println("Changed job priority.");
                exitCode = 0;
            }
        } else if (viewHistory) {
            // it's a Job ID
            if (historyFileOrJobId.endsWith(".jhist")) {
                viewHistory(historyFileOrJobId, viewAllHistory, historyOutFile, historyOutFormat);
                exitCode = 0;
            } else {
                Job job = getJob(JobID.forName(historyFileOrJobId));
                if (job == null) {
                    System.out.println("Could not find job " + jobid);
                } else {
                    String historyUrl = job.getHistoryUrl();
                    if (historyUrl == null || historyUrl.isEmpty()) {
                        System.out.println("History file for job " + historyFileOrJobId + " is currently unavailable.");
                    } else {
                        viewHistory(historyUrl, viewAllHistory, historyOutFile, historyOutFormat);
                        exitCode = 0;
                    }
                }
            }
        } else if (listEvents) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                listEvents(job, fromEvent, nEvents);
                exitCode = 0;
            }
        } else if (listJobs) {
            listJobs(cluster);
            exitCode = 0;
        } else if (listAllJobs) {
            listAllJobs(cluster);
            exitCode = 0;
        } else if (listActiveTrackers) {
            listActiveTrackers(cluster);
            exitCode = 0;
        } else if (listBlacklistedTrackers) {
            listBlacklistedTrackers(cluster);
            exitCode = 0;
        } else if (displayTasks) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                displayTasks(getJob(JobID.forName(jobid)), taskType, taskState);
                exitCode = 0;
            }
        } else if (killTask) {
            TaskAttemptID taskID = TaskAttemptID.forName(taskid);
            Job job = getJob(taskID.getJobID());
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else if (job.killTask(taskID, false)) {
                System.out.println("Killed task " + taskid);
                exitCode = 0;
            } else {
                System.out.println("Could not kill task " + taskid);
                exitCode = -1;
            }
        } else if (failTask) {
            TaskAttemptID taskID = TaskAttemptID.forName(taskid);
            Job job = getJob(taskID.getJobID());
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else if (job.killTask(taskID, true)) {
                System.out.println("Killed task " + taskID + " by failing it");
                exitCode = 0;
            } else {
                System.out.println("Could not fail task " + taskid);
                exitCode = -1;
            }
        } else if (logs) {
            JobID jobID = JobID.forName(jobid);
            if (getJob(jobID) == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                try {
                    TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskid);
                    LogParams logParams = cluster.getLogParams(jobID, taskAttemptID);
                    LogCLIHelpers logDumper = new LogCLIHelpers();
                    logDumper.setConf(getConf());
                    exitCode = logDumper.dumpAContainersLogs(logParams.getApplicationId(), logParams.getContainerId(), logParams.getNodeId(), logParams.getOwner());
                } catch (IOException e) {
                    if (e instanceof RemoteException) {
                        throw e;
                    }
                    System.out.println(e.getMessage());
                }
            }
        } else if (downloadConfig) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                String jobFile = job.getJobFile();
                if (jobFile == null || jobFile.isEmpty()) {
                    System.out.println("Config file for job " + jobFile + " could not be found.");
                } else {
                    Path configPath = new Path(jobFile);
                    FileSystem fs = FileSystem.get(getConf());
                    fs.copyToLocalFile(configPath, new Path(configOutFile));
                    exitCode = 0;
                }
            }
        }
    } catch (RemoteException re) {
        IOException unwrappedException = re.unwrapRemoteException();
        if (unwrappedException instanceof AccessControlException) {
            System.out.println(unwrappedException.getMessage());
        } else {
            throw re;
        }
    } finally {
        cluster.close();
    }
    return exitCode;
}
Also used : Path(org.apache.hadoop.fs.Path) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) JobPriority(org.apache.hadoop.mapreduce.JobPriority) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) LogParams(org.apache.hadoop.mapreduce.v2.LogParams) JobStatus(org.apache.hadoop.mapreduce.JobStatus) FileSystem(org.apache.hadoop.fs.FileSystem) LogCLIHelpers(org.apache.hadoop.yarn.logaggregation.LogCLIHelpers) Counters(org.apache.hadoop.mapreduce.Counters) Job(org.apache.hadoop.mapreduce.Job) RemoteException(org.apache.hadoop.ipc.RemoteException) JobConf(org.apache.hadoop.mapred.JobConf) JobID(org.apache.hadoop.mapreduce.JobID)

Example 2 with LogCLIHelpers

use of org.apache.hadoop.yarn.logaggregation.LogCLIHelpers in project hadoop by apache.

the class TestLogsCLI method testFailResultCodes.

@Test(timeout = 5000l)
public void testFailResultCodes() throws Exception {
    Configuration conf = new YarnConfiguration();
    conf.setClass("fs.file.impl", LocalFileSystem.class, FileSystem.class);
    LogCLIHelpers cliHelper = new LogCLIHelpers();
    cliHelper.setConf(conf);
    YarnClient mockYarnClient = createMockYarnClient(YarnApplicationState.FINISHED, UserGroupInformation.getCurrentUser().getShortUserName());
    LogsCLI dumper = new LogsCLIForTest(mockYarnClient);
    dumper.setConf(conf);
    // verify dumping a non-existent application's logs returns a failure code
    int exitCode = dumper.run(new String[] { "-applicationId", "application_0_0" });
    assertTrue("Should return an error code", exitCode != 0);
    // verify dumping a non-existent container log is a failure code
    exitCode = cliHelper.dumpAContainersLogs("application_0_0", "container_0_0", "nonexistentnode:1234", "nobody");
    assertTrue("Should return an error code", exitCode != 0);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) LogCLIHelpers(org.apache.hadoop.yarn.logaggregation.LogCLIHelpers) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) Test(org.junit.Test)

Example 3 with LogCLIHelpers

use of org.apache.hadoop.yarn.logaggregation.LogCLIHelpers in project hadoop by apache.

the class LogsCLI method runCommand.

private int runCommand(String[] args) throws Exception {
    Options opts = createCommandOpts();
    Options printOpts = createPrintOpts(opts);
    if (args.length < 1) {
        printHelpMessage(printOpts);
        return -1;
    }
    if (args[0].equals("-help")) {
        printHelpMessage(printOpts);
        return 0;
    }
    CommandLineParser parser = new GnuParser();
    String appIdStr = null;
    String containerIdStr = null;
    String nodeAddress = null;
    String appOwner = null;
    boolean getAMContainerLogs = false;
    boolean nodesList = false;
    boolean showApplicationLogInfo = false;
    boolean showContainerLogInfo = false;
    boolean useRegex = false;
    String[] logFiles = null;
    String[] logFilesRegex = null;
    List<String> amContainersList = new ArrayList<String>();
    String localDir = null;
    long bytes = Long.MAX_VALUE;
    try {
        CommandLine commandLine = parser.parse(opts, args, false);
        appIdStr = commandLine.getOptionValue(APPLICATION_ID_OPTION);
        containerIdStr = commandLine.getOptionValue(CONTAINER_ID_OPTION);
        nodeAddress = commandLine.getOptionValue(NODE_ADDRESS_OPTION);
        appOwner = commandLine.getOptionValue(APP_OWNER_OPTION);
        getAMContainerLogs = commandLine.hasOption(AM_CONTAINER_OPTION);
        nodesList = commandLine.hasOption(LIST_NODES_OPTION);
        localDir = commandLine.getOptionValue(OUT_OPTION);
        showApplicationLogInfo = commandLine.hasOption(SHOW_APPLICATION_LOG_INFO);
        showContainerLogInfo = commandLine.hasOption(SHOW_CONTAINER_LOG_INFO);
        if (getAMContainerLogs) {
            try {
                amContainersList = parseAMContainer(commandLine, printOpts);
            } catch (NumberFormatException ex) {
                System.err.println(ex.getMessage());
                return -1;
            }
        }
        if (commandLine.hasOption(PER_CONTAINER_LOG_FILES_OPTION)) {
            logFiles = commandLine.getOptionValues(PER_CONTAINER_LOG_FILES_OPTION);
        }
        if (commandLine.hasOption(PER_CONTAINER_LOG_FILES_REGEX_OPTION)) {
            logFilesRegex = commandLine.getOptionValues(PER_CONTAINER_LOG_FILES_REGEX_OPTION);
            useRegex = true;
        }
        if (commandLine.hasOption(SIZE_OPTION)) {
            bytes = Long.parseLong(commandLine.getOptionValue(SIZE_OPTION));
        }
    } catch (ParseException e) {
        System.err.println("options parsing failed: " + e.getMessage());
        printHelpMessage(printOpts);
        return -1;
    }
    if (appIdStr == null && containerIdStr == null) {
        System.err.println("Both applicationId and containerId are missing, " + " one of them must be specified.");
        printHelpMessage(printOpts);
        return -1;
    }
    ApplicationId appId = null;
    if (appIdStr != null) {
        try {
            appId = ApplicationId.fromString(appIdStr);
        } catch (Exception e) {
            System.err.println("Invalid ApplicationId specified");
            return -1;
        }
    }
    if (containerIdStr != null) {
        try {
            ContainerId containerId = ContainerId.fromString(containerIdStr);
            if (appId == null) {
                appId = containerId.getApplicationAttemptId().getApplicationId();
            } else if (!containerId.getApplicationAttemptId().getApplicationId().equals(appId)) {
                System.err.println("The Application:" + appId + " does not have the container:" + containerId);
                return -1;
            }
        } catch (Exception e) {
            System.err.println("Invalid ContainerId specified");
            return -1;
        }
    }
    if (showApplicationLogInfo && showContainerLogInfo) {
        System.err.println("Invalid options. Can only accept one of " + "show_application_log_info/show_container_log_info.");
        return -1;
    }
    if (logFiles != null && logFiles.length > 0 && logFilesRegex != null && logFilesRegex.length > 0) {
        System.err.println("Invalid options. Can only accept one of " + "log_files/log_files_pattern.");
        return -1;
    }
    if (localDir != null) {
        File file = new File(localDir);
        if (file.exists() && file.isFile()) {
            System.err.println("Invalid value for -out option. " + "Please provide a directory.");
            return -1;
        }
    }
    LogCLIHelpers logCliHelper = new LogCLIHelpers();
    logCliHelper.setConf(getConf());
    YarnApplicationState appState = YarnApplicationState.NEW;
    ApplicationReport appReport = null;
    try {
        appReport = getApplicationReport(appId);
        appState = appReport.getYarnApplicationState();
        if (appState == YarnApplicationState.NEW || appState == YarnApplicationState.NEW_SAVING || appState == YarnApplicationState.SUBMITTED) {
            System.err.println("Logs are not available right now.");
            return -1;
        }
    } catch (IOException | YarnException e) {
        // If we can not get appReport from either RM or ATS
        // We will assume that this app has already finished.
        appState = YarnApplicationState.FINISHED;
        System.err.println("Unable to get ApplicationState." + " Attempting to fetch logs directly from the filesystem.");
    }
    if (appOwner == null || appOwner.isEmpty()) {
        appOwner = guessAppOwner(appReport, appId);
        if (appOwner == null) {
            System.err.println("Can not find the appOwner. " + "Please specify the correct appOwner");
            System.err.println("Could not locate application logs for " + appId);
            return -1;
        }
    }
    Set<String> logs = new HashSet<String>();
    if (fetchAllLogFiles(logFiles, logFilesRegex)) {
        logs.add("ALL");
    } else if (logFiles != null && logFiles.length > 0) {
        logs.addAll(Arrays.asList(logFiles));
    } else if (logFilesRegex != null && logFilesRegex.length > 0) {
        logs.addAll(Arrays.asList(logFilesRegex));
    }
    ContainerLogsRequest request = new ContainerLogsRequest(appId, isApplicationFinished(appState), appOwner, nodeAddress, null, containerIdStr, localDir, logs, bytes, null);
    if (showContainerLogInfo) {
        return showContainerLogInfo(request, logCliHelper);
    }
    if (nodesList) {
        return showNodeLists(request, logCliHelper);
    }
    if (showApplicationLogInfo) {
        return showApplicationLogInfo(request, logCliHelper);
    }
    // To get am logs
    if (getAMContainerLogs) {
        return fetchAMContainerLogs(request, amContainersList, logCliHelper, useRegex);
    }
    int resultCode = 0;
    if (containerIdStr != null) {
        return fetchContainerLogs(request, logCliHelper, useRegex);
    } else {
        if (nodeAddress == null) {
            resultCode = fetchApplicationLogs(request, logCliHelper, useRegex);
        } else {
            System.err.println("Should at least provide ContainerId!");
            printHelpMessage(printOpts);
            resultCode = -1;
        }
    }
    return resultCode;
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) ArrayList(java.util.ArrayList) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) ContainerLogsRequest(org.apache.hadoop.yarn.logaggregation.ContainerLogsRequest) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) CommandLineParser(org.apache.commons.cli.CommandLineParser) HashSet(java.util.HashSet) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) CommandLine(org.apache.commons.cli.CommandLine) LogCLIHelpers(org.apache.hadoop.yarn.logaggregation.LogCLIHelpers) ParseException(org.apache.commons.cli.ParseException) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File)

Aggregations

LogCLIHelpers (org.apache.hadoop.yarn.logaggregation.LogCLIHelpers)3 IOException (java.io.IOException)2 ClientHandlerException (com.sun.jersey.api.client.ClientHandlerException)1 UniformInterfaceException (com.sun.jersey.api.client.UniformInterfaceException)1 File (java.io.File)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 CommandLine (org.apache.commons.cli.CommandLine)1 CommandLineParser (org.apache.commons.cli.CommandLineParser)1 GnuParser (org.apache.commons.cli.GnuParser)1 Options (org.apache.commons.cli.Options)1 ParseException (org.apache.commons.cli.ParseException)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 RemoteException (org.apache.hadoop.ipc.RemoteException)1 JobConf (org.apache.hadoop.mapred.JobConf)1 Counters (org.apache.hadoop.mapreduce.Counters)1 Job (org.apache.hadoop.mapreduce.Job)1 JobID (org.apache.hadoop.mapreduce.JobID)1