Search in sources :

Example 31 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class RMContainerAllocator method getResourceLimit.

@Private
public Resource getResourceLimit() {
    Resource headRoom = getAvailableResources();
    Resource assignedMapResource = Resources.multiply(mapResourceRequest, assignedRequests.maps.size());
    Resource assignedReduceResource = Resources.multiply(reduceResourceRequest, assignedRequests.reduces.size());
    return Resources.add(headRoom, Resources.add(assignedMapResource, assignedReduceResource));
}
Also used : Resource(org.apache.hadoop.yarn.api.records.Resource) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 32 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class FileOutputFormat method setWorkOutputPath.

/**
   * Set the {@link Path} of the task's temporary output directory 
   * for the map-reduce job.
   * 
   * <p><i>Note</i>: Task output path is set by the framework.
   * </p>
   * @param conf The configuration of the job.
   * @param outputDir the {@link Path} of the output directory 
   * for the map-reduce job.
   */
@Private
public static void setWorkOutputPath(JobConf conf, Path outputDir) {
    outputDir = new Path(conf.getWorkingDirectory(), outputDir);
    conf.set(JobContext.TASK_OUTPUT_DIR, outputDir.toString());
}
Also used : Path(org.apache.hadoop.fs.Path) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 33 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class JobConf method parseMaximumHeapSizeMB.

/**
   * Parse the Maximum heap size from the java opts as specified by the -Xmx option
   * Format: -Xmx&lt;size&gt;[g|G|m|M|k|K]
   * @param javaOpts String to parse to read maximum heap size
   * @return Maximum heap size in MB or -1 if not specified
   */
@Private
@VisibleForTesting
public static int parseMaximumHeapSizeMB(String javaOpts) {
    // Find the last matching -Xmx following word boundaries
    Matcher m = JAVA_OPTS_XMX_PATTERN.matcher(javaOpts);
    if (m.matches()) {
        long size = Long.parseLong(m.group(1));
        if (size <= 0) {
            return -1;
        }
        if (m.group(2).isEmpty()) {
            // -Xmx specified in bytes
            return (int) (size / (1024 * 1024));
        }
        char unit = m.group(2).charAt(0);
        switch(unit) {
            case 'g':
            case 'G':
                // -Xmx specified in GB
                return (int) (size * 1024);
            case 'm':
            case 'M':
                // -Xmx specified in MB
                return (int) size;
            case 'k':
            case 'K':
                // -Xmx specified in KB
                return (int) (size / 1024);
        }
    }
    // -Xmx not specified
    return -1;
}
Also used : Matcher(java.util.regex.Matcher) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 34 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class FileOutputCommitter method commitTask.

@Private
public void commitTask(TaskAttemptContext context, Path taskAttemptPath) throws IOException {
    TaskAttemptID attemptId = context.getTaskAttemptID();
    if (hasOutputPath()) {
        context.progress();
        if (taskAttemptPath == null) {
            taskAttemptPath = getTaskAttemptPath(context);
        }
        FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration());
        FileStatus taskAttemptDirStatus;
        try {
            taskAttemptDirStatus = fs.getFileStatus(taskAttemptPath);
        } catch (FileNotFoundException e) {
            taskAttemptDirStatus = null;
        }
        if (taskAttemptDirStatus != null) {
            if (algorithmVersion == 1) {
                Path committedTaskPath = getCommittedTaskPath(context);
                if (fs.exists(committedTaskPath)) {
                    if (!fs.delete(committedTaskPath, true)) {
                        throw new IOException("Could not delete " + committedTaskPath);
                    }
                }
                if (!fs.rename(taskAttemptPath, committedTaskPath)) {
                    throw new IOException("Could not rename " + taskAttemptPath + " to " + committedTaskPath);
                }
                LOG.info("Saved output of task '" + attemptId + "' to " + committedTaskPath);
            } else {
                // directly merge everything from taskAttemptPath to output directory
                mergePaths(fs, taskAttemptDirStatus, outputPath);
                LOG.info("Saved output of task '" + attemptId + "' to " + outputPath);
            }
        } else {
            LOG.warn("No Output found for " + attemptId);
        }
    } else {
        LOG.warn("Output Path is null in commitTask()");
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) FileSystem(org.apache.hadoop.fs.FileSystem) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 35 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class FileOutputCommitter method abortTask.

@Private
public void abortTask(TaskAttemptContext context, Path taskAttemptPath) throws IOException {
    if (hasOutputPath()) {
        context.progress();
        if (taskAttemptPath == null) {
            taskAttemptPath = getTaskAttemptPath(context);
        }
        FileSystem fs = taskAttemptPath.getFileSystem(context.getConfiguration());
        if (!fs.delete(taskAttemptPath, true)) {
            LOG.warn("Could not delete " + taskAttemptPath);
        }
    } else {
        LOG.warn("Output Path is null in abortTask()");
    }
}
Also used : FileSystem(org.apache.hadoop.fs.FileSystem) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Aggregations

Private (org.apache.hadoop.classification.InterfaceAudience.Private)52 VisibleForTesting (com.google.common.annotations.VisibleForTesting)15 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)12 IOException (java.io.IOException)9 FileStatus (org.apache.hadoop.fs.FileStatus)8 ArrayList (java.util.ArrayList)6 Path (org.apache.hadoop.fs.Path)6 DataInputStream (java.io.DataInputStream)5 EOFException (java.io.EOFException)5 PrintStream (java.io.PrintStream)5 LogReader (org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader)5 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)4 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)4 Resource (org.apache.hadoop.yarn.api.records.Resource)4 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)4 ByteString (com.google.protobuf.ByteString)2 FileNotFoundException (java.io.FileNotFoundException)2 AccessDeniedException (java.nio.file.AccessDeniedException)2 HashSet (java.util.HashSet)2 FileSystem (org.apache.hadoop.fs.FileSystem)2