Search in sources :

Example 16 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class LogCLIHelpers method dumpAllContainersLogs.

@Private
public int dumpAllContainersLogs(ContainerLogsRequest options) throws IOException {
    ApplicationId appId = options.getAppId();
    String appOwner = options.getAppOwner();
    String localDir = options.getOutputLocalDir();
    List<String> logTypes = new ArrayList<String>(options.getLogTypes());
    RemoteIterator<FileStatus> nodeFiles = getRemoteNodeFileDir(appId, appOwner);
    if (nodeFiles == null) {
        return -1;
    }
    boolean foundAnyLogs = false;
    while (nodeFiles.hasNext()) {
        FileStatus thisNodeFile = nodeFiles.next();
        if (thisNodeFile.getPath().getName().equals(appId + ".har")) {
            Path p = new Path("har:///" + thisNodeFile.getPath().toUri().getRawPath());
            nodeFiles = HarFs.get(p.toUri(), conf).listStatusIterator(p);
            continue;
        }
        if (!thisNodeFile.getPath().getName().endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
            AggregatedLogFormat.LogReader reader = new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath());
            try {
                DataInputStream valueStream;
                LogKey key = new LogKey();
                valueStream = reader.next(key);
                while (valueStream != null) {
                    PrintStream out = createPrintStream(localDir, thisNodeFile.getPath().getName(), key.toString());
                    try {
                        String containerString = String.format(CONTAINER_ON_NODE_PATTERN, key, thisNodeFile.getPath().getName());
                        out.println(containerString);
                        out.println("LogAggregationType: AGGREGATED");
                        out.println(StringUtils.repeat("=", containerString.length()));
                        while (true) {
                            try {
                                if (logTypes == null || logTypes.isEmpty()) {
                                    LogReader.readAContainerLogsForALogType(valueStream, out, thisNodeFile.getModificationTime(), options.getBytes());
                                    foundAnyLogs = true;
                                } else {
                                    int result = LogReader.readContainerLogsForALogType(valueStream, out, thisNodeFile.getModificationTime(), logTypes, options.getBytes());
                                    if (result == 0) {
                                        foundAnyLogs = true;
                                    }
                                }
                            } catch (EOFException eof) {
                                break;
                            }
                        }
                    } finally {
                        closePrintStream(out);
                    }
                    // Next container
                    key = new LogKey();
                    valueStream = reader.next(key);
                }
            } finally {
                reader.close();
            }
        }
    }
    if (!foundAnyLogs) {
        emptyLogDir(LogAggregationUtils.getRemoteAppLogDir(conf, appId, appOwner).toString());
        return -1;
    }
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) LogReader(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader) PrintStream(java.io.PrintStream) FileStatus(org.apache.hadoop.fs.FileStatus) LogKey(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey) ArrayList(java.util.ArrayList) DataInputStream(java.io.DataInputStream) LogReader(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader) EOFException(java.io.EOFException) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 17 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class LogCLIHelpers method printContainersList.

@Private
public void printContainersList(ContainerLogsRequest options, PrintStream out, PrintStream err) throws IOException {
    ApplicationId appId = options.getAppId();
    String appOwner = options.getAppOwner();
    String nodeId = options.getNodeId();
    String nodeIdStr = (nodeId == null) ? null : LogAggregationUtils.getNodeString(nodeId);
    RemoteIterator<FileStatus> nodeFiles = getRemoteNodeFileDir(appId, appOwner);
    if (nodeFiles == null) {
        return;
    }
    boolean foundAnyLogs = false;
    while (nodeFiles.hasNext()) {
        FileStatus thisNodeFile = nodeFiles.next();
        if (nodeIdStr != null) {
            if (!thisNodeFile.getPath().getName().contains(nodeIdStr)) {
                continue;
            }
        }
        if (!thisNodeFile.getPath().getName().endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
            AggregatedLogFormat.LogReader reader = new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath());
            try {
                DataInputStream valueStream;
                LogKey key = new LogKey();
                valueStream = reader.next(key);
                while (valueStream != null) {
                    out.println(String.format(CONTAINER_ON_NODE_PATTERN, key, thisNodeFile.getPath().getName()));
                    foundAnyLogs = true;
                    // Next container
                    key = new LogKey();
                    valueStream = reader.next(key);
                }
            } finally {
                reader.close();
            }
        }
    }
    if (!foundAnyLogs) {
        if (nodeId != null) {
            err.println("Can not find information for any containers on " + nodeId);
        } else {
            err.println("Can not find any container information for " + "the application: " + appId);
        }
    }
}
Also used : LogReader(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader) FileStatus(org.apache.hadoop.fs.FileStatus) LogKey(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey) LogReader(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) DataInputStream(java.io.DataInputStream) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 18 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class LogCLIHelpers method printNodesList.

@Private
public void printNodesList(ContainerLogsRequest options, PrintStream out, PrintStream err) throws IOException {
    ApplicationId appId = options.getAppId();
    String appOwner = options.getAppOwner();
    RemoteIterator<FileStatus> nodeFiles = getRemoteNodeFileDir(appId, appOwner);
    if (nodeFiles == null) {
        return;
    }
    boolean foundNode = false;
    StringBuilder sb = new StringBuilder();
    while (nodeFiles.hasNext()) {
        FileStatus thisNodeFile = nodeFiles.next();
        sb.append(thisNodeFile.getPath().getName() + "\n");
        foundNode = true;
    }
    if (!foundNode) {
        err.println("No nodes found that aggregated logs for " + "the application: " + appId);
    } else {
        out.println(sb.toString());
    }
}
Also used : FileStatus(org.apache.hadoop.fs.FileStatus) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 19 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class LogCLIHelpers method dumpAContainerLogsForLogTypeWithoutNodeId.

@Private
public int dumpAContainerLogsForLogTypeWithoutNodeId(ContainerLogsRequest options) throws IOException {
    ApplicationId applicationId = options.getAppId();
    String jobOwner = options.getAppOwner();
    String containerId = options.getContainerId();
    String localDir = options.getOutputLocalDir();
    List<String> logType = new ArrayList<String>(options.getLogTypes());
    RemoteIterator<FileStatus> nodeFiles = getRemoteNodeFileDir(applicationId, jobOwner);
    if (nodeFiles == null) {
        return -1;
    }
    boolean foundContainerLogs = false;
    while (nodeFiles.hasNext()) {
        FileStatus thisNodeFile = nodeFiles.next();
        if (!thisNodeFile.getPath().getName().endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
            AggregatedLogFormat.LogReader reader = null;
            PrintStream out = System.out;
            try {
                reader = new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath());
                if (getContainerLogsStream(containerId, reader) == null) {
                    continue;
                }
                // We have to re-create reader object to reset the stream index
                // after calling getContainerLogsStream which would move the stream
                // index to the end of the log file.
                reader = new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath());
                out = createPrintStream(localDir, thisNodeFile.getPath().getName(), containerId);
                String containerString = String.format(CONTAINER_ON_NODE_PATTERN, containerId, thisNodeFile.getPath().getName());
                out.println(containerString);
                out.println("LogAggregationType: AGGREGATED");
                out.println(StringUtils.repeat("=", containerString.length()));
                if (logType == null || logType.isEmpty()) {
                    if (dumpAContainerLogs(containerId, reader, out, thisNodeFile.getModificationTime(), options.getBytes()) > -1) {
                        foundContainerLogs = true;
                    }
                } else {
                    if (dumpAContainerLogsForALogType(containerId, reader, out, thisNodeFile.getModificationTime(), logType, options.getBytes()) > -1) {
                        foundContainerLogs = true;
                    }
                }
            } finally {
                if (reader != null) {
                    reader.close();
                }
                closePrintStream(out);
            }
        }
    }
    if (!foundContainerLogs) {
        containerLogNotFound(containerId);
        return -1;
    }
    return 0;
}
Also used : LogReader(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader) PrintStream(java.io.PrintStream) FileStatus(org.apache.hadoop.fs.FileStatus) ArrayList(java.util.ArrayList) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Example 20 with Private

use of org.apache.hadoop.classification.InterfaceAudience.Private in project hadoop by apache.

the class TimelineV2ClientImpl method putObjects.

@Private
protected void putObjects(String path, MultivaluedMap<String, String> params, Object obj) throws IOException, YarnException {
    int retries = verifyRestEndPointAvailable();
    // timelineServiceAddress could be stale, add retry logic here.
    boolean needRetry = true;
    while (needRetry) {
        try {
            URI uri = TimelineConnector.constructResURI(getConfig(), timelineServiceAddress, RESOURCE_URI_STR_V2);
            putObjects(uri, path, params, obj);
            needRetry = false;
        } catch (IOException e) {
            // handle exception for timelineServiceAddress being updated.
            checkRetryWithSleep(retries, e);
            retries--;
        }
    }
}
Also used : IOException(java.io.IOException) URI(java.net.URI) Private(org.apache.hadoop.classification.InterfaceAudience.Private)

Aggregations

Private (org.apache.hadoop.classification.InterfaceAudience.Private)52 VisibleForTesting (com.google.common.annotations.VisibleForTesting)15 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)12 IOException (java.io.IOException)9 FileStatus (org.apache.hadoop.fs.FileStatus)8 ArrayList (java.util.ArrayList)6 Path (org.apache.hadoop.fs.Path)6 DataInputStream (java.io.DataInputStream)5 EOFException (java.io.EOFException)5 PrintStream (java.io.PrintStream)5 LogReader (org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader)5 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)4 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)4 Resource (org.apache.hadoop.yarn.api.records.Resource)4 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)4 ByteString (com.google.protobuf.ByteString)2 FileNotFoundException (java.io.FileNotFoundException)2 AccessDeniedException (java.nio.file.AccessDeniedException)2 HashSet (java.util.HashSet)2 FileSystem (org.apache.hadoop.fs.FileSystem)2