Search in sources :

Example 71 with YarnException

use of org.apache.hadoop.yarn.exceptions.YarnException in project hadoop by apache.

the class RMServerUtils method validateISO8601AndConvertToLocalTimeEpoch.

/**
   * Validate ISO8601 format with epoch time.
   * @param timeoutsInISO8601 format
   * @return expire time in local epoch
   * @throws YarnException if given application timeout value is lesser than
   *           current time.
   */
public static Map<ApplicationTimeoutType, Long> validateISO8601AndConvertToLocalTimeEpoch(Map<ApplicationTimeoutType, String> timeoutsInISO8601) throws YarnException {
    long currentTimeMillis = clock.getTime();
    Map<ApplicationTimeoutType, Long> newApplicationTimeout = new HashMap<ApplicationTimeoutType, Long>();
    if (timeoutsInISO8601 != null) {
        for (Map.Entry<ApplicationTimeoutType, String> timeout : timeoutsInISO8601.entrySet()) {
            long expireTime = 0L;
            try {
                expireTime = Times.parseISO8601ToLocalTimeInMillis(timeout.getValue());
            } catch (ParseException ex) {
                String message = "Expire time is not in ISO8601 format. ISO8601 supported " + "format is yyyy-MM-dd'T'HH:mm:ss.SSSZ. Configured " + "timeout value is " + timeout.getValue();
                throw new YarnException(message, ex);
            }
            if (expireTime < currentTimeMillis) {
                String message = "Expire time is less than current time, current-time=" + Times.formatISO8601(currentTimeMillis) + " expire-time=" + Times.formatISO8601(expireTime);
                throw new YarnException(message);
            }
            newApplicationTimeout.put(timeout.getKey(), expireTime);
        }
    }
    return newApplicationTimeout;
}
Also used : HashMap(java.util.HashMap) ApplicationTimeoutType(org.apache.hadoop.yarn.api.records.ApplicationTimeoutType) ParseException(java.text.ParseException) Map(java.util.Map) HashMap(java.util.HashMap) YarnException(org.apache.hadoop.yarn.exceptions.YarnException)

Example 72 with YarnException

use of org.apache.hadoop.yarn.exceptions.YarnException in project hadoop by apache.

the class NodesListManager method disableHostsFileReader.

private void disableHostsFileReader(Exception ex) {
    LOG.warn("Failed to init hostsReader, disabling", ex);
    try {
        this.includesFile = conf.get(YarnConfiguration.DEFAULT_RM_NODES_INCLUDE_FILE_PATH);
        this.excludesFile = conf.get(YarnConfiguration.DEFAULT_RM_NODES_EXCLUDE_FILE_PATH);
        this.hostsReader = createHostsFileReader(this.includesFile, this.excludesFile);
        setDecomissionedNMs();
    } catch (IOException ioe2) {
        // Should *never* happen
        this.hostsReader = null;
        throw new YarnRuntimeException(ioe2);
    } catch (YarnException e) {
        // Should *never* happen
        this.hostsReader = null;
        throw new YarnRuntimeException(e);
    }
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException)

Example 73 with YarnException

use of org.apache.hadoop.yarn.exceptions.YarnException in project hadoop by apache.

the class NodesListManager method serviceInit.

@Override
protected void serviceInit(Configuration conf) throws Exception {
    this.conf = conf;
    int nodeIpCacheTimeout = conf.getInt(YarnConfiguration.RM_NODE_IP_CACHE_EXPIRY_INTERVAL_SECS, YarnConfiguration.DEFAULT_RM_NODE_IP_CACHE_EXPIRY_INTERVAL_SECS);
    if (nodeIpCacheTimeout <= 0) {
        resolver = new DirectResolver();
    } else {
        resolver = new CachedResolver(SystemClock.getInstance(), nodeIpCacheTimeout);
        addIfService(resolver);
    }
    // Read the hosts/exclude files to restrict access to the RM
    try {
        this.includesFile = conf.get(YarnConfiguration.RM_NODES_INCLUDE_FILE_PATH, YarnConfiguration.DEFAULT_RM_NODES_INCLUDE_FILE_PATH);
        this.excludesFile = conf.get(YarnConfiguration.RM_NODES_EXCLUDE_FILE_PATH, YarnConfiguration.DEFAULT_RM_NODES_EXCLUDE_FILE_PATH);
        this.hostsReader = createHostsFileReader(this.includesFile, this.excludesFile);
        setDecomissionedNMs();
        printConfiguredHosts();
    } catch (YarnException ex) {
        disableHostsFileReader(ex);
    } catch (IOException ioe) {
        disableHostsFileReader(ioe);
    }
    final int nodeRemovalTimeout = conf.getInt(YarnConfiguration.RM_NODEMANAGER_UNTRACKED_REMOVAL_TIMEOUT_MSEC, YarnConfiguration.DEFAULT_RM_NODEMANAGER_UNTRACKED_REMOVAL_TIMEOUT_MSEC);
    nodeRemovalCheckInterval = (Math.min(nodeRemovalTimeout / 2, 600000));
    removalTimer = new Timer("Node Removal Timer");
    removalTimer.schedule(new TimerTask() {

        @Override
        public void run() {
            long now = Time.monotonicNow();
            for (Map.Entry<NodeId, RMNode> entry : rmContext.getInactiveRMNodes().entrySet()) {
                NodeId nodeId = entry.getKey();
                RMNode rmNode = entry.getValue();
                if (isUntrackedNode(rmNode.getHostName())) {
                    if (rmNode.getUntrackedTimeStamp() == 0) {
                        rmNode.setUntrackedTimeStamp(now);
                    } else if (now - rmNode.getUntrackedTimeStamp() > nodeRemovalTimeout) {
                        RMNode result = rmContext.getInactiveRMNodes().remove(nodeId);
                        if (result != null) {
                            decrInactiveNMMetrics(rmNode);
                            LOG.info("Removed " + result.getState().toString() + " node " + result.getHostName() + " from inactive nodes list");
                        }
                    }
                } else {
                    rmNode.setUntrackedTimeStamp(0);
                }
            }
        }
    }, nodeRemovalCheckInterval, nodeRemovalCheckInterval);
    super.serviceInit(conf);
}
Also used : Entry(java.util.Map.Entry) RMNode(org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode) Timer(java.util.Timer) TimerTask(java.util.TimerTask) NodeId(org.apache.hadoop.yarn.api.records.NodeId) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException)

Example 74 with YarnException

use of org.apache.hadoop.yarn.exceptions.YarnException in project hadoop by apache.

the class ClientRMService method getApplicationAttemptReport.

@Override
public GetApplicationAttemptReportResponse getApplicationAttemptReport(GetApplicationAttemptReportRequest request) throws YarnException, IOException {
    ApplicationAttemptId appAttemptId = request.getApplicationAttemptId();
    UserGroupInformation callerUGI;
    try {
        callerUGI = UserGroupInformation.getCurrentUser();
    } catch (IOException ie) {
        LOG.info("Error getting UGI ", ie);
        throw RPCUtil.getRemoteException(ie);
    }
    RMApp application = this.rmContext.getRMApps().get(appAttemptId.getApplicationId());
    if (application == null) {
        // ApplicationNotFoundException and let client to handle.
        throw new ApplicationNotFoundException("Application with id '" + request.getApplicationAttemptId().getApplicationId() + "' doesn't exist in RM. Please check that the job " + "submission was successful.");
    }
    boolean allowAccess = checkAccess(callerUGI, application.getUser(), ApplicationAccessType.VIEW_APP, application);
    GetApplicationAttemptReportResponse response = null;
    if (allowAccess) {
        RMAppAttempt appAttempt = application.getAppAttempts().get(appAttemptId);
        if (appAttempt == null) {
            throw new ApplicationAttemptNotFoundException("ApplicationAttempt with id '" + appAttemptId + "' doesn't exist in RM.");
        }
        ApplicationAttemptReport attemptReport = appAttempt.createApplicationAttemptReport();
        response = GetApplicationAttemptReportResponse.newInstance(attemptReport);
    } else {
        throw new YarnException("User " + callerUGI.getShortUserName() + " does not have privilege to see this attempt " + appAttemptId);
    }
    return response;
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) RMAppAttempt(org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt) ApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptReport) ApplicationNotFoundException(org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException) GetApplicationAttemptReportResponse(org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) ApplicationAttemptNotFoundException(org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException)

Example 75 with YarnException

use of org.apache.hadoop.yarn.exceptions.YarnException in project hadoop by apache.

the class AdminService method refreshClusterMaxPriority.

@Override
public RefreshClusterMaxPriorityResponse refreshClusterMaxPriority(RefreshClusterMaxPriorityRequest request) throws YarnException, IOException {
    final String operation = "refreshClusterMaxPriority";
    final String msg = "refresh cluster max priority";
    UserGroupInformation user = checkAcls(operation);
    checkRMStatus(user.getShortUserName(), operation, msg);
    try {
        refreshClusterMaxPriority();
        RMAuditLogger.logSuccess(user.getShortUserName(), operation, "AdminService");
        return recordFactory.newRecordInstance(RefreshClusterMaxPriorityResponse.class);
    } catch (YarnException e) {
        throw logAndWrapException(e, user.getShortUserName(), operation, msg);
    }
}
Also used : YarnException(org.apache.hadoop.yarn.exceptions.YarnException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

YarnException (org.apache.hadoop.yarn.exceptions.YarnException)287 IOException (java.io.IOException)149 Test (org.junit.Test)107 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)61 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)44 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)31 Configuration (org.apache.hadoop.conf.Configuration)26 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)26 ArrayList (java.util.ArrayList)25 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)25 ApplicationNotFoundException (org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException)25 AccessControlException (org.apache.hadoop.security.AccessControlException)22 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)21 UndeclaredThrowableException (java.lang.reflect.UndeclaredThrowableException)17 Path (org.apache.hadoop.fs.Path)17 ReservationSubmissionRequest (org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest)15 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)15 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)14 HashMap (java.util.HashMap)13 ApplicationSubmissionContext (org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)13