Search in sources :

Example 56 with YarnClient

use of org.apache.hadoop.yarn.client.api.YarnClient in project hadoop by apache.

the class ProtocolHATestBase method createAndStartYarnClient.

protected YarnClient createAndStartYarnClient(Configuration conf) {
    Configuration configuration = new YarnConfiguration(conf);
    YarnClient client = YarnClient.createYarnClient();
    client.init(configuration);
    client.start();
    return client;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient)

Example 57 with YarnClient

use of org.apache.hadoop.yarn.client.api.YarnClient in project hadoop by apache.

the class ProtocolHATestBase method verifyClientConnection.

protected void verifyClientConnection() {
    int numRetries = 3;
    while (numRetries-- > 0) {
        Configuration conf = new YarnConfiguration(this.conf);
        YarnClient client = createAndStartYarnClient(conf);
        try {
            Thread.sleep(100);
            client.getApplications();
            return;
        } catch (Exception e) {
            LOG.error(e.getMessage());
        } finally {
            client.stop();
        }
    }
    fail("Client couldn't connect to the Active RM");
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException)

Example 58 with YarnClient

use of org.apache.hadoop.yarn.client.api.YarnClient in project hadoop by apache.

the class TestRMFailover method verifyClientConnection.

private void verifyClientConnection() {
    int numRetries = 3;
    while (numRetries-- > 0) {
        Configuration conf = new YarnConfiguration(this.conf);
        YarnClient client = YarnClient.createYarnClient();
        client.init(conf);
        client.start();
        try {
            client.getApplications();
            return;
        } catch (Exception e) {
            LOG.error(e);
        } finally {
            client.stop();
        }
    }
    fail("Client couldn't connect to the Active RM");
}
Also used : YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException)

Example 59 with YarnClient

use of org.apache.hadoop.yarn.client.api.YarnClient in project apex-core by apache.

the class StramAgent method retrieveWebServicesInfo.

private StramWebServicesInfo retrieveWebServicesInfo(String appId) {
    YarnClient yarnClient = YarnClient.createYarnClient();
    String url;
    try {
        yarnClient.init(conf);
        yarnClient.start();
        ApplicationReport ar = yarnClient.getApplicationReport(ConverterUtils.toApplicationId(appId));
        if (ar == null) {
            LOG.warn("YARN does not have record for this application {}", appId);
            return null;
        } else if (ar.getYarnApplicationState() != YarnApplicationState.RUNNING) {
            LOG.debug("Application {} is not running (state: {})", appId, ar.getYarnApplicationState());
            return null;
        }
        String trackingUrl = ar.getTrackingUrl();
        if (!trackingUrl.startsWith("http://") && !trackingUrl.startsWith("https://")) {
            url = "http://" + trackingUrl;
        } else {
            url = trackingUrl;
        }
        if (StringUtils.isBlank(url)) {
            LOG.error("Cannot get tracking url from YARN");
            return null;
        }
        if (url.endsWith("/")) {
            url = url.substring(0, url.length() - 1);
        }
        url += WebServices.PATH;
    } catch (Exception ex) {
        LOG.error("Cannot retrieve web services info", ex);
        return null;
    } finally {
        yarnClient.stop();
    }
    WebServicesClient webServicesClient = new WebServicesClient();
    try {
        JSONObject response;
        String secToken = null;
        ClientResponse clientResponse;
        int i = 0;
        while (true) {
            LOG.debug("Accessing url {}", url);
            clientResponse = webServicesClient.process(url, ClientResponse.class, new WebServicesClient.GetWebServicesHandler<ClientResponse>());
            String val = clientResponse.getHeaders().getFirst("Refresh");
            if (val == null) {
                break;
            }
            int index = val.indexOf("url=");
            if (index < 0) {
                break;
            }
            url = val.substring(index + 4);
            if (i++ > MAX_REDIRECTS) {
                LOG.error("Cannot get web service info -- exceeded the max number of redirects");
                return null;
            }
        }
        if (!UserGroupInformation.isSecurityEnabled()) {
            response = new JSONObject(clientResponse.getEntity(String.class));
        } else {
            if (UserGroupInformation.isSecurityEnabled()) {
                for (NewCookie nc : clientResponse.getCookies()) {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Cookie " + nc.getName() + " " + nc.getValue());
                    }
                    if (nc.getName().equals(StramWSFilter.CLIENT_COOKIE)) {
                        secToken = nc.getValue();
                    }
                }
            }
            response = new JSONObject(clientResponse.getEntity(String.class));
        }
        String version = response.getString("version");
        response = webServicesClient.process(url + "/" + version + "/stram/info", JSONObject.class, new WebServicesClient.GetWebServicesHandler<JSONObject>());
        String appMasterUrl = response.getString("appMasterTrackingUrl");
        String appPath = response.getString("appPath");
        String user = response.getString("user");
        JSONObject permissionsInfo = null;
        try (FSDataInputStream is = fileSystem.open(new Path(appPath, "permissions.json"))) {
            permissionsInfo = new JSONObject(IOUtils.toString(is));
        } catch (FileNotFoundException ex) {
        // ignore if file is not found
        }
        return new StramWebServicesInfo(appMasterUrl, version, appPath, user, secToken, permissionsInfo);
    } catch (Exception ex) {
        LOG.warn("Cannot retrieve web service info for app {}", appId, ex);
        return null;
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Path(org.apache.hadoop.fs.Path) FileNotFoundException(java.io.FileNotFoundException) WebServicesClient(com.datatorrent.stram.util.WebServicesClient) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) IncompatibleVersionException(com.datatorrent.stram.client.WebServicesVersionConversion.IncompatibleVersionException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) JSONException(org.codehaus.jettison.json.JSONException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) JSONObject(org.codehaus.jettison.json.JSONObject) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) NewCookie(javax.ws.rs.core.NewCookie)

Example 60 with YarnClient

use of org.apache.hadoop.yarn.client.api.YarnClient in project flink by apache.

the class AbstractYarnClusterDescriptor method retrieve.

@Override
public YarnClusterClient retrieve(String applicationID) {
    try {
        // check if required Hadoop environment variables are set. If not, warn user
        if (System.getenv("HADOOP_CONF_DIR") == null && System.getenv("YARN_CONF_DIR") == null) {
            LOG.warn("Neither the HADOOP_CONF_DIR nor the YARN_CONF_DIR environment variable is set." + "The Flink YARN Client needs one of these to be set to properly load the Hadoop " + "configuration for accessing YARN.");
        }
        final ApplicationId yarnAppId = ConverterUtils.toApplicationId(applicationID);
        final YarnClient yarnClient = getYarnClient();
        final ApplicationReport appReport = yarnClient.getApplicationReport(yarnAppId);
        if (appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) {
            // Flink cluster is not running anymore
            LOG.error("The application {} doesn't run anymore. It has previously completed with final status: {}", applicationID, appReport.getFinalApplicationStatus());
            throw new RuntimeException("The Yarn application " + applicationID + " doesn't run anymore.");
        }
        LOG.info("Found application JobManager host name '{}' and port '{}' from supplied application id '{}'", appReport.getHost(), appReport.getRpcPort(), applicationID);
        flinkConfiguration.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, appReport.getHost());
        flinkConfiguration.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, appReport.getRpcPort());
        return createYarnClusterClient(this, yarnClient, appReport, flinkConfiguration, sessionFilesDir, false);
    } catch (Exception e) {
        throw new RuntimeException("Couldn't retrieve Yarn cluster", e);
    }
}
Also used : ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) URISyntaxException(java.net.URISyntaxException) InvocationTargetException(java.lang.reflect.InvocationTargetException) IllegalConfigurationException(org.apache.flink.configuration.IllegalConfigurationException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException)

Aggregations

YarnClient (org.apache.hadoop.yarn.client.api.YarnClient)89 Test (org.junit.Test)51 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)50 Configuration (org.apache.hadoop.conf.Configuration)45 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)37 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)21 CapacitySchedulerConfiguration (org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration)18 IOException (java.io.IOException)17 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)15 MiniYARNCluster (org.apache.hadoop.yarn.server.MiniYARNCluster)15 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)14 Path (org.apache.hadoop.fs.Path)13 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)13 FileSystem (org.apache.hadoop.fs.FileSystem)11 Matchers.anyString (org.mockito.Matchers.anyString)11 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)9 NodeId (org.apache.hadoop.yarn.api.records.NodeId)9 ArrayList (java.util.ArrayList)8 LocalFileSystem (org.apache.hadoop.fs.LocalFileSystem)8 ReservationSubmissionRequest (org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest)7