Search in sources :

Example 1 with ClusterResource

use of com.dtstack.taier.pluginapi.pojo.ClusterResource in project Taier by DTStack.

the class HadoopClient method getClusterResource.

@Override
public ClusterResource getClusterResource() {
    ClusterResource clusterResource = new ClusterResource();
    try {
        KerberosUtils.login(config, () -> {
            YarnClient resourceClient = null;
            try {
                resourceClient = YarnClient.createYarnClient();
                resourceClient.init(conf);
                resourceClient.start();
                List<NodeReport> nodes = resourceClient.getNodeReports(NodeState.RUNNING);
                List<ClusterResource.NodeDescription> clusterNodes = new ArrayList<>();
                Integer totalMem = 0;
                Integer totalCores = 0;
                Integer usedMem = 0;
                Integer usedCores = 0;
                for (NodeReport rep : nodes) {
                    ClusterResource.NodeDescription node = new ClusterResource.NodeDescription();
                    String nodeName = rep.getHttpAddress().split(":")[0];
                    node.setNodeName(nodeName);
                    node.setMemory(rep.getCapability().getMemory());
                    node.setUsedMemory(rep.getUsed().getMemory());
                    node.setUsedVirtualCores(rep.getUsed().getVirtualCores());
                    node.setVirtualCores(rep.getCapability().getVirtualCores());
                    clusterNodes.add(node);
                    // 计算集群资源总量和使用量
                    Resource capability = rep.getCapability();
                    Resource used = rep.getUsed();
                    totalMem += capability.getMemory();
                    totalCores += capability.getVirtualCores();
                    usedMem += used.getMemory();
                    usedCores += used.getVirtualCores();
                }
                ClusterResource.ResourceMetrics metrics = createResourceMetrics(totalMem, usedMem, totalCores, usedCores);
                clusterResource.setNodes(clusterNodes);
                String webAddress = getYarnWebAddress(yarnClient);
                String schedulerUrl = String.format(YARN_SCHEDULER_FORMAT, webAddress);
                String schedulerInfoMsg = PoolHttpClient.get(schedulerUrl, null);
                JSONObject schedulerInfo = JSONObject.parseObject(schedulerInfoMsg);
                if (schedulerInfo.containsKey("scheduler")) {
                    clusterResource.setScheduleInfo(schedulerInfo.getJSONObject("scheduler").getJSONObject("schedulerInfo"));
                }
                clusterResource.setQueues(getQueueResource(yarnClient));
                clusterResource.setResourceMetrics(metrics);
            } catch (Exception e) {
                LOG.error("close reource error ", e);
            } finally {
                if (null != resourceClient) {
                    try {
                        resourceClient.close();
                    } catch (IOException e) {
                        LOG.error("close reource error ", e);
                    }
                }
            }
            return clusterResource;
        }, conf);
    } catch (Exception e) {
        throw new PluginDefineException(e.getMessage());
    }
    return clusterResource;
}
Also used : ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource) JSONObject(com.alibaba.fastjson.JSONObject) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 2 with ClusterResource

use of com.dtstack.taier.pluginapi.pojo.ClusterResource in project Taier by DTStack.

the class HiveClient method main.

public static void main(String[] args) throws IOException {
    FileInputStream fileInputStream = null;
    InputStreamReader inputStreamReader = null;
    BufferedReader reader = null;
    try {
        System.setProperty("HADOOP_USER_NAME", "admin");
        // input params json file path
        String filePath = args[0];
        File paramsFile = new File(filePath);
        fileInputStream = new FileInputStream(paramsFile);
        inputStreamReader = new InputStreamReader(fileInputStream);
        reader = new BufferedReader(inputStreamReader);
        String request = reader.readLine();
        Map params = PublicUtil.jsonStrToObject(request, Map.class);
        ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
        JobClient jobClient = new JobClient(paramAction);
        String pluginInfo = jobClient.getPluginInfo();
        Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
        String md5plugin = MD5Util.getMd5String(pluginInfo);
        properties.setProperty("md5sum", md5plugin);
        HiveClient client = new HiveClient();
        client.init(properties);
        ClusterResource clusterResource = client.getClusterResource();
        LOG.info("submit success!");
        LOG.info(clusterResource.toString());
        System.exit(0);
    } catch (Exception e) {
        LOG.error("submit error!", e);
    } finally {
        if (reader != null) {
            reader.close();
            inputStreamReader.close();
            fileInputStream.close();
        }
    }
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) Properties(java.util.Properties) Map(java.util.Map) JobClient(com.dtstack.taier.pluginapi.JobClient) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource)

Example 3 with ClusterResource

use of com.dtstack.taier.pluginapi.pojo.ClusterResource in project Taier by DTStack.

the class HadoopClient method main.

public static void main(String[] args) throws Exception {
    FileInputStream fileInputStream = null;
    InputStreamReader inputStreamReader = null;
    BufferedReader reader = null;
    try {
        System.setProperty("HADOOP_USER_NAME", "admin");
        // input params json file path
        String filePath = args[0];
        File paramsFile = new File(filePath);
        fileInputStream = new FileInputStream(paramsFile);
        inputStreamReader = new InputStreamReader(fileInputStream);
        reader = new BufferedReader(inputStreamReader);
        String request = reader.readLine();
        Map params = PublicUtil.jsonStrToObject(request, Map.class);
        ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
        JobClient jobClient = new JobClient(paramAction);
        String pluginInfo = jobClient.getPluginInfo();
        Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
        String md5plugin = MD5Util.getMd5String(pluginInfo);
        properties.setProperty("md5sum", md5plugin);
        HadoopClient client = new HadoopClient();
        client.init(properties);
        ClusterResource clusterResource = client.getClusterResource();
        LOG.info("submit success!");
        LOG.info(clusterResource.toString());
        System.exit(0);
    } catch (Exception e) {
        LOG.error("submit error!", e);
    } finally {
        if (reader != null) {
            reader.close();
            inputStreamReader.close();
            fileInputStream.close();
        }
    }
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) JobClient(com.dtstack.taier.pluginapi.JobClient) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource)

Example 4 with ClusterResource

use of com.dtstack.taier.pluginapi.pojo.ClusterResource in project Taier by DTStack.

the class DtYarnClient method main.

public static void main(String[] args) throws Exception {
    System.setProperty("HADOOP_USER_NAME", "admin");
    // input params json file path
    String filePath = args[0];
    File paramsFile = new File(filePath);
    BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(paramsFile)));
    String request = reader.readLine();
    Map params = PublicUtil.jsonStrToObject(request, Map.class);
    ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
    JobClient jobClient = new JobClient(paramAction);
    String pluginInfo = jobClient.getPluginInfo();
    Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
    String md5plugin = MD5Util.getMd5String(pluginInfo);
    properties.setProperty("md5sum", md5plugin);
    DtYarnClient client = new DtYarnClient();
    client.init(properties);
    ClusterResource clusterResource = client.getClusterResource();
    LOG.info("submit success!");
    LOG.info(clusterResource.toString());
    System.exit(0);
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) Properties(java.util.Properties) File(java.io.File) Map(java.util.Map) JobClient(com.dtstack.taier.pluginapi.JobClient) FileInputStream(java.io.FileInputStream) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource)

Example 5 with ClusterResource

use of com.dtstack.taier.pluginapi.pojo.ClusterResource in project Taier by DTStack.

the class DtYarnClient method getClusterResource.

@Override
public ClusterResource getClusterResource() {
    ClusterResource clusterResource = new ClusterResource();
    try {
        KerberosUtils.login(config, () -> {
            YarnClient resourceClient = null;
            try {
                resourceClient = YarnClient.createYarnClient();
                resourceClient.init(configuration);
                resourceClient.start();
                List<NodeReport> nodes = resourceClient.getNodeReports(NodeState.RUNNING);
                List<ClusterResource.NodeDescription> clusterNodes = new ArrayList<>();
                Integer totalMem = 0;
                Integer totalCores = 0;
                Integer usedMem = 0;
                Integer usedCores = 0;
                for (NodeReport rep : nodes) {
                    ClusterResource.NodeDescription node = new ClusterResource.NodeDescription();
                    String nodeName = rep.getHttpAddress().split(":")[0];
                    node.setNodeName(nodeName);
                    node.setMemory(rep.getCapability().getMemory());
                    node.setUsedMemory(rep.getUsed().getMemory());
                    node.setUsedVirtualCores(rep.getUsed().getVirtualCores());
                    node.setVirtualCores(rep.getCapability().getVirtualCores());
                    clusterNodes.add(node);
                    // 计算集群资源总量和使用量
                    Resource capability = rep.getCapability();
                    Resource used = rep.getUsed();
                    totalMem += capability.getMemory();
                    totalCores += capability.getVirtualCores();
                    usedMem += used.getMemory();
                    usedCores += used.getVirtualCores();
                }
                ClusterResource.ResourceMetrics metrics = createResourceMetrics(totalMem, usedMem, totalCores, usedCores);
                clusterResource.setNodes(clusterNodes);
                String webAddress = getYarnWebAddress(resourceClient);
                String schedulerUrl = String.format(YARN_SCHEDULER_FORMAT, webAddress);
                String schedulerInfoMsg = PoolHttpClient.get(schedulerUrl, null);
                JSONObject schedulerInfo = JSONObject.parseObject(schedulerInfoMsg);
                if (schedulerInfo.containsKey("scheduler")) {
                    clusterResource.setScheduleInfo(schedulerInfo.getJSONObject("scheduler").getJSONObject("schedulerInfo"));
                }
                clusterResource.setQueues(getQueueResource(resourceClient));
                clusterResource.setResourceMetrics(metrics);
            } catch (Exception e) {
                LOG.error("close reource error ", e);
            } finally {
                if (null != resourceClient) {
                    try {
                        resourceClient.close();
                    } catch (IOException e) {
                        LOG.error("close reource error ", e);
                    }
                }
            }
            return clusterResource;
        }, configuration);
    } catch (Exception e) {
        throw new PluginDefineException(e.getMessage());
    }
    return clusterResource;
}
Also used : ArrayList(java.util.ArrayList) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource) Resource(org.apache.hadoop.yarn.api.records.Resource) IOException(java.io.IOException) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) IOException(java.io.IOException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource) JSONObject(com.alibaba.fastjson.JSONObject) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) NodeReport(org.apache.hadoop.yarn.api.records.NodeReport)

Aggregations

ClusterResource (com.dtstack.taier.pluginapi.pojo.ClusterResource)6 JSONObject (com.alibaba.fastjson.JSONObject)3 JobClient (com.dtstack.taier.pluginapi.JobClient)3 PluginDefineException (com.dtstack.taier.pluginapi.exception.PluginDefineException)3 ParamAction (com.dtstack.taier.pluginapi.pojo.ParamAction)3 Map (java.util.Map)2 Properties (java.util.Properties)2 YarnClient (org.apache.hadoop.yarn.client.api.YarnClient)2 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)2 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)1 BufferedReader (java.io.BufferedReader)1 File (java.io.File)1 FileInputStream (java.io.FileInputStream)1 IOException (java.io.IOException)1 InputStreamReader (java.io.InputStreamReader)1 ArrayList (java.util.ArrayList)1 NodeReport (org.apache.hadoop.yarn.api.records.NodeReport)1 Resource (org.apache.hadoop.yarn.api.records.Resource)1