use of com.dtstack.taier.pluginapi.pojo.ComponentTestResult in project Taier by DTStack.
the class ConsoleComponentService method refresh.
/**
* 刷新组件信息
*
* @param clusterName
* @return
*/
public List<ComponentTestResult> refresh(String clusterName) {
List<ComponentTestResult> refreshResults = new ArrayList<>();
ComponentTestResult componentTestResult = testConnect(clusterName, EComponentType.YARN.getTypeCode(), null);
refreshResults.add(componentTestResult);
return refreshResults;
}
use of com.dtstack.taier.pluginapi.pojo.ComponentTestResult in project Taier by DTStack.
the class ConsoleComponentService method testConnects.
/**
* 测试所有组件连通性
*
* @param clusterName
* @return
*/
public List<ComponentMultiTestResult> testConnects(String clusterName) {
Cluster cluster = clusterMapper.getByClusterName(clusterName);
List<Component> components = getComponents(cluster);
if (CollectionUtils.isEmpty(components)) {
return new ArrayList<>();
}
Map sftpMap = componentService.getComponentByClusterId(cluster.getId(), EComponentType.SFTP.getTypeCode(), false, Map.class, null);
Map<Component, CompletableFuture<ComponentTestResult>> completableFutureMap = components.stream().collect(Collectors.toMap(component -> component, c -> CompletableFuture.supplyAsync(() -> testComponentWithResult(clusterName, cluster, sftpMap, c), connectPool)));
CompletableFuture<List<ComponentTestResult>> completableFuture = CompletableFuture.allOf(completableFutureMap.values().toArray(new CompletableFuture[0])).thenApply((f) -> completableFutureMap.keySet().stream().map(component -> {
try {
return completableFutureMap.get(component).get(env.getTestConnectTimeout(), TimeUnit.SECONDS);
} catch (Exception e) {
ComponentTestResult testResult = new ComponentTestResult();
testResult.setResult(false);
testResult.setErrorMsg(ExceptionUtil.getErrorMessage(e));
testResult.setComponentVersion(component.getVersionValue());
testResult.setComponentTypeCode(component.getComponentTypeCode());
return testResult;
}
}).collect(Collectors.toList()));
try {
List<ComponentTestResult> componentTestResults = completableFuture.get();
Map<Integer, List<ComponentTestResult>> componentCodeResultMap = componentTestResults.stream().collect(Collectors.groupingBy(ComponentTestResult::getComponentTypeCode, Collectors.collectingAndThen(Collectors.toList(), c -> c)));
return componentCodeResultMap.keySet().stream().map(componentCode -> {
ComponentMultiTestResult multiTestResult = new ComponentMultiTestResult(componentCode);
multiTestResult.setMultiVersion(componentCodeResultMap.get(componentCode));
List<ComponentTestResult> testResults = componentCodeResultMap.get(componentCode);
multiTestResult.setResult(testResults.stream().allMatch(ComponentTestResult::getResult));
testResults.stream().filter(componentTestResult -> StringUtils.isNotBlank(componentTestResult.getErrorMsg())).findFirst().ifPresent(errorResult -> multiTestResult.setErrorMsg(errorResult.getErrorMsg()));
return multiTestResult;
}).collect(Collectors.toList());
} catch (Exception e) {
throw new RdosDefineException(e);
}
}
use of com.dtstack.taier.pluginapi.pojo.ComponentTestResult in project Taier by DTStack.
the class DtHdfsClient method checkHdfsConnect.
private ComponentTestResult checkHdfsConnect(Config testConnectConf) {
// 测试hdfs联通性
ComponentTestResult componentTestResult = new ComponentTestResult();
try {
if (null == testConnectConf) {
componentTestResult.setResult(false);
componentTestResult.setErrorMsg("配置信息不能你为空");
return componentTestResult;
}
KerberosUtils.login(testConnectConf, () -> {
FileSystem fs = null;
try {
Configuration configuration = this.initYarnConf(testConnectConf.getHadoopConf());
fs = FileSystem.get(configuration);
Path path = new Path(configuration.get("yarn.nodemanager.remote-app-log-dir"));
fs.exists(path);
} catch (Exception e) {
componentTestResult.setResult(false);
componentTestResult.setErrorMsg(ExceptionUtil.getErrorMessage(e));
return componentTestResult;
} finally {
if (null != fs) {
try {
fs.close();
} catch (IOException e) {
LOG.error("close file system error ", e);
}
}
}
componentTestResult.setResult(true);
return componentTestResult;
}, KerberosUtils.convertMapConfToConfiguration(testConnectConf.getHadoopConf()));
} catch (Exception e) {
LOG.error("close hdfs connect error ", e);
componentTestResult.setResult(false);
componentTestResult.setErrorMsg(ExceptionUtil.getErrorMessage(e));
}
return componentTestResult;
}
use of com.dtstack.taier.pluginapi.pojo.ComponentTestResult in project Taier by DTStack.
the class HadoopClient method testYarnConnect.
private ComponentTestResult testYarnConnect(ComponentTestResult testResult, Config allConfig) {
HadoopConf hadoopConf = new HadoopConf();
hadoopConf.initYarnConf(allConfig.getYarnConf());
List<NodeReport> nodes = new ArrayList<>();
try (YarnClient testYarnClient = YarnClient.createYarnClient()) {
testYarnClient.init(hadoopConf.getYarnConfiguration());
testYarnClient.start();
nodes = testYarnClient.getNodeReports(NodeState.RUNNING);
int totalMemory = 0;
int totalCores = 0;
for (NodeReport rep : nodes) {
totalMemory += rep.getCapability().getMemory();
totalCores += rep.getCapability().getVirtualCores();
}
List<ComponentTestResult.QueueDescription> descriptions = getQueueDescription(null, testYarnClient.getRootQueueInfos());
testResult.setClusterResourceDescription(new ComponentTestResult.ClusterResourceDescription(nodes.size(), totalMemory, totalCores, descriptions));
} catch (Exception e) {
LOG.error("test yarn connect error", e);
testResult.setErrorMsg(ExceptionUtil.getErrorMessage(e));
return testResult;
}
testResult.setResult(true);
return testResult;
}
use of com.dtstack.taier.pluginapi.pojo.ComponentTestResult in project Taier by DTStack.
the class DtYarnClient method testYarnConnect.
private ComponentTestResult testYarnConnect(ComponentTestResult testResult, Config allConfig) {
try {
HadoopConf hadoopConf = new HadoopConf();
hadoopConf.initYarnConf(allConfig.getYarnConf());
YarnClient testYarnClient = YarnClient.createYarnClient();
testYarnClient.init(hadoopConf.getYarnConfiguration());
testYarnClient.start();
List<NodeReport> nodes = testYarnClient.getNodeReports(NodeState.RUNNING);
int totalMemory = 0;
int totalCores = 0;
for (NodeReport rep : nodes) {
totalMemory += rep.getCapability().getMemory();
totalCores += rep.getCapability().getVirtualCores();
}
boolean isFullPath = hadoopConf.getYarnConfiguration().getBoolean(ConfigConstrant.IS_FULL_PATH_KEY, false);
String rootQueueName = isFullPath ? getRootQueueName(testYarnClient) : "";
List<ComponentTestResult.QueueDescription> descriptions = getQueueDescription(rootQueueName, testYarnClient.getRootQueueInfos(), isFullPath);
testResult.setClusterResourceDescription(new ComponentTestResult.ClusterResourceDescription(nodes.size(), totalMemory, totalCores, descriptions));
} catch (Exception e) {
LOG.error("test yarn connect error", e);
throw new PluginDefineException(e);
}
testResult.setResult(true);
return testResult;
}
Aggregations