use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class CommonResource method newInstance.
public ComputeResourceType newInstance(JobClient jobClient) {
EScheduleJobType taskType = EScheduleJobType.getByTaskType(jobClient.getTaskType());
CommonResource resource = resources.computeIfAbsent(taskType, k -> {
CommonResource commonResource = null;
switch(taskType) {
case SYNC:
commonResource = new FlinkResource();
commonResource.setClusterMapper(clusterMapper);
commonResource.setClusterService(clusterService);
commonResource.setComponentService(componentService);
commonResource.setClusterTenantMapper(clusterTenantMapper);
break;
case SPARK_SQL:
case SPARK:
commonResource = this;
break;
default:
throw new RdosDefineException("taskType:" + taskType + " is not support.");
}
return commonResource;
});
return resource.getComputeResourceType(jobClient);
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class FlinkResource method getComputeResourceType.
@Override
public ComputeResourceType getComputeResourceType(JobClient jobClient) {
long tenantId = jobClient.getTenantId();
EComponentType componentType = resourceComponents.computeIfAbsent(tenantId, e -> getResourceEComponentType(jobClient));
Properties properties = jobClient.getConfProperties();
ComputeType computeType = jobClient.getComputeType();
String modeStr = properties.getProperty(FLINK_TASK_RUN_MODE_KEY);
if (EComponentType.FLINK.getTypeCode().equals(componentType.getTypeCode())) {
// flink on standalone
return ComputeResourceType.FlinkOnStandalone;
} else if (EComponentType.YARN.getTypeCode().equals(componentType.getTypeCode())) {
if (StringUtils.isEmpty(modeStr)) {
if (ComputeType.STREAM == computeType) {
return ComputeResourceType.Yarn;
} else {
return ComputeResourceType.FlinkYarnSession;
}
}
if (SESSION.equalsIgnoreCase(modeStr)) {
return ComputeResourceType.FlinkYarnSession;
} else if (PER_JOB.equalsIgnoreCase(modeStr)) {
return ComputeResourceType.Yarn;
} else if (STANDALONE.equals(modeStr)) {
return ComputeResourceType.FlinkOnStandalone;
}
}
throw new RdosDefineException("not support mode: " + modeStr);
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class FlinkResource method getResourceEComponentType.
public EComponentType getResourceEComponentType(JobClient jobClient) {
long tenantId = jobClient.getTenantId();
Long clusterId = clusterTenantMapper.getClusterIdByTenantId(tenantId);
Component flinkComponent = componentService.getComponentByClusterId(clusterId, EComponentType.FLINK.getTypeCode(), null);
if (null != flinkComponent) {
if (EDeployType.STANDALONE.getType() == flinkComponent.getDeployType()) {
return EComponentType.FLINK;
} else if (EDeployType.YARN.getType() == flinkComponent.getDeployType()) {
return EComponentType.YARN;
}
}
throw new RdosDefineException("No found resource EComponentType");
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class AbstractJobBuilder method getTriggerRange.
/**
* 校验时间是否符合规范,且获得时间范围
*
* @param triggerDay 目标时间
* @param beginTime 开始时间
* @param endTime 结束时间
* @return 范围时间
*/
private Pair<Date, Date> getTriggerRange(String triggerDay, String beginTime, String endTime) {
if (StringUtils.isBlank(triggerDay)) {
throw new RdosDefineException("triggerDay is not null");
}
if (StringUtils.isBlank(beginTime)) {
beginTime = "00:00:00";
}
if (StringUtils.isBlank(endTime)) {
endTime = "23:59:59";
}
String start = triggerDay + " " + beginTime + ":00";
String end = triggerDay + " " + endTime + ":59";
Date startDate = DateUtil.parseDate(start, DateUtil.STANDARD_DATETIME_FORMAT, Locale.CHINA);
Date endDate = DateUtil.parseDate(end, DateUtil.STANDARD_DATETIME_FORMAT, Locale.CHINA);
if (startDate == null || endDate == null) {
throw new RdosDefineException("triggerDay or beginTime or endTime invalid");
}
return new ImmutablePair<>(startDate, endDate);
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class BatchResourceService method uploadHDFSFileWithResource.
/**
* 上次资源文件到hdsf上
* @param tenantId
* @param resourceName
* @param originalFilename
* @param tmpPath
* @return
*/
private String uploadHDFSFileWithResource(Long tenantId, String resourceName, String originalFilename, String tmpPath) {
if (StringUtils.isBlank(originalFilename) || StringUtils.isBlank(tmpPath)) {
throw new RdosDefineException(ErrorCode.DATA_NOT_FIND);
}
String hdfsFileName = String.format("%s_%s_%s", tenantId, resourceName, originalFilename);
String hdfsPath = this.getBatchHdfsPath(tenantId, hdfsFileName);
try {
HdfsOperator.uploadLocalFileToHdfs(HadoopConf.getConfiguration(tenantId), HadoopConf.getHadoopKerberosConf(tenantId), tmpPath, hdfsPath);
} catch (Exception e) {
throw new RdosDefineException(ErrorCode.SERVER_EXCEPTION, e);
} finally {
File tmpFile = new File(tmpPath);
if (tmpFile.exists()) {
tmpFile.delete();
}
}
return hdfsPath;
}
Aggregations