use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class UserController method login.
@PostMapping(value = "/login")
public R<String> login(@RequestParam(value = "username") String userName, @RequestParam(value = "password") String password, HttpServletRequest request, HttpServletResponse response) {
if (StringUtils.isBlank(userName)) {
throw new RdosDefineException("userName can not null");
}
if (StringUtils.isBlank(password)) {
throw new RdosDefineException("password can not null");
}
User user = userService.getByUserName(userName.trim());
if (null == user) {
throw new RdosDefineException(ErrorCode.USER_IS_NULL);
}
String md5Password = MD5Util.getMd5String(password);
if (!md5Password.equalsIgnoreCase(user.getPassword())) {
throw new RdosDefineException("password not correct");
}
DtUser dtUser = new DtUser();
dtUser.setUserId(user.getId());
dtUser.setUserName(user.getUserName());
dtUser.setEmail(user.getEmail());
dtUser.setPhone(user.getPhoneNumber());
loginService.onAuthenticationSuccess(request, response, dtUser);
return R.ok(dtUser.getUserName());
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DependComputePart method getPluginName.
@Override
public String getPluginName() {
validDeployType(deployType);
if (null == storageType) {
throw new RdosDefineException(ErrorCode.STORE_COMPONENT_NOT_CONFIG);
}
List<Component> components = componentScheduleGroup.get(EComponentScheduleType.RESOURCE);
if (CollectionUtils.isEmpty(components)) {
throw new RdosDefineException(ErrorCode.RESOURCE_COMPONENT_NOT_CONFIG);
}
Component resourceComponent = components.get(0);
String resourceVersion = resourceComponent.getVersionName();
EComponentType resourceType = EComponentType.getByCode(resourceComponent.getComponentTypeCode());
Optional<JSONObject> resourceModelConfig = context.getModelConfig(resourceType, resourceVersion);
if (!resourceModelConfig.isPresent()) {
throw new RdosDefineException(Strings.format(ErrorCode.RESOURCE_NOT_SUPPORT_COMPONENT_VERSION.getMsg(), resourceType, type, versionName));
}
// 唯一的pluginName
return getValueInConfigWithResourceStore(resourceModelConfig.get(), resourceComponent, this::getPluginNameInModelOrByConfigVersion);
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DatasourceService method executeOnSpecifySourceWithOutResult.
/**
* @param sourceId 数据源id
* @param sql 拼写sql
* @param targetSchema 只做doris入参,其他类型不用传
*/
private void executeOnSpecifySourceWithOutResult(Long sourceId, String sql, String targetSchema) {
BatchDataSource source = getOne(sourceId);
DataSourceType dataSourceType = DataSourceType.getSourceType(source.getType());
if (!SUPPORT_CREATE_TABLE_DATASOURCES.contains(dataSourceType)) {
throw new RdosDefineException(String.format("只支持创建%s数据源表", SUPPORT_CREATE_TABLE_DATASOURCES_NAMES));
}
JSONObject json = JSON.parseObject(source.getDataJson());
try {
Map<String, Object> kerberosConfig = fillKerberosConfig(sourceId);
Map<String, Object> expandConfigPrepare = expandConfigPrepare(sourceId);
ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), kerberosConfig, expandConfigPrepare);
IClient iClient = ClientCache.getClient(dataSourceType.getVal());
Connection con = iClient.getCon(sourceDTO);
DBUtil.executeSqlWithoutResultSet(con, sql, false);
} catch (Exception e) {
throw new RdosDefineException(e.getMessage() + "。 执行sql = " + sql, e);
}
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DatasourceService method syncReaderBuild.
private Reader syncReaderBuild(final Integer sourceType, final Map<String, Object> sourceMap, final List<Long> sourceIds) throws IOException {
Reader reader = null;
if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.CarbonData.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
reader = PublicUtil.objectToObject(sourceMap, RDBReader.class);
((RDBBase) reader).setSourceIds(sourceIds);
return reader;
}
if (DataSourceType.HDFS.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, HDFSReader.class);
}
if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, HiveReader.class);
}
if (DataSourceType.HBASE.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, HBaseReader.class);
}
if (DataSourceType.FTP.getVal().equals(sourceType)) {
reader = PublicUtil.objectToObject(sourceMap, FtpReader.class);
if (sourceMap.containsKey("isFirstLineHeader") && (Boolean) sourceMap.get("isFirstLineHeader")) {
((FtpReader) reader).setFirstLineHeader(true);
} else {
((FtpReader) reader).setFirstLineHeader(false);
}
return reader;
}
if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
reader = PublicUtil.objectToObject(sourceMap, OdpsReader.class);
((OdpsBase) reader).setSourceId(sourceIds.get(0));
return reader;
}
if (DataSourceType.ES.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, EsReader.class);
}
if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, MongoDbReader.class);
}
if (DataSourceType.CarbonData.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, CarbonDataReader.class);
}
if (DataSourceType.Kudu.getVal().equals(sourceType)) {
return syncBuilderFactory.getSyncBuilder(DataSourceType.Kudu.getVal()).syncReaderBuild(sourceMap, sourceIds);
}
if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, InfluxDBReader.class);
}
if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
// setSftpConf时,设置的hdfsConfig和sftpConf
if (sourceMap.containsKey(HADOOP_CONFIG)) {
Object impalaConfig = sourceMap.get(HADOOP_CONFIG);
if (impalaConfig instanceof Map) {
sourceMap.put(HADOOP_CONFIG, impalaConfig);
sourceMap.put("sftpConf", ((Map) impalaConfig).get("sftpConf"));
}
}
return syncBuilderFactory.getSyncBuilder(DataSourceType.IMPALA.getVal()).syncReaderBuild(sourceMap, sourceIds);
}
if (DataSourceType.AWS_S3.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, AwsS3Reader.class);
}
throw new RdosDefineException("暂不支持" + DataSourceType.getSourceType(sourceType).name() + "作为数据同步的源");
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DatasourceService method getTableColumnIncludePart.
/**
* 查询表所属字段 可以选择是否需要分区字段
* @param source
* @param tableName
* @param part 是否需要分区字段
* @return
* @throws Exception
*/
private List<JSONObject> getTableColumnIncludePart(BatchDataSource source, String tableName, Boolean part, String schema) {
try {
if (source == null) {
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_DATA_SOURCE);
}
if (part == null) {
part = false;
}
JSONObject dataJson = JSONObject.parseObject(source.getDataJson());
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
IClient iClient = ClientCache.getClient(source.getType());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).schema(schema).filterPartitionColumns(part).build();
ISourceDTO iSourceDTO = SourceDTOType.getSourceDTO(dataJson, source.getType(), kerberosConfig, Maps.newHashMap());
List<ColumnMetaDTO> columnMetaData = iClient.getColumnMetaData(iSourceDTO, sqlQueryDTO);
List<JSONObject> list = new ArrayList<>();
if (CollectionUtils.isNotEmpty(columnMetaData)) {
for (ColumnMetaDTO columnMetaDTO : columnMetaData) {
JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
jsonObject.put("isPart", columnMetaDTO.getPart());
list.add(jsonObject);
}
}
return list;
} catch (DtCenterDefException e) {
throw e;
} catch (Exception e) {
throw new RdosDefineException(ErrorCode.GET_COLUMN_ERROR, e);
}
}
Aggregations