use of com.dtstack.taier.common.exception.DtCenterDefException in project Taier by DTStack.
the class DatasourceService method getTableColumnIncludePart.
/**
* 查询表所属字段 可以选择是否需要分区字段
* @param source
* @param tableName
* @param part 是否需要分区字段
* @return
* @throws Exception
*/
private List<JSONObject> getTableColumnIncludePart(BatchDataSource source, String tableName, Boolean part, String schema) {
try {
if (source == null) {
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_DATA_SOURCE);
}
if (part == null) {
part = false;
}
JSONObject dataJson = JSONObject.parseObject(source.getDataJson());
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
IClient iClient = ClientCache.getClient(source.getType());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).schema(schema).filterPartitionColumns(part).build();
ISourceDTO iSourceDTO = SourceDTOType.getSourceDTO(dataJson, source.getType(), kerberosConfig, Maps.newHashMap());
List<ColumnMetaDTO> columnMetaData = iClient.getColumnMetaData(iSourceDTO, sqlQueryDTO);
List<JSONObject> list = new ArrayList<>();
if (CollectionUtils.isNotEmpty(columnMetaData)) {
for (ColumnMetaDTO columnMetaDTO : columnMetaData) {
JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
jsonObject.put("isPart", columnMetaDTO.getPart());
list.add(jsonObject);
}
}
return list;
} catch (DtCenterDefException e) {
throw e;
} catch (Exception e) {
throw new RdosDefineException(ErrorCode.GET_COLUMN_ERROR, e);
}
}
use of com.dtstack.taier.common.exception.DtCenterDefException in project Taier by DTStack.
the class DatasourceService method kerberosConnectPrepare.
/**
* kerberos认证前预处理 :对kerberos参数替换相对路径为绝对路径等操作
* @param sourceId
* @return
*/
public Map<String, Object> kerberosConnectPrepare(Long sourceId) {
DsInfo dataSource = dsInfoService.getOneById(sourceId);
DataSourceTypeEnum typeEnum = DataSourceTypeEnum.typeVersionOf(dataSource.getDataType(), dataSource.getDataVersion());
if (Objects.isNull(typeEnum)) {
throw new PubSvcDefineException(ErrorCode.CAN_NOT_FITABLE_SOURCE_TYPE);
}
Map<String, Object> kerberosConfig = fillKerberosConfig(dataSource.getId());
HashMap<String, Object> tmpKerberosConfig = new HashMap<>(kerberosConfig);
// kerberos获取表操作预处理
if (MapUtils.isNotEmpty(kerberosConfig)) {
String localKerberosPath = kerberosService.getLocalKerberosPath(sourceId);
IKerberos kerberos = ClientCache.getKerberos(typeEnum.getVal());
try {
kerberos.prepareKerberosForConnect(tmpKerberosConfig, localKerberosPath);
} catch (Exception e) {
LOGGER.error("kerberos连接预处理失败!{}", e.getMessage(), e);
throw new DtCenterDefException(String.format("kerberos连接预处理失败,Caused by: %s", e.getMessage()), e);
}
}
return tmpKerberosConfig;
}
use of com.dtstack.taier.common.exception.DtCenterDefException in project Taier by DTStack.
the class HdfsOperator method getFileStatus.
public static FileStatus getFileStatus(Map<String, Object> conf, Map<String, Object> kerberos, String dir) {
HdfsSourceDTO sourceDTO = getSourceDTO(conf, kerberos);
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
if (isFileExist(conf, kerberos, dir)) {
return hdfsClient.getStatus(sourceDTO, uri(dir));
} else {
throw new DtCenterDefException(dir + " 文件不存在");
}
}
use of com.dtstack.taier.common.exception.DtCenterDefException in project Taier by DTStack.
the class BatchTaskService method getTableColumnIncludePart.
/**
* 查询表所属字段 可以选择是否需要分区字段
* @param source
* @param tableName
* @param part 是否需要分区字段
* @return
* @throws Exception
*/
private List<JSONObject> getTableColumnIncludePart(BatchDataSource source, String tableName, Boolean part, String schema) {
try {
if (source == null) {
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_DATA_SOURCE);
}
if (part == null) {
part = false;
}
JSONObject dataJson = JSONObject.parseObject(source.getDataJson());
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
IClient iClient = ClientCache.getClient(source.getType());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).schema(schema).filterPartitionColumns(part).build();
ISourceDTO iSourceDTO = SourceDTOType.getSourceDTO(dataJson, source.getType(), kerberosConfig, Maps.newHashMap());
List<ColumnMetaDTO> columnMetaData = iClient.getColumnMetaData(iSourceDTO, sqlQueryDTO);
List<JSONObject> list = new ArrayList<>();
if (CollectionUtils.isNotEmpty(columnMetaData)) {
for (ColumnMetaDTO columnMetaDTO : columnMetaData) {
JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
jsonObject.put("isPart", columnMetaDTO.getPart());
list.add(jsonObject);
}
}
return list;
} catch (DtCenterDefException e) {
throw e;
} catch (Exception e) {
throw new RdosDefineException(ErrorCode.GET_COLUMN_ERROR, e);
}
}
use of com.dtstack.taier.common.exception.DtCenterDefException in project Taier by DTStack.
the class HiveSelectDownload method readNext.
@Override
public Object readNext() {
try {
List<String> row = (List<String>) pluginDownloader.readNext();
List<String> data = new ArrayList<>();
List<String> columns = CollectionUtils.isNotEmpty(queryFieldNames) ? queryFieldNames : columnNames;
for (int index = 0; index < columns.size(); index++) {
String source = row.get(index);
data.add(dealHiveTextNull(source));
}
excludeCol.forEach(index -> data.add(NO_PERMISSION));
return data;
} catch (Exception e) {
throw new DtCenterDefException(String.format("下载器readNext失败:%s", e.getMessage()), e);
}
}
Aggregations