use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class LogPluginDownload method init.
private void init() throws Exception {
Object kerberosConfig = hdfsConf.get("kerberosConfig");
Map<String, Object> kerberosConfMap = null;
if (Objects.nonNull(kerberosConfig)) {
if (kerberosConfig instanceof String) {
kerberosConfMap = PublicUtil.objectToMap(kerberosConfig);
} else if (kerberosConfig instanceof Map) {
kerberosConfMap = (Map<String, Object>) kerberosConfig;
}
}
HdfsSourceDTO sourceDTO = HdfsSourceDTO.builder().config(PublicUtil.objectToStr(hdfsConf)).defaultFS(hdfsConf.getOrDefault("fs.defaultFS", "").toString()).kerberosConfig(kerberosConfMap).yarnConf(yarnConf).appIdStr(applicationStr).readLimit(readLimit).user(user).build();
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().build();
hdfsLogDownloader = hdfsClient.getLogDownloader(sourceDTO, sqlQueryDTO);
}
use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class DatasourceService method getTableColumnIncludePart.
/**
* 查询表所属字段 可以选择是否需要分区字段
* @param source
* @param tableName
* @param part 是否需要分区字段
* @return
* @throws Exception
*/
private List<JSONObject> getTableColumnIncludePart(BatchDataSource source, String tableName, Boolean part, String schema) {
try {
if (source == null) {
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_DATA_SOURCE);
}
if (part == null) {
part = false;
}
JSONObject dataJson = JSONObject.parseObject(source.getDataJson());
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
IClient iClient = ClientCache.getClient(source.getType());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).schema(schema).filterPartitionColumns(part).build();
ISourceDTO iSourceDTO = SourceDTOType.getSourceDTO(dataJson, source.getType(), kerberosConfig, Maps.newHashMap());
List<ColumnMetaDTO> columnMetaData = iClient.getColumnMetaData(iSourceDTO, sqlQueryDTO);
List<JSONObject> list = new ArrayList<>();
if (CollectionUtils.isNotEmpty(columnMetaData)) {
for (ColumnMetaDTO columnMetaDTO : columnMetaData) {
JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
jsonObject.put("isPart", columnMetaDTO.getPart());
list.add(jsonObject);
}
}
return list;
} catch (DtCenterDefException e) {
throw e;
} catch (Exception e) {
throw new RdosDefineException(ErrorCode.GET_COLUMN_ERROR, e);
}
}
use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class BatchTaskService method getTableColumnIncludePart.
/**
* 查询表所属字段 可以选择是否需要分区字段
* @param source
* @param tableName
* @param part 是否需要分区字段
* @return
* @throws Exception
*/
private List<JSONObject> getTableColumnIncludePart(BatchDataSource source, String tableName, Boolean part, String schema) {
try {
if (source == null) {
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_DATA_SOURCE);
}
if (part == null) {
part = false;
}
JSONObject dataJson = JSONObject.parseObject(source.getDataJson());
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
IClient iClient = ClientCache.getClient(source.getType());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).schema(schema).filterPartitionColumns(part).build();
ISourceDTO iSourceDTO = SourceDTOType.getSourceDTO(dataJson, source.getType(), kerberosConfig, Maps.newHashMap());
List<ColumnMetaDTO> columnMetaData = iClient.getColumnMetaData(iSourceDTO, sqlQueryDTO);
List<JSONObject> list = new ArrayList<>();
if (CollectionUtils.isNotEmpty(columnMetaData)) {
for (ColumnMetaDTO columnMetaDTO : columnMetaData) {
JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
jsonObject.put("isPart", columnMetaDTO.getPart());
list.add(jsonObject);
}
}
return list;
} catch (DtCenterDefException e) {
throw e;
} catch (Exception e) {
throw new RdosDefineException(ErrorCode.GET_COLUMN_ERROR, e);
}
}
use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class DatasourceService method preview.
/**
* 数据同步-获得预览数据,默认展示3条
*
* @param sourceId 数据源id
* @param tableName 表名
* @return
* @author toutian
*/
public JSONObject preview(Long sourceId, String tableName, String schema) {
BatchDataSource source = getOne(sourceId);
StringBuffer newTableName = new StringBuffer();
if (DataSourceType.SQLServer.getVal().equals(source.getType()) && StringUtils.isNotBlank(tableName)) {
if (tableName.indexOf("[") == -1) {
final String[] tableNames = tableName.split("\\.");
for (final String name : tableNames) {
newTableName.append("[").append(name).append("]").append(".");
}
tableName = newTableName.substring(0, newTableName.length() - 1);
}
}
String dataJson = source.getDataJson();
JSONObject json = JSON.parseObject(dataJson);
// 获取字段信息
List<String> columnList = new ArrayList<String>();
// 获取数据
List<List<String>> dataList = new ArrayList<List<String>>();
try {
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
List<JSONObject> columnJson = getTableColumn(source, tableName, schema);
if (CollectionUtils.isNotEmpty(columnJson)) {
for (JSONObject columnMetaDTO : columnJson) {
columnList.add(columnMetaDTO.getString("key"));
}
}
IClient iClient = ClientCache.getClient(source.getType());
ISourceDTO iSourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), kerberosConfig, Maps.newHashMap());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().schema(schema).tableName(tableName).previewNum(3).build();
dataList = iClient.getPreview(iSourceDTO, sqlQueryDTO);
if (DataSourceType.getRDBMS().contains(source.getType())) {
// 因为会把字段名也会返回 所以要去除第一行
dataList = dataList.subList(1, dataList.size());
}
} catch (Exception e) {
LOGGER.error("datasource preview end with error.", e);
throw new RdosDefineException(String.format("%s获取预览数据失败", source.getDataName()), e);
}
JSONObject preview = new JSONObject(2);
preview.put("columnList", columnList);
preview.put("dataList", dataList);
return preview;
}
use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class HiveSelectDownload method init.
// 初始downloader和相关参数
private void init() throws Exception {
Map<String, String> partitionMap = Maps.newHashMap();
if (StringUtils.isNotEmpty(partition)) {
List<Pair<String, String>> stringStringPair = splitPartition(partition);
if (CollectionUtils.isNotEmpty(stringStringPair)) {
for (int i = 0; i < stringStringPair.size(); i++) {
partitionMap.put(stringStringPair.get(i).getKey(), stringStringPair.get(i).getValue());
}
}
}
// 获取client
ISourceDTO sourceDTO = Engine2DTOService.get(tenantId, null, dataSourceType, db, jdbcInfo);
IClient client = ClientCache.getClient(dataSourceType);
SqlQueryDTO queryDTO = SqlQueryDTO.builder().tableName(tableName).columns(queryFieldNames).partitionColumns(partitionMap).build();
pluginDownloader = client.getDownloader(sourceDTO, queryDTO);
this.columnNames = pluginDownloader.getMetaInfo();
if (CollectionUtils.isNotEmpty(queryFieldNames)) {
// permissionStyle为true则展示无权限字段
if (permissionStyle) {
// 获取不展示列添加到fieldNameList后面
excludeCol = columnNames.stream().filter(column -> {
Boolean flag = false;
for (String clumnName : queryFieldNames) {
flag = clumnName.equalsIgnoreCase(column);
if (flag) {
break;
}
}
return !flag;
}).collect(Collectors.toList());
fieldNamesShow.addAll(excludeCol);
}
} else {
fieldNamesShow = new ArrayList<>(columnNames);
queryFieldNames = columnNames;
}
}
Aggregations