use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class HiveBase method inferHdfsParams.
protected void inferHdfsParams() {
if (inferred.compareAndSet(false, true) && StringUtils.isNotBlank(table)) {
DataSourceType sourceType = DataSourceType.getSourceType(dataSourceType);
JSONObject dataJson = new JSONObject();
dataJson.put(SourceDTOType.JDBC_URL, jdbcUrl);
dataJson.put(SourceDTOType.JDBC_USERNAME, username);
dataJson.put(SourceDTOType.JDBC_PASSWORD, password);
ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(dataJson, sourceType.getVal(), kerberosConfig, Maps.newHashMap());
IClient client = ClientCache.getClient(sourceType.getVal());
Table tableInfo = client.getTable(sourceDTO, SqlQueryDTO.builder().tableName(this.table).build());
List<ColumnMetaDTO> columnMetaData = tableInfo.getColumns();
for (ColumnMetaDTO dto : columnMetaData) {
if (!dto.getPart()) {
fullColumnNames.add(dto.getKey());
fullColumnTypes.add(dto.getType());
} else {
isPartitioned = true;
partitionedBy.add(dto.getKey());
}
}
if (isPartitioned) {
ITable tableClient = ClientCache.getTable(sourceType.getVal());
List<String> partitions = tableClient.showPartitions(sourceDTO, table);
partitions.forEach(bean -> {
partitionList.add(bean);
});
}
this.dbName = tableInfo.getDb();
this.path = tableInfo.getPath();
this.fileType = tableInfo.getStoreType();
this.fieldDelimiter = tableInfo.getDelim();
this.isTransaction = tableInfo.getIsTransTable();
}
for (int i = 0; i < fullColumnNames.size(); i++) {
for (Object col : column) {
if (fullColumnNames.get(i).equals(((Map<String, Object>) col).get("key"))) {
((Map<String, Object>) col).put("index", i);
break;
}
}
}
}
use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class RDBReader method toReaderJson.
@Override
public JSONObject toReaderJson() {
JSONObject parameter = new JSONObject(true);
parameter.put("username", this.getUsername());
parameter.put("password", this.getPassword());
parameter.put("splitPk", this.getSplitPK());
parameter.put("where", this.getWhere());
parameter.put("column", ColumnUtil.getColumns(this.getColumn(), PluginName.MySQLD_R));
if (this.getJdbcUrl() != null) {
JSONObject conn = new JSONObject(true);
conn.put("jdbcUrl", Arrays.asList(this.getJdbcUrl()));
conn.put("password", this.getPassword());
conn.put("username", this.getUsername());
conn.put("table", this.getTable());
conn.put("sourceId", this.getSourceId());
conn.put("schema", isNeedSchema());
connections = new JSONArray();
connections.add(conn);
}
if (connections != null && connections.size() > 0) {
Map<String, Object> conn = (Map) connections.get(0);
this.setJdbcUrl(((List<String>) conn.get("jdbcUrl")).get(0));
String pass = Objects.isNull(conn.get("password")) ? "" : conn.get("password").toString();
this.setPassword(pass);
this.setUsername(MapUtils.getString(conn, "username"));
if (conn.get("table") instanceof String) {
this.setTable(Arrays.asList((String) conn.get("table")));
} else {
this.setTable((List<String>) conn.get("table"));
}
}
boolean isMultiTable = (CollectionUtils.isNotEmpty(this.getConnections()) && this.getConnections().size() > 1) || (CollectionUtils.isNotEmpty(this.getTable()) && this.getTable().size() > 1);
// 增量配置
parameter.put("increColumn", Optional.ofNullable(this.getIncreColumn()).orElse(""));
parameter.put("startLocation", "");
parameter.put("connection", connections);
JSONObject reader = new JSONObject(true);
DataSourceType dataSourceType = DataSourceType.getSourceType(getType());
switch(dataSourceType) {
case MySQL:
case MySQL8:
case MySQLPXC:
case TiDB:
if (isMultiTable) {
reader.put("name", PluginName.MySQLD_R);
} else {
reader.put("name", PluginName.MySQL_R);
}
break;
case Clickhouse:
reader.put("name", PluginName.Clickhouse_R);
break;
case Polardb_For_MySQL:
reader.put("name", PluginName.Polardb_for_MySQL_R);
break;
case Oracle:
reader.put("name", PluginName.Oracle_R);
break;
case SQLServer:
reader.put("name", PluginName.SQLServer_R);
break;
case HIVE:
case HIVE3X:
case HIVE1X:
reader.put("name", PluginName.Hive_R);
break;
case PostgreSQL:
case LIBRA:
reader.put("name", PluginName.PostgreSQL_R);
break;
case DB2:
reader.put("name", PluginName.DB2_R);
break;
case GBase_8a:
reader.put("name", PluginName.GBase_R);
break;
case Phoenix:
reader.put("name", PluginName.Phoenix_R);
break;
case PHOENIX5:
reader.put("name", PluginName.Phoenix5_R);
break;
case DMDB:
reader.put("name", PluginName.DM_R);
break;
case GREENPLUM6:
reader.put("name", PluginName.GREENPLUM_R);
break;
case KINGBASE8:
reader.put("name", PluginName.KINGBASE_R);
break;
case ADB_FOR_PG:
reader.put("name", PluginName.ADB_FOR_PG_R);
break;
default:
throw new RdosDefineException(ErrorCode.CAN_NOT_FITABLE_SOURCE_TYPE);
}
parameter.put("customSql", Optional.ofNullable(getCustomSql()).orElse(""));
parameter.put("sourceIds", getSourceIds());
parameter.putAll(super.getExtralConfigMap());
reader.put("parameter", parameter);
return reader;
}
use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class RDBWriter method toWriterJson.
@Override
public JSONObject toWriterJson() {
JSONObject connection = new JSONObject(true);
connection.put("jdbcUrl", this.getJdbcUrl());
connection.put("table", this.getTable());
connection.put("schema", isNeedSchema());
JSONObject parameter = new JSONObject(true);
parameter.put("username", this.getUsername());
parameter.put("password", this.getPassword());
parameter.put("connection", Lists.newArrayList(connection));
parameter.put("session", StringUtils.isNotBlank(this.getSession()) ? Lists.newArrayList(this.getSession()) : Lists.newArrayList());
parameter.put("preSql", StringUtils.isNotBlank(this.getPreSql()) ? Lists.newArrayList(this.getPreSql().trim().split(";")) : Lists.newArrayList());
parameter.put("postSql", StringUtils.isNotBlank(this.getPostSql()) ? Lists.newArrayList(this.getPostSql().trim().split(";")) : Lists.newArrayList());
parameter.put("writeMode", this.getWriteMode());
parameter.put("column", ColumnUtil.getColumns(this.getColumn(), PluginName.MySQLD_R));
parameter.put("sourceIds", getSourceIds());
parameter.putAll(super.getExtralConfigMap());
JSONObject writer = new JSONObject(true);
DataSourceType dataSourceType = DataSourceType.getSourceType(getType());
switch(dataSourceType) {
case MySQL:
case MySQL8:
case MySQLPXC:
case TiDB:
writer.put("name", PluginName.MySQL_W);
break;
case Clickhouse:
writer.put("name", PluginName.Clichhouse_W);
break;
case Polardb_For_MySQL:
writer.put("name", PluginName.Polardb_for_MySQL_W);
break;
case Oracle:
writer.put("name", PluginName.Oracle_W);
break;
case SQLServer:
writer.put("name", PluginName.SQLServer_W);
break;
case HIVE:
case HIVE1X:
writer.put("name", PluginName.Hive_W);
break;
case PostgreSQL:
case LIBRA:
writer.put("name", PluginName.PostgreSQL_W);
break;
case DB2:
writer.put("name", PluginName.DB2_W);
break;
case GBase_8a:
writer.put("name", PluginName.GBase_W);
break;
case Phoenix:
writer.put("name", PluginName.Phoenix_W);
// 特殊处理写入模式,200302_3.10_beta2 只支持 upsert
parameter.put("writeMode", StringUtils.isBlank(this.getWriteMode()) ? UPSERT : this.getWriteMode());
break;
case PHOENIX5:
writer.put("name", PluginName.Phoenix5_W);
parameter.put("writeMode", StringUtils.isBlank(this.getWriteMode()) ? UPSERT : this.getWriteMode());
break;
case DMDB:
writer.put("name", PluginName.DM_W);
break;
case GREENPLUM6:
writer.put("name", PluginName.GREENPLUM_W);
break;
case KINGBASE8:
writer.put("name", PluginName.KINGBASE_W);
break;
case INCEPTOR:
writer.put("name", PluginName.INCEPTOR_W);
break;
case ADB_FOR_PG:
writer.put("name", PluginName.ADB_FOR_PG_W);
break;
default:
throw new RdosDefineException(ErrorCode.CAN_NOT_FITABLE_SOURCE_TYPE);
}
writer.put("parameter", parameter);
return writer;
}
use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class DatasourceService method executeOnSpecifySourceWithOutResult.
/**
* @param sourceId 数据源id
* @param sql 拼写sql
* @param targetSchema 只做doris入参,其他类型不用传
*/
private void executeOnSpecifySourceWithOutResult(Long sourceId, String sql, String targetSchema) {
BatchDataSource source = getOne(sourceId);
DataSourceType dataSourceType = DataSourceType.getSourceType(source.getType());
if (!SUPPORT_CREATE_TABLE_DATASOURCES.contains(dataSourceType)) {
throw new RdosDefineException(String.format("只支持创建%s数据源表", SUPPORT_CREATE_TABLE_DATASOURCES_NAMES));
}
JSONObject json = JSON.parseObject(source.getDataJson());
try {
Map<String, Object> kerberosConfig = fillKerberosConfig(sourceId);
Map<String, Object> expandConfigPrepare = expandConfigPrepare(sourceId);
ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), kerberosConfig, expandConfigPrepare);
IClient iClient = ClientCache.getClient(dataSourceType.getVal());
Connection con = iClient.getCon(sourceDTO);
DBUtil.executeSqlWithoutResultSet(con, sql, false);
} catch (Exception e) {
throw new RdosDefineException(e.getMessage() + "。 执行sql = " + sql, e);
}
}
use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class HadoopDataDownloadService method queryDataFromTempTable.
/**
* 从临时表获取表数据
*
* @param tenantId
* @param tableName
* @param db
* @return
* @throws Exception
*/
public List<Object> queryDataFromTempTable(Long tenantId, String tableName, String db) throws Exception {
DataSourceType dataSourceType = datasourceService.getHadoopDefaultDataSourceByTenantId(tenantId);
IDownload downloader = getDownloader(tenantId, tableName, db, dataSourceType.getVal());
List<Object> result = new ArrayList<>();
List<String> alias = downloader.getMetaInfo();
result.add(alias);
JdbcInfo jdbcInfo = Engine2DTOService.getJdbcInfo(tenantId, null, DataSourceTypeJobTypeMapping.getTaskTypeByDataSourceType(dataSourceType.getVal()));
int readCounter = 0;
while (!downloader.reachedEnd() && readCounter < jdbcInfo.getMaxRows()) {
List<String> row = (List<String>) downloader.readNext();
result.add(row);
readCounter++;
}
return result;
}
Aggregations