Search in sources :

Example 1 with DataBaseType

use of com.dtstack.dtcenter.loader.source.DataBaseType in project Taier by DTStack.

the class DatasourceService method replaceJdbcInfoByDataJsonToMap.

/**
 * 根据dataJson 替换map中 jdbc信息
 *
 * @param map
 * @param sourceId
 * @param source
 * @param tenantId
 * @param json
 * @param sourceType
 * @throws Exception
 */
private void replaceJdbcInfoByDataJsonToMap(Map<String, Object> map, Long sourceId, BatchDataSource source, Long tenantId, JSONObject json, Integer sourceType) throws Exception {
    if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.CarbonData.getVal().equals(sourceType) && !DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
        map.put("type", sourceType);
        map.put("password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD));
        map.put("username", JsonUtils.getStrFromJson(json, JDBC_USERNAME));
        map.put("jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL));
        processTable(map);
    } else if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
        map.put("isDefaultSource", 1 == source.getIsDefault());
        map.put("type", sourceType);
        map.put("password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD));
        map.put("username", JsonUtils.getStrFromJson(json, JDBC_USERNAME));
        map.put("jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL));
        map.put("partition", map.get(HIVE_PARTITION));
        map.put("defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS));
        this.checkLastHadoopConfig(map, json);
        if (1 == source.getIsDefault()) {
            setDefaultHadoopSftpConfig(json, tenantId, map);
        } else {
            setSftpConfig(sourceId, json, tenantId, map, HADOOP_CONFIG);
        }
    } else if (DataSourceType.HDFS.getVal().equals(sourceType)) {
        map.put("defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS));
        this.checkLastHadoopConfig(map, json);
        setSftpConfig(sourceId, json, tenantId, map, HADOOP_CONFIG);
    } else if (DataSourceType.HBASE.getVal().equals(sourceType)) {
        String jsonStr = json.getString(HBASE_CONFIG);
        Map jsonMap = new HashMap();
        if (StringUtils.isNotEmpty(jsonStr)) {
            jsonMap = objectMapper.readValue(jsonStr, Map.class);
        }
        map.put("hbaseConfig", jsonMap);
        setSftpConfig(sourceId, json, tenantId, map, "hbaseConfig");
    } else if (DataSourceType.FTP.getVal().equals(sourceType)) {
        map.putAll(json);
    } else if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
        map.put("accessId", json.get("accessId"));
        map.put("accessKey", json.get("accessKey"));
        map.put("project", json.get("project"));
        map.put("endPoint", json.get("endPoint"));
    } else if ((DataSourceType.ES.getVal().equals(sourceType))) {
        map.put("address", json.get("address"));
        map.put("username", JsonUtils.getStrFromJson(json, "username"));
        map.put("password", JsonUtils.getStrFromJson(json, "password"));
    } else if (DataSourceType.REDIS.getVal().equals(sourceType)) {
        map.put("type", "string");
        map.put("hostPort", JsonUtils.getStrFromJson(json, "hostPort"));
        map.put("database", json.getIntValue("database"));
        map.put("password", JsonUtils.getStrFromJson(json, "password"));
    } else if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
        map.put(JDBC_HOSTPORTS, JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS));
        map.put("username", JsonUtils.getStrFromJson(json, "username"));
        map.put("database", JsonUtils.getStrFromJson(json, "database"));
        map.put("password", JsonUtils.getStrFromJson(json, "password"));
    } else if (DataSourceType.AWS_S3.getVal().equals(sourceType)) {
        map.put("accessKey", JsonUtils.getStrFromJson(json, "accessKey"));
        map.put("secretKey", JsonUtils.getStrFromJson(json, "secretKey"));
        map.put("region", JsonUtils.getStrFromJson(json, "region"));
    } else if (DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
        DataBaseType dataBaseType = DataSourceDataBaseType.getBaseTypeBySourceType(sourceType);
        map.put("type", dataBaseType);
        map.put("password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD));
        map.put("username", JsonUtils.getStrFromJson(json, JDBC_USERNAME));
        map.put("jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL));
        map.put("partition", map.get(HIVE_PARTITION));
        map.put("defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS));
        map.put("hiveMetastoreUris", JsonUtils.getStrFromJson(json, HIVE_METASTORE_URIS));
        checkLastHadoopConfig(map, json);
        setSftpConfig(sourceId, json, tenantId, map, "hadoopConfig");
    } else if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
        map.put("username", JsonUtils.getStrFromJson(json, "username"));
        map.put("password", JsonUtils.getStrFromJson(json, "password"));
        map.put("url", JsonUtils.getStrFromJson(json, "url"));
    }
}
Also used : LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) DataBaseType(com.dtstack.dtcenter.loader.source.DataBaseType) DataSourceDataBaseType(com.dtstack.taier.develop.enums.develop.DataSourceDataBaseType)

Aggregations

DataBaseType (com.dtstack.dtcenter.loader.source.DataBaseType)1 DataSourceDataBaseType (com.dtstack.taier.develop.enums.develop.DataSourceDataBaseType)1 HashMap (java.util.HashMap)1 LinkedHashMap (java.util.LinkedHashMap)1 Map (java.util.Map)1