Search in sources :

Example 1 with RDBBase

use of com.dtstack.taier.develop.utils.develop.sync.template.RDBBase in project Taier by DTStack.

the class DatasourceService method syncReaderBuild.

private Reader syncReaderBuild(final Integer sourceType, final Map<String, Object> sourceMap, final List<Long> sourceIds) throws IOException {
    Reader reader = null;
    if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.CarbonData.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
        reader = PublicUtil.objectToObject(sourceMap, RDBReader.class);
        ((RDBBase) reader).setSourceIds(sourceIds);
        return reader;
    }
    if (DataSourceType.HDFS.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, HDFSReader.class);
    }
    if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, HiveReader.class);
    }
    if (DataSourceType.HBASE.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, HBaseReader.class);
    }
    if (DataSourceType.FTP.getVal().equals(sourceType)) {
        reader = PublicUtil.objectToObject(sourceMap, FtpReader.class);
        if (sourceMap.containsKey("isFirstLineHeader") && (Boolean) sourceMap.get("isFirstLineHeader")) {
            ((FtpReader) reader).setFirstLineHeader(true);
        } else {
            ((FtpReader) reader).setFirstLineHeader(false);
        }
        return reader;
    }
    if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
        reader = PublicUtil.objectToObject(sourceMap, OdpsReader.class);
        ((OdpsBase) reader).setSourceId(sourceIds.get(0));
        return reader;
    }
    if (DataSourceType.ES.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, EsReader.class);
    }
    if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, MongoDbReader.class);
    }
    if (DataSourceType.CarbonData.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, CarbonDataReader.class);
    }
    if (DataSourceType.Kudu.getVal().equals(sourceType)) {
        return syncBuilderFactory.getSyncBuilder(DataSourceType.Kudu.getVal()).syncReaderBuild(sourceMap, sourceIds);
    }
    if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, InfluxDBReader.class);
    }
    if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
        // setSftpConf时,设置的hdfsConfig和sftpConf
        if (sourceMap.containsKey(HADOOP_CONFIG)) {
            Object impalaConfig = sourceMap.get(HADOOP_CONFIG);
            if (impalaConfig instanceof Map) {
                sourceMap.put(HADOOP_CONFIG, impalaConfig);
                sourceMap.put("sftpConf", ((Map) impalaConfig).get("sftpConf"));
            }
        }
        return syncBuilderFactory.getSyncBuilder(DataSourceType.IMPALA.getVal()).syncReaderBuild(sourceMap, sourceIds);
    }
    if (DataSourceType.AWS_S3.getVal().equals(sourceType)) {
        return PublicUtil.objectToObject(sourceMap, AwsS3Reader.class);
    }
    throw new RdosDefineException("暂不支持" + DataSourceType.getSourceType(sourceType).name() + "作为数据同步的源");
}
Also used : RDBReader(com.dtstack.taier.develop.utils.develop.sync.template.RDBReader) OdpsReader(com.dtstack.taier.develop.utils.develop.sync.template.OdpsReader) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) FtpReader(com.dtstack.taier.develop.utils.develop.sync.template.FtpReader) OdpsBase(com.dtstack.taier.develop.utils.develop.sync.template.OdpsBase) HBaseReader(com.dtstack.taier.develop.utils.develop.sync.template.HBaseReader) AwsS3Reader(com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Reader) MongoDbReader(com.dtstack.taier.develop.utils.develop.sync.template.MongoDbReader) OdpsReader(com.dtstack.taier.develop.utils.develop.sync.template.OdpsReader) FtpReader(com.dtstack.taier.develop.utils.develop.sync.template.FtpReader) EsReader(com.dtstack.taier.develop.utils.develop.sync.template.EsReader) HiveReader(com.dtstack.taier.develop.utils.develop.sync.template.HiveReader) HDFSReader(com.dtstack.taier.develop.utils.develop.sync.template.HDFSReader) CarbonDataReader(com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataReader) Reader(com.dtstack.taier.develop.common.template.Reader) InfluxDBReader(com.dtstack.taier.develop.utils.develop.sync.template.InfluxDBReader) RDBReader(com.dtstack.taier.develop.utils.develop.sync.template.RDBReader) JSONObject(com.alibaba.fastjson.JSONObject) RDBBase(com.dtstack.taier.develop.utils.develop.sync.template.RDBBase) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 2 with RDBBase

use of com.dtstack.taier.develop.utils.develop.sync.template.RDBBase in project Taier by DTStack.

the class DatasourceService method syncWriterBuild.

private Writer syncWriterBuild(final Integer targetType, final List<Long> targetIds, final Map<String, Object> targetMap, final Reader reader) throws IOException {
    Writer writer = null;
    if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(targetType)) && !DataSourceType.HIVE.getVal().equals(targetType) && !DataSourceType.HIVE1X.getVal().equals(targetType) && !DataSourceType.HIVE3X.getVal().equals(targetType) && !DataSourceType.IMPALA.getVal().equals(targetType) && !DataSourceType.CarbonData.getVal().equals(targetType) && !DataSourceType.SparkThrift2_1.getVal().equals(targetType) && !DataSourceType.INCEPTOR.getVal().equals(targetType)) {
        writer = PublicUtil.objectToObject(targetMap, RDBWriter.class);
        ((RDBBase) writer).setSourceIds(targetIds);
        return writer;
    }
    if (DataSourceType.HDFS.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, HDFSWriter.class);
    }
    if (DataSourceType.HIVE.getVal().equals(targetType) || DataSourceType.HIVE3X.getVal().equals(targetType) || DataSourceType.HIVE1X.getVal().equals(targetType) || DataSourceType.SparkThrift2_1.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, HiveWriter.class);
    }
    if (DataSourceType.FTP.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, FtpWriter.class);
    }
    if (DataSourceType.ES.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, EsWriter.class);
    }
    if (DataSourceType.HBASE.getVal().equals(targetType)) {
        targetMap.put("hbaseConfig", targetMap.get("hbaseConfig"));
        writer = PublicUtil.objectToObject(targetMap, HBaseWriter.class);
        HBaseWriter hbaseWriter = (HBaseWriter) writer;
        List<String> sourceColNames = new ArrayList<>();
        List<Map<String, String>> columnList = (List<Map<String, String>>) targetMap.get("column");
        for (Map<String, String> column : columnList) {
            if (column.containsKey("key")) {
                sourceColNames.add(column.get("key"));
            }
        }
        hbaseWriter.setSrcColumns(sourceColNames);
        return writer;
    }
    if (DataSourceType.MAXCOMPUTE.getVal().equals(targetType)) {
        writer = PublicUtil.objectToObject(targetMap, OdpsWriter.class);
        ((OdpsBase) writer).setSourceId(targetIds.get(0));
        return writer;
    }
    if (DataSourceType.REDIS.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, RedisWriter.class);
    }
    if (DataSourceType.MONGODB.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, MongoDbWriter.class);
    }
    if (DataSourceType.CarbonData.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, CarbonDataWriter.class);
    }
    if (DataSourceType.Kudu.getVal().equals(targetType)) {
        return syncBuilderFactory.getSyncBuilder(DataSourceType.Kudu.getVal()).syncWriterBuild(targetIds, targetMap, reader);
    }
    if (DataSourceType.IMPALA.getVal().equals(targetType)) {
        return syncBuilderFactory.getSyncBuilder(DataSourceType.IMPALA.getVal()).syncWriterBuild(targetIds, targetMap, reader);
    }
    if (DataSourceType.AWS_S3.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, AwsS3Writer.class);
    }
    if (DataSourceType.INCEPTOR.getVal().equals(targetType)) {
        return PublicUtil.objectToObject(targetMap, InceptorWriter.class);
    }
    throw new RdosDefineException("暂不支持" + DataSourceType.getSourceType(targetType).name() + "作为数据同步的目标");
}
Also used : RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) RDBWriter(com.dtstack.taier.develop.utils.develop.sync.template.RDBWriter) ArrayList(java.util.ArrayList) OdpsWriter(com.dtstack.taier.develop.utils.develop.sync.template.OdpsWriter) RDBBase(com.dtstack.taier.develop.utils.develop.sync.template.RDBBase) HBaseWriter(com.dtstack.taier.develop.utils.develop.sync.template.HBaseWriter) OdpsBase(com.dtstack.taier.develop.utils.develop.sync.template.OdpsBase) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) HDFSWriter(com.dtstack.taier.develop.utils.develop.sync.template.HDFSWriter) CarbonDataWriter(com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataWriter) EsWriter(com.dtstack.taier.develop.utils.develop.sync.template.EsWriter) OdpsWriter(com.dtstack.taier.develop.utils.develop.sync.template.OdpsWriter) HiveWriter(com.dtstack.taier.develop.utils.develop.sync.template.HiveWriter) RedisWriter(com.dtstack.taier.develop.utils.develop.sync.template.RedisWriter) AwsS3Writer(com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Writer) HBaseWriter(com.dtstack.taier.develop.utils.develop.sync.template.HBaseWriter) RDBWriter(com.dtstack.taier.develop.utils.develop.sync.template.RDBWriter) InceptorWriter(com.dtstack.taier.develop.utils.develop.sync.template.InceptorWriter) MongoDbWriter(com.dtstack.taier.develop.utils.develop.sync.template.MongoDbWriter) FtpWriter(com.dtstack.taier.develop.utils.develop.sync.template.FtpWriter) Writer(com.dtstack.taier.develop.common.template.Writer)

Aggregations

RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)2 OdpsBase (com.dtstack.taier.develop.utils.develop.sync.template.OdpsBase)2 RDBBase (com.dtstack.taier.develop.utils.develop.sync.template.RDBBase)2 HashMap (java.util.HashMap)2 LinkedHashMap (java.util.LinkedHashMap)2 Map (java.util.Map)2 JSONObject (com.alibaba.fastjson.JSONObject)1 Reader (com.dtstack.taier.develop.common.template.Reader)1 Writer (com.dtstack.taier.develop.common.template.Writer)1 AwsS3Reader (com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Reader)1 AwsS3Writer (com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Writer)1 CarbonDataReader (com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataReader)1 CarbonDataWriter (com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataWriter)1 EsReader (com.dtstack.taier.develop.utils.develop.sync.template.EsReader)1 EsWriter (com.dtstack.taier.develop.utils.develop.sync.template.EsWriter)1 FtpReader (com.dtstack.taier.develop.utils.develop.sync.template.FtpReader)1 FtpWriter (com.dtstack.taier.develop.utils.develop.sync.template.FtpWriter)1 HBaseReader (com.dtstack.taier.develop.utils.develop.sync.template.HBaseReader)1 HBaseWriter (com.dtstack.taier.develop.utils.develop.sync.template.HBaseWriter)1 HDFSReader (com.dtstack.taier.develop.utils.develop.sync.template.HDFSReader)1