use of com.dtstack.taier.develop.utils.develop.sync.template.HBaseWriter in project Taier by DTStack.
the class DatasourceService method syncWriterBuild.
private Writer syncWriterBuild(final Integer targetType, final List<Long> targetIds, final Map<String, Object> targetMap, final Reader reader) throws IOException {
Writer writer = null;
if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(targetType)) && !DataSourceType.HIVE.getVal().equals(targetType) && !DataSourceType.HIVE1X.getVal().equals(targetType) && !DataSourceType.HIVE3X.getVal().equals(targetType) && !DataSourceType.IMPALA.getVal().equals(targetType) && !DataSourceType.CarbonData.getVal().equals(targetType) && !DataSourceType.SparkThrift2_1.getVal().equals(targetType) && !DataSourceType.INCEPTOR.getVal().equals(targetType)) {
writer = PublicUtil.objectToObject(targetMap, RDBWriter.class);
((RDBBase) writer).setSourceIds(targetIds);
return writer;
}
if (DataSourceType.HDFS.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, HDFSWriter.class);
}
if (DataSourceType.HIVE.getVal().equals(targetType) || DataSourceType.HIVE3X.getVal().equals(targetType) || DataSourceType.HIVE1X.getVal().equals(targetType) || DataSourceType.SparkThrift2_1.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, HiveWriter.class);
}
if (DataSourceType.FTP.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, FtpWriter.class);
}
if (DataSourceType.ES.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, EsWriter.class);
}
if (DataSourceType.HBASE.getVal().equals(targetType)) {
targetMap.put("hbaseConfig", targetMap.get("hbaseConfig"));
writer = PublicUtil.objectToObject(targetMap, HBaseWriter.class);
HBaseWriter hbaseWriter = (HBaseWriter) writer;
List<String> sourceColNames = new ArrayList<>();
List<Map<String, String>> columnList = (List<Map<String, String>>) targetMap.get("column");
for (Map<String, String> column : columnList) {
if (column.containsKey("key")) {
sourceColNames.add(column.get("key"));
}
}
hbaseWriter.setSrcColumns(sourceColNames);
return writer;
}
if (DataSourceType.MAXCOMPUTE.getVal().equals(targetType)) {
writer = PublicUtil.objectToObject(targetMap, OdpsWriter.class);
((OdpsBase) writer).setSourceId(targetIds.get(0));
return writer;
}
if (DataSourceType.REDIS.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, RedisWriter.class);
}
if (DataSourceType.MONGODB.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, MongoDbWriter.class);
}
if (DataSourceType.CarbonData.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, CarbonDataWriter.class);
}
if (DataSourceType.Kudu.getVal().equals(targetType)) {
return syncBuilderFactory.getSyncBuilder(DataSourceType.Kudu.getVal()).syncWriterBuild(targetIds, targetMap, reader);
}
if (DataSourceType.IMPALA.getVal().equals(targetType)) {
return syncBuilderFactory.getSyncBuilder(DataSourceType.IMPALA.getVal()).syncWriterBuild(targetIds, targetMap, reader);
}
if (DataSourceType.AWS_S3.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, AwsS3Writer.class);
}
if (DataSourceType.INCEPTOR.getVal().equals(targetType)) {
return PublicUtil.objectToObject(targetMap, InceptorWriter.class);
}
throw new RdosDefineException("暂不支持" + DataSourceType.getSourceType(targetType).name() + "作为数据同步的目标");
}
Aggregations