use of com.dtstack.taier.develop.utils.develop.sync.template.FtpReader in project Taier by DTStack.
the class DatasourceService method syncReaderBuild.
private Reader syncReaderBuild(final Integer sourceType, final Map<String, Object> sourceMap, final List<Long> sourceIds) throws IOException {
Reader reader = null;
if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.CarbonData.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
reader = PublicUtil.objectToObject(sourceMap, RDBReader.class);
((RDBBase) reader).setSourceIds(sourceIds);
return reader;
}
if (DataSourceType.HDFS.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, HDFSReader.class);
}
if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, HiveReader.class);
}
if (DataSourceType.HBASE.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, HBaseReader.class);
}
if (DataSourceType.FTP.getVal().equals(sourceType)) {
reader = PublicUtil.objectToObject(sourceMap, FtpReader.class);
if (sourceMap.containsKey("isFirstLineHeader") && (Boolean) sourceMap.get("isFirstLineHeader")) {
((FtpReader) reader).setFirstLineHeader(true);
} else {
((FtpReader) reader).setFirstLineHeader(false);
}
return reader;
}
if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
reader = PublicUtil.objectToObject(sourceMap, OdpsReader.class);
((OdpsBase) reader).setSourceId(sourceIds.get(0));
return reader;
}
if (DataSourceType.ES.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, EsReader.class);
}
if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, MongoDbReader.class);
}
if (DataSourceType.CarbonData.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, CarbonDataReader.class);
}
if (DataSourceType.Kudu.getVal().equals(sourceType)) {
return syncBuilderFactory.getSyncBuilder(DataSourceType.Kudu.getVal()).syncReaderBuild(sourceMap, sourceIds);
}
if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, InfluxDBReader.class);
}
if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
// setSftpConf时,设置的hdfsConfig和sftpConf
if (sourceMap.containsKey(HADOOP_CONFIG)) {
Object impalaConfig = sourceMap.get(HADOOP_CONFIG);
if (impalaConfig instanceof Map) {
sourceMap.put(HADOOP_CONFIG, impalaConfig);
sourceMap.put("sftpConf", ((Map) impalaConfig).get("sftpConf"));
}
}
return syncBuilderFactory.getSyncBuilder(DataSourceType.IMPALA.getVal()).syncReaderBuild(sourceMap, sourceIds);
}
if (DataSourceType.AWS_S3.getVal().equals(sourceType)) {
return PublicUtil.objectToObject(sourceMap, AwsS3Reader.class);
}
throw new RdosDefineException("暂不支持" + DataSourceType.getSourceType(sourceType).name() + "作为数据同步的源");
}
use of com.dtstack.taier.develop.utils.develop.sync.template.FtpReader in project Taier by DTStack.
the class DatasourceService method putDefaultEmptyValueForReader.
/**
* 向导模式,填充reader的默认信息
* @param sourceType
* @param reader
* @return
*/
private Reader putDefaultEmptyValueForReader(int sourceType, Reader reader) {
if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && DataSourceType.HIVE.getVal() != sourceType && DataSourceType.HIVE1X.getVal() != sourceType && DataSourceType.HIVE3X.getVal() != sourceType && DataSourceType.SparkThrift2_1.getVal() != sourceType && DataSourceType.CarbonData.getVal() != sourceType) {
RDBReader rdbReader = (RDBReader) reader;
rdbReader.setWhere("");
rdbReader.setSplitPK("");
return rdbReader;
} else if (DataSourceType.ES.getVal() == sourceType) {
EsReader esReader = (EsReader) reader;
JSONObject obj = new JSONObject();
obj.put("col", "");
JSONObject query = new JSONObject();
query.put("match", obj);
esReader.setQuery(query);
JSONObject column = new JSONObject();
column.put("key", "col1");
column.put("type", "string");
esReader.getColumn().add(column);
return esReader;
} else if (DataSourceType.FTP.getVal() == sourceType) {
FtpReader ftpReader = (FtpReader) reader;
ftpReader.setPath("/");
return ftpReader;
} else if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
InfluxDBReader influxDBReader = (InfluxDBReader) reader;
influxDBReader.setWhere("");
influxDBReader.setSplitPK("");
return influxDBReader;
}
return reader;
}
Aggregations