use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class ImpalaSyncBuilder method syncWriterBuild.
@Override
public Writer syncWriterBuild(List<Long> targetIds, Map<String, Object> targetMap, Reader reader) {
TableLocationType tableLocationType = TableLocationType.getTableLocationType((String) targetMap.get(TableLocationType.key()));
if (tableLocationType == null) {
throw new RdosDefineException("不支持的表存储类型");
}
if (tableLocationType == TableLocationType.HIVE) {
Map<String, Object> clone = new HashMap<>(targetMap);
String writeMode = (String) clone.get("writeMode");
writeMode = writeMode != null && writeMode.trim().length() != 0 ? SyncWriteMode.tranferHiveMode(writeMode) : SyncWriteMode.HIVE_OVERWRITE.getMode();
clone.put("writeMode", writeMode);
// 设置hdfs index字段
List column = (List) clone.get("column");
List<Column> allColumns = (List<Column>) clone.get("allColumns");
List<Column> partitionColumns = (List<Column>) clone.get("partitionColumns");
Map<String, Column> allColumnsMap = allColumns.stream().collect(Collectors.toMap(Column::getName, item -> item));
for (Object col : column) {
String name = (String) ((Map<String, Object>) col).get("key");
((Map<String, Object>) col).put("index", allColumnsMap.get(name).getIndex());
}
// 设置 fullColumnNames 和 fullColumnTypes 脏数据记录的时候需要
// 需要去掉分区字段
Set<String> partitionColumnNameSet = CollectionUtils.isEmpty(partitionColumns) ? new HashSet<>() : partitionColumns.stream().map(pColumn -> pColumn.getName()).collect(Collectors.toSet());
List<String> fullColumnNames = new ArrayList<>();
List<String> fullColumnTypes = new ArrayList<>();
for (Column allColumn : allColumns) {
if (!partitionColumnNameSet.contains(allColumn.getName())) {
fullColumnNames.add(allColumn.getName());
fullColumnTypes.add(allColumn.getType());
}
}
clone.put("fullColumnNames", fullColumnNames);
clone.put("fullColumnTypes", fullColumnTypes);
String partition = (String) clone.get("partition");
// fileName 逻辑参考自HiveWriter
String fileName = StringUtils.isNotEmpty(partition) ? partition : "";
clone.put("fileName", fileName);
return objToObject(clone, ImpalaHdfsWriter.class);
} else if (tableLocationType == TableLocationType.KUDU) {
KuduWriter kuduWriter = objToObject(targetMap, KuduWriter.class);
String kuduTableName = (String) targetMap.get("kuduTableName");
LOGGER.info("syncWriterBuild format impala kuduTableName :{} ", kuduTableName);
kuduWriter.setTable(kuduTableName);
return kuduWriter;
}
return null;
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class CarbonDataWriter method checkFormat.
@Override
public void checkFormat(JSONObject data) {
JSONObject parameter = data.getJSONObject("parameter");
if (parameter == null) {
throw new RdosDefineException("parameter 不能为空");
} else {
String path = parameter.getString("path");
if (StringUtils.isEmpty(path)) {
throw new RdosDefineException("目标源的表路径不能为空");
}
String table = parameter.getString("table");
if (StringUtils.isEmpty(table)) {
throw new RdosDefineException("目标源的表名不能为空");
}
String database = parameter.getString("database");
if (StringUtils.isEmpty(database)) {
throw new RdosDefineException("目标源的数据库名不能为空");
}
String writeMode = parameter.getString("writeMode");
if (StringUtils.isEmpty(writeMode)) {
throw new RdosDefineException("目标源的写入模式不能为空");
}
JSONArray columnArray = parameter.getJSONArray("column");
if (columnArray == null || columnArray.size() == 0) {
throw new RdosDefineException("目标源的列名列表不能为空");
}
for (int i = 0; i < columnArray.size(); i++) {
String obj = columnArray.getString(i);
if (StringUtils.isEmpty(obj)) {
throw new RdosDefineException("目标源列名格式错误");
}
}
}
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DefaultSetting method toSettingJson.
@Override
public JSONObject toSettingJson() {
JSONObject setting = new JSONObject(true);
if (this.isSaveDirty == 1) {
JSONObject dirty = new JSONObject(true);
if (StringUtils.isNotEmpty(this.path)) {
dirty.put("path", this.path);
}
if (StringUtils.isNotEmpty(this.hadoopConfig)) {
JSONObject hadoopConfig = new JSONObject();
JSONObject otherConfig = JSONObject.parseObject(this.hadoopConfig);
for (String key : otherConfig.keySet()) {
hadoopConfig.put(key, otherConfig.getString(key));
}
dirty.put("hadoopConfig", hadoopConfig);
}
setting.put("dirty", dirty);
}
JSONObject speed = new JSONObject(true);
if (this.getChannel() != null) {
speed.put("channel", this.getChannel());
}
if (this.getSpeed() != null) {
if (this.getSpeed() < 0) {
speed.put("bytes", 0);
} else {
speed.put("bytes", (long) (this.getSpeed() * 1024 * 1024));
}
}
JSONObject errorLimit = new JSONObject(true);
if (this.getRecord() != null) {
errorLimit.put("record", this.getRecord());
}
if (this.getPercentage() != null) {
errorLimit.put("percentage", this.getPercentage());
}
JSONObject restore = new JSONObject();
restore.put("isRestore", isRestore);
restore.put("restoreColumnName", restoreColumnName == null ? "" : restoreColumnName);
restore.put("maxRowNumForCheckpoint", maxRowNumForCheckpoint);
restore.put("restoreColumnIndex", restoreColumnIndex);
if (isRestore && StringUtils.isEmpty(restoreColumnName)) {
throw new RdosDefineException("开启断点续传时必须指定恢复字段");
}
setting.put("speed", speed);
setting.put("errorLimit", errorLimit);
setting.put("restore", restore);
return setting;
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class EsReader method checkFormat.
@Override
public void checkFormat(JSONObject data) {
if (StringUtils.isBlank(data.getString("name"))) {
throw new RdosDefineException("name 不能为空");
}
JSONObject parameter = data.getJSONObject("parameter");
if (StringUtils.isBlank(parameter.getString("address"))) {
throw new RdosDefineException("address 不能为空");
}
checkArray(parameter, "column");
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class FtpReader method toReaderJson.
@Override
public JSONObject toReaderJson() {
JSONObject parameter = new JSONObject(true);
parameter.put("protocol", this.getProtocol());
if (path != null) {
if (path instanceof String) {
parameter.put("path", path.toString());
} else if (path instanceof List) {
try {
parameter.put("path", StringUtils.join(PublicUtil.objectToObject(path, List.class), ","));
} catch (Exception e) {
throw new RdosDefineException(String.format("解析ftp路径出错,报错原因: %s", e.getMessage()));
}
}
} else {
throw new RdosDefineException("FTP路径不能为空");
}
if (auth != null && Integer.valueOf(auth).equals(SftpAuthType.RSA.getType())) {
// 免登录 私钥路径
parameter.put("privateKeyPath", rsaPath);
} else {
parameter.put("password", this.getPassword());
}
parameter.put("host", this.getHost());
parameter.put("port", this.getPort());
parameter.put("ftpFileName", this.getFtpFileName());
parameter.put("username", this.getUsername());
parameter.put("fieldDelimiter", this.fieldDelimiter);
parameter.put("connectPattern", this.getConnectPattern());
parameter.put("isFirstLineHeader", isFirstLineHeader);
parameter.put("column", ColumnUtil.getColumns(this.getColumn(), PluginName.FTP_R));
parameter.put("encoding", this.getEncoding());
parameter.put("sourceIds", getSourceIds());
parameter.putAll(super.getExtralConfigMap());
JSONObject reader = new JSONObject(true);
reader.put("name", PluginName.FTP_R);
reader.put("parameter", parameter);
return reader;
}
Aggregations