use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkDTO in project incubator-inlong by apache.
the class HiveStreamSinkOperation method saveOpt.
@Override
public Integer saveOpt(SinkRequest request, String operator) {
String sinkType = request.getSinkType();
Preconditions.checkTrue(Constant.SINK_HIVE.equals(sinkType), ErrorCodeEnum.SINK_TYPE_NOT_SUPPORT.getMessage() + ": " + sinkType);
HiveSinkRequest hiveRequest = (HiveSinkRequest) request;
StreamSinkEntity entity = CommonBeanUtils.copyProperties(hiveRequest, StreamSinkEntity::new);
entity.setStatus(EntityStatus.SINK_NEW.getCode());
entity.setIsDeleted(EntityStatus.UN_DELETED.getCode());
entity.setCreator(operator);
entity.setModifier(operator);
Date now = new Date();
entity.setCreateTime(now);
entity.setModifyTime(now);
// get the ext params
HiveSinkDTO dto = HiveSinkDTO.getFromRequest(hiveRequest);
try {
entity.setExtParams(objectMapper.writeValueAsString(dto));
} catch (Exception e) {
throw new BusinessException(ErrorCodeEnum.SINK_SAVE_FAILED);
}
sinkMapper.insert(entity);
Integer sinkId = entity.getId();
request.setId(sinkId);
this.saveFieldOpt(request);
return sinkId;
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkDTO in project incubator-inlong by apache.
the class HiveStreamSinkOperation method updateOpt.
@Override
public void updateOpt(SinkRequest request, String operator) {
String sinkType = request.getSinkType();
Preconditions.checkTrue(Constant.SINK_HIVE.equals(sinkType), String.format(Constant.SINK_TYPE_NOT_SAME, Constant.SINK_HIVE, sinkType));
StreamSinkEntity entity = sinkMapper.selectByPrimaryKey(request.getId());
Preconditions.checkNotNull(entity, ErrorCodeEnum.SINK_INFO_NOT_FOUND.getMessage());
HiveSinkRequest hiveRequest = (HiveSinkRequest) request;
CommonBeanUtils.copyProperties(hiveRequest, entity, true);
try {
HiveSinkDTO dto = HiveSinkDTO.getFromRequest(hiveRequest);
entity.setExtParams(objectMapper.writeValueAsString(dto));
} catch (Exception e) {
throw new BusinessException(ErrorCodeEnum.SINK_INFO_INCORRECT.getMessage());
}
entity.setPreviousStatus(entity.getStatus());
entity.setStatus(EntityStatus.GROUP_CONFIG_ING.getCode());
entity.setModifier(operator);
entity.setModifyTime(new Date());
sinkMapper.updateByPrimaryKeySelective(entity);
boolean onlyAdd = EntityStatus.SINK_CONFIG_SUCCESSFUL.getCode().equals(entity.getPreviousStatus());
this.updateFieldOpt(onlyAdd, hiveRequest);
LOGGER.info("success to update sink of type={}", sinkType);
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkDTO in project incubator-inlong by apache.
the class HiveStreamSinkOperation method getFromEntity.
@Override
public <T> T getFromEntity(StreamSinkEntity entity, Supplier<T> target) {
T result = target.get();
if (entity == null) {
return result;
}
String existType = entity.getSinkType();
Preconditions.checkTrue(Constant.SINK_HIVE.equals(existType), String.format(Constant.SINK_TYPE_NOT_SAME, Constant.SINK_HIVE, existType));
HiveSinkDTO dto = HiveSinkDTO.getFromJson(entity.getExtParams());
CommonBeanUtils.copyProperties(entity, result, true);
CommonBeanUtils.copyProperties(dto, result, true);
return result;
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkDTO in project incubator-inlong by apache.
the class DefaultHiveTableOperator method createTable.
private void createTable(String groupId, SinkForSortDTO config) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("begin create hive table for inlong group={}, config={}", groupId, config);
}
// Get all info from config
HiveSinkDTO hiveInfo = HiveSinkDTO.getFromJson(config.getExtParams());
HiveTableQueryBean tableBean = getTableQueryBean(config, hiveInfo);
try {
// create database if not exists
dataSourceService.createDb(tableBean);
// check if the table exists
List<ColumnInfoBean> columns = dataSourceService.queryColumns(tableBean);
if (columns.size() == 0) {
// no such table, create one
dataSourceService.createTable(tableBean);
} else {
// set columns, skip the first columns already exist in hive
List<HiveColumnQueryBean> columnsSkipHistory = tableBean.getColumns().stream().skip(columns.size()).collect(toList());
if (columnsSkipHistory.size() != 0) {
tableBean.setColumns(columnsSkipHistory);
dataSourceService.createColumn(tableBean);
}
}
sinkService.updateStatus(config.getId(), EntityStatus.SINK_CONFIG_SUCCESSFUL.getCode(), "create hive table success");
} catch (Throwable e) {
LOGGER.error("create hive table error, ", e);
sinkService.updateStatus(config.getId(), EntityStatus.SINK_CONFIG_FAILED.getCode(), e.getMessage());
throw new WorkflowException("create hive table failed, reason: " + e.getMessage());
}
LOGGER.info("success create hive table for data group [" + groupId + "]");
}
Aggregations