use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkResponse in project incubator-inlong by apache.
the class HiveStreamSinkOperation method getById.
@Override
public SinkResponse getById(@NotNull String sinkType, @NotNull Integer id) {
StreamSinkEntity entity = sinkMapper.selectByPrimaryKey(id);
Preconditions.checkNotNull(entity, ErrorCodeEnum.SINK_INFO_NOT_FOUND.getMessage());
String existType = entity.getSinkType();
Preconditions.checkTrue(Constant.SINK_HIVE.equals(existType), String.format(Constant.SINK_TYPE_NOT_SAME, Constant.SINK_HIVE, existType));
SinkResponse response = this.getFromEntity(entity, HiveSinkResponse::new);
List<StreamSinkFieldEntity> entities = sinkFieldMapper.selectBySinkId(id);
List<SinkFieldResponse> infos = CommonBeanUtils.copyListProperties(entities, SinkFieldResponse::new);
response.setFieldList(infos);
return response;
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkResponse in project incubator-inlong by apache.
the class CommonOperateService method createDataFlow.
/**
* Create dataflow info for sort.
*/
public DataFlowInfo createDataFlow(InlongGroupInfo groupInfo, SinkResponse sinkResponse) {
String groupId = sinkResponse.getInlongGroupId();
String streamId = sinkResponse.getInlongStreamId();
// TODO Support all source type, include AUTO_PUSH.
List<SourceResponse> sourceList = streamSourceService.listSource(groupId, streamId);
if (CollectionUtils.isEmpty(sourceList)) {
throw new WorkflowListenerException(String.format("Source not found by groupId=%s and streamId=%s", groupId, streamId));
}
// Get all field info
List<FieldInfo> sourceFields = new ArrayList<>();
List<FieldInfo> sinkFields = new ArrayList<>();
String partition = null;
if (SinkType.forType(sinkResponse.getSinkType()) == SinkType.HIVE) {
HiveSinkResponse hiveSink = (HiveSinkResponse) sinkResponse;
partition = hiveSink.getPrimaryPartition();
}
// TODO Support more than one source and one sink
final SourceResponse sourceResponse = sourceList.get(0);
boolean isAllMigration = SourceInfoUtils.isBinlogAllMigration(sourceResponse);
FieldMappingRule fieldMappingRule = FieldInfoUtils.createFieldInfo(isAllMigration, sinkResponse.getFieldList(), sourceFields, sinkFields, partition);
// Get source info
String masterAddress = getSpecifiedParam(Constant.TUBE_MASTER_URL);
PulsarClusterInfo pulsarCluster = getPulsarClusterInfo(groupInfo.getMiddlewareType());
InlongStreamInfo streamInfo = streamService.get(groupId, streamId);
SourceInfo sourceInfo = SourceInfoUtils.createSourceInfo(pulsarCluster, masterAddress, clusterBean, groupInfo, streamInfo, sourceResponse, sourceFields);
// Get sink info
SinkInfo sinkInfo = SinkInfoUtils.createSinkInfo(sourceResponse, sinkResponse, sinkFields);
// Get transformation info
TransformationInfo transInfo = new TransformationInfo(fieldMappingRule);
// Get properties
Map<String, Object> properties = new HashMap<>();
if (MapUtils.isNotEmpty(sinkResponse.getProperties())) {
properties.putAll(sinkResponse.getProperties());
}
properties.put(Constant.DATA_FLOW_GROUP_ID_KEY, groupId);
return new DataFlowInfo(sinkResponse.getId(), sourceInfo, transInfo, sinkInfo, properties);
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkResponse in project incubator-inlong by apache.
the class HiveStreamSinkServiceTest method testGetAndUpdate.
@Test
public void testGetAndUpdate() {
Integer id = this.saveSink();
SinkResponse response = sinkService.get(id, Constant.SINK_HIVE);
Assert.assertEquals(globalGroupId, response.getInlongGroupId());
HiveSinkResponse hiveResponse = (HiveSinkResponse) response;
hiveResponse.setEnableCreateResource(Constant.DISABLE_CREATE_RESOURCE);
HiveSinkRequest request = CommonBeanUtils.copyProperties(hiveResponse, HiveSinkRequest::new);
boolean result = sinkService.update(request, globalOperator);
Assert.assertTrue(result);
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkResponse in project incubator-inlong by apache.
the class SinkInfoUtils method createSinkInfo.
/**
* Create sink info for DataFlowInfo.
*/
public static SinkInfo createSinkInfo(SourceResponse sourceResponse, SinkResponse sinkResponse, List<FieldInfo> sinkFields) {
String sinkType = sinkResponse.getSinkType();
SinkInfo sinkInfo;
if (SinkType.forType(sinkType) == SinkType.HIVE) {
sinkInfo = createHiveSinkInfo((HiveSinkResponse) sinkResponse, sinkFields);
} else if (SinkType.forType(sinkType) == SinkType.KAFKA) {
sinkInfo = createKafkaSinkInfo(sourceResponse, (KafkaSinkResponse) sinkResponse, sinkFields);
} else if (SinkType.forType(sinkType) == SinkType.CLICKHOUSE) {
sinkInfo = createClickhouseSinkInfo((ClickHouseSinkResponse) sinkResponse, sinkFields);
} else {
throw new RuntimeException(String.format("Unsupported SinkType {%s}", sinkType));
}
return sinkInfo;
}
use of org.apache.inlong.manager.common.pojo.sink.hive.HiveSinkResponse in project incubator-inlong by apache.
the class InlongParser method parseStreamList.
public static List<FullStreamResponse> parseStreamList(Response response) {
Object data = response.getData();
JsonObject pageInfoJson = GsonUtil.fromJson(GsonUtil.toJson(data), JsonObject.class);
JsonArray fullStreamArray = pageInfoJson.getAsJsonArray("list");
List<FullStreamResponse> list = Lists.newArrayList();
for (int i = 0; i < fullStreamArray.size(); i++) {
JsonObject fullStreamJson = (JsonObject) fullStreamArray.get(i);
FullStreamResponse fullStreamResponse = GsonUtil.fromJson(fullStreamJson.toString(), FullStreamResponse.class);
list.add(fullStreamResponse);
// Parse sourceResponse in each stream
JsonArray sourceJsonArr = fullStreamJson.getAsJsonArray(SOURCE_INFO);
List<SourceResponse> sourceResponses = Lists.newArrayList();
fullStreamResponse.setSourceInfo(sourceResponses);
for (int j = 0; j < sourceJsonArr.size(); j++) {
JsonObject sourceJson = (JsonObject) sourceJsonArr.get(i);
String type = sourceJson.get(SOURCE_TYPE).getAsString();
SourceType sourceType = SourceType.forType(type);
switch(sourceType) {
case BINLOG:
BinlogSourceResponse binlogSourceResponse = GsonUtil.fromJson(sourceJson.toString(), BinlogSourceResponse.class);
sourceResponses.add(binlogSourceResponse);
break;
case KAFKA:
KafkaSourceResponse kafkaSourceResponse = GsonUtil.fromJson(sourceJson.toString(), KafkaSourceResponse.class);
sourceResponses.add(kafkaSourceResponse);
break;
default:
throw new RuntimeException(String.format("Unsupport sourceType=%s for Inlong", sourceType));
}
}
// Parse sinkResponse in each stream
JsonArray sinkJsonArr = fullStreamJson.getAsJsonArray(SINK_INFO);
List<SinkResponse> sinkResponses = Lists.newArrayList();
fullStreamResponse.setSinkInfo(sinkResponses);
for (int j = 0; j < sinkJsonArr.size(); j++) {
JsonObject sinkJson = (JsonObject) sinkJsonArr.get(i);
String type = sinkJson.get(SINK_TYPE).getAsString();
SinkType sinkType = SinkType.forType(type);
switch(sinkType) {
case HIVE:
HiveSinkResponse hiveSinkResponse = GsonUtil.fromJson(sinkJson.toString(), HiveSinkResponse.class);
sinkResponses.add(hiveSinkResponse);
break;
case KAFKA:
KafkaSinkResponse kafkaSinkResponse = GsonUtil.fromJson(sinkJson.toString(), KafkaSinkResponse.class);
sinkResponses.add(kafkaSinkResponse);
break;
case ICEBERG:
IcebergSinkResponse icebergSinkResponse = GsonUtil.fromJson(sinkJson.toString(), IcebergSinkResponse.class);
sinkResponses.add(icebergSinkResponse);
break;
case CLICKHOUSE:
ClickHouseSinkResponse clickHouseSinkResponse = GsonUtil.fromJson(sinkJson.toString(), ClickHouseSinkResponse.class);
sinkResponses.add(clickHouseSinkResponse);
break;
default:
throw new RuntimeException(String.format("Unsupport sinkType=%s for Inlong", sinkType));
}
}
}
return list;
}
Aggregations