use of org.apache.inlong.manager.common.pojo.stream.InlongStreamInfo in project incubator-inlong by apache.
the class DisableZkForSortTest method createHiveSink.
public HiveSinkRequest createHiveSink(InlongStreamInfo streamInfo) {
HiveSinkRequest hiveSinkRequest = new HiveSinkRequest();
hiveSinkRequest.setInlongGroupId(streamInfo.getInlongGroupId());
hiveSinkRequest.setSinkType("HIVE");
hiveSinkRequest.setSinkName("HIVE");
hiveSinkRequest.setInlongStreamId(streamInfo.getInlongStreamId());
List<SinkFieldRequest> sinkFieldRequests = createStreamFields(streamInfo.getInlongGroupId(), streamInfo.getInlongStreamId()).stream().map(streamFieldInfo -> {
SinkFieldRequest fieldInfo = new SinkFieldRequest();
fieldInfo.setFieldName(streamFieldInfo.getFieldName());
fieldInfo.setFieldType(streamFieldInfo.getFieldType());
fieldInfo.setFieldComment(streamFieldInfo.getFieldComment());
return fieldInfo;
}).collect(Collectors.toList());
hiveSinkRequest.setFieldList(sinkFieldRequests);
hiveSinkRequest.setEnableCreateTable(0);
hiveSinkRequest.setUsername(OPERATOR);
hiveSinkRequest.setPassword("password");
hiveSinkRequest.setDbName("default");
hiveSinkRequest.setTableName("kip_test");
hiveSinkRequest.setJdbcUrl("jdbc:hive2://172.17.12.135:7001");
hiveSinkRequest.setFileFormat("TextFile");
hiveSinkRequest.setHdfsDefaultFs("hdfs://172.17.12.235:4007");
hiveSinkRequest.setWarehouseDir("/user/hive/warehouse");
hiveSinkRequest.setFileFormat(StandardCharsets.UTF_8.name());
hiveSinkRequest.setDataSeparator("124");
streamSinkService.save(hiveSinkRequest, OPERATOR);
return hiveSinkRequest;
}
use of org.apache.inlong.manager.common.pojo.stream.InlongStreamInfo in project incubator-inlong by apache.
the class DisableZkForSortTest method testCreateSortConfigInUpdateWorkflow.
@Test
public void testCreateSortConfigInUpdateWorkflow() {
InlongGroupInfo groupInfo = initGroupForm("PULSAR");
groupInfo.setZookeeperEnabled(0);
groupInfo.setStatus(GroupState.CONFIG_SUCCESSFUL.getCode());
groupService.update(groupInfo.genRequest(), OPERATOR);
InlongStreamInfo streamInfo = createStreamInfo(groupInfo);
createHiveSink(streamInfo);
createKafkaSource(streamInfo);
UpdateGroupProcessForm form = new UpdateGroupProcessForm();
form.setGroupInfo(groupInfo);
form.setOperateType(OperateType.SUSPEND);
taskListenerFactory.acceptPlugin(new MockPlugin());
WorkflowContext context = workflowEngine.processService().start(ProcessName.SUSPEND_GROUP_PROCESS.name(), applicant, form);
WorkflowResult result = WorkflowBeanUtils.result(context);
ProcessResponse response = result.getProcessInfo();
Assert.assertSame(response.getStatus(), ProcessStatus.COMPLETED);
WorkflowProcess process = context.getProcess();
WorkflowTask task = process.getTaskByName("stopSort");
Assert.assertTrue(task instanceof ServiceTask);
Assert.assertEquals(2, task.getNameToListenerMap().size());
List<TaskEventListener> listeners = Lists.newArrayList(task.getNameToListenerMap().values());
Assert.assertTrue(listeners.get(1) instanceof CreateSortConfigListener);
ProcessForm currentProcessForm = context.getProcessForm();
InlongGroupInfo curGroupRequest = ((UpdateGroupProcessForm) currentProcessForm).getGroupInfo();
Assert.assertTrue(curGroupRequest.getExtList().size() == 1);
}
use of org.apache.inlong.manager.common.pojo.stream.InlongStreamInfo in project incubator-inlong by apache.
the class WorkflowServiceImplTest method createStreamInfo.
/**
* Create inlong stream
*/
public InlongStreamInfo createStreamInfo(InlongGroupInfo inlongGroupInfo) {
// delete first
try {
streamService.delete(GROUP_ID, OPERATOR, OPERATOR);
} catch (Exception e) {
// ignore
}
InlongStreamInfo streamInfo = new InlongStreamInfo();
streamInfo.setInlongGroupId(inlongGroupInfo.getInlongGroupId());
streamInfo.setInlongStreamId(STREAM_ID);
streamInfo.setMqResourceObj(STREAM_ID);
streamInfo.setDataSeparator("124");
streamInfo.setDataEncoding(DATA_ENCODING);
streamInfo.setInCharges(OPERATOR);
streamInfo.setCreator(OPERATOR);
streamInfo.setFieldList(createStreamFields(inlongGroupInfo.getInlongGroupId(), STREAM_ID));
streamService.save(streamInfo, OPERATOR);
return streamInfo;
}
use of org.apache.inlong.manager.common.pojo.stream.InlongStreamInfo in project incubator-inlong by apache.
the class DataSourceListenerTest method createBinlogSource.
public Integer createBinlogSource(InlongGroupInfo groupInfo) {
final InlongStreamInfo streamInfo = createStreamInfo(groupInfo);
BinlogSourceRequest sourceRequest = new BinlogSourceRequest();
sourceRequest.setInlongGroupId(streamInfo.getInlongGroupId());
sourceRequest.setInlongStreamId(streamInfo.getInlongStreamId());
sourceRequest.setSourceName("binlog-collect");
return streamSourceService.save(sourceRequest, OPERATOR);
}
use of org.apache.inlong.manager.common.pojo.stream.InlongStreamInfo in project incubator-inlong by apache.
the class CommonOperateService method createDataFlow.
/**
* Create dataflow info for sort.
*/
public DataFlowInfo createDataFlow(InlongGroupInfo groupInfo, SinkResponse sinkResponse) {
String groupId = sinkResponse.getInlongGroupId();
String streamId = sinkResponse.getInlongStreamId();
// TODO Support all source type, include AUTO_PUSH.
List<SourceResponse> sourceList = streamSourceService.listSource(groupId, streamId);
if (CollectionUtils.isEmpty(sourceList)) {
throw new WorkflowListenerException(String.format("Source not found by groupId=%s and streamId=%s", groupId, streamId));
}
// Get all field info
List<FieldInfo> sourceFields = new ArrayList<>();
List<FieldInfo> sinkFields = new ArrayList<>();
String partition = null;
if (SinkType.forType(sinkResponse.getSinkType()) == SinkType.HIVE) {
HiveSinkResponse hiveSink = (HiveSinkResponse) sinkResponse;
partition = hiveSink.getPrimaryPartition();
}
// TODO Support more than one source and one sink
final SourceResponse sourceResponse = sourceList.get(0);
boolean isAllMigration = SourceInfoUtils.isBinlogAllMigration(sourceResponse);
FieldMappingRule fieldMappingRule = FieldInfoUtils.createFieldInfo(isAllMigration, sinkResponse.getFieldList(), sourceFields, sinkFields, partition);
// Get source info
String masterAddress = getSpecifiedParam(Constant.TUBE_MASTER_URL);
PulsarClusterInfo pulsarCluster = getPulsarClusterInfo(groupInfo.getMiddlewareType());
InlongStreamInfo streamInfo = streamService.get(groupId, streamId);
SourceInfo sourceInfo = SourceInfoUtils.createSourceInfo(pulsarCluster, masterAddress, clusterBean, groupInfo, streamInfo, sourceResponse, sourceFields);
// Get sink info
SinkInfo sinkInfo = SinkInfoUtils.createSinkInfo(sourceResponse, sinkResponse, sinkFields);
// Get transformation info
TransformationInfo transInfo = new TransformationInfo(fieldMappingRule);
// Get properties
Map<String, Object> properties = new HashMap<>();
if (MapUtils.isNotEmpty(sinkResponse.getProperties())) {
properties.putAll(sinkResponse.getProperties());
}
properties.put(Constant.DATA_FLOW_GROUP_ID_KEY, groupId);
return new DataFlowInfo(sinkResponse.getId(), sourceInfo, transInfo, sinkInfo, properties);
}
Aggregations