use of org.apache.inlong.sort.protocol.source.SourceInfo in project incubator-inlong by apache.
the class SourceInfoUtils method createSourceInfo.
/**
* Create source info for DataFlowInfo.
*/
public static SourceInfo createSourceInfo(PulsarClusterInfo pulsarCluster, String masterAddress, ClusterBean clusterBean, InlongGroupInfo groupInfo, InlongStreamInfo streamInfo, SourceResponse sourceResponse, List<FieldInfo> sourceFields) {
String mqType = groupInfo.getMiddlewareType();
DeserializationInfo deserializationInfo = SerializationUtils.createDeserialInfo(sourceResponse, streamInfo);
SourceInfo sourceInfo;
if (Constant.MIDDLEWARE_PULSAR.equals(mqType) || Constant.MIDDLEWARE_TDMQ_PULSAR.equals(mqType)) {
sourceInfo = createPulsarSourceInfo(pulsarCluster, clusterBean, groupInfo, streamInfo, deserializationInfo, sourceFields);
} else if (Constant.MIDDLEWARE_TUBE.equals(mqType)) {
// InlongGroupInfo groupInfo, String masterAddress,
sourceInfo = createTubeSourceInfo(groupInfo, masterAddress, clusterBean, deserializationInfo, sourceFields);
} else {
throw new WorkflowListenerException(String.format("Unsupported middleware {%s}", mqType));
}
return sourceInfo;
}
use of org.apache.inlong.sort.protocol.source.SourceInfo in project incubator-inlong by apache.
the class CommonOperateService method createDataFlow.
/**
* Create dataflow info for sort.
*/
public DataFlowInfo createDataFlow(InlongGroupInfo groupInfo, SinkResponse sinkResponse) {
String groupId = sinkResponse.getInlongGroupId();
String streamId = sinkResponse.getInlongStreamId();
// TODO Support all source type, include AUTO_PUSH.
List<SourceResponse> sourceList = streamSourceService.listSource(groupId, streamId);
if (CollectionUtils.isEmpty(sourceList)) {
throw new WorkflowListenerException(String.format("Source not found by groupId=%s and streamId=%s", groupId, streamId));
}
// Get all field info
List<FieldInfo> sourceFields = new ArrayList<>();
List<FieldInfo> sinkFields = new ArrayList<>();
String partition = null;
if (SinkType.forType(sinkResponse.getSinkType()) == SinkType.HIVE) {
HiveSinkResponse hiveSink = (HiveSinkResponse) sinkResponse;
partition = hiveSink.getPrimaryPartition();
}
// TODO Support more than one source and one sink
final SourceResponse sourceResponse = sourceList.get(0);
boolean isAllMigration = SourceInfoUtils.isBinlogAllMigration(sourceResponse);
FieldMappingRule fieldMappingRule = FieldInfoUtils.createFieldInfo(isAllMigration, sinkResponse.getFieldList(), sourceFields, sinkFields, partition);
// Get source info
String masterAddress = getSpecifiedParam(Constant.TUBE_MASTER_URL);
PulsarClusterInfo pulsarCluster = getPulsarClusterInfo(groupInfo.getMiddlewareType());
InlongStreamInfo streamInfo = streamService.get(groupId, streamId);
SourceInfo sourceInfo = SourceInfoUtils.createSourceInfo(pulsarCluster, masterAddress, clusterBean, groupInfo, streamInfo, sourceResponse, sourceFields);
// Get sink info
SinkInfo sinkInfo = SinkInfoUtils.createSinkInfo(sourceResponse, sinkResponse, sinkFields);
// Get transformation info
TransformationInfo transInfo = new TransformationInfo(fieldMappingRule);
// Get properties
Map<String, Object> properties = new HashMap<>();
if (MapUtils.isNotEmpty(sinkResponse.getProperties())) {
properties.putAll(sinkResponse.getProperties());
}
properties.put(Constant.DATA_FLOW_GROUP_ID_KEY, groupId);
return new DataFlowInfo(sinkResponse.getId(), sourceInfo, transInfo, sinkInfo, properties);
}
use of org.apache.inlong.sort.protocol.source.SourceInfo in project incubator-inlong by apache.
the class FieldMappingTransformerTest method testTransform.
@Test
public void testTransform() throws Exception {
final FieldInfo fieldInfo = new FieldInfo("id", new LongFormatInfo());
final FieldInfo extraFieldInfo = new FieldInfo("not_important", new StringFormatInfo());
final SourceInfo sourceInfo = new TestingSourceInfo(new FieldInfo[] { extraFieldInfo, fieldInfo });
final SinkInfo sinkInfo = new TestingSinkInfo(new FieldInfo[] { extraFieldInfo, fieldInfo });
final long dataFlowId = 1L;
final DataFlowInfo dataFlowInfo = new DataFlowInfo(dataFlowId, sourceInfo, sinkInfo);
final FieldMappingTransformer transformer = new FieldMappingTransformer(new Configuration());
transformer.addDataFlow(dataFlowInfo);
// should be 4 fields (2 origin fields + time + attr)
final Row sourceRow = new Row(2 + SOURCE_FIELD_SKIP_STEP);
sourceRow.setField(0, System.currentTimeMillis());
sourceRow.setField(1, "attr");
sourceRow.setField(2, "not important");
sourceRow.setField(3, 9527L);
final Record sourceRecord = new Record(dataFlowId, System.currentTimeMillis(), sourceRow);
final Record sinkRecord = transformer.transform(sourceRecord);
assertEquals(dataFlowId, sinkRecord.getDataflowId());
assertEquals(2, sinkRecord.getRow().getArity());
assertEquals("not important", sinkRecord.getRow().getField(0));
assertEquals(9527L, sinkRecord.getRow().getField(1));
}
use of org.apache.inlong.sort.protocol.source.SourceInfo in project incubator-inlong by apache.
the class FieldMappingTransformerTest method testTransformWithDt.
@Test
public void testTransformWithDt() throws Exception {
final FieldInfo fieldInfo = new FieldInfo("id", new LongFormatInfo());
final FieldInfo dtFieldInfo = new BuiltInFieldInfo("dt", new TimestampFormatInfo(), BuiltInField.DATA_TIME);
final SourceInfo sourceInfo = new TestingSourceInfo(new FieldInfo[] { fieldInfo, dtFieldInfo });
final SinkInfo sinkInfo = new TestingSinkInfo(new FieldInfo[] { fieldInfo, dtFieldInfo });
final long dataFlowId = 1L;
final DataFlowInfo dataFlowInfo = new DataFlowInfo(dataFlowId, sourceInfo, sinkInfo);
final FieldMappingTransformer transformer = new FieldMappingTransformer(new Configuration());
transformer.addDataFlow(dataFlowInfo);
// should be 3 fields (1 origin fields + time + attr)
final Row sourceRow = new Row(1 + SOURCE_FIELD_SKIP_STEP);
final long dt = System.currentTimeMillis();
sourceRow.setField(0, dt);
sourceRow.setField(1, "attr");
sourceRow.setField(2, 9527L);
final Record sourceRecord = new Record(dataFlowId, dt, sourceRow);
final Record sinkRecord = transformer.transform(sourceRecord);
assertEquals(dataFlowId, sinkRecord.getDataflowId());
assertEquals(2, sinkRecord.getRow().getArity());
assertEquals(9527L, sinkRecord.getRow().getField(0));
assertEquals(new Timestamp(dt), sinkRecord.getRow().getField(1));
}
Aggregations