use of com.alibaba.otter.shared.common.model.config.pipeline.Pipeline in project otter by alibaba.
the class FileLoadActionTest method testLoadWithLocal.
@Test
public void testLoadWithLocal() throws IOException {
// 构造fileData使用的参数,fileDataStartIndex 决定着 pipeline 与 fileData 对应的关系(通过
// dataMediaPair 的 id),
// 以及 dataMediaPair 的 pushWeight
final int fileDataStartIndex = 0;
final int fileDataCount = 50;
final Pipeline pipeline = buildPipeline(fileDataStartIndex, fileDataCount);
final Channel channel = new Channel();
new NonStrictExpectations() {
{
configClientService.findChannel(anyLong);
returns(channel);
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity id = buildIdentity(1L, 2L, 3L);
FileBatch fileBatch = buildFileBatch(id);
fileBatch.getFiles().addAll(buildFileDatas(null, EventType.INSERT, fileDataStartIndex, fileDataCount, true));
WeightController controller = new WeightController(1);
FileLoadContext context = fileLoadAction.load(fileBatch, ROOT_DIR, controller);
want.object(context.getChannel()).isEqualTo(channel);
want.object(context.getPipeline()).isEqualTo(pipeline);
want.object(context.getPrepareDatas()).isEqualTo(fileBatch.getFiles());
want.number(context.getProcessedDatas().size()).isEqualTo(fileBatch.getFiles().size());
}
use of com.alibaba.otter.shared.common.model.config.pipeline.Pipeline in project otter by alibaba.
the class FileLoadActionTest method buildPipeline.
protected Pipeline buildPipeline(final int fileDataStartIndex, int fileDataCount) {
final Pipeline pipeline = new Pipeline();
pipeline.setParameters(new PipelineParameter());
int dataMediaPairCount = fileDataCount / NUMBER_OF_FILE_DATA_COPIES;
pipeline.setPairs(new ArrayList<DataMediaPair>(dataMediaPairCount));
for (int i = fileDataStartIndex; i < dataMediaPairCount; i++) {
DataMediaPair dataMediaPair = buildDataMediaPair(i, i);
pipeline.getPairs().add(dataMediaPair);
}
return pipeline;
}
use of com.alibaba.otter.shared.common.model.config.pipeline.Pipeline in project otter by alibaba.
the class OtterTransformerTest method test_fileData.
@Test
public void test_fileData() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
List<DataMediaPair> pairs = new ArrayList<DataMediaPair>();
DataMediaPair pair1 = new DataMediaPair();
pair1.setId(1L);
pair1.setPipelineId(pipeline.getId());
pair1.setPullWeight(1L);
pair1.setPushWeight(1L);
DbDataMedia oracleMedia = getOracleMedia();
oracleMedia.setId(1L);
pair1.setSource(oracleMedia);
DbDataMedia mysqlMedia = getMysqlMedia();
pair1.setTarget(mysqlMedia);
pairs.add(pair1);
pipeline.setPairs(pairs);
new NonStrictExpectations() {
{
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
FileBatch fileBatch = new FileBatch();
fileBatch.setIdentity(identity);
File localFile = new File("/tmp", "httpPipeTest.jpg");
FileData localFileData = new FileData();
localFileData.setTableId(1L);
localFileData.setPairId(1L);
localFileData.setPath(localFile.getPath());
fileBatch.getFiles().add(localFileData);
Map<Class, BatchObject> batchs = otterTransformFactory.transform(fileBatch);
FileBatch result = (FileBatch) batchs.get(FileData.class);
want.number(result.getFiles().size()).isEqualTo(1);
}
use of com.alibaba.otter.shared.common.model.config.pipeline.Pipeline in project otter by alibaba.
the class OtterTransformerTest method test_rowData_mysql_oracle.
@Test
public void test_rowData_mysql_oracle() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
List<DataMediaPair> pairs = new ArrayList<DataMediaPair>();
DataMediaPair pair1 = new DataMediaPair();
pair1.setId(1L);
pair1.setPipelineId(pipeline.getId());
pair1.setPullWeight(1L);
pair1.setPushWeight(1L);
DbDataMedia mysqlMedia = getMysqlMedia();
mysqlMedia.setId(1L);
pair1.setSource(mysqlMedia);
DbDataMedia oracleMedia = getOracleMedia();
pair1.setTarget(oracleMedia);
pairs.add(pair1);
pipeline.setPairs(pairs);
PipelineParameter param = new PipelineParameter();
param.setSyncMode(SyncMode.ROW);
pipeline.setParameters(param);
new NonStrictExpectations() {
{
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
EventData eventData = new EventData();
eventData.setTableId(1L);
eventData.setSchemaName("srf");
eventData.setTableName("columns");
eventData.setEventType(EventType.UPDATE);
eventData.setExecuteTime(100L);
eventData.getKeys().add(buildColumn("id", Types.INTEGER, "1", true, false));
eventData.getKeys().add(buildColumn("name", Types.VARCHAR, "ljh", true, false));
eventData.getColumns().add(buildColumn("alias_name", Types.CHAR, "hello", false, false));
eventData.getColumns().add(buildColumn("amount", Types.DECIMAL, "100.01", false, false));
eventData.getColumns().add(buildColumn("text_b", Types.BLOB, "[116,101,120,116,95,98]", false, false));
eventData.getColumns().add(buildColumn("text_c", Types.CLOB, "text_c", false, false));
eventData.getColumns().add(buildColumn("curr_date", Types.DATE, "2011-01-01", false, false));
eventData.getColumns().add(buildColumn("gmt_create", Types.TIMESTAMP, "2011-01-01 11:11:11", false, false));
eventData.getColumns().add(buildColumn("gmt_modify", Types.TIMESTAMP, "2011-01-01 11:11:11", false, false));
rowBatch.merge(eventData);
Map<Class, BatchObject> batchs = otterTransformFactory.transform(rowBatch);
RowBatch result = (RowBatch) batchs.get(EventData.class);
want.number(result.getDatas().size()).isEqualTo(1);
}
use of com.alibaba.otter.shared.common.model.config.pipeline.Pipeline in project otter by alibaba.
the class DbLoadActionTest method test_db_load_mysql.
@Test
public void test_db_load_mysql() {
ArbitrateConfigRegistry.regist(configClientService);
dbLoadAction = (DbLoadAction) TestedObject.getSpringBeanFactory().getBean("dbLoadAction");
final Channel channel = new Channel();
channel.setId(1L);
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
List<DataMediaPair> pairs = generatorDataMediaPairForMysql(20);
pipeline.setPairs(pairs);
pipeline.getParameters().merge(new SystemParameter());
pipeline.getParameters().merge(new ChannelParameter());
// pipeline.getParameters().setChannelInfo("LJH_DEMO");
// final Pipeline oppositePipeline = new Pipeline();
// oppositePipeline.setId(101L);
channel.setPipelines(Arrays.asList(pipeline));
final Node currentNode = new Node();
currentNode.setId(1L);
new NonStrictExpectations() {
{
configClientService.findChannel(anyLong);
returns(channel);
configClientService.findPipeline(anyLong);
returns(pipeline);
configClientService.currentNode();
returns(currentNode);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
List<EventData> eventDatas = generatorEventDataForMysql(0, 20, EventType.INSERT);
for (EventData eventData : eventDatas) {
rowBatch.merge(eventData);
}
eventDatas = generatorEventDataForMysql(10, 10, EventType.INSERT);
for (EventData eventData : eventDatas) {
rowBatch.merge(eventData);
}
eventDatas = generatorEventDataForMysql(19, 1, EventType.DELETE);
for (EventData eventData : eventDatas) {
rowBatch.merge(eventData);
}
WeightController controller = new WeightController(1);
dbLoadAction.load(rowBatch, controller);
}
Aggregations