use of com.alibaba.otter.shared.etl.model.RowBatch in project otter by alibaba.
the class OtterTransformerTest method test_rowData_mysql_oracle.
@Test
public void test_rowData_mysql_oracle() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
List<DataMediaPair> pairs = new ArrayList<DataMediaPair>();
DataMediaPair pair1 = new DataMediaPair();
pair1.setId(1L);
pair1.setPipelineId(pipeline.getId());
pair1.setPullWeight(1L);
pair1.setPushWeight(1L);
DbDataMedia mysqlMedia = getMysqlMedia();
mysqlMedia.setId(1L);
pair1.setSource(mysqlMedia);
DbDataMedia oracleMedia = getOracleMedia();
pair1.setTarget(oracleMedia);
pairs.add(pair1);
pipeline.setPairs(pairs);
PipelineParameter param = new PipelineParameter();
param.setSyncMode(SyncMode.ROW);
pipeline.setParameters(param);
new NonStrictExpectations() {
{
configClientService.findPipeline(anyLong);
returns(pipeline);
}
};
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
EventData eventData = new EventData();
eventData.setTableId(1L);
eventData.setSchemaName("srf");
eventData.setTableName("columns");
eventData.setEventType(EventType.UPDATE);
eventData.setExecuteTime(100L);
eventData.getKeys().add(buildColumn("id", Types.INTEGER, "1", true, false));
eventData.getKeys().add(buildColumn("name", Types.VARCHAR, "ljh", true, false));
eventData.getColumns().add(buildColumn("alias_name", Types.CHAR, "hello", false, false));
eventData.getColumns().add(buildColumn("amount", Types.DECIMAL, "100.01", false, false));
eventData.getColumns().add(buildColumn("text_b", Types.BLOB, "[116,101,120,116,95,98]", false, false));
eventData.getColumns().add(buildColumn("text_c", Types.CLOB, "text_c", false, false));
eventData.getColumns().add(buildColumn("curr_date", Types.DATE, "2011-01-01", false, false));
eventData.getColumns().add(buildColumn("gmt_create", Types.TIMESTAMP, "2011-01-01 11:11:11", false, false));
eventData.getColumns().add(buildColumn("gmt_modify", Types.TIMESTAMP, "2011-01-01 11:11:11", false, false));
rowBatch.merge(eventData);
Map<Class, BatchObject> batchs = otterTransformFactory.transform(rowBatch);
RowBatch result = (RowBatch) batchs.get(EventData.class);
want.number(result.getDatas().size()).isEqualTo(1);
}
use of com.alibaba.otter.shared.etl.model.RowBatch in project otter by alibaba.
the class OtterTransformerFactory method initBatchObject.
// 根据对应的类型初始化batchObject对象
private BatchObject initBatchObject(Identity identity, Class clazz) {
if (EventData.class.equals(clazz)) {
RowBatch rowbatch = new RowBatch();
rowbatch.setIdentity(identity);
return rowbatch;
} else if (FileData.class.equals(clazz)) {
FileBatch fileBatch = new FileBatch();
fileBatch.setIdentity(identity);
return fileBatch;
} else {
throw new TransformException("no support Data[" + clazz.getName() + "]");
}
}
use of com.alibaba.otter.shared.etl.model.RowBatch in project otter by alibaba.
the class MemoryPipeTest method test_timeout.
@Test
public void test_timeout() {
RowDataMemoryPipe pipe = new RowDataMemoryPipe();
// 1s后超时
pipe.setTimeout(1 * 1000L);
pipe.setDownloadDir(tmp);
try {
pipe.afterPropertiesSet();
} catch (Exception e) {
want.fail();
}
DbBatch source = new DbBatch();
RowBatch rowBatch = new RowBatch();
Identity identity = new Identity();
identity.setChannelId(100L);
identity.setPipelineId(100L);
identity.setProcessId(100L);
rowBatch.setIdentity(identity);
source.setRowBatch(rowBatch);
MemoryPipeKey key = pipe.put(source);
try {
Thread.sleep(1500L);
} catch (InterruptedException e) {
want.fail();
}
DbBatch target = pipe.get(key);
// 返回结果为空
want.bool(target == null).is(true);
}
use of com.alibaba.otter.shared.etl.model.RowBatch in project otter by alibaba.
the class DatabaseExtractorTest method test_global_row.
@Test
public void test_global_row() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
pipeline.getParameters().setSyncMode(SyncMode.ROW);
// 设置为全局
pipeline.getParameters().setSyncConsistency(SyncConsistency.MEDIA);
int start = RandomUtils.nextInt();
int count = 10;
List<DataMediaPair> pairs = getDataMediaPairForMysql(start, count);
pipeline.setPairs(pairs);
new NonStrictExpectations() {
{
configClientService.findPipeline(100L);
returns(pipeline);
}
};
// 构造数据
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
for (int tableId = start; tableId < start + count; tableId++) {
for (int i = start; i < start + count; i++) {
EventData eventData = getEventData(tableId, i);
eventData.setSchemaName("srf");
eventData.setTableName("columns");
rowBatch.merge(eventData);
}
}
databaseExtractor.extract(new DbBatch(rowBatch));
want.number(rowBatch.getDatas().size()).isEqualTo(count);
}
use of com.alibaba.otter.shared.etl.model.RowBatch in project otter by alibaba.
the class FreedomExtractorTest method test_mysql.
@Test
public void test_mysql() {
final Pipeline pipeline = new Pipeline();
pipeline.setId(100L);
int start = RandomUtils.nextInt();
int count = 10;
List<DataMediaPair> pairs = getDataMediaPairForMysql(start, count);
pipeline.setPairs(pairs);
new NonStrictExpectations() {
{
configClientService.findPipeline(100L);
returns(pipeline);
}
};
// 构造数据
RowBatch rowBatch = new RowBatch();
rowBatch.setIdentity(identity);
for (int tableId = start; tableId < start + count; tableId++) {
for (int i = start; i < start + count; i++) {
EventData eventData = getEventData(tableId, i);
eventData.setSchemaName("retl");
eventData.setTableName("retl_buffer");
rowBatch.merge(eventData);
}
}
DbBatch dbBatch = new DbBatch(rowBatch);
freedomExtractor.extract(dbBatch);
want.collection(dbBatch.getRowBatch().getDatas()).sizeEq(count * count);
}
Aggregations